hip_filename stringlengths 5 84 | hip_content stringlengths 79 9.69M | cuda_filename stringlengths 4 83 | cuda_content stringlengths 19 9.69M |
|---|---|---|---|
918c86051dd73a93bd6f909f5824fdbcfd112937.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <map> // NOLINT
#include "gtest/gtest.h"
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/api/lib/utils/allocator.h"
#include "paddle/phi/backends/all_context.h"
#include "paddle/phi/backends/gpu/gpu_context.h"
#include "paddle/phi/common/complex.h"
#include "paddle/phi/common/float16.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/common/scalar.h"
#include "paddle/phi/core/dense_tensor.h"
#include "paddle/phi/core/kernel_registry.h"
namespace phi {
namespace tests {
using DDim = phi::DDim;
using float16 = phi::dtype::float16;
using complex64 = ::phi::dtype::complex<float>;
using complex128 = ::phi::dtype::complex<double>;
__global__ void FillTensor(float* data) { data[0] = 1; }
TEST(Scalar, ConstructFromDenseTensor1) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT16, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float16>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_NEAR(1, scalar_test.to<float16>(), 1e-6);
}
TEST(Scalar, ConstructFromDenseTensor2) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::INT16, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<int16_t>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(1, scalar_test.to<int16_t>());
}
TEST(Scalar, ConstructFromDenseTensor3) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::INT8, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<int8_t>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(1, scalar_test.to<int8_t>());
}
TEST(Scalar, ConstructFromDenseTensor4) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::BOOL, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<bool>(&dense_x);
dense_x_data[0] = true;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(true, scalar_test.to<bool>());
}
TEST(Scalar, ConstructFromDenseTensor5) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(alloc.get(),
phi::DenseTensorMeta(phi::DataType::COMPLEX64,
phi::make_ddim({1}),
phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<complex64>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
complex64 expected_value(1, 0);
EXPECT_TRUE(expected_value == scalar_test.to<complex64>());
}
TEST(Scalar, ConstructFromDenseTensor6) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(alloc.get(),
phi::DenseTensorMeta(phi::DataType::COMPLEX128,
phi::make_ddim({1}),
phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<complex128>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
complex128 expected_value(1, 0);
EXPECT_TRUE(expected_value == scalar_test.to<complex128>());
}
TEST(Scalar, ConstructFromDenseTensor7) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::GPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT32, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::GPUContext*>(pool.Get(phi::GPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float>(&dense_x);
hipLaunchKernelGGL(( FillTensor), dim3(1), dim3(1), 0, dev_ctx->stream(), dense_x_data);
dev_ctx->Wait();
phi::Scalar scalar_test(dense_x);
ASSERT_NEAR(1, scalar_test.to<float>(), 1e-6);
}
TEST(Scalar, ConstructFromTensor) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::GPUPlace());
auto dense_x = std::make_shared<phi::DenseTensor>(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT32, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::GPUContext*>(pool.Get(phi::GPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float>(dense_x.get());
hipLaunchKernelGGL(( FillTensor), dim3(1), dim3(1), 0, dev_ctx->stream(), dense_x_data);
dev_ctx->Wait();
paddle::experimental::Tensor x(dense_x);
paddle::experimental::Scalar scalar_test(x);
ASSERT_NEAR(1, scalar_test.to<float>(), 1e-6);
}
} // namespace tests
} // namespace phi
| 918c86051dd73a93bd6f909f5824fdbcfd112937.cu | /* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <map> // NOLINT
#include "gtest/gtest.h"
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/api/lib/utils/allocator.h"
#include "paddle/phi/backends/all_context.h"
#include "paddle/phi/backends/gpu/gpu_context.h"
#include "paddle/phi/common/complex.h"
#include "paddle/phi/common/float16.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/common/scalar.h"
#include "paddle/phi/core/dense_tensor.h"
#include "paddle/phi/core/kernel_registry.h"
namespace phi {
namespace tests {
using DDim = phi::DDim;
using float16 = phi::dtype::float16;
using complex64 = ::phi::dtype::complex<float>;
using complex128 = ::phi::dtype::complex<double>;
__global__ void FillTensor(float* data) { data[0] = 1; }
TEST(Scalar, ConstructFromDenseTensor1) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT16, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float16>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_NEAR(1, scalar_test.to<float16>(), 1e-6);
}
TEST(Scalar, ConstructFromDenseTensor2) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::INT16, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<int16_t>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(1, scalar_test.to<int16_t>());
}
TEST(Scalar, ConstructFromDenseTensor3) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::INT8, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<int8_t>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(1, scalar_test.to<int8_t>());
}
TEST(Scalar, ConstructFromDenseTensor4) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::BOOL, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<bool>(&dense_x);
dense_x_data[0] = true;
phi::Scalar scalar_test(dense_x);
ASSERT_EQ(true, scalar_test.to<bool>());
}
TEST(Scalar, ConstructFromDenseTensor5) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(alloc.get(),
phi::DenseTensorMeta(phi::DataType::COMPLEX64,
phi::make_ddim({1}),
phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<complex64>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
complex64 expected_value(1, 0);
EXPECT_TRUE(expected_value == scalar_test.to<complex64>());
}
TEST(Scalar, ConstructFromDenseTensor6) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::CPUPlace());
phi::DenseTensor dense_x(alloc.get(),
phi::DenseTensorMeta(phi::DataType::COMPLEX128,
phi::make_ddim({1}),
phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::CPUContext*>(pool.Get(phi::CPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<complex128>(&dense_x);
dense_x_data[0] = 1;
phi::Scalar scalar_test(dense_x);
complex128 expected_value(1, 0);
EXPECT_TRUE(expected_value == scalar_test.to<complex128>());
}
TEST(Scalar, ConstructFromDenseTensor7) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::GPUPlace());
phi::DenseTensor dense_x(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT32, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::GPUContext*>(pool.Get(phi::GPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float>(&dense_x);
FillTensor<<<1, 1, 0, dev_ctx->stream()>>>(dense_x_data);
dev_ctx->Wait();
phi::Scalar scalar_test(dense_x);
ASSERT_NEAR(1, scalar_test.to<float>(), 1e-6);
}
TEST(Scalar, ConstructFromTensor) {
// 1. create tensor
const auto alloc =
std::make_unique<paddle::experimental::DefaultAllocator>(phi::GPUPlace());
auto dense_x = std::make_shared<phi::DenseTensor>(
alloc.get(),
phi::DenseTensorMeta(
phi::DataType::FLOAT32, phi::make_ddim({1}), phi::DataLayout::NCHW));
phi::DeviceContextPool& pool = phi::DeviceContextPool::Instance();
auto* dev_ctx = reinterpret_cast<phi::GPUContext*>(pool.Get(phi::GPUPlace()));
auto* dense_x_data = dev_ctx->Alloc<float>(dense_x.get());
FillTensor<<<1, 1, 0, dev_ctx->stream()>>>(dense_x_data);
dev_ctx->Wait();
paddle::experimental::Tensor x(dense_x);
paddle::experimental::Scalar scalar_test(x);
ASSERT_NEAR(1, scalar_test.to<float>(), 1e-6);
}
} // namespace tests
} // namespace phi
|
a51090d62426c1142aed90a2c6f19de5ab8a8a1e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "siftDetectorGPU.cuh"
__global__ void gaussianHorizontal(float* image, float* convolved, float* convolvedDoG,
float* kernels, int width, int height, int layer, int kRadius, bool returnDoG)
{
__shared__ float cache[TILE_SIDE + 2 * MAX_KERNEL_RADIUS];
int tIdx = threadIdx.x;
int col = blockIdx.x * TILE_SIDE + tIdx;
int row = blockIdx.y;
int numLayers = (returnDoG) ? (SIZE_OCTAVES + 3) : 1;
int offset = row * width;
int cacheLeftOffset = MAX_KERNEL_RADIUS - kRadius;
if (col < width) {
// The first layer of an octave already has the correct blur from initial blur / downsampling
if (layer == 0 && returnDoG) {
convolved[offset + col] = image[offset + col];
} else {
int layerOffset = offset * numLayers;
float* source = (returnDoG) ? convolvedDoG + (layer - 1) * width : image;
// Copy the part of the image row to the shared memory: each thread takes care of 1 pixel
cache[tIdx + MAX_KERNEL_RADIUS] = source[layerOffset + col];
// If a thread is located at the corners of the tread block, it also takes care of the
// neighboring pixels required for the 1D convolution.
if (col == 0) {
// Mirrow the pixels at the left border of the image
for (int i = 0; i < kRadius; i++)
cache[cacheLeftOffset + i] = source[layerOffset + kRadius - i - 1];
}
else if (tIdx == 0) {
for (int i = 0; i < kRadius; i++) {
int colIdx = (int)fmaxf(0.f, col - kRadius + i);
cache[cacheLeftOffset + i] = source[layerOffset + colIdx];
}
}
if (col == width - 1) {
// Mirrow pixels at the right border of the image
for (int i = 0; i < kRadius; i++)
cache[MAX_KERNEL_RADIUS + tIdx + i + 1] = source[layerOffset + col - i - 1];
}
else if (tIdx == TILE_SIDE - 1) {
for (int i = 0; i < kRadius; i++) {
int colIdx = (int)fminf((float)width - 1.f, col + i + 1);
cache[MAX_KERNEL_RADIUS + tIdx + i + 1] = source[layerOffset + colIdx];
}
}
__syncthreads();
float* data = cache + cacheLeftOffset + tIdx;
// Compute 1D convolution for the current pixel, corresponding to the horizontal blur
float* kernel = kernels + layer * (MAX_KERNEL_RADIUS + 1);
float convResult = kernel[0] * data[kRadius];
for (int i = 1; i <= kRadius; i++)
convResult += kernel[i] * (data[kRadius - i] + data[kRadius + i]);
// Write the result of the convolution for the current pixel
convolved[offset + col] = convResult;
}
}
}
__global__ void gaussianVerticalDoG(float* image, float* convolved, float* convolvedDoG,
float* kernels, int width, int height, int layer, int kRadius, bool returnDoG)
{
__shared__ float cache[(TILE_SIDE + 2 * MAX_KERNEL_RADIUS) * TILE_SIDE];
int tIdx = threadIdx.x;
int tIdy = threadIdx.y;
int col = blockIdx.x * TILE_SIDE + tIdx;
int row = blockIdx.y * TILE_SIDE + tIdy;
int numLayers = (returnDoG) ? (SIZE_OCTAVES + 3) : 1;
int cacheTopOffset = MAX_KERNEL_RADIUS - kRadius;
if (row < height && col < width) {
int curIdx = row * width * numLayers + layer * width + col;
// The first layer already has the correct blur
if (layer == 0 && returnDoG) {
convolvedDoG[curIdx] = image[row * width + col];
} else {
// Copy the part of the image row to the shared memory: each thread takes care of 1 pixel
int rowOffsetCache = (tIdy + MAX_KERNEL_RADIUS) * TILE_SIDE;
cache[rowOffsetCache + tIdx] = convolved[row * width + col];
if (row == 0) {
// Mirrow the pixels at the upper border of the image
for (int i = 0; i < kRadius; i++) {
int convolvedIdx = (kRadius - i - 1) * width + col;
cache[(cacheTopOffset + i) * TILE_SIDE + tIdx] = convolved[convolvedIdx];
}
}
else if (tIdy == 0) {
for (int i = 0; i < kRadius; i++) {
int rowIdx = (int)fmaxf(0.f, row - kRadius + i);
int convolvedIdx = rowIdx * width + col;
cache[(cacheTopOffset + i) * TILE_SIDE + tIdx] = convolved[convolvedIdx];
}
}
if (row == height - 1) {
// Mirrow the pixels at the bottom border of the image
for (int i = 0; i < kRadius; i++) {
int cacheIdx = (MAX_KERNEL_RADIUS + tIdy + i + 1) * TILE_SIDE + tIdx;
int convolvedIdx = (row - i - 1) * width + col;
cache[cacheIdx] = convolved[convolvedIdx];
}
}
else if (tIdy == TILE_SIDE - 1) {
for (int i = 0; i < kRadius; i++) {
int rowIdx = (int)fminf((float)height - 1.f, row + i + 1);
int cacheIdx = (MAX_KERNEL_RADIUS + tIdy + i + 1) * TILE_SIDE + tIdx;
int convolvedIdx = rowIdx * width + col;
cache[cacheIdx] = convolved[convolvedIdx];
}
}
__syncthreads();
// Compute 1D convolution for the current pixel, corresponding to the vertical blur
int cacheOffset = (tIdy + MAX_KERNEL_RADIUS) * TILE_SIDE + tIdx;
float* kernel = kernels + layer * (MAX_KERNEL_RADIUS + 1);
// Compute 1D convolution for the current pixel, corresponding to the vertical blur
float convResult = kernel[0] * cache[cacheOffset];
for (int i = 1; i <= kRadius; i++) {
convResult += kernel[i] * (
cache[cacheOffset - i * TILE_SIDE] + cache[cacheOffset + i * TILE_SIDE]);
}
// Write the result of the convolution for the current pixel
convolvedDoG[curIdx] = convResult;
if (returnDoG) {
// Directly construct the Difference-of-Gaussian for the neighboring layers of an octave
convolvedDoG[curIdx - width] = convolvedDoG[curIdx] - convolvedDoG[curIdx - width];
}
__syncthreads();
// Save the image that will be used for downsampling
if (layer == SIZE_OCTAVES && returnDoG) {
image[row * width + col] = convolvedDoG[curIdx];
}
}
}
}
__global__ void downsampleImage(float* image, int width, int height)
{
int col = blockIdx.x * TILE_SIDE_FIXED + threadIdx.x;
int row = blockIdx.y * TILE_SIDE_FIXED + threadIdx.y;
if (row < height / 2 && col < width / 2) {
float result = image[row * 2 * width + col * 2];
result += image[(row * 2 + 1) * width + col * 2];
result += image[row * 2 * width + col * 2 + 1];
result += image[(row * 2 + 1) * width + col * 2 + 1];
image[row * (width / 2) + col] = 0.25f * result;
}
}
__global__ void upsampleImage(float* image, float* upsampledImage,
int width, int height, const float rRatio, const float cRatio)
{
int col = blockIdx.x * TILE_SIDE_FIXED + threadIdx.x;
int row = blockIdx.y * TILE_SIDE_FIXED + threadIdx.y;
if (row < 2 * height && col < 2 * width) {
int cLow = (int)floorf(cRatio * col), rLow = (int)floorf(rRatio * row);
int cHigh = (int)ceilf(cRatio * col), rHigh = (int)ceilf(rRatio * row);
float cWeight = (cRatio * col) - (float)cLow, rWeight = (rRatio * row) - (float)rLow;
float result = image[rLow * width * 2 + cLow] * (1 - cWeight) * (1 - rWeight);
result += image[rLow * width * 2 + cHigh] * cWeight * (1 - rWeight);
result += image[rHigh * width * 2 + cLow] * (1 - cWeight) * rWeight;
result += image[rHigh * width * 2 + cHigh] * cWeight * rWeight;
upsampledImage[row * width * 2 + col] = result;
}
}
__global__ void locateExtrema(
float* convolvedDoG, Keypoint* keypoints, int* counter, int width, int height, int octave)
{
__shared__ float cache[(TILE_SIDE_FIXED + 2) * (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2)];
int tIdx = threadIdx.x;
int tIdy = threadIdx.y;
int col = blockIdx.x * TILE_SIDE_FIXED + tIdx;
int row = blockIdx.y * TILE_SIDE_FIXED + tIdy;
// Get the corresponding block of pixels to the shared memory
if (row < height && col < width) {
int cacheOffset = (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2);
int imageOffset = width * (SIZE_OCTAVES + 3);
for (int layer = 0; layer < SIZE_OCTAVES + 2; layer++) {
int cacheCol = layer * (TILE_SIDE_FIXED + 2) + tIdx + 1;
int imageIdx = row * imageOffset + layer * width + col;
cache[(tIdy + 1) * cacheOffset + cacheCol] = convolvedDoG[imageIdx];
if (tIdx == 0 && col != 0)
cache[(tIdy + 1) * cacheOffset + cacheCol - 1] = convolvedDoG[imageIdx - 1];
if (tIdx == TILE_SIDE_FIXED - 1 && col != width - 1)
cache[(tIdy + 1) * cacheOffset + cacheCol + 1] = convolvedDoG[imageIdx + 1];
if (tIdy == 0 && row != 0)
cache[cacheCol] = convolvedDoG[imageIdx - imageOffset];
if (tIdy == TILE_SIDE_FIXED - 1 && row != height - 1)
cache[(tIdy + 2) * cacheOffset + cacheCol] = convolvedDoG[imageIdx + imageOffset];
}
}
__syncthreads();
// Look for the keypoints in the DoG pyramid
const int N_NEIGHBORS = 27;
float neighbors[N_NEIGHBORS];
if (row > SIFT_BORDER && row < height - SIFT_BORDER && col > SIFT_BORDER && col < width - SIFT_BORDER) {
for (int layer = 1; layer < SIZE_OCTAVES + 1; layer++) {
getPointNeighborhood(neighbors, cache, tIdy, tIdx, layer);
if (isMaximum(neighbors) || isMinimum(neighbors)) {
Keypoint* kp = new Keypoint((float)row, (float)col, layer, octave, layer);
bool adjusted = adjustKeypoint(kp, neighbors, convolvedDoG, width, height);
bool keep = keepKeypoint(kp, neighbors, convolvedDoG, width);
if (adjusted && keep) {
int oldCounter = atomicAdd(&counter[0], 1);
if (oldCounter < MAX_N_KEYPOINTS) {
keypoints[oldCounter].x = kp->x;
keypoints[oldCounter].y = kp->y;
keypoints[oldCounter].xAdj = kp->xAdj;
keypoints[oldCounter].yAdj = kp->yAdj;
keypoints[oldCounter].sigma = kp->sigma;
keypoints[oldCounter].octave = kp->octave;
keypoints[oldCounter].layer = kp->layer;
}
}
delete kp;
}
}
}
}
__device__ void getPointNeighborhood(float* neighbors, float* cache, int tIdy, int tIdx, int layer)
{
int neighIdx = 0;
int cacheIdx = (tIdy + 1) * (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2) +
(layer - 1) * (TILE_SIDE_FIXED + 2) + tIdx + 1;
int cacheOffset = (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2);
for (int i = 0; i < 3; i++) {
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset - 1]; // up-left
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset]; // up
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset + 1]; // up-right
neighbors[neighIdx++] = cache[cacheIdx - 1]; // left
neighbors[neighIdx++] = cache[cacheIdx]; // center
neighbors[neighIdx++] = cache[cacheIdx + 1]; // right
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset - 1]; // down-left
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset]; // down
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset + 1]; // down-right
cacheIdx += TILE_SIDE_FIXED + 2;
}
}
__device__ void getPointNeighborhoodFromImage(float* neighbors,
float* convolvedDoG, int row, int col, int layer, int width)
{
int neighIdx = 0;
int imageIdx = row * width * (SIZE_OCTAVES + 3) + (layer - 1) * width + col;
int imageOffset = width * (SIZE_OCTAVES + 3);
for (int i = 0; i < 3; i++) {
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset - 1]; // up-left
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset]; // up
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset + 1]; // up-right
neighbors[neighIdx++] = convolvedDoG[imageIdx - 1]; // left
neighbors[neighIdx++] = convolvedDoG[imageIdx]; // center
neighbors[neighIdx++] = convolvedDoG[imageIdx + 1]; // right
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset - 1]; // down-left
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset]; // down
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset + 1]; // down-right
imageIdx += width;
}
}
__device__ bool isMaximum(float* neighbors)
{
const int CENTER = 13;
for (int i = 0; i < 27; i++) {
if (i == CENTER) continue;
if (neighbors[CENTER] <= neighbors[i]) return false;
}
return true;
}
__device__ bool isMinimum(float* neighbors)
{
const int CENTER = 13;
for (int i = 0; i < 27; i++) {
if (i == CENTER) continue;
if (neighbors[CENTER] >= neighbors[i]) return false;
}
return true;
}
__device__ float getValueAtExtremum(float* neighbors, float* alpha)
{
const int CENTER = 13;
// Perform quadratic interpolation
float g[3] = {
0.5f * (neighbors[22] - neighbors[4]),
0.5f * (neighbors[16] - neighbors[10]),
0.5f * (neighbors[14] - neighbors[12]) };
// Calculate entries of 3x3 Hessian matrix
float h11 = neighbors[22] + neighbors[4] - 2 * neighbors[CENTER];
float h22 = neighbors[16] + neighbors[10] - 2 * neighbors[CENTER];
float h33 = neighbors[14] + neighbors[12] - 2 * neighbors[CENTER];
float h12 = 0.25f * (neighbors[25] - neighbors[19] - neighbors[7] + neighbors[1]);
float h13 = 0.25f * (neighbors[23] - neighbors[21] - neighbors[5] + neighbors[3]);
float h23 = 0.25f * (neighbors[17] - neighbors[15] - neighbors[11] + neighbors[9]);
float h21 = h12, h31 = h13, h32 = h23;
float det = h11 * (h22 * h33 - h32 * h23) - h12 * (h21 * h33 - h23 * h31) + h13 * (h21 * h32 - h22 * h31);
// Calculate the inverse of 3x3 Hessian matrix
float invDet = fabsf(1.f / det);
float h11Inv = (h22 * h33 - h32 * h23) * invDet;
float h12Inv = (h13 * h32 - h12 * h33) * invDet;
float h13Inv = (h12 * h23 - h13 * h22) * invDet;
float h21Inv = (h23 * h31 - h21 * h33) * invDet;
float h22Inv = (h11 * h33 - h13 * h31) * invDet;
float h23Inv = (h21 * h13 - h11 * h23) * invDet;
float h31Inv = (h21 * h32 - h31 * h22) * invDet;
float h32Inv = (h31 * h12 - h11 * h32) * invDet;
float h33Inv = (h11 * h22 - h21 * h12) * invDet;
float invHessian[9] = { h11Inv, h12Inv, h13Inv, h21Inv, h22Inv, h23Inv, h31Inv, h32Inv, h33Inv };
float extremumVal = neighbors[CENTER];
for (int i = 0; i < 3; i++) {
alpha[i] = 0.f;
for (int j = 0; j < 3; j++) {
alpha[i] += -invHessian[3 * i + j] * g[j];
}
extremumVal += 0.5f * alpha[i] * g[i];
}
return extremumVal;
}
__device__ bool adjustKeypoint(Keypoint* kp, float* neighbors, float* convolvedDoG, int width, int height)
{
float l = (float)kp->layer, x = kp->x, y = kp->y;
float maxAlpha = 1.f;
const int CENTER = 13;
// Conservative test for low contrast
if (fabsf(neighbors[CENTER]) < roundf(0.5f * CONTRAST_TH / (float)SIZE_OCTAVES)) return false;
float extremumVal, alpha[3];
// Iterate until the keypoint is adjusted or maximum of iterations is exceeded
for (int i = 0; i < MAX_ADJ_ITER; i++) {
// Calculate adjusted coordinates of the keypoint in the scale-space
extremumVal = getValueAtExtremum(neighbors, alpha);
// Check for the successful keypoint adjustment
maxAlpha = fmaxf(fmaxf(fabsf(alpha[0]), fabsf(alpha[1])), fabsf(alpha[2]));
if (maxAlpha <= 0.5f) break;
// Update interpolating position
l += roundf(alpha[0]);
x += roundf(alpha[1]);
y += roundf(alpha[2]);
if ((l < 1 || l > SIZE_OCTAVES) || (x < SIFT_BORDER) || (y < SIFT_BORDER) ||
(x > height - SIFT_BORDER) || (y > width - SIFT_BORDER)) return false;
// Update the neighborhood for the new extrema position
getPointNeighborhoodFromImage(neighbors, convolvedDoG, (int)x, (int)y, (int)l, width);
}
if (maxAlpha <= 0.5f) {
// The second test for low constrast
if (fabsf(extremumVal) < (CONTRAST_TH / SIZE_OCTAVES)) return false;
// Calculate adjusted coordinates of the keypoint in the scale-space
int octaveMult = (1 << kp->octave);
float sigma_0 = sqrt(SIGMA_INIT * SIGMA_INIT - IMAGE_SIGMA * IMAGE_SIGMA);
kp->sigma = 2 * sigma_0 * octaveMult * powf(2.f, (alpha[0] + l) / SIZE_OCTAVES);
kp->xAdj = MIN_SAMPLING_DIST * octaveMult * (alpha[1] + x);
kp->yAdj = MIN_SAMPLING_DIST * octaveMult * (alpha[2] + y);
kp->x = x; kp->y = y; kp->layer = l;
return true;
}
return false;
}
__device__ bool keepKeypoint(Keypoint* kp, float* neighbors, float* convolvedDoG, int width)
{
const int CENTER = 13;
float l = kp->layer, x = kp->x, y = kp->y;
if ((int)kp->sigma != kp->layer) {
getPointNeighborhoodFromImage(neighbors, convolvedDoG, (int)x, (int)y, (int)l, width);
}
int octaveOffset = kp->octave * (SIZE_OCTAVES + 2);
// Calculate entries of 2x2 Hessian matrix
float h11 = neighbors[16] + neighbors[10] - 2 * neighbors[CENTER];
float h22 = neighbors[14] + neighbors[12] - 2 * neighbors[CENTER];
float h12 = 0.25f * (neighbors[17] - neighbors[15] - neighbors[11] + neighbors[9]);
float trace = h11 + h12;
float det = h11 * h22 - h12 * h12;
if (det <= 0.f) return false;
// Filter based on the edgeness of the keypoint
float edgeness = (trace * trace) / det;
if (edgeness >= powf(EDGENESS_TH + 1.f, 2.f) / EDGENESS_TH) return false;
return true;
} | a51090d62426c1142aed90a2c6f19de5ab8a8a1e.cu | #include "siftDetectorGPU.cuh"
__global__ void gaussianHorizontal(float* image, float* convolved, float* convolvedDoG,
float* kernels, int width, int height, int layer, int kRadius, bool returnDoG)
{
__shared__ float cache[TILE_SIDE + 2 * MAX_KERNEL_RADIUS];
int tIdx = threadIdx.x;
int col = blockIdx.x * TILE_SIDE + tIdx;
int row = blockIdx.y;
int numLayers = (returnDoG) ? (SIZE_OCTAVES + 3) : 1;
int offset = row * width;
int cacheLeftOffset = MAX_KERNEL_RADIUS - kRadius;
if (col < width) {
// The first layer of an octave already has the correct blur from initial blur / downsampling
if (layer == 0 && returnDoG) {
convolved[offset + col] = image[offset + col];
} else {
int layerOffset = offset * numLayers;
float* source = (returnDoG) ? convolvedDoG + (layer - 1) * width : image;
// Copy the part of the image row to the shared memory: each thread takes care of 1 pixel
cache[tIdx + MAX_KERNEL_RADIUS] = source[layerOffset + col];
// If a thread is located at the corners of the tread block, it also takes care of the
// neighboring pixels required for the 1D convolution.
if (col == 0) {
// Mirrow the pixels at the left border of the image
for (int i = 0; i < kRadius; i++)
cache[cacheLeftOffset + i] = source[layerOffset + kRadius - i - 1];
}
else if (tIdx == 0) {
for (int i = 0; i < kRadius; i++) {
int colIdx = (int)fmaxf(0.f, col - kRadius + i);
cache[cacheLeftOffset + i] = source[layerOffset + colIdx];
}
}
if (col == width - 1) {
// Mirrow pixels at the right border of the image
for (int i = 0; i < kRadius; i++)
cache[MAX_KERNEL_RADIUS + tIdx + i + 1] = source[layerOffset + col - i - 1];
}
else if (tIdx == TILE_SIDE - 1) {
for (int i = 0; i < kRadius; i++) {
int colIdx = (int)fminf((float)width - 1.f, col + i + 1);
cache[MAX_KERNEL_RADIUS + tIdx + i + 1] = source[layerOffset + colIdx];
}
}
__syncthreads();
float* data = cache + cacheLeftOffset + tIdx;
// Compute 1D convolution for the current pixel, corresponding to the horizontal blur
float* kernel = kernels + layer * (MAX_KERNEL_RADIUS + 1);
float convResult = kernel[0] * data[kRadius];
for (int i = 1; i <= kRadius; i++)
convResult += kernel[i] * (data[kRadius - i] + data[kRadius + i]);
// Write the result of the convolution for the current pixel
convolved[offset + col] = convResult;
}
}
}
__global__ void gaussianVerticalDoG(float* image, float* convolved, float* convolvedDoG,
float* kernels, int width, int height, int layer, int kRadius, bool returnDoG)
{
__shared__ float cache[(TILE_SIDE + 2 * MAX_KERNEL_RADIUS) * TILE_SIDE];
int tIdx = threadIdx.x;
int tIdy = threadIdx.y;
int col = blockIdx.x * TILE_SIDE + tIdx;
int row = blockIdx.y * TILE_SIDE + tIdy;
int numLayers = (returnDoG) ? (SIZE_OCTAVES + 3) : 1;
int cacheTopOffset = MAX_KERNEL_RADIUS - kRadius;
if (row < height && col < width) {
int curIdx = row * width * numLayers + layer * width + col;
// The first layer already has the correct blur
if (layer == 0 && returnDoG) {
convolvedDoG[curIdx] = image[row * width + col];
} else {
// Copy the part of the image row to the shared memory: each thread takes care of 1 pixel
int rowOffsetCache = (tIdy + MAX_KERNEL_RADIUS) * TILE_SIDE;
cache[rowOffsetCache + tIdx] = convolved[row * width + col];
if (row == 0) {
// Mirrow the pixels at the upper border of the image
for (int i = 0; i < kRadius; i++) {
int convolvedIdx = (kRadius - i - 1) * width + col;
cache[(cacheTopOffset + i) * TILE_SIDE + tIdx] = convolved[convolvedIdx];
}
}
else if (tIdy == 0) {
for (int i = 0; i < kRadius; i++) {
int rowIdx = (int)fmaxf(0.f, row - kRadius + i);
int convolvedIdx = rowIdx * width + col;
cache[(cacheTopOffset + i) * TILE_SIDE + tIdx] = convolved[convolvedIdx];
}
}
if (row == height - 1) {
// Mirrow the pixels at the bottom border of the image
for (int i = 0; i < kRadius; i++) {
int cacheIdx = (MAX_KERNEL_RADIUS + tIdy + i + 1) * TILE_SIDE + tIdx;
int convolvedIdx = (row - i - 1) * width + col;
cache[cacheIdx] = convolved[convolvedIdx];
}
}
else if (tIdy == TILE_SIDE - 1) {
for (int i = 0; i < kRadius; i++) {
int rowIdx = (int)fminf((float)height - 1.f, row + i + 1);
int cacheIdx = (MAX_KERNEL_RADIUS + tIdy + i + 1) * TILE_SIDE + tIdx;
int convolvedIdx = rowIdx * width + col;
cache[cacheIdx] = convolved[convolvedIdx];
}
}
__syncthreads();
// Compute 1D convolution for the current pixel, corresponding to the vertical blur
int cacheOffset = (tIdy + MAX_KERNEL_RADIUS) * TILE_SIDE + tIdx;
float* kernel = kernels + layer * (MAX_KERNEL_RADIUS + 1);
// Compute 1D convolution for the current pixel, corresponding to the vertical blur
float convResult = kernel[0] * cache[cacheOffset];
for (int i = 1; i <= kRadius; i++) {
convResult += kernel[i] * (
cache[cacheOffset - i * TILE_SIDE] + cache[cacheOffset + i * TILE_SIDE]);
}
// Write the result of the convolution for the current pixel
convolvedDoG[curIdx] = convResult;
if (returnDoG) {
// Directly construct the Difference-of-Gaussian for the neighboring layers of an octave
convolvedDoG[curIdx - width] = convolvedDoG[curIdx] - convolvedDoG[curIdx - width];
}
__syncthreads();
// Save the image that will be used for downsampling
if (layer == SIZE_OCTAVES && returnDoG) {
image[row * width + col] = convolvedDoG[curIdx];
}
}
}
}
__global__ void downsampleImage(float* image, int width, int height)
{
int col = blockIdx.x * TILE_SIDE_FIXED + threadIdx.x;
int row = blockIdx.y * TILE_SIDE_FIXED + threadIdx.y;
if (row < height / 2 && col < width / 2) {
float result = image[row * 2 * width + col * 2];
result += image[(row * 2 + 1) * width + col * 2];
result += image[row * 2 * width + col * 2 + 1];
result += image[(row * 2 + 1) * width + col * 2 + 1];
image[row * (width / 2) + col] = 0.25f * result;
}
}
__global__ void upsampleImage(float* image, float* upsampledImage,
int width, int height, const float rRatio, const float cRatio)
{
int col = blockIdx.x * TILE_SIDE_FIXED + threadIdx.x;
int row = blockIdx.y * TILE_SIDE_FIXED + threadIdx.y;
if (row < 2 * height && col < 2 * width) {
int cLow = (int)floorf(cRatio * col), rLow = (int)floorf(rRatio * row);
int cHigh = (int)ceilf(cRatio * col), rHigh = (int)ceilf(rRatio * row);
float cWeight = (cRatio * col) - (float)cLow, rWeight = (rRatio * row) - (float)rLow;
float result = image[rLow * width * 2 + cLow] * (1 - cWeight) * (1 - rWeight);
result += image[rLow * width * 2 + cHigh] * cWeight * (1 - rWeight);
result += image[rHigh * width * 2 + cLow] * (1 - cWeight) * rWeight;
result += image[rHigh * width * 2 + cHigh] * cWeight * rWeight;
upsampledImage[row * width * 2 + col] = result;
}
}
__global__ void locateExtrema(
float* convolvedDoG, Keypoint* keypoints, int* counter, int width, int height, int octave)
{
__shared__ float cache[(TILE_SIDE_FIXED + 2) * (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2)];
int tIdx = threadIdx.x;
int tIdy = threadIdx.y;
int col = blockIdx.x * TILE_SIDE_FIXED + tIdx;
int row = blockIdx.y * TILE_SIDE_FIXED + tIdy;
// Get the corresponding block of pixels to the shared memory
if (row < height && col < width) {
int cacheOffset = (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2);
int imageOffset = width * (SIZE_OCTAVES + 3);
for (int layer = 0; layer < SIZE_OCTAVES + 2; layer++) {
int cacheCol = layer * (TILE_SIDE_FIXED + 2) + tIdx + 1;
int imageIdx = row * imageOffset + layer * width + col;
cache[(tIdy + 1) * cacheOffset + cacheCol] = convolvedDoG[imageIdx];
if (tIdx == 0 && col != 0)
cache[(tIdy + 1) * cacheOffset + cacheCol - 1] = convolvedDoG[imageIdx - 1];
if (tIdx == TILE_SIDE_FIXED - 1 && col != width - 1)
cache[(tIdy + 1) * cacheOffset + cacheCol + 1] = convolvedDoG[imageIdx + 1];
if (tIdy == 0 && row != 0)
cache[cacheCol] = convolvedDoG[imageIdx - imageOffset];
if (tIdy == TILE_SIDE_FIXED - 1 && row != height - 1)
cache[(tIdy + 2) * cacheOffset + cacheCol] = convolvedDoG[imageIdx + imageOffset];
}
}
__syncthreads();
// Look for the keypoints in the DoG pyramid
const int N_NEIGHBORS = 27;
float neighbors[N_NEIGHBORS];
if (row > SIFT_BORDER && row < height - SIFT_BORDER && col > SIFT_BORDER && col < width - SIFT_BORDER) {
for (int layer = 1; layer < SIZE_OCTAVES + 1; layer++) {
getPointNeighborhood(neighbors, cache, tIdy, tIdx, layer);
if (isMaximum(neighbors) || isMinimum(neighbors)) {
Keypoint* kp = new Keypoint((float)row, (float)col, layer, octave, layer);
bool adjusted = adjustKeypoint(kp, neighbors, convolvedDoG, width, height);
bool keep = keepKeypoint(kp, neighbors, convolvedDoG, width);
if (adjusted && keep) {
int oldCounter = atomicAdd(&counter[0], 1);
if (oldCounter < MAX_N_KEYPOINTS) {
keypoints[oldCounter].x = kp->x;
keypoints[oldCounter].y = kp->y;
keypoints[oldCounter].xAdj = kp->xAdj;
keypoints[oldCounter].yAdj = kp->yAdj;
keypoints[oldCounter].sigma = kp->sigma;
keypoints[oldCounter].octave = kp->octave;
keypoints[oldCounter].layer = kp->layer;
}
}
delete kp;
}
}
}
}
__device__ void getPointNeighborhood(float* neighbors, float* cache, int tIdy, int tIdx, int layer)
{
int neighIdx = 0;
int cacheIdx = (tIdy + 1) * (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2) +
(layer - 1) * (TILE_SIDE_FIXED + 2) + tIdx + 1;
int cacheOffset = (TILE_SIDE_FIXED + 2) * (SIZE_OCTAVES + 2);
for (int i = 0; i < 3; i++) {
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset - 1]; // up-left
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset]; // up
neighbors[neighIdx++] = cache[cacheIdx - cacheOffset + 1]; // up-right
neighbors[neighIdx++] = cache[cacheIdx - 1]; // left
neighbors[neighIdx++] = cache[cacheIdx]; // center
neighbors[neighIdx++] = cache[cacheIdx + 1]; // right
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset - 1]; // down-left
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset]; // down
neighbors[neighIdx++] = cache[cacheIdx + cacheOffset + 1]; // down-right
cacheIdx += TILE_SIDE_FIXED + 2;
}
}
__device__ void getPointNeighborhoodFromImage(float* neighbors,
float* convolvedDoG, int row, int col, int layer, int width)
{
int neighIdx = 0;
int imageIdx = row * width * (SIZE_OCTAVES + 3) + (layer - 1) * width + col;
int imageOffset = width * (SIZE_OCTAVES + 3);
for (int i = 0; i < 3; i++) {
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset - 1]; // up-left
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset]; // up
neighbors[neighIdx++] = convolvedDoG[imageIdx - imageOffset + 1]; // up-right
neighbors[neighIdx++] = convolvedDoG[imageIdx - 1]; // left
neighbors[neighIdx++] = convolvedDoG[imageIdx]; // center
neighbors[neighIdx++] = convolvedDoG[imageIdx + 1]; // right
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset - 1]; // down-left
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset]; // down
neighbors[neighIdx++] = convolvedDoG[imageIdx + imageOffset + 1]; // down-right
imageIdx += width;
}
}
__device__ bool isMaximum(float* neighbors)
{
const int CENTER = 13;
for (int i = 0; i < 27; i++) {
if (i == CENTER) continue;
if (neighbors[CENTER] <= neighbors[i]) return false;
}
return true;
}
__device__ bool isMinimum(float* neighbors)
{
const int CENTER = 13;
for (int i = 0; i < 27; i++) {
if (i == CENTER) continue;
if (neighbors[CENTER] >= neighbors[i]) return false;
}
return true;
}
__device__ float getValueAtExtremum(float* neighbors, float* alpha)
{
const int CENTER = 13;
// Perform quadratic interpolation
float g[3] = {
0.5f * (neighbors[22] - neighbors[4]),
0.5f * (neighbors[16] - neighbors[10]),
0.5f * (neighbors[14] - neighbors[12]) };
// Calculate entries of 3x3 Hessian matrix
float h11 = neighbors[22] + neighbors[4] - 2 * neighbors[CENTER];
float h22 = neighbors[16] + neighbors[10] - 2 * neighbors[CENTER];
float h33 = neighbors[14] + neighbors[12] - 2 * neighbors[CENTER];
float h12 = 0.25f * (neighbors[25] - neighbors[19] - neighbors[7] + neighbors[1]);
float h13 = 0.25f * (neighbors[23] - neighbors[21] - neighbors[5] + neighbors[3]);
float h23 = 0.25f * (neighbors[17] - neighbors[15] - neighbors[11] + neighbors[9]);
float h21 = h12, h31 = h13, h32 = h23;
float det = h11 * (h22 * h33 - h32 * h23) - h12 * (h21 * h33 - h23 * h31) + h13 * (h21 * h32 - h22 * h31);
// Calculate the inverse of 3x3 Hessian matrix
float invDet = fabsf(1.f / det);
float h11Inv = (h22 * h33 - h32 * h23) * invDet;
float h12Inv = (h13 * h32 - h12 * h33) * invDet;
float h13Inv = (h12 * h23 - h13 * h22) * invDet;
float h21Inv = (h23 * h31 - h21 * h33) * invDet;
float h22Inv = (h11 * h33 - h13 * h31) * invDet;
float h23Inv = (h21 * h13 - h11 * h23) * invDet;
float h31Inv = (h21 * h32 - h31 * h22) * invDet;
float h32Inv = (h31 * h12 - h11 * h32) * invDet;
float h33Inv = (h11 * h22 - h21 * h12) * invDet;
float invHessian[9] = { h11Inv, h12Inv, h13Inv, h21Inv, h22Inv, h23Inv, h31Inv, h32Inv, h33Inv };
float extremumVal = neighbors[CENTER];
for (int i = 0; i < 3; i++) {
alpha[i] = 0.f;
for (int j = 0; j < 3; j++) {
alpha[i] += -invHessian[3 * i + j] * g[j];
}
extremumVal += 0.5f * alpha[i] * g[i];
}
return extremumVal;
}
__device__ bool adjustKeypoint(Keypoint* kp, float* neighbors, float* convolvedDoG, int width, int height)
{
float l = (float)kp->layer, x = kp->x, y = kp->y;
float maxAlpha = 1.f;
const int CENTER = 13;
// Conservative test for low contrast
if (fabsf(neighbors[CENTER]) < roundf(0.5f * CONTRAST_TH / (float)SIZE_OCTAVES)) return false;
float extremumVal, alpha[3];
// Iterate until the keypoint is adjusted or maximum of iterations is exceeded
for (int i = 0; i < MAX_ADJ_ITER; i++) {
// Calculate adjusted coordinates of the keypoint in the scale-space
extremumVal = getValueAtExtremum(neighbors, alpha);
// Check for the successful keypoint adjustment
maxAlpha = fmaxf(fmaxf(fabsf(alpha[0]), fabsf(alpha[1])), fabsf(alpha[2]));
if (maxAlpha <= 0.5f) break;
// Update interpolating position
l += roundf(alpha[0]);
x += roundf(alpha[1]);
y += roundf(alpha[2]);
if ((l < 1 || l > SIZE_OCTAVES) || (x < SIFT_BORDER) || (y < SIFT_BORDER) ||
(x > height - SIFT_BORDER) || (y > width - SIFT_BORDER)) return false;
// Update the neighborhood for the new extrema position
getPointNeighborhoodFromImage(neighbors, convolvedDoG, (int)x, (int)y, (int)l, width);
}
if (maxAlpha <= 0.5f) {
// The second test for low constrast
if (fabsf(extremumVal) < (CONTRAST_TH / SIZE_OCTAVES)) return false;
// Calculate adjusted coordinates of the keypoint in the scale-space
int octaveMult = (1 << kp->octave);
float sigma_0 = sqrt(SIGMA_INIT * SIGMA_INIT - IMAGE_SIGMA * IMAGE_SIGMA);
kp->sigma = 2 * sigma_0 * octaveMult * powf(2.f, (alpha[0] + l) / SIZE_OCTAVES);
kp->xAdj = MIN_SAMPLING_DIST * octaveMult * (alpha[1] + x);
kp->yAdj = MIN_SAMPLING_DIST * octaveMult * (alpha[2] + y);
kp->x = x; kp->y = y; kp->layer = l;
return true;
}
return false;
}
__device__ bool keepKeypoint(Keypoint* kp, float* neighbors, float* convolvedDoG, int width)
{
const int CENTER = 13;
float l = kp->layer, x = kp->x, y = kp->y;
if ((int)kp->sigma != kp->layer) {
getPointNeighborhoodFromImage(neighbors, convolvedDoG, (int)x, (int)y, (int)l, width);
}
int octaveOffset = kp->octave * (SIZE_OCTAVES + 2);
// Calculate entries of 2x2 Hessian matrix
float h11 = neighbors[16] + neighbors[10] - 2 * neighbors[CENTER];
float h22 = neighbors[14] + neighbors[12] - 2 * neighbors[CENTER];
float h12 = 0.25f * (neighbors[17] - neighbors[15] - neighbors[11] + neighbors[9]);
float trace = h11 + h12;
float det = h11 * h22 - h12 * h12;
if (det <= 0.f) return false;
// Filter based on the edgeness of the keypoint
float edgeness = (trace * trace) / det;
if (edgeness >= powf(EDGENESS_TH + 1.f, 2.f) / EDGENESS_TH) return false;
return true;
} |
da4aa3c053f65694574a51c7a33f9acd7b76065e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2014-2022, Lawrence Livermore National Security, LLC.
// Produced at the Lawrence Livermore National Laboratory.
// Written by the LBANN Research Team (B. Van Essen, et al.) listed in
// the CONTRIBUTORS file. <lbann-dev@llnl.gov>
//
// LLNL-CODE-697807.
// All rights reserved.
//
// This file is part of LBANN: Livermore Big Artificial Neural Network
// Toolkit. For details, see http://software.llnl.gov/LBANN or
// https://github.com/LLNL/LBANN.
//
// Licensed under the Apache License, Version 2.0 (the "Licensee"); you
// may not use this file except in compliance with the License. You may
// obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the license.
////////////////////////////////////////////////////////////////////////////////
#define LBANN_ENTRYWISE_BATCH_NORMALIZATION_LAYER_INSTANTIATE
#include "lbann/comm_impl.hpp"
#include "lbann/layers/regularizers/entrywise_batch_normalization.hpp"
#include "lbann/weights/weights_helpers.hpp"
#include "lbann/utils/gpu/helpers.hpp"
namespace lbann {
namespace {
/**
* On input, sums and sqsums are assumed to be filled with zeros.
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (height / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void row_sums_kernel(size_t height,
size_t width,
const TensorDataType* __restrict__ vals,
size_t vals_ldim,
TensorDataType* __restrict__ sums,
TensorDataType* __restrict__ sqsums) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t row = gid; row < height; row += nthreads) {
auto& sum = sums[row];
auto& sqsum = sqsums[row];
for (size_t col = 0; col < width; ++col) {
const auto& x = vals[row + col * vals_ldim];
sum += x;
sqsum += x * x;
}
}
}
/**
* On input, batch_mean and batch_var are assumed to contain sums and
* squares of sums, respectively.
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (size / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void compute_statistics_kernel(size_t size,
unsigned long long statistics_count,
TensorDataType decay,
TensorDataType* __restrict__ batch_mean,
TensorDataType* __restrict__ batch_var,
TensorDataType* __restrict__ running_mean,
TensorDataType* __restrict__ running_var) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t i = gid; i < size; i += nthreads) {
auto& mean = batch_mean[i];
auto& var = batch_var[i];
auto& _running_mean = running_mean[i];
auto& _running_var = running_var[i];
const auto sum = batch_mean[i];
const auto sqsum = batch_var[i];
const TensorDataType statistics_count_dt = TensorDataType(statistics_count);
mean = sum / statistics_count_dt;
const auto sqmean = sqsum / statistics_count_dt;
var = (sqmean - mean * mean) * statistics_count_dt / TensorDataType(statistics_count - 1);
_running_mean = decay * _running_mean + (TensorDataType{1.f} - decay) * mean;
_running_var = decay * _running_var + (TensorDataType{1.f} - decay) * var;
}
}
/**
* mean = sum(x_i) / n
*
* var = ( sum(x_i^2)/n - mean^2 ) * n/(n-1)
*/
template <typename TensorDataType>
void compute_batch_statistics(lbann_comm& comm,
TensorDataType decay,
const El::AbstractDistMatrix<TensorDataType>& input,
El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& running_mean,
El::AbstractDistMatrix<TensorDataType>& running_var) {
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
auto& local_batch_statistics = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(batch_statistics.Matrix());
auto local_batch_mean = El::View(local_batch_statistics, El::ALL, El::IR(0));
auto local_batch_var = El::View(local_batch_statistics, El::ALL, El::IR(1));
auto& local_running_mean = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(running_mean.Matrix());
auto& local_running_var = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.Matrix());
// Dimensions
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
// Compute local sums
El::Zero(batch_statistics);
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_batch_statistics),
gpu::get_sync_info(local_input));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
row_sums_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
local_input.LockedBuffer(),
local_input.LDim(),
local_batch_mean.Buffer(),
local_batch_var.Buffer());
}
// Accumulate sums between processes
/// @todo Local statistics
/// @todo Arbitrary group sizes
comm.allreduce(batch_statistics,
batch_statistics.RedundantComm(),
El::mpi::SUM);
const size_t statistics_count = input.Width();
// Compute mini-batch statistics from sums
if (statistics_count <= 1) {
// local_mean already has correct values
El::Fill(local_batch_var, El::TypeTraits<TensorDataType>::One());
} else {
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_batch_statistics),
gpu::get_sync_info(local_running_mean),
gpu::get_sync_info(local_running_var));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
compute_statistics_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
statistics_count,
decay,
local_batch_mean.Buffer(),
local_batch_var.Buffer(),
local_running_mean.Buffer(),
local_running_var.Buffer());
}
}
}
/**
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void batchnorm_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ input,
size_t input_ldim,
TensorDataType* __restrict__ output,
size_t output_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& x = input[row + col*input_ldim];
auto& y = output[row + col*output_ldim];
y = (x - _mean) * inv_stdev;
}
}
}
/**
* y_i = (x_i - mean) / sqrt(var + epsilon)
*/
template <typename TensorDataType>
void apply_batchnorm(DataType epsilon,
const El::Matrix<TensorDataType, El::Device::GPU>& local_input,
El::Matrix<TensorDataType, El::Device::GPU>& local_output,
const El::Matrix<TensorDataType, El::Device::GPU>& local_mean,
const El::Matrix<TensorDataType, El::Device::GPU>& local_var) {
if (!local_input.IsEmpty()) {
auto multisync = El::MakeMultiSync(gpu::get_sync_info(local_output),
gpu::get_sync_info(local_input),
gpu::get_sync_info(local_mean),
gpu::get_sync_info(local_var));
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
batchnorm_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_input.LockedBuffer(),
local_input.LDim(),
local_output.Buffer(),
local_output.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer());
}
}
template <typename TensorDataType>
void fp_impl(lbann_comm& comm,
TensorDataType decay,
TensorDataType epsilon,
bool is_training,
const El::AbstractDistMatrix<TensorDataType>& input,
El::AbstractDistMatrix<TensorDataType>& output,
El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& running_mean,
El::AbstractDistMatrix<TensorDataType>& running_var) {
// Make sure workspace is aligned with input tensor
batch_statistics.Empty(false);
batch_statistics.AlignWith(input);
batch_statistics.Resize(input.Height(), 2);
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
auto& local_output = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(output.Matrix());
// Batchnorm has different behavior for training and inference
if (is_training) {
// For training, normalize with batch statistics
compute_batch_statistics<TensorDataType>(comm,
decay,
input,
batch_statistics,
running_mean,
running_var);
const auto& local_batch_statistics
= dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(batch_statistics.LockedMatrix());
const auto local_batch_mean = El::LockedView(local_batch_statistics,
El::ALL, El::IR(0));
const auto local_batch_var = El::LockedView(local_batch_statistics,
El::ALL, El::IR(1));
apply_batchnorm<TensorDataType>(epsilon,
local_input,
local_output,
local_batch_mean,
local_batch_var);
}
else {
// For inference, normalize with running statistics
const auto& local_running_mean = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_mean.LockedMatrix());
const auto& local_running_var = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.LockedMatrix());
apply_batchnorm<TensorDataType>(epsilon,
local_input,
local_output,
local_running_mean,
local_running_var);
}
}
/**
* On input, gradient_wrt_mean and gradient_wrt_var are assumed to be
* filled with zeros.
*
* dL/dmean = - sum(dL/dy_i) / sqrt(var+epsilon)
*
* dL/dvar = - sum(dL/dy_i * (x_i-mean)) * (var+epsilon)^(-3/2) / 2
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (height / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void bp_training_stats_gradient_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ input,
size_t input_ldim,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var,
TensorDataType* __restrict__ gradient_wrt_mean,
TensorDataType* __restrict__ gradient_wrt_var) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t row = gid; row < height; row += nthreads) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
auto& dmean = gradient_wrt_mean[row];
auto& dvar = gradient_wrt_var[row];
for (size_t col = 0; col < width; ++col) {
const auto& x = input[row + col * input_ldim];
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
dmean += - dy * inv_stdev;
dvar += - dy * (x - _mean) * inv_stdev*inv_stdev*inv_stdev / TensorDataType(2);
}
}
}
/**
* dL/dx_i = ( dL/dy_i / sqrt(var+epsilon)
* + dL/dmean / n
* + dL/dvar * (x_i - mean) * 2/(n-1) )
*
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void bp_training_error_signal_kernel(size_t height,
size_t width,
TensorDataType epsilon,
unsigned long long statistics_count,
const TensorDataType* __restrict__ input,
size_t input_ldim,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
TensorDataType* __restrict__ gradient_wrt_input,
size_t gradient_wrt_input_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var,
const TensorDataType* __restrict__ gradient_wrt_mean,
const TensorDataType* __restrict__ gradient_wrt_var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto& dmean = gradient_wrt_mean[row];
const auto& dvar = gradient_wrt_var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& x = input[row + col * input_ldim];
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
auto& dx = gradient_wrt_input[row + col * gradient_wrt_input_ldim];
dx = (dy * inv_stdev
+ dmean / TensorDataType(statistics_count)
+ dvar * (x - _mean) * TensorDataType(2) / TensorDataType(statistics_count - 1));
}
}
}
/** @brief Backprop for training.
*
* Assumes forward prop uses mini-batch statistics. In other words,
* statistics are dependent on input.
*/
template <typename TensorDataType>
void bp_training_impl(lbann_comm& comm,
TensorDataType epsilon,
const El::AbstractDistMatrix<TensorDataType>& input,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& statistics,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_statistics) {
// Make sure workspace is aligned with input tensor
gradient_wrt_statistics.Empty(false);
gradient_wrt_statistics.AlignWith(input);
gradient_wrt_statistics.Resize(input.Height(), 2);
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
const auto& local_gradient_wrt_output = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_output.LockedMatrix());
auto& local_gradient_wrt_input = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_input.Matrix());
const auto& local_statistics = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(statistics.LockedMatrix());
const auto local_mean = El::LockedView(local_statistics, El::ALL, El::IR(0));
const auto local_var = El::LockedView(local_statistics, El::ALL, El::IR(1));
auto& local_gradient_wrt_statistics = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_statistics.Matrix());
auto local_gradient_wrt_mean = El::View(local_gradient_wrt_statistics, El::ALL, El::IR(0));
auto local_gradient_wrt_var = El::View(local_gradient_wrt_statistics, El::ALL, El::IR(1));
// Dimensions
const size_t local_height = local_gradient_wrt_input.Height();
const size_t local_width = local_gradient_wrt_input.Width();
// Count for statistics
// Note: Output is constant if statistics count is <=1, so error
// signal is zero.
/// @todo Local statistics
/// @todo Arbitrary group sizes
const size_t statistics_count = input.Width();
if (statistics_count <= 1) {
El::Zero(local_gradient_wrt_input);
return;
}
// Compute local gradient w.r.t. batch statistics
El::Zero(gradient_wrt_statistics);
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_statistics),
gpu::get_sync_info(local_statistics),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_input));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
bp_training_stats_gradient_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_input.LockedBuffer(),
local_input.LDim(),
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer(),
local_gradient_wrt_mean.Buffer(),
local_gradient_wrt_var.Buffer());
}
// Accumulate gradient w.r.t. statistics across processes
/// @todo Local statistics
/// @todo Arbitrary group sizes
comm.allreduce(gradient_wrt_statistics,
gradient_wrt_statistics.RedundantComm(),
El::mpi::SUM);
// Compute gradient w.r.t. input
if (!local_input.IsEmpty()) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_input),
gpu::get_sync_info(local_gradient_wrt_statistics),
gpu::get_sync_info(local_statistics),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_input));
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
bp_training_error_signal_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
statistics_count,
local_input.LockedBuffer(),
local_input.LDim(),
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_gradient_wrt_input.Buffer(),
local_gradient_wrt_input.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer(),
local_gradient_wrt_mean.LockedBuffer(),
local_gradient_wrt_var.LockedBuffer());
}
}
/**
* dL/dx_i = dL/dy_i / sqrt(var+epsilon)
*
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void bp_inference_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
TensorDataType* __restrict__ gradient_wrt_input,
size_t gradient_wrt_input_ldim,
const TensorDataType* __restrict__ running_var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& var = running_var[row];
const auto inv_stdev = gpu_lib::rsqrt(var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
auto& dx = gradient_wrt_input[row + col * gradient_wrt_input_ldim];
dx = dy * inv_stdev;
}
}
}
/** @brief Backprop for inference.
*
* Assumes forward prop uses running statistics. In other words,
* statistics are independent of input.
*/
template <typename TensorDataType>
void bp_inference_impl(DataType epsilon,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& running_var) {
// Local matrices
const auto& local_gradient_wrt_output = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_output.LockedMatrix());
auto& local_gradient_wrt_input = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_input.Matrix());
const auto& local_running_var = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.LockedMatrix());
// Compute gradient w.r.t. input
if (!local_gradient_wrt_output.IsEmpty()) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_input),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_running_var));
const size_t local_height = local_gradient_wrt_output.Height();
const size_t local_width = local_gradient_wrt_output.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
bp_inference_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_gradient_wrt_input.Buffer(),
local_gradient_wrt_input.LDim(),
local_running_var.LockedBuffer());
}
}
template <typename TensorDataType>
void bp_impl(lbann_comm& comm,
TensorDataType epsilon,
bool is_training,
const El::AbstractDistMatrix<TensorDataType>& input,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_batch_statistics,
const El::AbstractDistMatrix<TensorDataType>& running_var) {
// Batchnorm has different behavior for training and inference
if (is_training) {
bp_training_impl<TensorDataType>(comm,
epsilon,
input,
gradient_wrt_output,
gradient_wrt_input,
batch_statistics,
gradient_wrt_batch_statistics);
}
else {
bp_inference_impl<TensorDataType>(epsilon,
gradient_wrt_output,
gradient_wrt_input,
running_var);
}
}
} // namespace
// Template instantiation
template <typename TensorDataType, data_layout T_layout, El::Device Dev>
void entrywise_batch_normalization_layer<TensorDataType, T_layout, Dev>::fp_compute() {
using ValuesGetter = weights_details::SafeWeightsAccessor<TensorDataType>;
const auto mode = this->get_model()->get_execution_context().get_execution_mode();
fp_impl(*this->get_comm(),
this->m_decay,
this->m_epsilon,
mode == execution_mode::training,
this->get_prev_activations(),
this->get_activations(),
*this->m_batch_statistics,
ValuesGetter::mutable_values(this->get_weights(0)),
ValuesGetter::mutable_values(this->get_weights(1)));
}
template <typename TensorDataType, data_layout T_layout, El::Device Dev>
void entrywise_batch_normalization_layer<TensorDataType, T_layout, Dev>::bp_compute() {
const auto mode = this->get_model()->get_execution_context().get_execution_mode();
bp_impl(*this->get_comm(),
this->m_epsilon,
mode == execution_mode::training,
this->get_prev_activations(),
this->get_prev_error_signals(),
this->get_error_signals(),
*this->m_batch_statistics,
*this->m_batch_statistics_gradient,
this->weights_values(1));
}
#define PROTO(T) \
template class entrywise_batch_normalization_layer< \
T, data_layout::DATA_PARALLEL, El::Device::GPU>; \
template class entrywise_batch_normalization_layer< \
T, data_layout::MODEL_PARALLEL, El::Device::GPU>
#include "lbann/macros/instantiate.hpp"
} // namespace lbann
| da4aa3c053f65694574a51c7a33f9acd7b76065e.cu | ////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2014-2022, Lawrence Livermore National Security, LLC.
// Produced at the Lawrence Livermore National Laboratory.
// Written by the LBANN Research Team (B. Van Essen, et al.) listed in
// the CONTRIBUTORS file. <lbann-dev@llnl.gov>
//
// LLNL-CODE-697807.
// All rights reserved.
//
// This file is part of LBANN: Livermore Big Artificial Neural Network
// Toolkit. For details, see http://software.llnl.gov/LBANN or
// https://github.com/LLNL/LBANN.
//
// Licensed under the Apache License, Version 2.0 (the "Licensee"); you
// may not use this file except in compliance with the License. You may
// obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the license.
////////////////////////////////////////////////////////////////////////////////
#define LBANN_ENTRYWISE_BATCH_NORMALIZATION_LAYER_INSTANTIATE
#include "lbann/comm_impl.hpp"
#include "lbann/layers/regularizers/entrywise_batch_normalization.hpp"
#include "lbann/weights/weights_helpers.hpp"
#include "lbann/utils/gpu/helpers.hpp"
namespace lbann {
namespace {
/**
* On input, sums and sqsums are assumed to be filled with zeros.
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (height / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void row_sums_kernel(size_t height,
size_t width,
const TensorDataType* __restrict__ vals,
size_t vals_ldim,
TensorDataType* __restrict__ sums,
TensorDataType* __restrict__ sqsums) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t row = gid; row < height; row += nthreads) {
auto& sum = sums[row];
auto& sqsum = sqsums[row];
for (size_t col = 0; col < width; ++col) {
const auto& x = vals[row + col * vals_ldim];
sum += x;
sqsum += x * x;
}
}
}
/**
* On input, batch_mean and batch_var are assumed to contain sums and
* squares of sums, respectively.
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (size / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void compute_statistics_kernel(size_t size,
unsigned long long statistics_count,
TensorDataType decay,
TensorDataType* __restrict__ batch_mean,
TensorDataType* __restrict__ batch_var,
TensorDataType* __restrict__ running_mean,
TensorDataType* __restrict__ running_var) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t i = gid; i < size; i += nthreads) {
auto& mean = batch_mean[i];
auto& var = batch_var[i];
auto& _running_mean = running_mean[i];
auto& _running_var = running_var[i];
const auto sum = batch_mean[i];
const auto sqsum = batch_var[i];
const TensorDataType statistics_count_dt = TensorDataType(statistics_count);
mean = sum / statistics_count_dt;
const auto sqmean = sqsum / statistics_count_dt;
var = (sqmean - mean * mean) * statistics_count_dt / TensorDataType(statistics_count - 1);
_running_mean = decay * _running_mean + (TensorDataType{1.f} - decay) * mean;
_running_var = decay * _running_var + (TensorDataType{1.f} - decay) * var;
}
}
/**
* mean = sum(x_i) / n
*
* var = ( sum(x_i^2)/n - mean^2 ) * n/(n-1)
*/
template <typename TensorDataType>
void compute_batch_statistics(lbann_comm& comm,
TensorDataType decay,
const El::AbstractDistMatrix<TensorDataType>& input,
El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& running_mean,
El::AbstractDistMatrix<TensorDataType>& running_var) {
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
auto& local_batch_statistics = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(batch_statistics.Matrix());
auto local_batch_mean = El::View(local_batch_statistics, El::ALL, El::IR(0));
auto local_batch_var = El::View(local_batch_statistics, El::ALL, El::IR(1));
auto& local_running_mean = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(running_mean.Matrix());
auto& local_running_var = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.Matrix());
// Dimensions
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
// Compute local sums
El::Zero(batch_statistics);
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_batch_statistics),
gpu::get_sync_info(local_input));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
row_sums_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
local_input.LockedBuffer(),
local_input.LDim(),
local_batch_mean.Buffer(),
local_batch_var.Buffer());
}
// Accumulate sums between processes
/// @todo Local statistics
/// @todo Arbitrary group sizes
comm.allreduce(batch_statistics,
batch_statistics.RedundantComm(),
El::mpi::SUM);
const size_t statistics_count = input.Width();
// Compute mini-batch statistics from sums
if (statistics_count <= 1) {
// local_mean already has correct values
El::Fill(local_batch_var, El::TypeTraits<TensorDataType>::One());
} else {
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_batch_statistics),
gpu::get_sync_info(local_running_mean),
gpu::get_sync_info(local_running_var));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
compute_statistics_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
statistics_count,
decay,
local_batch_mean.Buffer(),
local_batch_var.Buffer(),
local_running_mean.Buffer(),
local_running_var.Buffer());
}
}
}
/**
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void batchnorm_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ input,
size_t input_ldim,
TensorDataType* __restrict__ output,
size_t output_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& x = input[row + col*input_ldim];
auto& y = output[row + col*output_ldim];
y = (x - _mean) * inv_stdev;
}
}
}
/**
* y_i = (x_i - mean) / sqrt(var + epsilon)
*/
template <typename TensorDataType>
void apply_batchnorm(DataType epsilon,
const El::Matrix<TensorDataType, El::Device::GPU>& local_input,
El::Matrix<TensorDataType, El::Device::GPU>& local_output,
const El::Matrix<TensorDataType, El::Device::GPU>& local_mean,
const El::Matrix<TensorDataType, El::Device::GPU>& local_var) {
if (!local_input.IsEmpty()) {
auto multisync = El::MakeMultiSync(gpu::get_sync_info(local_output),
gpu::get_sync_info(local_input),
gpu::get_sync_info(local_mean),
gpu::get_sync_info(local_var));
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
batchnorm_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_input.LockedBuffer(),
local_input.LDim(),
local_output.Buffer(),
local_output.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer());
}
}
template <typename TensorDataType>
void fp_impl(lbann_comm& comm,
TensorDataType decay,
TensorDataType epsilon,
bool is_training,
const El::AbstractDistMatrix<TensorDataType>& input,
El::AbstractDistMatrix<TensorDataType>& output,
El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& running_mean,
El::AbstractDistMatrix<TensorDataType>& running_var) {
// Make sure workspace is aligned with input tensor
batch_statistics.Empty(false);
batch_statistics.AlignWith(input);
batch_statistics.Resize(input.Height(), 2);
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
auto& local_output = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(output.Matrix());
// Batchnorm has different behavior for training and inference
if (is_training) {
// For training, normalize with batch statistics
compute_batch_statistics<TensorDataType>(comm,
decay,
input,
batch_statistics,
running_mean,
running_var);
const auto& local_batch_statistics
= dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(batch_statistics.LockedMatrix());
const auto local_batch_mean = El::LockedView(local_batch_statistics,
El::ALL, El::IR(0));
const auto local_batch_var = El::LockedView(local_batch_statistics,
El::ALL, El::IR(1));
apply_batchnorm<TensorDataType>(epsilon,
local_input,
local_output,
local_batch_mean,
local_batch_var);
}
else {
// For inference, normalize with running statistics
const auto& local_running_mean = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_mean.LockedMatrix());
const auto& local_running_var = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.LockedMatrix());
apply_batchnorm<TensorDataType>(epsilon,
local_input,
local_output,
local_running_mean,
local_running_var);
}
}
/**
* On input, gradient_wrt_mean and gradient_wrt_var are assumed to be
* filled with zeros.
*
* dL/dmean = - sum(dL/dy_i) / sqrt(var+epsilon)
*
* dL/dvar = - sum(dL/dy_i * (x_i-mean)) * (var+epsilon)^(-3/2) / 2
*
* Block dimensions: bsize x 1 x 1
*
* Grid dimensions: (height / bsize) x 1 x 1
*/
template <typename TensorDataType>
__global__ void bp_training_stats_gradient_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ input,
size_t input_ldim,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var,
TensorDataType* __restrict__ gradient_wrt_mean,
TensorDataType* __restrict__ gradient_wrt_var) {
const size_t gid = threadIdx.x + blockIdx.x * blockDim.x;
const size_t nthreads = blockDim.x * gridDim.x;
for (size_t row = gid; row < height; row += nthreads) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
auto& dmean = gradient_wrt_mean[row];
auto& dvar = gradient_wrt_var[row];
for (size_t col = 0; col < width; ++col) {
const auto& x = input[row + col * input_ldim];
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
dmean += - dy * inv_stdev;
dvar += - dy * (x - _mean) * inv_stdev*inv_stdev*inv_stdev / TensorDataType(2);
}
}
}
/**
* dL/dx_i = ( dL/dy_i / sqrt(var+epsilon)
* + dL/dmean / n
* + dL/dvar * (x_i - mean) * 2/(n-1) )
*
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void bp_training_error_signal_kernel(size_t height,
size_t width,
TensorDataType epsilon,
unsigned long long statistics_count,
const TensorDataType* __restrict__ input,
size_t input_ldim,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
TensorDataType* __restrict__ gradient_wrt_input,
size_t gradient_wrt_input_ldim,
const TensorDataType* __restrict__ mean,
const TensorDataType* __restrict__ var,
const TensorDataType* __restrict__ gradient_wrt_mean,
const TensorDataType* __restrict__ gradient_wrt_var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& _mean = mean[row];
const auto& _var = var[row];
const auto& dmean = gradient_wrt_mean[row];
const auto& dvar = gradient_wrt_var[row];
const auto inv_stdev = gpu_lib::rsqrt(_var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& x = input[row + col * input_ldim];
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
auto& dx = gradient_wrt_input[row + col * gradient_wrt_input_ldim];
dx = (dy * inv_stdev
+ dmean / TensorDataType(statistics_count)
+ dvar * (x - _mean) * TensorDataType(2) / TensorDataType(statistics_count - 1));
}
}
}
/** @brief Backprop for training.
*
* Assumes forward prop uses mini-batch statistics. In other words,
* statistics are dependent on input.
*/
template <typename TensorDataType>
void bp_training_impl(lbann_comm& comm,
TensorDataType epsilon,
const El::AbstractDistMatrix<TensorDataType>& input,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& statistics,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_statistics) {
// Make sure workspace is aligned with input tensor
gradient_wrt_statistics.Empty(false);
gradient_wrt_statistics.AlignWith(input);
gradient_wrt_statistics.Resize(input.Height(), 2);
// Local matrices
const auto& local_input = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(input.LockedMatrix());
const auto& local_gradient_wrt_output = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_output.LockedMatrix());
auto& local_gradient_wrt_input = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_input.Matrix());
const auto& local_statistics = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(statistics.LockedMatrix());
const auto local_mean = El::LockedView(local_statistics, El::ALL, El::IR(0));
const auto local_var = El::LockedView(local_statistics, El::ALL, El::IR(1));
auto& local_gradient_wrt_statistics = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_statistics.Matrix());
auto local_gradient_wrt_mean = El::View(local_gradient_wrt_statistics, El::ALL, El::IR(0));
auto local_gradient_wrt_var = El::View(local_gradient_wrt_statistics, El::ALL, El::IR(1));
// Dimensions
const size_t local_height = local_gradient_wrt_input.Height();
const size_t local_width = local_gradient_wrt_input.Width();
// Count for statistics
// Note: Output is constant if statistics count is <=1, so error
// signal is zero.
/// @todo Local statistics
/// @todo Arbitrary group sizes
const size_t statistics_count = input.Width();
if (statistics_count <= 1) {
El::Zero(local_gradient_wrt_input);
return;
}
// Compute local gradient w.r.t. batch statistics
El::Zero(gradient_wrt_statistics);
if (local_height > 0) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_statistics),
gpu::get_sync_info(local_statistics),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_input));
constexpr size_t block_size = 256;
dim3 block_dims, grid_dims;
block_dims.x = block_size;
grid_dims.x = (local_height + block_size - 1) / block_size;
hydrogen::gpu::LaunchKernel(
bp_training_stats_gradient_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_input.LockedBuffer(),
local_input.LDim(),
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer(),
local_gradient_wrt_mean.Buffer(),
local_gradient_wrt_var.Buffer());
}
// Accumulate gradient w.r.t. statistics across processes
/// @todo Local statistics
/// @todo Arbitrary group sizes
comm.allreduce(gradient_wrt_statistics,
gradient_wrt_statistics.RedundantComm(),
El::mpi::SUM);
// Compute gradient w.r.t. input
if (!local_input.IsEmpty()) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_input),
gpu::get_sync_info(local_gradient_wrt_statistics),
gpu::get_sync_info(local_statistics),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_input));
const size_t local_height = local_input.Height();
const size_t local_width = local_input.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
bp_training_error_signal_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
statistics_count,
local_input.LockedBuffer(),
local_input.LDim(),
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_gradient_wrt_input.Buffer(),
local_gradient_wrt_input.LDim(),
local_mean.LockedBuffer(),
local_var.LockedBuffer(),
local_gradient_wrt_mean.LockedBuffer(),
local_gradient_wrt_var.LockedBuffer());
}
}
/**
* dL/dx_i = dL/dy_i / sqrt(var+epsilon)
*
* Block dimensions: bsizex x bsizey x 1
*
* Grid dimensions: (height / bsizex) x (width / bsizey) x 1
*/
template <typename TensorDataType>
__global__ void bp_inference_kernel(size_t height,
size_t width,
TensorDataType epsilon,
const TensorDataType* __restrict__ gradient_wrt_output,
size_t gradient_wrt_output_ldim,
TensorDataType* __restrict__ gradient_wrt_input,
size_t gradient_wrt_input_ldim,
const TensorDataType* __restrict__ running_var) {
const size_t gidx = threadIdx.x + blockIdx.x * blockDim.x;
const size_t gidy = threadIdx.y + blockIdx.y * blockDim.y;
const size_t nthreadsx = blockDim.x * gridDim.x;
const size_t nthreadsy = blockDim.y * gridDim.y;
for (size_t row = gidx; row < height; row += nthreadsx) {
const auto& var = running_var[row];
const auto inv_stdev = gpu_lib::rsqrt(var + epsilon);
for (size_t col = gidy; col < width; col += nthreadsy) {
const auto& dy = gradient_wrt_output[row + col * gradient_wrt_output_ldim];
auto& dx = gradient_wrt_input[row + col * gradient_wrt_input_ldim];
dx = dy * inv_stdev;
}
}
}
/** @brief Backprop for inference.
*
* Assumes forward prop uses running statistics. In other words,
* statistics are independent of input.
*/
template <typename TensorDataType>
void bp_inference_impl(DataType epsilon,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& running_var) {
// Local matrices
const auto& local_gradient_wrt_output = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_output.LockedMatrix());
auto& local_gradient_wrt_input = dynamic_cast<El::Matrix<TensorDataType, El::Device::GPU>&>(gradient_wrt_input.Matrix());
const auto& local_running_var = dynamic_cast<const El::Matrix<TensorDataType, El::Device::GPU>&>(running_var.LockedMatrix());
// Compute gradient w.r.t. input
if (!local_gradient_wrt_output.IsEmpty()) {
auto multisync =
El::MakeMultiSync(gpu::get_sync_info(local_gradient_wrt_input),
gpu::get_sync_info(local_gradient_wrt_output),
gpu::get_sync_info(local_running_var));
const size_t local_height = local_gradient_wrt_output.Height();
const size_t local_width = local_gradient_wrt_output.Width();
constexpr size_t block_size_x = 256;
constexpr size_t block_size_y = 1;
dim3 block_dims, grid_dims;
block_dims.x = block_size_x;
block_dims.y = block_size_y;
grid_dims.x = (local_height + block_size_x - 1) / block_size_x;
grid_dims.y = (local_width + block_size_y - 1) / block_size_y;
hydrogen::gpu::LaunchKernel(
bp_inference_kernel<TensorDataType>,
grid_dims, block_dims, 0, multisync,
local_height,
local_width,
epsilon,
local_gradient_wrt_output.LockedBuffer(),
local_gradient_wrt_output.LDim(),
local_gradient_wrt_input.Buffer(),
local_gradient_wrt_input.LDim(),
local_running_var.LockedBuffer());
}
}
template <typename TensorDataType>
void bp_impl(lbann_comm& comm,
TensorDataType epsilon,
bool is_training,
const El::AbstractDistMatrix<TensorDataType>& input,
const El::AbstractDistMatrix<TensorDataType>& gradient_wrt_output,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_input,
const El::AbstractDistMatrix<TensorDataType>& batch_statistics,
El::AbstractDistMatrix<TensorDataType>& gradient_wrt_batch_statistics,
const El::AbstractDistMatrix<TensorDataType>& running_var) {
// Batchnorm has different behavior for training and inference
if (is_training) {
bp_training_impl<TensorDataType>(comm,
epsilon,
input,
gradient_wrt_output,
gradient_wrt_input,
batch_statistics,
gradient_wrt_batch_statistics);
}
else {
bp_inference_impl<TensorDataType>(epsilon,
gradient_wrt_output,
gradient_wrt_input,
running_var);
}
}
} // namespace
// Template instantiation
template <typename TensorDataType, data_layout T_layout, El::Device Dev>
void entrywise_batch_normalization_layer<TensorDataType, T_layout, Dev>::fp_compute() {
using ValuesGetter = weights_details::SafeWeightsAccessor<TensorDataType>;
const auto mode = this->get_model()->get_execution_context().get_execution_mode();
fp_impl(*this->get_comm(),
this->m_decay,
this->m_epsilon,
mode == execution_mode::training,
this->get_prev_activations(),
this->get_activations(),
*this->m_batch_statistics,
ValuesGetter::mutable_values(this->get_weights(0)),
ValuesGetter::mutable_values(this->get_weights(1)));
}
template <typename TensorDataType, data_layout T_layout, El::Device Dev>
void entrywise_batch_normalization_layer<TensorDataType, T_layout, Dev>::bp_compute() {
const auto mode = this->get_model()->get_execution_context().get_execution_mode();
bp_impl(*this->get_comm(),
this->m_epsilon,
mode == execution_mode::training,
this->get_prev_activations(),
this->get_prev_error_signals(),
this->get_error_signals(),
*this->m_batch_statistics,
*this->m_batch_statistics_gradient,
this->weights_values(1));
}
#define PROTO(T) \
template class entrywise_batch_normalization_layer< \
T, data_layout::DATA_PARALLEL, El::Device::GPU>; \
template class entrywise_batch_normalization_layer< \
T, data_layout::MODEL_PARALLEL, El::Device::GPU>
#include "lbann/macros/instantiate.hpp"
} // namespace lbann
|
3b6b033b11080f8a0195398f59907eb2443c0ea7.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include<stdio.h>
#include<cuda.h>
# define M 10000
# define N 10000
__global__ void add( int * a, int * b, int * c)
{
unsigned int i= blockDim.x *blockIdx.x + threadIdx.x;
unsigned int j= blockDim.y *blockIdx.y + threadIdx.y;
if(i<M && j<N)
c[i*M+j]=a[i*M+j]+b[i*M+j];
}
int check(int *a, int *b, int *c)
{
for(int i=0;i<M;i++)
{
for(int j=0;j<N;j++)
{
if(c[i*M+j] !=a[i*M+j]+b[i*M+j])
return 0;
}
}
return 1;
}
int main()
{
int *h_a, *h_b, *h_c;
int *d_a, *d_b, *d_c;
// allocating memory on host
h_a = (int *)malloc(M * N * sizeof(int));
h_b = (int *)malloc(M * N * sizeof(int));
h_c = (int *)malloc(M * N * sizeof(int));
//assigning random values to the array elements
for(int i=0;i<M;i++)
{
for(int j=0;j<N;j++)
{
h_a[i*M+j]=1;
h_b[i*M+j]=2;
}
}
//assigning memory on the device
hipMalloc((void **)&d_a, M*N*sizeof(int));
hipMalloc((void **)&d_b, M*N*sizeof(int));
hipMalloc((void **)&d_c, M*N*sizeof(int));
//copying elements from host to device
hipMemcpy(d_a, h_a, M*N*sizeof(int), hipMemcpyHostToDevice);
hipMemcpy(d_b, h_b, M*N*sizeof(int), hipMemcpyHostToDevice);
//declaring the number of blocks and number of threads per block
dim3 threads(32,32);
dim3 blocks(M/32+1, N/32+1);
//calling the function and calculating the sum on device
hipLaunchKernelGGL(( add), dim3(blocks), dim3(threads) , 0, 0, d_a, d_b, d_c);
//copying the result to host memory
hipMemcpy(h_c, d_c, M*N*sizeof(int), hipMemcpyDeviceToHost);
if(check(h_a, h_b, h_c))
printf("Matrix sum is correct\n");
else
printf("Matrix sum is incorrect\n");
hipFree(d_a);
hipFree(d_b);
hipFree(d_c);
free(h_a);
free(h_b);
free(h_c);
} | 3b6b033b11080f8a0195398f59907eb2443c0ea7.cu | #include<stdio.h>
#include<cuda.h>
# define M 10000
# define N 10000
__global__ void add( int * a, int * b, int * c)
{
unsigned int i= blockDim.x *blockIdx.x + threadIdx.x;
unsigned int j= blockDim.y *blockIdx.y + threadIdx.y;
if(i<M && j<N)
c[i*M+j]=a[i*M+j]+b[i*M+j];
}
int check(int *a, int *b, int *c)
{
for(int i=0;i<M;i++)
{
for(int j=0;j<N;j++)
{
if(c[i*M+j] !=a[i*M+j]+b[i*M+j])
return 0;
}
}
return 1;
}
int main()
{
int *h_a, *h_b, *h_c;
int *d_a, *d_b, *d_c;
// allocating memory on host
h_a = (int *)malloc(M * N * sizeof(int));
h_b = (int *)malloc(M * N * sizeof(int));
h_c = (int *)malloc(M * N * sizeof(int));
//assigning random values to the array elements
for(int i=0;i<M;i++)
{
for(int j=0;j<N;j++)
{
h_a[i*M+j]=1;
h_b[i*M+j]=2;
}
}
//assigning memory on the device
cudaMalloc((void **)&d_a, M*N*sizeof(int));
cudaMalloc((void **)&d_b, M*N*sizeof(int));
cudaMalloc((void **)&d_c, M*N*sizeof(int));
//copying elements from host to device
cudaMemcpy(d_a, h_a, M*N*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_b, h_b, M*N*sizeof(int), cudaMemcpyHostToDevice);
//declaring the number of blocks and number of threads per block
dim3 threads(32,32);
dim3 blocks(M/32+1, N/32+1);
//calling the function and calculating the sum on device
add<<< blocks, threads >>>(d_a, d_b, d_c);
//copying the result to host memory
cudaMemcpy(h_c, d_c, M*N*sizeof(int), cudaMemcpyDeviceToHost);
if(check(h_a, h_b, h_c))
printf("Matrix sum is correct\n");
else
printf("Matrix sum is incorrect\n");
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
free(h_a);
free(h_b);
free(h_c);
} |
4e47ef86e88aa726b0f3cf1e0d365cc939f68149.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* Copyright 1993-2015 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
//
// This sample demonstrates the use of streams for concurrent execution. It also illustrates how to
// introduce dependencies between CUDA streams with the new hipStreamWaitEvent function introduced
// in CUDA 3.2.
//
// Devices of compute capability 1.x will run the kernels one after another
// Devices of compute capability 2.0 or higher can overlap the kernels
//
#include <stdio.h>
#include <helper_functions.h>
#include <helper_cuda.h>
// This is a kernel that does no real work but runs at least for a specified number of clocks
__global__ void clock_block(clock_t *d_o, clock_t clock_count)
{
unsigned int start_clock = (unsigned int) clock();
clock_t clock_offset = 0;
while (clock_offset < clock_count)
{
unsigned int end_clock = (unsigned int) clock();
// The code below should work like
// this (thanks to modular arithmetics):
//
// clock_offset = (clock_t) (end_clock > start_clock ?
// end_clock - start_clock :
// end_clock + (0xffffffffu - start_clock));
//
// Indeed, let m = 2^32 then
// end - start = end + m - start (mod m).
clock_offset = (clock_t)(end_clock - start_clock);
}
d_o[0] = clock_offset;
}
// Single warp reduction kernel
__global__ void sum(clock_t *d_clocks, int N)
{
__shared__ clock_t s_clocks[32];
clock_t my_sum = 0;
for (int i = threadIdx.x; i < N; i+= blockDim.x)
{
my_sum += d_clocks[i];
}
s_clocks[threadIdx.x] = my_sum;
__syncthreads();
for (int i=16; i>0; i/=2)
{
if (threadIdx.x < i)
{
s_clocks[threadIdx.x] += s_clocks[threadIdx.x + i];
}
__syncthreads();
}
d_clocks[0] = s_clocks[0];
}
int main(int argc, char **argv)
{
int nkernels = 8; // number of concurrent kernels
int nstreams = nkernels + 1; // use one more stream than concurrent kernel
int nbytes = nkernels * sizeof(clock_t); // number of data bytes
float kernel_time = 10; // time the kernel should run in ms
float elapsed_time; // timing variables
int cuda_device = 0;
printf("[%s] - Starting...\n", argv[0]);
// get number of kernels if overridden on the command line
if (checkCmdLineFlag(argc, (const char **)argv, "nkernels"))
{
nkernels = getCmdLineArgumentInt(argc, (const char **)argv, "nkernels");
nstreams = nkernels + 1;
}
// use command-line specified CUDA device, otherwise use device with highest Gflops/s
cuda_device = findCudaDevice(argc, (const char **)argv);
hipDeviceProp_t deviceProp;
checkCudaErrors(hipGetDevice(&cuda_device));
checkCudaErrors(hipGetDeviceProperties(&deviceProp, cuda_device));
if ((deviceProp.concurrentKernels == 0))
{
printf("> GPU does not support concurrent kernel execution\n");
printf(" CUDA kernel runs will be serialized\n");
}
printf("> Detected Compute SM %d.%d hardware with %d multi-processors\n",
deviceProp.major, deviceProp.minor, deviceProp.multiProcessorCount);
// allocate host memory
clock_t *a = 0; // pointer to the array data in host memory
checkCudaErrors(hipHostMalloc((void **)&a, nbytes));
// allocate device memory
clock_t *d_a = 0; // pointers to data and init value in the device memory
checkCudaErrors(hipMalloc((void **)&d_a, nbytes));
// allocate and initialize an array of stream handles
hipStream_t *streams = (hipStream_t *) malloc(nstreams * sizeof(hipStream_t));
for (int i = 0; i < nstreams; i++)
{
checkCudaErrors(hipStreamCreate(&(streams[i])));
}
// create CUDA event handles
hipEvent_t start_event, stop_event;
checkCudaErrors(hipEventCreate(&start_event));
checkCudaErrors(hipEventCreate(&stop_event));
// the events are used for synchronization only and hence do not need to record timings
// this also makes events not introduce global sync points when recorded which is critical to get overlap
hipEvent_t *kernelEvent;
kernelEvent = (hipEvent_t *) malloc(nkernels * sizeof(hipEvent_t));
for (int i = 0; i < nkernels; i++)
{
checkCudaErrors(hipEventCreateWithFlags(&(kernelEvent[i]), hipEventDisableTiming));
}
//////////////////////////////////////////////////////////////////////
// time execution with nkernels streams
clock_t total_clocks = 0;
#if defined(__arm__) || defined(__aarch64__)
// the kernel takes more time than the channel reset time on arm archs, so to prevent hangs reduce time_clocks.
clock_t time_clocks = (clock_t)(kernel_time * (deviceProp.clockRate / 1000));
#else
clock_t time_clocks = (clock_t)(kernel_time * deviceProp.clockRate);
#endif
hipEventRecord(start_event, 0);
// queue nkernels in separate streams and record when they are done
for (int i=0; i<nkernels; ++i)
{
hipLaunchKernelGGL(( clock_block), dim3(1),dim3(1),0,streams[i], &d_a[i], time_clocks);
total_clocks += time_clocks;
checkCudaErrors(hipEventRecord(kernelEvent[i], streams[i]));
// make the last stream wait for the kernel event to be recorded
checkCudaErrors(hipStreamWaitEvent(streams[nstreams-1], kernelEvent[i],0));
}
// queue a sum kernel and a copy back to host in the last stream.
// the commands in this stream get dispatched as soon as all the kernel events have been recorded
hipLaunchKernelGGL(( sum), dim3(1),dim3(32),0,streams[nstreams-1], d_a, nkernels);
checkCudaErrors(hipMemcpyAsync(a, d_a, sizeof(clock_t), hipMemcpyDeviceToHost, streams[nstreams-1]));
// at this point the CPU has dispatched all work for the GPU and can continue processing other tasks in parallel
// in this sample we just wait until the GPU is done
checkCudaErrors(hipEventRecord(stop_event, 0));
checkCudaErrors(hipEventSynchronize(stop_event));
checkCudaErrors(hipEventElapsedTime(&elapsed_time, start_event, stop_event));
printf("Expected time for serial execution of %d kernels = %.3fs\n", nkernels, nkernels * kernel_time/1000.0f);
printf("Expected time for concurrent execution of %d kernels = %.3fs\n", nkernels, kernel_time/1000.0f);
printf("Measured time for sample = %.3fs\n", elapsed_time/1000.0f);
bool bTestResult = (a[0] > total_clocks);
// release resources
for (int i = 0; i < nkernels; i++)
{
hipStreamDestroy(streams[i]);
hipEventDestroy(kernelEvent[i]);
}
free(streams);
free(kernelEvent);
hipEventDestroy(start_event);
hipEventDestroy(stop_event);
hipHostFree(a);
hipFree(d_a);
if (!bTestResult)
{
printf("Test failed!\n");
exit(EXIT_FAILURE);
}
printf("Test passed\n");
exit(EXIT_SUCCESS);
}
| 4e47ef86e88aa726b0f3cf1e0d365cc939f68149.cu | /*
* Copyright 1993-2015 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
//
// This sample demonstrates the use of streams for concurrent execution. It also illustrates how to
// introduce dependencies between CUDA streams with the new cudaStreamWaitEvent function introduced
// in CUDA 3.2.
//
// Devices of compute capability 1.x will run the kernels one after another
// Devices of compute capability 2.0 or higher can overlap the kernels
//
#include <stdio.h>
#include <helper_functions.h>
#include <helper_cuda.h>
// This is a kernel that does no real work but runs at least for a specified number of clocks
__global__ void clock_block(clock_t *d_o, clock_t clock_count)
{
unsigned int start_clock = (unsigned int) clock();
clock_t clock_offset = 0;
while (clock_offset < clock_count)
{
unsigned int end_clock = (unsigned int) clock();
// The code below should work like
// this (thanks to modular arithmetics):
//
// clock_offset = (clock_t) (end_clock > start_clock ?
// end_clock - start_clock :
// end_clock + (0xffffffffu - start_clock));
//
// Indeed, let m = 2^32 then
// end - start = end + m - start (mod m).
clock_offset = (clock_t)(end_clock - start_clock);
}
d_o[0] = clock_offset;
}
// Single warp reduction kernel
__global__ void sum(clock_t *d_clocks, int N)
{
__shared__ clock_t s_clocks[32];
clock_t my_sum = 0;
for (int i = threadIdx.x; i < N; i+= blockDim.x)
{
my_sum += d_clocks[i];
}
s_clocks[threadIdx.x] = my_sum;
__syncthreads();
for (int i=16; i>0; i/=2)
{
if (threadIdx.x < i)
{
s_clocks[threadIdx.x] += s_clocks[threadIdx.x + i];
}
__syncthreads();
}
d_clocks[0] = s_clocks[0];
}
int main(int argc, char **argv)
{
int nkernels = 8; // number of concurrent kernels
int nstreams = nkernels + 1; // use one more stream than concurrent kernel
int nbytes = nkernels * sizeof(clock_t); // number of data bytes
float kernel_time = 10; // time the kernel should run in ms
float elapsed_time; // timing variables
int cuda_device = 0;
printf("[%s] - Starting...\n", argv[0]);
// get number of kernels if overridden on the command line
if (checkCmdLineFlag(argc, (const char **)argv, "nkernels"))
{
nkernels = getCmdLineArgumentInt(argc, (const char **)argv, "nkernels");
nstreams = nkernels + 1;
}
// use command-line specified CUDA device, otherwise use device with highest Gflops/s
cuda_device = findCudaDevice(argc, (const char **)argv);
cudaDeviceProp deviceProp;
checkCudaErrors(cudaGetDevice(&cuda_device));
checkCudaErrors(cudaGetDeviceProperties(&deviceProp, cuda_device));
if ((deviceProp.concurrentKernels == 0))
{
printf("> GPU does not support concurrent kernel execution\n");
printf(" CUDA kernel runs will be serialized\n");
}
printf("> Detected Compute SM %d.%d hardware with %d multi-processors\n",
deviceProp.major, deviceProp.minor, deviceProp.multiProcessorCount);
// allocate host memory
clock_t *a = 0; // pointer to the array data in host memory
checkCudaErrors(cudaMallocHost((void **)&a, nbytes));
// allocate device memory
clock_t *d_a = 0; // pointers to data and init value in the device memory
checkCudaErrors(cudaMalloc((void **)&d_a, nbytes));
// allocate and initialize an array of stream handles
cudaStream_t *streams = (cudaStream_t *) malloc(nstreams * sizeof(cudaStream_t));
for (int i = 0; i < nstreams; i++)
{
checkCudaErrors(cudaStreamCreate(&(streams[i])));
}
// create CUDA event handles
cudaEvent_t start_event, stop_event;
checkCudaErrors(cudaEventCreate(&start_event));
checkCudaErrors(cudaEventCreate(&stop_event));
// the events are used for synchronization only and hence do not need to record timings
// this also makes events not introduce global sync points when recorded which is critical to get overlap
cudaEvent_t *kernelEvent;
kernelEvent = (cudaEvent_t *) malloc(nkernels * sizeof(cudaEvent_t));
for (int i = 0; i < nkernels; i++)
{
checkCudaErrors(cudaEventCreateWithFlags(&(kernelEvent[i]), cudaEventDisableTiming));
}
//////////////////////////////////////////////////////////////////////
// time execution with nkernels streams
clock_t total_clocks = 0;
#if defined(__arm__) || defined(__aarch64__)
// the kernel takes more time than the channel reset time on arm archs, so to prevent hangs reduce time_clocks.
clock_t time_clocks = (clock_t)(kernel_time * (deviceProp.clockRate / 1000));
#else
clock_t time_clocks = (clock_t)(kernel_time * deviceProp.clockRate);
#endif
cudaEventRecord(start_event, 0);
// queue nkernels in separate streams and record when they are done
for (int i=0; i<nkernels; ++i)
{
clock_block<<<1,1,0,streams[i]>>>(&d_a[i], time_clocks);
total_clocks += time_clocks;
checkCudaErrors(cudaEventRecord(kernelEvent[i], streams[i]));
// make the last stream wait for the kernel event to be recorded
checkCudaErrors(cudaStreamWaitEvent(streams[nstreams-1], kernelEvent[i],0));
}
// queue a sum kernel and a copy back to host in the last stream.
// the commands in this stream get dispatched as soon as all the kernel events have been recorded
sum<<<1,32,0,streams[nstreams-1]>>>(d_a, nkernels);
checkCudaErrors(cudaMemcpyAsync(a, d_a, sizeof(clock_t), cudaMemcpyDeviceToHost, streams[nstreams-1]));
// at this point the CPU has dispatched all work for the GPU and can continue processing other tasks in parallel
// in this sample we just wait until the GPU is done
checkCudaErrors(cudaEventRecord(stop_event, 0));
checkCudaErrors(cudaEventSynchronize(stop_event));
checkCudaErrors(cudaEventElapsedTime(&elapsed_time, start_event, stop_event));
printf("Expected time for serial execution of %d kernels = %.3fs\n", nkernels, nkernels * kernel_time/1000.0f);
printf("Expected time for concurrent execution of %d kernels = %.3fs\n", nkernels, kernel_time/1000.0f);
printf("Measured time for sample = %.3fs\n", elapsed_time/1000.0f);
bool bTestResult = (a[0] > total_clocks);
// release resources
for (int i = 0; i < nkernels; i++)
{
cudaStreamDestroy(streams[i]);
cudaEventDestroy(kernelEvent[i]);
}
free(streams);
free(kernelEvent);
cudaEventDestroy(start_event);
cudaEventDestroy(stop_event);
cudaFreeHost(a);
cudaFree(d_a);
if (!bTestResult)
{
printf("Test failed!\n");
exit(EXIT_FAILURE);
}
printf("Test passed\n");
exit(EXIT_SUCCESS);
}
|
bde04212eb9bf9d86fbc67896801bf52c560359e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "plugin/device/gpu/kernel/cuda_impl/cuda_ops/extract_volume_patches_impl.cuh"
#include "include/hip/hip_fp16.h"
template <typename T>
__global__ void ExtractVolumePatches(size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col,
int64_t output_depth, int64_t output_height, int64_t output_width, bool need_batch,
int64_t d_stride, int64_t h_stride, int64_t w_stride, int64_t patch_stride,
int64_t other_stride, int64_t input_channel, int64_t input_dep_size,
int64_t input_row_size, int64_t input_col_size, int64_t pad_head, int64_t pad_top,
int64_t pad_left, int64_t chan_input_stride, int64_t dep_input_stride,
int64_t row_input_stride, int64_t patch_input_stride, const T *input, T *output) {
size_t pos;
for (size_t w_pos = blockIdx.x * blockDim.x + threadIdx.x; w_pos < output_size / (w_stride * input_channel);
w_pos += blockDim.x * gridDim.x) {
pos = static_cast<size_t>(w_pos / patch_stride) * w_stride * input_channel * patch_stride + (w_pos % patch_stride);
const int64_t batch_index = need_batch ? (static_cast<int64_t>(pos) / other_stride) : 0;
const int64_t inner_index =
need_batch ? (static_cast<int64_t>(pos) - batch_index * other_stride) : static_cast<int64_t>(pos);
// inner index
const int64_t patch_index = inner_index % patch_stride;
const int64_t patch_offset = inner_index / patch_stride / input_channel;
// channel
const int64_t channel = inner_index / patch_stride % input_channel;
// depth
const int64_t dep_index = patch_index / (output_height * output_width);
const int64_t dep_offset = patch_offset / d_stride;
const int64_t input_dep = dep_index * stride_dep + dep_offset - pad_head;
if (input_dep < 0 || input_dep >= input_dep_size) {
continue;
}
// height
const int64_t row_index = patch_index / output_width % output_height;
const int64_t row_offset = patch_offset / w_stride % h_stride;
const int64_t input_row = row_index * stride_row + row_offset - pad_top;
if (input_row < 0 || input_row >= input_row_size) {
continue;
}
// width
const int64_t col_index = patch_index % output_width;
const int64_t col_offset = patch_offset % w_stride;
const int64_t input_col = col_index * stride_col + col_offset - pad_left;
// input index
const int64_t input_index = input_col + input_row * row_input_stride + input_dep * dep_input_stride +
channel * chan_input_stride + batch_index * patch_input_stride;
#pragma unroll
for (int64_t i = 0; i < w_stride; i++) {
if (input_col + i < 0) {
continue;
}
if (input_col + i >= input_col_size) {
break;
}
#pragma unroll
for (int64_t j = 0; j < input_channel; j++) {
output[pos + (i * input_channel + j) * patch_stride] = input[input_index + i + j * chan_input_stride];
}
}
}
return;
}
template <typename T>
void CalExtractVolumePatches(size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col,
int64_t output_depth, int64_t output_height, int64_t output_width, bool need_batch,
int64_t d_stride, int64_t h_stride, int64_t w_stride, int64_t patch_stride,
int64_t other_stride, int64_t input_channel, int64_t input_dep_size,
int64_t input_row_size, int64_t input_col_size, int64_t pad_head, int64_t pad_top,
int64_t pad_left, int64_t chan_input_stride, int64_t dep_input_stride,
int64_t row_input_stride, int64_t patch_input_stride, const T *input, T *output,
hipStream_t stream) {
hipMemsetAsync(output, 0, sizeof(T) * output_size, stream);
hipLaunchKernelGGL(( ExtractVolumePatches), dim3(GET_BLOCKS(output_size / (w_stride * input_channel))), dim3(GET_THREADS), 0, stream,
output_size, stride_dep, stride_row, stride_col, output_depth, output_height, output_width, need_batch, d_stride,
h_stride, w_stride, patch_stride, other_stride, input_channel, input_dep_size, input_row_size, input_col_size,
pad_head, pad_top, pad_left, chan_input_stride, dep_input_stride, row_input_stride, patch_input_stride, input,
output);
}
template CUDA_LIB_EXPORT void CalExtractVolumePatches<double>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const double *input, double *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<float>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const float *input, float *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<half>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const half *input, half *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int64_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int64_t *input, int64_t *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int32_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int32_t *input, int32_t *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int16_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int16_t *input, int16_t *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int8_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int8_t *input, int8_t *output,
hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint64_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint64_t *input,
uint64_t *output, hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint32_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint32_t *input,
uint32_t *output, hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint16_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint16_t *input,
uint16_t *output, hipStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint8_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint8_t *input, uint8_t *output,
hipStream_t stream);
| bde04212eb9bf9d86fbc67896801bf52c560359e.cu | /**
* Copyright 2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "plugin/device/gpu/kernel/cuda_impl/cuda_ops/extract_volume_patches_impl.cuh"
#include "include/cuda_fp16.h"
template <typename T>
__global__ void ExtractVolumePatches(size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col,
int64_t output_depth, int64_t output_height, int64_t output_width, bool need_batch,
int64_t d_stride, int64_t h_stride, int64_t w_stride, int64_t patch_stride,
int64_t other_stride, int64_t input_channel, int64_t input_dep_size,
int64_t input_row_size, int64_t input_col_size, int64_t pad_head, int64_t pad_top,
int64_t pad_left, int64_t chan_input_stride, int64_t dep_input_stride,
int64_t row_input_stride, int64_t patch_input_stride, const T *input, T *output) {
size_t pos;
for (size_t w_pos = blockIdx.x * blockDim.x + threadIdx.x; w_pos < output_size / (w_stride * input_channel);
w_pos += blockDim.x * gridDim.x) {
pos = static_cast<size_t>(w_pos / patch_stride) * w_stride * input_channel * patch_stride + (w_pos % patch_stride);
const int64_t batch_index = need_batch ? (static_cast<int64_t>(pos) / other_stride) : 0;
const int64_t inner_index =
need_batch ? (static_cast<int64_t>(pos) - batch_index * other_stride) : static_cast<int64_t>(pos);
// inner index
const int64_t patch_index = inner_index % patch_stride;
const int64_t patch_offset = inner_index / patch_stride / input_channel;
// channel
const int64_t channel = inner_index / patch_stride % input_channel;
// depth
const int64_t dep_index = patch_index / (output_height * output_width);
const int64_t dep_offset = patch_offset / d_stride;
const int64_t input_dep = dep_index * stride_dep + dep_offset - pad_head;
if (input_dep < 0 || input_dep >= input_dep_size) {
continue;
}
// height
const int64_t row_index = patch_index / output_width % output_height;
const int64_t row_offset = patch_offset / w_stride % h_stride;
const int64_t input_row = row_index * stride_row + row_offset - pad_top;
if (input_row < 0 || input_row >= input_row_size) {
continue;
}
// width
const int64_t col_index = patch_index % output_width;
const int64_t col_offset = patch_offset % w_stride;
const int64_t input_col = col_index * stride_col + col_offset - pad_left;
// input index
const int64_t input_index = input_col + input_row * row_input_stride + input_dep * dep_input_stride +
channel * chan_input_stride + batch_index * patch_input_stride;
#pragma unroll
for (int64_t i = 0; i < w_stride; i++) {
if (input_col + i < 0) {
continue;
}
if (input_col + i >= input_col_size) {
break;
}
#pragma unroll
for (int64_t j = 0; j < input_channel; j++) {
output[pos + (i * input_channel + j) * patch_stride] = input[input_index + i + j * chan_input_stride];
}
}
}
return;
}
template <typename T>
void CalExtractVolumePatches(size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col,
int64_t output_depth, int64_t output_height, int64_t output_width, bool need_batch,
int64_t d_stride, int64_t h_stride, int64_t w_stride, int64_t patch_stride,
int64_t other_stride, int64_t input_channel, int64_t input_dep_size,
int64_t input_row_size, int64_t input_col_size, int64_t pad_head, int64_t pad_top,
int64_t pad_left, int64_t chan_input_stride, int64_t dep_input_stride,
int64_t row_input_stride, int64_t patch_input_stride, const T *input, T *output,
cudaStream_t stream) {
cudaMemsetAsync(output, 0, sizeof(T) * output_size, stream);
ExtractVolumePatches<<<GET_BLOCKS(output_size / (w_stride * input_channel)), GET_THREADS, 0, stream>>>(
output_size, stride_dep, stride_row, stride_col, output_depth, output_height, output_width, need_batch, d_stride,
h_stride, w_stride, patch_stride, other_stride, input_channel, input_dep_size, input_row_size, input_col_size,
pad_head, pad_top, pad_left, chan_input_stride, dep_input_stride, row_input_stride, patch_input_stride, input,
output);
}
template CUDA_LIB_EXPORT void CalExtractVolumePatches<double>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const double *input, double *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<float>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const float *input, float *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<half>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const half *input, half *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int64_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int64_t *input, int64_t *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int32_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int32_t *input, int32_t *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int16_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int16_t *input, int16_t *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<int8_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const int8_t *input, int8_t *output,
cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint64_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint64_t *input,
uint64_t *output, cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint32_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint32_t *input,
uint32_t *output, cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint16_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint16_t *input,
uint16_t *output, cudaStream_t stream);
template CUDA_LIB_EXPORT void CalExtractVolumePatches<uint8_t>(
size_t output_size, int64_t stride_dep, int64_t stride_row, int64_t stride_col, int64_t output_depth,
int64_t output_height, int64_t output_width, bool need_batch, int64_t d_stride, int64_t h_stride, int64_t w_stride,
int64_t patch_stride, int64_t other_stride, int64_t input_channel, int64_t input_dep_size, int64_t input_row_size,
int64_t input_col_size, int64_t pad_head, int64_t pad_top, int64_t pad_left, int64_t chan_input_stride,
int64_t dep_input_stride, int64_t row_input_stride, int64_t patch_input_stride, const uint8_t *input, uint8_t *output,
cudaStream_t stream);
|
7644d37c246332950a49caf46b3c6de6c1dd7cbe.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "helpers.h"
/*
This convolution operation uses constant memory for kernel(filter),with register output
*/
__constant__ float ckernel[81];
__global__ void conv_cuda( float *input, float *output, int width, int height, float *kernel, int channels,int k_width,int kernels ){
int k = blockIdx.z;
int j = threadIdx.x + blockIdx.x*blockDim.x ;
int i = threadIdx.y + blockIdx.y*blockDim.y ;
int output_idx = i*width*kernels + j*kernels + k;
int input_idx = 0;
if(i>=height || j>=width){
return;
}
float tmp_output=0;
output[output_idx] = 0.0;
for (int c = 0; c < channels; c++) {
for (int k_i = -k_width; k_i <= k_width; k_i++) {
for (int k_j = -k_width; k_j <= k_width; k_j++) {
if (i + k_i >= 0 && i + k_i < height && j + k_j >= 0 &&
j + k_j < width) {
input_idx =
c + (j + k_j)*channels + (i + k_i)*channels * width;
int kernel_index = k*channels*(2*k_width+1)*(2*k_width+1) + c*(2*k_width+1)*(2*k_width+1) + (k_i + k_width)* (2*k_width+1)+k_j + k_width;
tmp_output +=
input[input_idx] * ckernel[kernel_index];
//h_kernel[k][c][k_i + k_width][k_j + k_width];
}
}
}
}
output[output_idx] =tmp_output;
return;
}
int main(int argc, char *argv[]) {
char *outputfile = (char *)"cuda_out.png";
// Check input image name
if (argc < 2) {
std::cout << "No file input" << std::endl;
return 0;
}
//
// Check if the filename is valid
char *filename = argv[1];
std::cout << argv[1] << std::endl;
// Load Image
cv::Mat image;
image = load_image(filename);
if (image.empty()) {
std::cout << "File not exist" << std::endl;
return 0;
}
//==================================
// Define I/O sizes
//==================================
int padding = 1;
int channels = 3;
int height = image.rows;
int width = image.cols;
int kernels = 3;
std::cout << "Image dims (HxW)is " << height << "x" << width << std::endl;
int height_padded = height + 2 * padding;
int width_padded = width + 2 * padding;
int input_bytes = channels * height * width * sizeof(float);
int output_bytes = channels * height * width * sizeof(float);
std::cout << "Padded dims is " << height_padded << "x" << width_padded
<< std::endl;
float *h_input = (float *)image.data;
//float *h_output = new float[output_bytes];
float *h_output;
h_output = (float *) malloc( output_bytes ) ;
float *d_input;
float *d_output;
hipMalloc( (void **) &d_input, input_bytes) ;
hipMalloc( (void **) &d_output, output_bytes) ;
hipMemcpy( d_input, h_input, input_bytes, hipMemcpyHostToDevice);
// invoke Kernel
int bx =32;
int by =16;
dim3 block( bx, by ) ; // you will want to configure this
dim3 grid( (width + block.x-1)/block.x, (height + block.y-1)/block.y, 3) ;
printf("Grid : {%d, %d, %d} blocks. Blocks : {%d, %d} threads.\n", grid.x, grid.y,grid.z, block.x, block.y);
//==================================
// Define Kernel data
//==================================
// Mystery kernel
const float kernel_template[3][3] = {{1, 1, 1}, {1, -8, 1}, {1, 1, 1}};
float *d_kernel;
float h_kernel[3][3][3][3];
int kernel_bytes = 3*3*3*3*sizeof(float);
for (int kernel = 0; kernel < 3; ++kernel) {
for (int channel = 0; channel < 3; ++channel) {
for (int row = 0; row < 3; ++row) {
for (int column = 0; column < 3; ++column) {
h_kernel[kernel][channel][row][column] = kernel_template[row][column];
}
}
}
}
// //Copy kernel to global mem
// hipMalloc( (void **) &d_kernel, kernel_bytes ) ;
// hipMemcpy( d_kernel, h_kernel, kernel_bytes, hipMemcpyHostToDevice);
//Copy kernek to cmem
hipMemcpyToSymbol(ckernel, &h_kernel,kernel_bytes);
int k_size = 3;
int k_width = (k_size - 1) / 2;
//==================================
// CPU Convolution
//==================================
printf("Start conv\n");
double timeStampA = getTimeStamp();
hipLaunchKernelGGL(( conv_cuda), dim3(grid), dim3(block), 0, 0, d_input, d_output, width, height, ckernel, 3,k_width,kernels );
hipDeviceSynchronize();
double timeStampB = getTimeStamp();
hipMemcpy( h_output, d_output, input_bytes, hipMemcpyDeviceToHost);
//==================================
// Collect data
//==================================
// Print result
std::cout << "Total convolution time: " << timeStampB - timeStampA
<< std::endl;
std::cout << "Save Output to " << outputfile << std::endl;
save_image(outputfile, h_output, height, width);
hipFree( d_input ) ;
hipFree( d_output ) ;
hipFree( d_kernel ) ;
hipDeviceReset() ;
delete[] h_output;
return 0;
} | 7644d37c246332950a49caf46b3c6de6c1dd7cbe.cu | #include "helpers.h"
/*
This convolution operation uses constant memory for kernel(filter),with register output
*/
__constant__ float ckernel[81];
__global__ void conv_cuda( float *input, float *output, int width, int height, float *kernel, int channels,int k_width,int kernels ){
int k = blockIdx.z;
int j = threadIdx.x + blockIdx.x*blockDim.x ;
int i = threadIdx.y + blockIdx.y*blockDim.y ;
int output_idx = i*width*kernels + j*kernels + k;
int input_idx = 0;
if(i>=height || j>=width){
return;
}
float tmp_output=0;
output[output_idx] = 0.0;
for (int c = 0; c < channels; c++) {
for (int k_i = -k_width; k_i <= k_width; k_i++) {
for (int k_j = -k_width; k_j <= k_width; k_j++) {
if (i + k_i >= 0 && i + k_i < height && j + k_j >= 0 &&
j + k_j < width) {
input_idx =
c + (j + k_j)*channels + (i + k_i)*channels * width;
int kernel_index = k*channels*(2*k_width+1)*(2*k_width+1) + c*(2*k_width+1)*(2*k_width+1) + (k_i + k_width)* (2*k_width+1)+k_j + k_width;
tmp_output +=
input[input_idx] * ckernel[kernel_index];
//h_kernel[k][c][k_i + k_width][k_j + k_width];
}
}
}
}
output[output_idx] =tmp_output;
return;
}
int main(int argc, char *argv[]) {
char *outputfile = (char *)"cuda_out.png";
// Check input image name
if (argc < 2) {
std::cout << "No file input" << std::endl;
return 0;
}
//
// Check if the filename is valid
char *filename = argv[1];
std::cout << argv[1] << std::endl;
// Load Image
cv::Mat image;
image = load_image(filename);
if (image.empty()) {
std::cout << "File not exist" << std::endl;
return 0;
}
//==================================
// Define I/O sizes
//==================================
int padding = 1;
int channels = 3;
int height = image.rows;
int width = image.cols;
int kernels = 3;
std::cout << "Image dims (HxW)is " << height << "x" << width << std::endl;
int height_padded = height + 2 * padding;
int width_padded = width + 2 * padding;
int input_bytes = channels * height * width * sizeof(float);
int output_bytes = channels * height * width * sizeof(float);
std::cout << "Padded dims is " << height_padded << "x" << width_padded
<< std::endl;
float *h_input = (float *)image.data;
//float *h_output = new float[output_bytes];
float *h_output;
h_output = (float *) malloc( output_bytes ) ;
float *d_input;
float *d_output;
cudaMalloc( (void **) &d_input, input_bytes) ;
cudaMalloc( (void **) &d_output, output_bytes) ;
cudaMemcpy( d_input, h_input, input_bytes, cudaMemcpyHostToDevice);
// invoke Kernel
int bx =32;
int by =16;
dim3 block( bx, by ) ; // you will want to configure this
dim3 grid( (width + block.x-1)/block.x, (height + block.y-1)/block.y, 3) ;
printf("Grid : {%d, %d, %d} blocks. Blocks : {%d, %d} threads.\n", grid.x, grid.y,grid.z, block.x, block.y);
//==================================
// Define Kernel data
//==================================
// Mystery kernel
const float kernel_template[3][3] = {{1, 1, 1}, {1, -8, 1}, {1, 1, 1}};
float *d_kernel;
float h_kernel[3][3][3][3];
int kernel_bytes = 3*3*3*3*sizeof(float);
for (int kernel = 0; kernel < 3; ++kernel) {
for (int channel = 0; channel < 3; ++channel) {
for (int row = 0; row < 3; ++row) {
for (int column = 0; column < 3; ++column) {
h_kernel[kernel][channel][row][column] = kernel_template[row][column];
}
}
}
}
// //Copy kernel to global mem
// cudaMalloc( (void **) &d_kernel, kernel_bytes ) ;
// cudaMemcpy( d_kernel, h_kernel, kernel_bytes, cudaMemcpyHostToDevice);
//Copy kernek to cmem
cudaMemcpyToSymbol(ckernel, &h_kernel,kernel_bytes);
int k_size = 3;
int k_width = (k_size - 1) / 2;
//==================================
// CPU Convolution
//==================================
printf("Start conv\n");
double timeStampA = getTimeStamp();
conv_cuda<<<grid, block>>>( d_input, d_output, width, height, ckernel, 3,k_width,kernels );
cudaDeviceSynchronize();
double timeStampB = getTimeStamp();
cudaMemcpy( h_output, d_output, input_bytes, cudaMemcpyDeviceToHost);
//==================================
// Collect data
//==================================
// Print result
std::cout << "Total convolution time: " << timeStampB - timeStampA
<< std::endl;
std::cout << "Save Output to " << outputfile << std::endl;
save_image(outputfile, h_output, height, width);
cudaFree( d_input ) ;
cudaFree( d_output ) ;
cudaFree( d_kernel ) ;
cudaDeviceReset() ;
delete[] h_output;
return 0;
} |
086c375494ab3379cca678db896be006d42c4ecd.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <au_vision/shape_analysis/gpu_util_kernels.h>
namespace au_vision {
// Forward declarations
__global__ void inRange_device(const cv::cuda::PtrStepSz<uchar3> src,
cv::cuda::PtrStepSzb dst, int lbc0, int ubc0,
int lbc1, int ubc1, int lbc2, int ubc2);
__global__ void simpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols);
__device__ bool sameColor_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows, int cols,
int idx, int movX, int movY);
__global__ void buildMask_device(unsigned short* dstMask,
unsigned short* valueMap,
unsigned char* imageLab, int size);
// Kernel credits for inRange: https://github.com/opencv/opencv/issues/6295
void callInRange_device(const cv::cuda::GpuMat& src, const cv::Scalar& lowerb,
const cv::Scalar& upperb, cv::cuda::GpuMat& dst,
hipStream_t stream) {
// Max block size of 1024 (as per spec)
int m = global_threadsPerBlock;
if (m > 32) {
m = 32;
}
int numRows = src.rows, numCols = src.cols;
if (numRows == 0 || numCols == 0) return;
// Attention! Cols Vs. Rows are reversed
const dim3 gridSize(ceil((float)numCols / m), ceil((float)numRows / m), 1);
const dim3 blockSize(m, m, 1);
hipLaunchKernelGGL(( inRange_device), dim3(gridSize), dim3(blockSize), 0, stream,
src, dst, lowerb[0], upperb[0], lowerb[1], upperb[1], lowerb[2],
upperb[2]);
}
void callSimpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols) {
// Each thread will sum a grid square
int blocks = ::ceil((double)(rows * cols) / 32);
int threadsPerBlock = 32;
hipLaunchKernelGGL(( simpleEdgeDetect_device), dim3(blocks), dim3(threadsPerBlock), 0, 0, grayMask, binaryMask,
rows, cols);
hipDeviceSynchronize(); // Block until kernel queue finishes
gpuErrorCheck(hipGetLastError()); // Verify that all went OK
}
void callBuildMask_device(unsigned short* dstMask, unsigned short* valueMap,
unsigned char* imageLab, int size) {
// Each thread will sum a grid square
int blocks = ::ceil((double)(size) / 32);
int threadsPerBlock = 32;
hipLaunchKernelGGL(( buildMask_device), dim3(blocks), dim3(threadsPerBlock), 0, 0, dstMask, valueMap, imageLab,
size);
hipDeviceSynchronize(); // Block until kernel queue finishes
gpuErrorCheck(hipGetLastError()); // Verify that all went OK
}
__global__ void inRange_device(const cv::cuda::PtrStepSz<uchar3> src,
cv::cuda::PtrStepSzb dst, int lbc0, int ubc0,
int lbc1, int ubc1, int lbc2, int ubc2) {
int x = blockIdx.x * blockDim.x + threadIdx.x;
int y = blockIdx.y * blockDim.y + threadIdx.y;
if (x >= src.cols || y >= src.rows) return;
uchar3 v = src(y, x);
if (v.x >= lbc0 && v.x <= ubc0 && v.y >= lbc1 && v.y <= ubc1 && v.z >= lbc2 &&
v.z <= ubc2)
dst(y, x) = 255;
else
dst(y, x) = 0;
}
__global__ void simpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols) {
int idx = blockDim.x * blockIdx.x + threadIdx.x;
if (idx < rows * cols) {
int adj = 0;
// If the color is not surrounded by the same color, set it to black
//(left, right, up, down)
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, -1, 0)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 1, 0)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 0, -1)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 0, 1)) {
++adj;
}
if (adj) {
binaryMask[idx] = 255;
} else {
binaryMask[idx] = 0;
}
}
}
__device__ bool sameColor_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows, int cols,
int idx, int movX, int movY) {
// Check if the adjacent location is in range
int newIdx = idx + movX + cols * movY;
if (newIdx >= 0 && newIdx < rows * cols) {
if (grayMask[idx] != grayMask[newIdx]) {
return false;
} else {
return true;
}
} else {
return true;
}
}
__global__ void buildMask_device(unsigned short* dstMask,
unsigned short* valueMap,
unsigned char* imageLab, int size) {
int idx = blockDim.x * blockIdx.x + threadIdx.x;
if (idx < size) {
int a = imageLab[idx * 3 + 1];
int b = imageLab[idx * 3 + 2];
dstMask[idx] = valueMap[a * 255 + b];
}
}
} // namespace au_vision
| 086c375494ab3379cca678db896be006d42c4ecd.cu | #include <au_vision/shape_analysis/gpu_util_kernels.h>
namespace au_vision {
// Forward declarations
__global__ void inRange_device(const cv::cuda::PtrStepSz<uchar3> src,
cv::cuda::PtrStepSzb dst, int lbc0, int ubc0,
int lbc1, int ubc1, int lbc2, int ubc2);
__global__ void simpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols);
__device__ bool sameColor_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows, int cols,
int idx, int movX, int movY);
__global__ void buildMask_device(unsigned short* dstMask,
unsigned short* valueMap,
unsigned char* imageLab, int size);
// Kernel credits for inRange: https://github.com/opencv/opencv/issues/6295
void callInRange_device(const cv::cuda::GpuMat& src, const cv::Scalar& lowerb,
const cv::Scalar& upperb, cv::cuda::GpuMat& dst,
cudaStream_t stream) {
// Max block size of 1024 (as per spec)
int m = global_threadsPerBlock;
if (m > 32) {
m = 32;
}
int numRows = src.rows, numCols = src.cols;
if (numRows == 0 || numCols == 0) return;
// Attention! Cols Vs. Rows are reversed
const dim3 gridSize(ceil((float)numCols / m), ceil((float)numRows / m), 1);
const dim3 blockSize(m, m, 1);
inRange_device<<<gridSize, blockSize, 0, stream>>>(
src, dst, lowerb[0], upperb[0], lowerb[1], upperb[1], lowerb[2],
upperb[2]);
}
void callSimpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols) {
// Each thread will sum a grid square
int blocks = std::ceil((double)(rows * cols) / 32);
int threadsPerBlock = 32;
simpleEdgeDetect_device<<<blocks, threadsPerBlock>>>(grayMask, binaryMask,
rows, cols);
cudaDeviceSynchronize(); // Block until kernel queue finishes
gpuErrorCheck(cudaGetLastError()); // Verify that all went OK
}
void callBuildMask_device(unsigned short* dstMask, unsigned short* valueMap,
unsigned char* imageLab, int size) {
// Each thread will sum a grid square
int blocks = std::ceil((double)(size) / 32);
int threadsPerBlock = 32;
buildMask_device<<<blocks, threadsPerBlock>>>(dstMask, valueMap, imageLab,
size);
cudaDeviceSynchronize(); // Block until kernel queue finishes
gpuErrorCheck(cudaGetLastError()); // Verify that all went OK
}
__global__ void inRange_device(const cv::cuda::PtrStepSz<uchar3> src,
cv::cuda::PtrStepSzb dst, int lbc0, int ubc0,
int lbc1, int ubc1, int lbc2, int ubc2) {
int x = blockIdx.x * blockDim.x + threadIdx.x;
int y = blockIdx.y * blockDim.y + threadIdx.y;
if (x >= src.cols || y >= src.rows) return;
uchar3 v = src(y, x);
if (v.x >= lbc0 && v.x <= ubc0 && v.y >= lbc1 && v.y <= ubc1 && v.z >= lbc2 &&
v.z <= ubc2)
dst(y, x) = 255;
else
dst(y, x) = 0;
}
__global__ void simpleEdgeDetect_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows,
int cols) {
int idx = blockDim.x * blockIdx.x + threadIdx.x;
if (idx < rows * cols) {
int adj = 0;
// If the color is not surrounded by the same color, set it to black
//(left, right, up, down)
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, -1, 0)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 1, 0)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 0, -1)) {
++adj;
}
if (!sameColor_device(grayMask, binaryMask, rows, cols, idx, 0, 1)) {
++adj;
}
if (adj) {
binaryMask[idx] = 255;
} else {
binaryMask[idx] = 0;
}
}
}
__device__ bool sameColor_device(unsigned short* grayMask,
unsigned char* binaryMask, int rows, int cols,
int idx, int movX, int movY) {
// Check if the adjacent location is in range
int newIdx = idx + movX + cols * movY;
if (newIdx >= 0 && newIdx < rows * cols) {
if (grayMask[idx] != grayMask[newIdx]) {
return false;
} else {
return true;
}
} else {
return true;
}
}
__global__ void buildMask_device(unsigned short* dstMask,
unsigned short* valueMap,
unsigned char* imageLab, int size) {
int idx = blockDim.x * blockIdx.x + threadIdx.x;
if (idx < size) {
int a = imageLab[idx * 3 + 1];
int b = imageLab[idx * 3 + 2];
dstMask[idx] = valueMap[a * 255 + b];
}
}
} // namespace au_vision
|
e948b7a6a7df448e1a91326f16dedf175d843b4f.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
__global__ void sorted_mean_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
unsigned int n_slices,
double* mean_u) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the mean
value of quantity u for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
double sum_u;
unsigned int n_macroparticles; // in current slice
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
sum_u = 0;
n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles == 0) {
mean_u[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
}
mean_u[sid] = sum_u / n_macroparticles;
}
}
}
__global__ void sorted_std_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
unsigned int n_slices,
double* cov_u) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the
standard deviation of quantity u for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
double sum_u, mean_u, l_cov_u, du;
unsigned int n_macroparticles; // in current slice
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
sum_u = 0;
n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles <= 1) {
cov_u[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
}
mean_u = sum_u / n_macroparticles;
l_cov_u = 0;
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
du = u[pid] - mean_u;
l_cov_u += du * du;
}
cov_u[sid] = sqrt(l_cov_u / (n_macroparticles - 1));
}
}
}
__global__ void sorted_cov_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
double* v, // 2nd array of particles
unsigned int n_slices,
double* cov_uv) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the
covariance of the quantities u,v for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
double sum_u = 0.;
double sum_v = 0.;
unsigned int n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles <= 1) {
cov_uv[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
sum_v += v[pid];
}
double mean_u = sum_u / n_macroparticles;
double mean_v = sum_v / n_macroparticles;
double l_cov_u = 0;
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
l_cov_u += (u[pid] - mean_u) * (v[pid] - mean_v);
}
cov_uv[sid] = l_cov_u / (n_macroparticles - 1);
}
}
}
| e948b7a6a7df448e1a91326f16dedf175d843b4f.cu | __global__ void sorted_mean_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
unsigned int n_slices,
double* mean_u) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the mean
value of quantity u for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
double sum_u;
unsigned int n_macroparticles; // in current slice
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
sum_u = 0;
n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles == 0) {
mean_u[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
}
mean_u[sid] = sum_u / n_macroparticles;
}
}
}
__global__ void sorted_std_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
unsigned int n_slices,
double* cov_u) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the
standard deviation of quantity u for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
double sum_u, mean_u, l_cov_u, du;
unsigned int n_macroparticles; // in current slice
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
sum_u = 0;
n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles <= 1) {
cov_u[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
}
mean_u = sum_u / n_macroparticles;
l_cov_u = 0;
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
du = u[pid] - mean_u;
l_cov_u += du * du;
}
cov_u[sid] = sqrt(l_cov_u / (n_macroparticles - 1));
}
}
}
__global__ void sorted_cov_per_slice(unsigned int* lower_bounds,
unsigned int* upper_bounds,
double* u, // array of particle quantity sorted by slice
double* v, // 2nd array of particles
unsigned int n_slices,
double* cov_uv) // output array of length n_slices with mean values for each slice
/**
Iterate once through all the particles within the
slicing region and calculate simultaneously the
covariance of the quantities u,v for each slice separately.
Assumes the particle array u to be sorted by slices.
The index arrays lower_bounds and upper_bounds
indicate the start and end indices
within the sorted particle arrays for each slice. The respective
slice id is identical to the index within lower_bounds and
upper_bounds.
*/
{
for (int sid = blockIdx.x * blockDim.x + threadIdx.x;
sid < n_slices;
sid += blockDim.x * gridDim.x)
{
double sum_u = 0.;
double sum_v = 0.;
unsigned int n_macroparticles = upper_bounds[sid] - lower_bounds[sid];
if (n_macroparticles <= 1) {
cov_uv[sid] = 0;
} else {
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
sum_u += u[pid];
sum_v += v[pid];
}
double mean_u = sum_u / n_macroparticles;
double mean_v = sum_v / n_macroparticles;
double l_cov_u = 0;
for (int pid = lower_bounds[sid]; pid < upper_bounds[sid]; pid++)
{
l_cov_u += (u[pid] - mean_u) * (v[pid] - mean_v);
}
cov_uv[sid] = l_cov_u / (n_macroparticles - 1);
}
}
}
|
af1e72d3ba331166242f9a8a3e989ac0ab25b476.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "includes.h"
__global__ void wlcss_cuda_kernel(int32_t *d_mss, int32_t *d_mss_offsets, int32_t *d_ts, int32_t *d_ss, int32_t *d_tlen, int32_t *d_toffsets, int32_t *d_slen, int32_t *d_soffsets, int32_t *d_params, int32_t *d_tmp_windows, int32_t *d_tmp_windows_offsets, int32_t *d_2d_cost_matrix){
int32_t params_idx = threadIdx.x;
int32_t template_idx = threadIdx.x;
int32_t stream_idx = blockIdx.x;
int32_t t_len = d_tlen[template_idx];
int32_t s_len = d_slen[stream_idx];
int32_t t_offset = d_toffsets[template_idx];
int32_t s_offset = d_soffsets[stream_idx];
int32_t d_mss_offset = d_mss_offsets[stream_idx*blockDim.x+template_idx];
int32_t d_tmp_windows_offset = d_tmp_windows_offsets[stream_idx*blockDim.x+template_idx];
int32_t *tmp_window = &d_tmp_windows[d_tmp_windows_offset];
int32_t *mss = &d_mss[d_mss_offset];
int32_t *t = &d_ts[t_offset];
int32_t *s = &d_ss[s_offset];
int32_t reward = d_params[params_idx*3];
int32_t penalty = d_params[params_idx*3+1];
int32_t accepteddist = d_params[params_idx*3+2];
int32_t tmp = 0;
for(int32_t j=0;j<s_len;j++){
for(int32_t i=0;i<t_len;i++){
int32_t distance = d_2d_cost_matrix[s[j]*8 + t[i]];
if (distance <= accepteddist){
tmp = tmp_window[i]+reward;
} else{
tmp = max(tmp_window[i]-penalty*distance,
max(tmp_window[i+1]-penalty*distance,
tmp_window[t_len+1]-penalty*distance));
}
tmp_window[i] = tmp_window[t_len+1];
tmp_window[t_len+1] = tmp;
}
tmp_window[t_len] = tmp_window[t_len+1];
mss[j] = tmp_window[t_len+1];
tmp_window[t_len+1] = 0;
}
} | af1e72d3ba331166242f9a8a3e989ac0ab25b476.cu | #include "includes.h"
__global__ void wlcss_cuda_kernel(int32_t *d_mss, int32_t *d_mss_offsets, int32_t *d_ts, int32_t *d_ss, int32_t *d_tlen, int32_t *d_toffsets, int32_t *d_slen, int32_t *d_soffsets, int32_t *d_params, int32_t *d_tmp_windows, int32_t *d_tmp_windows_offsets, int32_t *d_2d_cost_matrix){
int32_t params_idx = threadIdx.x;
int32_t template_idx = threadIdx.x;
int32_t stream_idx = blockIdx.x;
int32_t t_len = d_tlen[template_idx];
int32_t s_len = d_slen[stream_idx];
int32_t t_offset = d_toffsets[template_idx];
int32_t s_offset = d_soffsets[stream_idx];
int32_t d_mss_offset = d_mss_offsets[stream_idx*blockDim.x+template_idx];
int32_t d_tmp_windows_offset = d_tmp_windows_offsets[stream_idx*blockDim.x+template_idx];
int32_t *tmp_window = &d_tmp_windows[d_tmp_windows_offset];
int32_t *mss = &d_mss[d_mss_offset];
int32_t *t = &d_ts[t_offset];
int32_t *s = &d_ss[s_offset];
int32_t reward = d_params[params_idx*3];
int32_t penalty = d_params[params_idx*3+1];
int32_t accepteddist = d_params[params_idx*3+2];
int32_t tmp = 0;
for(int32_t j=0;j<s_len;j++){
for(int32_t i=0;i<t_len;i++){
int32_t distance = d_2d_cost_matrix[s[j]*8 + t[i]];
if (distance <= accepteddist){
tmp = tmp_window[i]+reward;
} else{
tmp = max(tmp_window[i]-penalty*distance,
max(tmp_window[i+1]-penalty*distance,
tmp_window[t_len+1]-penalty*distance));
}
tmp_window[i] = tmp_window[t_len+1];
tmp_window[t_len+1] = tmp;
}
tmp_window[t_len] = tmp_window[t_len+1];
mss[j] = tmp_window[t_len+1];
tmp_window[t_len+1] = 0;
}
} |
8c4e119a9845c8f1468634885ea60afe7f1ab55e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
AIJCUSPARSE methods implemented with Cuda kernels. Uses cuSparse/Thrust maps from AIJCUSPARSE
*/
#define PETSC_SKIP_SPINLOCK
#define PETSC_SKIP_IMMINTRIN_H_CUDAWORKAROUND 1
#include <petscconf.h>
#include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
#include <../src/mat/impls/sbaij/seq/sbaij.h>
#undef VecType
#include <../src/mat/impls/aij/seq/seqcusparse/cusparsematimpl.h>
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
#include <hip/hip_cooperative_groups.h>
#endif
#define CHECK_LAUNCH_ERROR() \
do { \
/* Check synchronous errors, i.e. pre-launch */ \
hipError_t err = hipGetLastError(); \
if (hipSuccess != err) { \
SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cuda error: %s",hipGetErrorString(err)); \
} \
/* Check asynchronous errors, i.e. kernel failed (ULF) */ \
err = hipDeviceSynchronize(); \
if (hipSuccess != err) { \
SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cuda error: %s",hipGetErrorString(err)); \
} \
} while (0)
/*
LU BAND factorization with optimization for block diagonal (Nf blocks) in natural order (-mat_no_inode -pc_factor_mat_ordering_type rcm with Nf>1 fields)
requires:
structurally symmetric: fix with transpose/column meta data
*/
static PetscErrorCode MatLUFactorSymbolic_SeqAIJCUSPARSEBAND(Mat,Mat,IS,IS,const MatFactorInfo*);
static PetscErrorCode MatLUFactorNumeric_SeqAIJCUSPARSEBAND(Mat,Mat,const MatFactorInfo*);
/*
The GPU LU factor kernel
*/
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band_init_set_i(const PetscInt n, const int bw, int bi_csr[])
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt nloc_i = (nloc/Nblk + !!(nloc%Nblk)), start_i = field*nloc + blkIdx*nloc_i, end_i = (start_i + nloc_i) > (field+1)*nloc ? (field+1)*nloc : (start_i + nloc_i);
// set i (row+1)
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) bi_csr[0] = 0; // dummy at zero
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i && threadIdx.x==0) {
PetscInt i=rowb+1, ni = (rowb>bw) ? bw+1 : i, n1L = ni*(ni-1)/2, nug= i*bw, n2L = bw*((rowb>bw) ? (rowb-bw) : 0), mi = bw + rowb + 1 - n, clip = (mi>0) ? mi*(mi-1)/2 + mi: 0;
bi_csr[rowb+1] = n1L + nug - clip + n2L + i;
}
}
}
// copy AIJ to AIJ_BAND
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band_copy_aij_aij(const PetscInt n, const int bw, const PetscInt r[], const PetscInt ic[],
const int ai_d[], const int aj_d[], const PetscScalar aa_d[],
const int bi_csr[], PetscScalar ba_csr[])
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt nloc_i = (nloc/Nblk + !!(nloc%Nblk)), start_i = field*nloc + blkIdx*nloc_i, end_i = (start_i + nloc_i) > (field+1)*nloc ? (field+1)*nloc : (start_i + nloc_i);
// zero B
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) ba_csr[bi_csr[n]] = 0; // flop count at end
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i) {
PetscScalar *batmp = ba_csr + bi_csr[rowb];
const PetscInt nzb = bi_csr[rowb+1] - bi_csr[rowb];
for (int j=threadIdx.x ; j<nzb ; j += blockDim.x) {
if (j<nzb) {
batmp[j] = 0;
}
}
}
}
// copy A into B with CSR format -- these two loops can be fused
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i) {
const PetscInt rowa = r[rowb], nza = ai_d[rowa+1] - ai_d[rowa];
const int *ajtmp = aj_d + ai_d[rowa], bjStart = (rowb>bw) ? rowb-bw : 0;
const PetscScalar *av = aa_d + ai_d[rowa];
PetscScalar *batmp = ba_csr + bi_csr[rowb];
/* load in initial (unfactored row) */
for (int j=threadIdx.x ; j<nza ; j += blockDim.x) {
if (j<nza) {
PetscInt colb = ic[ajtmp[j]], idx = colb - bjStart;
PetscScalar vala = av[j];
batmp[idx] = vala;
}
}
}
}
}
// print AIJ_BAND
__global__
void print_mat_aij_band(const PetscInt n, const int bi_csr[], const PetscScalar ba_csr[])
{
// debug
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) {
printf("B (AIJ) n=%d:\n",(int)n);
for (int rowb=0;rowb<n;rowb++) {
const PetscInt nz = bi_csr[rowb+1] - bi_csr[rowb];
const PetscScalar *batmp = ba_csr + bi_csr[rowb];
for (int j=0; j<nz; j++) printf("(%13.6e) ",PetscRealPart(batmp[j]));
printf(" bi=%d\n",bi_csr[rowb+1]);
}
}
}
// Band LU kernel --- ba_csr bi_csr
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band(const PetscInt n, const PetscInt bw, const int bi_csr[], PetscScalar ba_csr[], int *use_group_sync)
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt start = field*nloc, end = start + nloc;
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
auto g = cooperative_groups::this_grid();
#endif
// A22 panel update for each row A(1,:) and col A(:,1)
for (int glbDD=start, locDD = 0; glbDD<end; glbDD++, locDD++) {
PetscInt tnzUd = bw, maxU = end-1 - glbDD; // we are chopping off the inter ears
const PetscInt nzUd = (tnzUd>maxU) ? maxU : tnzUd, dOffset = (glbDD > bw) ? bw : glbDD; // global to go past ears after first
PetscScalar *pBdd = ba_csr + bi_csr[glbDD] + dOffset;
const PetscScalar *baUd = pBdd + 1; // vector of data U(i,i+1:end)
const PetscScalar Bdd = *pBdd;
const PetscInt offset = blkIdx*blockDim.y + threadIdx.y, inc = Nblk*blockDim.y;
if (threadIdx.x==0) {
for (int idx = offset, myi = glbDD + offset + 1; idx < nzUd; idx += inc, myi += inc) { /* assuming symmetric structure */
const PetscInt bwi = myi > bw ? bw : myi, kIdx = bwi - (myi-glbDD); // cuts off just the first (global) block
PetscScalar *Aid = ba_csr + bi_csr[myi] + kIdx;
*Aid = *Aid/Bdd;
}
}
__syncthreads(); // synch on threadIdx.x only
for (int idx = offset, myi = glbDD + offset + 1; idx < nzUd; idx += inc, myi += inc) {
const PetscInt bwi = myi > bw ? bw : myi, kIdx = bwi - (myi-glbDD); // cuts off just the first (global) block
PetscScalar *Aid = ba_csr + bi_csr[myi] + kIdx;
PetscScalar *Aij = Aid + 1;
const PetscScalar Lid = *Aid;
for (int jIdx=threadIdx.x ; jIdx<nzUd; jIdx += blockDim.x) {
Aij[jIdx] -= Lid*baUd[jIdx];
}
}
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
if (use_group_sync) {
g.sync();
} else {
__syncthreads();
}
#else
__syncthreads();
#endif
} /* endof for (i=0; i<n; i++) { */
}
static PetscErrorCode MatSolve_SeqAIJCUSPARSEBAND(Mat,Vec,Vec);
static PetscErrorCode MatLUFactorNumeric_SeqAIJCUSPARSEBAND(Mat B,Mat A,const MatFactorInfo *info)
{
Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)B->spptr;
if (!cusparseTriFactors) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Missing cusparseTriFactors");
Mat_SeqAIJCUSPARSE *cusparsestructA = (Mat_SeqAIJCUSPARSE*)A->spptr;
Mat_SeqAIJCUSPARSEMultStruct *matstructA;
CsrMatrix *matrixA;
PetscErrorCode ierr;
hipError_t cerr;
const PetscInt n=A->rmap->n, *ic, *r;
const int *ai_d, *aj_d;
const PetscScalar *aa_d;
PetscScalar *ba_t = cusparseTriFactors->a_band_d;
int *bi_t = cusparseTriFactors->i_band_d;
PetscContainer container;
int Ni = 10, team_size=9, Nf, nVec=56, nconcurrent = 1, nsm = -1;
PetscFunctionBegin;
if (A->rmap->n == 0) {
PetscFunctionReturn(0);
}
// cusparse setup
if (!cusparsestructA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Missing cusparsestructA");
matstructA = (Mat_SeqAIJCUSPARSEMultStruct*)cusparsestructA->mat; // matstruct->cprowIndices
if (!matstructA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing mat struct");
matrixA = (CsrMatrix*)matstructA->mat;
if (!matrixA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing matrix cusparsestructA->mat->mat");
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
if ((*pNf)/1000>0) nconcurrent = (*pNf)/1000; // number of SMs to use
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n % Nf != 0 %D %D",n,Nf);
// get data
ic = thrust::raw_pointer_cast(cusparseTriFactors->cpermIndices->data());
ai_d = thrust::raw_pointer_cast(matrixA->row_offsets->data());
aj_d = thrust::raw_pointer_cast(matrixA->column_indices->data());
aa_d = thrust::raw_pointer_cast(matrixA->values->data().get());
r = thrust::raw_pointer_cast(cusparseTriFactors->rpermIndices->data());
cerr = WaitForCUDA();CHKERRCUDA(cerr);
ierr = PetscLogGpuTimeBegin();CHKERRQ(ierr);
{
int bw = (int)(2.*(double)n-1. - (double)(PetscSqrtReal(1.+4.*((double)n*(double)n-(double)b->nz))+PETSC_MACHINE_EPSILON))/2, bm1=bw-1,nl=n/Nf;
#if PETSC_PKG_CUDA_VERSION_LT(11,0,0)
Ni = 1/nconcurrent;
Ni = 1;
#else
if (!cusparseTriFactors->init_dev_prop) {
int gpuid;
cusparseTriFactors->init_dev_prop = PETSC_TRUE;
hipGetDevice(&gpuid);
hipGetDeviceProperties(&cusparseTriFactors->dev_prop, gpuid);
}
nsm = cusparseTriFactors->dev_prop.multiProcessorCount;
Ni = nsm/Nf/nconcurrent;
#endif
team_size = bw/Ni + !!(bw%Ni);
nVec = PetscMin(bw, 1024/team_size);
ierr = PetscInfo7(A,"Matrix Bandwidth = %d, number SMs/block = %d, num concurency = %d, num fields = %d, numSMs/GPU = %d, thread group size = %d,%d\n",bw,Ni,nconcurrent,Nf,nsm,team_size,nVec);CHKERRQ(ierr);
{
dim3 dimBlockTeam(nVec,team_size);
dim3 dimBlockLeague(Nf,Ni);
hipLaunchKernelGGL(( mat_lu_factor_band_copy_aij_aij), dim3(dimBlockLeague),dim3(dimBlockTeam), 0, 0, n, bw, r, ic, ai_d, aj_d, aa_d, bi_t, ba_t);
CHECK_LAUNCH_ERROR(); // does a sync
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
if (Ni > 1) {
void *kernelArgs[] = { (void*)&n, (void*)&bw, (void*)&bi_t, (void*)&ba_t, (void*)&nsm };
hipLaunchCooperativeKernel((void*)mat_lu_factor_band, dimBlockLeague, dimBlockTeam, kernelArgs, 0, NULL);
} else {
hipLaunchKernelGGL(( mat_lu_factor_band), dim3(dimBlockLeague),dim3(dimBlockTeam), 0, 0, n, bw, bi_t, ba_t, NULL);
}
#else
hipLaunchKernelGGL(( mat_lu_factor_band), dim3(dimBlockLeague),dim3(dimBlockTeam), 0, 0, n, bw, bi_t, ba_t, NULL);
#endif
CHECK_LAUNCH_ERROR(); // does a sync
#if defined(PETSC_USE_LOG)
ierr = PetscLogGpuFlops((PetscLogDouble)Nf*(bm1*(bm1 + 1)*(PetscLogDouble)(2*bm1 + 1)/3 + (PetscLogDouble)2*(nl-bw)*bw*bw + (PetscLogDouble)nl*(nl+1)/2));CHKERRQ(ierr);
#endif
}
}
ierr = PetscLogGpuTimeEnd();CHKERRQ(ierr);
/* determine which version of MatSolve needs to be used. from MatLUFactorNumeric_AIJ_SeqAIJCUSPARSE */
B->ops->solve = MatSolve_SeqAIJCUSPARSEBAND;
B->ops->solvetranspose = NULL; // need transpose
B->ops->matsolve = NULL;
B->ops->matsolvetranspose = NULL;
PetscFunctionReturn(0);
}
static PetscErrorCode MatrixNfDestroy(void *ptr)
{
PetscInt *nf = (PetscInt *)ptr;
PetscErrorCode ierr;
PetscFunctionBegin;
ierr = PetscFree(nf);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
PetscErrorCode MatLUFactorSymbolic_SeqAIJCUSPARSEBAND(Mat B,Mat A,IS isrow,IS iscol,const MatFactorInfo *info)
{
Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data,*b;
IS isicol;
PetscErrorCode ierr;
hipError_t cerr;
const PetscInt *ic,*ai=a->i,*aj=a->j;
PetscScalar *ba_t;
int *bi_t;
PetscInt i,n=A->rmap->n,Nf;
PetscInt nzBcsr,bwL,bwU;
PetscBool missing;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)B->spptr;
PetscContainer container;
PetscFunctionBegin;
if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"matrix must be square");
ierr = MatMissingDiagonal(A,&missing,&i);CHKERRQ(ierr);
if (missing) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is missing diagonal entry %D",i);
if (!cusparseTriFactors) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"!cusparseTriFactors");
ierr = MatGetOption(A,MAT_STRUCTURALLY_SYMMETRIC,&missing);CHKERRQ(ierr);
if (!missing) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"only structrally symmetric matrices supported");
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
ierr = PetscContainerCreate(PETSC_COMM_SELF, &container);CHKERRQ(ierr);
ierr = PetscMalloc(sizeof(PetscInt), &pNf);CHKERRQ(ierr);
*pNf = Nf;
ierr = PetscContainerSetPointer(container, (void *)pNf);CHKERRQ(ierr);
ierr = PetscContainerSetUserDestroy(container, MatrixNfDestroy);CHKERRQ(ierr);
ierr = PetscObjectCompose((PetscObject)B, "Nf", (PetscObject) container);CHKERRQ(ierr);
ierr = PetscContainerDestroy(&container);CHKERRQ(ierr);
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n % Nf != 0 %D %D",n,Nf);
ierr = ISInvertPermutation(iscol,PETSC_DECIDE,&isicol);CHKERRQ(ierr);
ierr = ISGetIndices(isicol,&ic);CHKERRQ(ierr);
ierr = MatSeqAIJSetPreallocation_SeqAIJ(B,MAT_SKIP_ALLOCATION,NULL);CHKERRQ(ierr);
ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)isicol);CHKERRQ(ierr);
b = (Mat_SeqAIJ*)(B)->data;
/* get band widths, MatComputeBandwidth should take a reordering ic and do this */
bwL = bwU = 0;
for (int rwb=0; rwb<n; rwb++) {
const PetscInt rwa = ic[rwb], anz = ai[rwb+1] - ai[rwb], *ajtmp = aj + ai[rwb];
for (int j=0;j<anz;j++) {
PetscInt colb = ic[ajtmp[j]];
if (colb<rwa) { // L
if (rwa-colb > bwL) bwL = rwa-colb;
} else {
if (colb-rwa > bwU) bwU = colb-rwa;
}
}
}
ierr = ISRestoreIndices(isicol,&ic);CHKERRQ(ierr);
/* only support structurally symmetric, but it might work */
if (bwL!=bwU) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Only symmetric structure supported (now) W_L=%D W_U=%D",bwL,bwU);
ierr = MatSeqAIJCUSPARSETriFactors_Reset(&cusparseTriFactors);CHKERRQ(ierr);
nzBcsr = n + (2*n-1)*bwU - bwU*bwU;
b->maxnz = b->nz = nzBcsr;
cusparseTriFactors->nnz = b->nz; // only meta data needed: n & nz
ierr = PetscInfo2(A,"Matrix Bandwidth = %D, nnz = %D\n",bwL,b->nz);CHKERRQ(ierr);
if (!cusparseTriFactors->workVector) { cusparseTriFactors->workVector = new THRUSTARRAY(n); }
cerr = hipMalloc(&ba_t,(b->nz+1)*sizeof(PetscScalar));CHKERRCUDA(cerr); // include a place for flops
cerr = hipMalloc(&bi_t,(n+1)*sizeof(int));CHKERRCUDA(cerr);
cusparseTriFactors->a_band_d = ba_t;
cusparseTriFactors->i_band_d = bi_t;
/* In b structure: Free imax, ilen, old a, old j. Allocate solve_work, new a, new j */
ierr = PetscLogObjectMemory((PetscObject)B,(nzBcsr+1)*(sizeof(PetscInt)+sizeof(PetscScalar)));CHKERRQ(ierr);
{
dim3 dimBlockTeam(1,128);
dim3 dimBlockLeague(Nf,1);
hipLaunchKernelGGL(( mat_lu_factor_band_init_set_i), dim3(dimBlockLeague),dim3(dimBlockTeam), 0, 0, n, bwU, bi_t);
}
CHECK_LAUNCH_ERROR(); // does a sync
// setup data
if (!cusparseTriFactors->rpermIndices) {
const PetscInt *r;
ierr = ISGetIndices(isrow,&r);CHKERRQ(ierr);
cusparseTriFactors->rpermIndices = new THRUSTINTARRAY(n);
cusparseTriFactors->rpermIndices->assign(r, r+n);
ierr = ISRestoreIndices(isrow,&r);CHKERRQ(ierr);
ierr = PetscLogCpuToGpu(n*sizeof(PetscInt));CHKERRQ(ierr);
}
/* upper triangular indices */
if (!cusparseTriFactors->cpermIndices) {
const PetscInt *c;
ierr = ISGetIndices(isicol,&c);CHKERRQ(ierr);
cusparseTriFactors->cpermIndices = new THRUSTINTARRAY(n);
cusparseTriFactors->cpermIndices->assign(c, c+n);
ierr = ISRestoreIndices(isicol,&c);CHKERRQ(ierr);
ierr = PetscLogCpuToGpu(n*sizeof(PetscInt));CHKERRQ(ierr);
}
/* put together the new matrix */
b->free_a = PETSC_FALSE;
b->free_ij = PETSC_FALSE;
b->singlemalloc = PETSC_FALSE;
b->ilen = NULL;
b->imax = NULL;
b->row = isrow;
b->col = iscol;
ierr = PetscObjectReference((PetscObject)isrow);CHKERRQ(ierr);
ierr = PetscObjectReference((PetscObject)iscol);CHKERRQ(ierr);
b->icol = isicol;
ierr = PetscMalloc1(n+1,&b->solve_work);CHKERRQ(ierr);
B->factortype = MAT_FACTOR_LU;
B->info.factor_mallocs = 0;
B->info.fill_ratio_given = 0;
if (ai[n]) {
B->info.fill_ratio_needed = ((PetscReal)(nzBcsr))/((PetscReal)ai[n]);
} else {
B->info.fill_ratio_needed = 0.0;
}
#if defined(PETSC_USE_INFO)
if (ai[n] != 0) {
PetscReal af = B->info.fill_ratio_needed;
ierr = PetscInfo1(A,"Band fill ratio %g\n",(double)af);CHKERRQ(ierr);
} else {
ierr = PetscInfo(A,"Empty matrix\n");CHKERRQ(ierr);
}
#endif
if (a->inode.size) {
ierr = PetscInfo(A,"Warning: using inodes in band solver.\n");CHKERRQ(ierr);
}
ierr = MatSeqAIJCheckInode_FactorLU(B);CHKERRQ(ierr);
B->ops->lufactornumeric = MatLUFactorNumeric_SeqAIJCUSPARSEBAND;
B->offloadmask = PETSC_OFFLOAD_GPU;
PetscFunctionReturn(0);
}
/* Use -pc_factor_mat_solver_type cusparseband */
PetscErrorCode MatFactorGetSolverType_seqaij_cusparse_band(Mat A,MatSolverType *type)
{
PetscFunctionBegin;
*type = MATSOLVERCUSPARSEBAND;
PetscFunctionReturn(0);
}
PETSC_EXTERN PetscErrorCode MatGetFactor_seqaijcusparse_cusparse_band(Mat A,MatFactorType ftype,Mat *B)
{
PetscErrorCode ierr;
PetscInt n = A->rmap->n;
PetscFunctionBegin;
ierr = MatCreate(PetscObjectComm((PetscObject)A),B);CHKERRQ(ierr);
ierr = MatSetSizes(*B,n,n,n,n);CHKERRQ(ierr);
(*B)->factortype = ftype;
(*B)->canuseordering = PETSC_TRUE;
ierr = MatSetType(*B,MATSEQAIJCUSPARSE);CHKERRQ(ierr);
if (ftype == MAT_FACTOR_LU) {
ierr = MatSetBlockSizesFromMats(*B,A,A);CHKERRQ(ierr);
(*B)->ops->ilufactorsymbolic = NULL; // MatILUFactorSymbolic_SeqAIJCUSPARSE;
(*B)->ops->lufactorsymbolic = MatLUFactorSymbolic_SeqAIJCUSPARSEBAND;
ierr = PetscStrallocpy(MATORDERINGRCM,(char**)&(*B)->preferredordering[MAT_FACTOR_LU]);CHKERRQ(ierr);
} else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported for CUSPARSEBAND Matrix Types");
ierr = MatSeqAIJSetPreallocation(*B,MAT_SKIP_ALLOCATION,NULL);CHKERRQ(ierr);
ierr = PetscObjectComposeFunction((PetscObject)(*B),"MatFactorGetSolverType_C",MatFactorGetSolverType_seqaij_cusparse_band);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
#define WARP_SIZE 32
template <typename T>
__forceinline__ __device__
T wreduce(T a)
{
T b;
#pragma unroll
for (int i = WARP_SIZE/2; i >= 1; i = i >> 1) {
b = __shfl_down_sync(0xffffffff, a, i);
a += b;
}
return a;
}
// reduce in a block, returns result in thread 0
template <typename T, int BLOCK_SIZE>
__device__
T breduce(T a)
{
constexpr int NWARP = BLOCK_SIZE/WARP_SIZE;
__shared__ double buf[NWARP];
int wid = threadIdx.x / WARP_SIZE;
int laneid = threadIdx.x % WARP_SIZE;
T b = wreduce<T>(a);
if (laneid == 0)
buf[wid] = b;
__syncthreads();
if (wid == 0) {
if (threadIdx.x < NWARP)
a = buf[threadIdx.x];
else
a = 0;
for (int i = (NWARP+1)/2; i >= 1; i = i >> 1) {
a += __shfl_down_sync(0xffffffff, a, i);
}
}
return a;
}
// Band LU kernel --- ba_csr bi_csr
template <int BLOCK_SIZE>
__global__
void __launch_bounds__(256,1)
mat_solve_band(const PetscInt n, const PetscInt bw, const PetscScalar ba_csr[], PetscScalar x[])
{
const PetscInt Nf = gridDim.x, nloc = n/Nf, field = blockIdx.x, start = field*nloc, end = start + nloc, chopnz = bw*(bw+1)/2, blocknz=(2*bw+1)*nloc, blocknz_0 = blocknz-chopnz;
const PetscScalar *pLi;
const int tid = threadIdx.x;
/* Next, solve L */
pLi = ba_csr + (field==0 ? 0 : blocknz_0 + (field-1)*blocknz + bw); // diagonal (0,0) in field
for (int glbDD=start, locDD = 0; glbDD<end; glbDD++, locDD++) {
const PetscInt col = locDD<bw ? start : (glbDD-bw);
PetscScalar t = 0;
for (int j=col+tid,idx=tid;j<glbDD;j+=blockDim.x,idx+=blockDim.x) {
t += pLi[idx]*x[j];
}
#if defined(PETSC_USE_COMPLEX)
PetscReal tr = PetscRealPartComplex(t), ti = PetscImaginaryPartComplex(t);
PetscScalar tt(breduce<PetscReal,BLOCK_SIZE>(tr), breduce<PetscReal,BLOCK_SIZE>(ti));
t = tt;
#else
t = breduce<PetscReal,BLOCK_SIZE>(t);
#endif
if (threadIdx.x == 0)
x[glbDD] -= t; // /1.0
__syncthreads();
// inc
pLi += glbDD-col; // get to diagonal
if (glbDD > n-1-bw) pLi += n-1-glbDD; // skip over U, only last block has funny offset
else pLi += bw;
pLi += 1; // skip to next row
if (field>0 && (locDD+1)<bw) pLi += bw-(locDD+1); // skip padding at beginning (ear)
}
/* Then, solve U */
pLi = ba_csr + Nf*blocknz - 2*chopnz - 1; // end of real data on block (diagonal)
if (field != Nf-1) pLi -= blocknz_0 + (Nf-2-field)*blocknz + bw; // diagonal of last local row
for (int glbDD=end-1, locDD = 0; glbDD >= start; glbDD--, locDD++) {
const PetscInt col = (locDD<bw) ? end-1 : glbDD+bw; // end of row in U
PetscScalar t = 0;
for (int j=col-tid,idx=tid;j>glbDD;j-=blockDim.x,idx+=blockDim.x) {
t += pLi[-idx]*x[j];
}
#if defined(PETSC_USE_COMPLEX)
PetscReal tr = PetscRealPartComplex(t), ti = PetscImaginaryPartComplex(t);
PetscScalar tt(breduce<PetscReal,BLOCK_SIZE>(tr), breduce<PetscReal,BLOCK_SIZE>(ti));
t = tt;
#else
t = breduce<PetscReal,BLOCK_SIZE>(PetscRealPart(t));
#endif
pLi -= col-glbDD; // diagonal
if (threadIdx.x == 0) {
x[glbDD] -= t;
x[glbDD] /= pLi[0];
}
__syncthreads();
// inc past L to start of previous U
pLi -= bw+1;
if (glbDD<bw) pLi += bw-glbDD; // overshot in top left corner
if (((locDD+1) < bw) && field != Nf-1) pLi -= (bw - (locDD+1)); // skip past right corner
}
}
static PetscErrorCode MatSolve_SeqAIJCUSPARSEBAND(Mat A,Vec bb,Vec xx)
{
const PetscScalar *barray;
PetscScalar *xarray;
thrust::device_ptr<const PetscScalar> bGPU;
thrust::device_ptr<PetscScalar> xGPU;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)A->spptr;
THRUSTARRAY *tempGPU = (THRUSTARRAY*)cusparseTriFactors->workVector;
PetscInt n=A->rmap->n, nz=cusparseTriFactors->nnz, Nf;
PetscInt bw = (int)(2.*(double)n-1.-(double)(PetscSqrtReal(1.+4.*((double)n*(double)n-(double)nz))+PETSC_MACHINE_EPSILON))/2; // quadric formula for bandwidth
PetscErrorCode ierr;
PetscContainer container;
PetscFunctionBegin;
if (A->rmap->n == 0) {
PetscFunctionReturn(0);
}
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n(%D) % Nf(%D) != 0",n,Nf);
/* Get the GPU pointers */
ierr = VecCUDAGetArrayWrite(xx,&xarray);CHKERRQ(ierr);
ierr = VecCUDAGetArrayRead(bb,&barray);CHKERRQ(ierr);
xGPU = thrust::device_pointer_cast(xarray);
bGPU = thrust::device_pointer_cast(barray);
ierr = PetscLogGpuTimeBegin();CHKERRQ(ierr);
/* First, reorder with the row permutation */
thrust::copy(thrust::hip::par.on(PetscDefaultCudaStream),thrust::make_permutation_iterator(bGPU, cusparseTriFactors->rpermIndices->begin()),
thrust::make_permutation_iterator(bGPU, cusparseTriFactors->rpermIndices->end()),
tempGPU->begin());
constexpr int block = 128;
hipLaunchKernelGGL(( mat_solve_band<block>), dim3(Nf),dim3(block), 0, 0, n,bw,cusparseTriFactors->a_band_d,tempGPU->data().get());
CHECK_LAUNCH_ERROR(); // does a sync
/* Last, reorder with the column permutation */
thrust::copy(thrust::hip::par.on(PetscDefaultCudaStream),thrust::make_permutation_iterator(tempGPU->begin(), cusparseTriFactors->cpermIndices->begin()),
thrust::make_permutation_iterator(tempGPU->begin(), cusparseTriFactors->cpermIndices->end()),
xGPU);
ierr = VecCUDARestoreArrayRead(bb,&barray);CHKERRQ(ierr);
ierr = VecCUDARestoreArrayWrite(xx,&xarray);CHKERRQ(ierr);
ierr = PetscLogGpuFlops(2.0*cusparseTriFactors->nnz - A->cmap->n);CHKERRQ(ierr);
ierr = PetscLogGpuTimeEnd();CHKERRQ(ierr);
PetscFunctionReturn(0);
}
| 8c4e119a9845c8f1468634885ea60afe7f1ab55e.cu | /*
AIJCUSPARSE methods implemented with Cuda kernels. Uses cuSparse/Thrust maps from AIJCUSPARSE
*/
#define PETSC_SKIP_SPINLOCK
#define PETSC_SKIP_IMMINTRIN_H_CUDAWORKAROUND 1
#include <petscconf.h>
#include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
#include <../src/mat/impls/sbaij/seq/sbaij.h>
#undef VecType
#include <../src/mat/impls/aij/seq/seqcusparse/cusparsematimpl.h>
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
#include <cooperative_groups.h>
#endif
#define CHECK_LAUNCH_ERROR() \
do { \
/* Check synchronous errors, i.e. pre-launch */ \
cudaError_t err = cudaGetLastError(); \
if (cudaSuccess != err) { \
SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cuda error: %s",cudaGetErrorString(err)); \
} \
/* Check asynchronous errors, i.e. kernel failed (ULF) */ \
err = cudaDeviceSynchronize(); \
if (cudaSuccess != err) { \
SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cuda error: %s",cudaGetErrorString(err)); \
} \
} while (0)
/*
LU BAND factorization with optimization for block diagonal (Nf blocks) in natural order (-mat_no_inode -pc_factor_mat_ordering_type rcm with Nf>1 fields)
requires:
structurally symmetric: fix with transpose/column meta data
*/
static PetscErrorCode MatLUFactorSymbolic_SeqAIJCUSPARSEBAND(Mat,Mat,IS,IS,const MatFactorInfo*);
static PetscErrorCode MatLUFactorNumeric_SeqAIJCUSPARSEBAND(Mat,Mat,const MatFactorInfo*);
/*
The GPU LU factor kernel
*/
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band_init_set_i(const PetscInt n, const int bw, int bi_csr[])
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt nloc_i = (nloc/Nblk + !!(nloc%Nblk)), start_i = field*nloc + blkIdx*nloc_i, end_i = (start_i + nloc_i) > (field+1)*nloc ? (field+1)*nloc : (start_i + nloc_i);
// set i (row+1)
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) bi_csr[0] = 0; // dummy at zero
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i && threadIdx.x==0) {
PetscInt i=rowb+1, ni = (rowb>bw) ? bw+1 : i, n1L = ni*(ni-1)/2, nug= i*bw, n2L = bw*((rowb>bw) ? (rowb-bw) : 0), mi = bw + rowb + 1 - n, clip = (mi>0) ? mi*(mi-1)/2 + mi: 0;
bi_csr[rowb+1] = n1L + nug - clip + n2L + i;
}
}
}
// copy AIJ to AIJ_BAND
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band_copy_aij_aij(const PetscInt n, const int bw, const PetscInt r[], const PetscInt ic[],
const int ai_d[], const int aj_d[], const PetscScalar aa_d[],
const int bi_csr[], PetscScalar ba_csr[])
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt nloc_i = (nloc/Nblk + !!(nloc%Nblk)), start_i = field*nloc + blkIdx*nloc_i, end_i = (start_i + nloc_i) > (field+1)*nloc ? (field+1)*nloc : (start_i + nloc_i);
// zero B
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) ba_csr[bi_csr[n]] = 0; // flop count at end
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i) {
PetscScalar *batmp = ba_csr + bi_csr[rowb];
const PetscInt nzb = bi_csr[rowb+1] - bi_csr[rowb];
for (int j=threadIdx.x ; j<nzb ; j += blockDim.x) {
if (j<nzb) {
batmp[j] = 0;
}
}
}
}
// copy A into B with CSR format -- these two loops can be fused
for (int rowb = start_i + threadIdx.y; rowb < end_i; rowb += blockDim.y) { // rows in block by thread y
if (rowb < end_i) {
const PetscInt rowa = r[rowb], nza = ai_d[rowa+1] - ai_d[rowa];
const int *ajtmp = aj_d + ai_d[rowa], bjStart = (rowb>bw) ? rowb-bw : 0;
const PetscScalar *av = aa_d + ai_d[rowa];
PetscScalar *batmp = ba_csr + bi_csr[rowb];
/* load in initial (unfactored row) */
for (int j=threadIdx.x ; j<nza ; j += blockDim.x) {
if (j<nza) {
PetscInt colb = ic[ajtmp[j]], idx = colb - bjStart;
PetscScalar vala = av[j];
batmp[idx] = vala;
}
}
}
}
}
// print AIJ_BAND
__global__
void print_mat_aij_band(const PetscInt n, const int bi_csr[], const PetscScalar ba_csr[])
{
// debug
if (threadIdx.x + threadIdx.y + blockIdx.x + blockIdx.y == 0) {
printf("B (AIJ) n=%d:\n",(int)n);
for (int rowb=0;rowb<n;rowb++) {
const PetscInt nz = bi_csr[rowb+1] - bi_csr[rowb];
const PetscScalar *batmp = ba_csr + bi_csr[rowb];
for (int j=0; j<nz; j++) printf("(%13.6e) ",PetscRealPart(batmp[j]));
printf(" bi=%d\n",bi_csr[rowb+1]);
}
}
}
// Band LU kernel --- ba_csr bi_csr
__global__
void __launch_bounds__(1024,1)
mat_lu_factor_band(const PetscInt n, const PetscInt bw, const int bi_csr[], PetscScalar ba_csr[], int *use_group_sync)
{
const PetscInt Nf = gridDim.x, Nblk = gridDim.y, nloc = n/Nf;
const PetscInt field = blockIdx.x, blkIdx = blockIdx.y;
const PetscInt start = field*nloc, end = start + nloc;
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
auto g = cooperative_groups::this_grid();
#endif
// A22 panel update for each row A(1,:) and col A(:,1)
for (int glbDD=start, locDD = 0; glbDD<end; glbDD++, locDD++) {
PetscInt tnzUd = bw, maxU = end-1 - glbDD; // we are chopping off the inter ears
const PetscInt nzUd = (tnzUd>maxU) ? maxU : tnzUd, dOffset = (glbDD > bw) ? bw : glbDD; // global to go past ears after first
PetscScalar *pBdd = ba_csr + bi_csr[glbDD] + dOffset;
const PetscScalar *baUd = pBdd + 1; // vector of data U(i,i+1:end)
const PetscScalar Bdd = *pBdd;
const PetscInt offset = blkIdx*blockDim.y + threadIdx.y, inc = Nblk*blockDim.y;
if (threadIdx.x==0) {
for (int idx = offset, myi = glbDD + offset + 1; idx < nzUd; idx += inc, myi += inc) { /* assuming symmetric structure */
const PetscInt bwi = myi > bw ? bw : myi, kIdx = bwi - (myi-glbDD); // cuts off just the first (global) block
PetscScalar *Aid = ba_csr + bi_csr[myi] + kIdx;
*Aid = *Aid/Bdd;
}
}
__syncthreads(); // synch on threadIdx.x only
for (int idx = offset, myi = glbDD + offset + 1; idx < nzUd; idx += inc, myi += inc) {
const PetscInt bwi = myi > bw ? bw : myi, kIdx = bwi - (myi-glbDD); // cuts off just the first (global) block
PetscScalar *Aid = ba_csr + bi_csr[myi] + kIdx;
PetscScalar *Aij = Aid + 1;
const PetscScalar Lid = *Aid;
for (int jIdx=threadIdx.x ; jIdx<nzUd; jIdx += blockDim.x) {
Aij[jIdx] -= Lid*baUd[jIdx];
}
}
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
if (use_group_sync) {
g.sync();
} else {
__syncthreads();
}
#else
__syncthreads();
#endif
} /* endof for (i=0; i<n; i++) { */
}
static PetscErrorCode MatSolve_SeqAIJCUSPARSEBAND(Mat,Vec,Vec);
static PetscErrorCode MatLUFactorNumeric_SeqAIJCUSPARSEBAND(Mat B,Mat A,const MatFactorInfo *info)
{
Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)B->spptr;
if (!cusparseTriFactors) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Missing cusparseTriFactors");
Mat_SeqAIJCUSPARSE *cusparsestructA = (Mat_SeqAIJCUSPARSE*)A->spptr;
Mat_SeqAIJCUSPARSEMultStruct *matstructA;
CsrMatrix *matrixA;
PetscErrorCode ierr;
cudaError_t cerr;
const PetscInt n=A->rmap->n, *ic, *r;
const int *ai_d, *aj_d;
const PetscScalar *aa_d;
PetscScalar *ba_t = cusparseTriFactors->a_band_d;
int *bi_t = cusparseTriFactors->i_band_d;
PetscContainer container;
int Ni = 10, team_size=9, Nf, nVec=56, nconcurrent = 1, nsm = -1;
PetscFunctionBegin;
if (A->rmap->n == 0) {
PetscFunctionReturn(0);
}
// cusparse setup
if (!cusparsestructA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Missing cusparsestructA");
matstructA = (Mat_SeqAIJCUSPARSEMultStruct*)cusparsestructA->mat; // matstruct->cprowIndices
if (!matstructA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing mat struct");
matrixA = (CsrMatrix*)matstructA->mat;
if (!matrixA) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing matrix cusparsestructA->mat->mat");
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
if ((*pNf)/1000>0) nconcurrent = (*pNf)/1000; // number of SMs to use
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n % Nf != 0 %D %D",n,Nf);
// get data
ic = thrust::raw_pointer_cast(cusparseTriFactors->cpermIndices->data());
ai_d = thrust::raw_pointer_cast(matrixA->row_offsets->data());
aj_d = thrust::raw_pointer_cast(matrixA->column_indices->data());
aa_d = thrust::raw_pointer_cast(matrixA->values->data().get());
r = thrust::raw_pointer_cast(cusparseTriFactors->rpermIndices->data());
cerr = WaitForCUDA();CHKERRCUDA(cerr);
ierr = PetscLogGpuTimeBegin();CHKERRQ(ierr);
{
int bw = (int)(2.*(double)n-1. - (double)(PetscSqrtReal(1.+4.*((double)n*(double)n-(double)b->nz))+PETSC_MACHINE_EPSILON))/2, bm1=bw-1,nl=n/Nf;
#if PETSC_PKG_CUDA_VERSION_LT(11,0,0)
Ni = 1/nconcurrent;
Ni = 1;
#else
if (!cusparseTriFactors->init_dev_prop) {
int gpuid;
cusparseTriFactors->init_dev_prop = PETSC_TRUE;
cudaGetDevice(&gpuid);
cudaGetDeviceProperties(&cusparseTriFactors->dev_prop, gpuid);
}
nsm = cusparseTriFactors->dev_prop.multiProcessorCount;
Ni = nsm/Nf/nconcurrent;
#endif
team_size = bw/Ni + !!(bw%Ni);
nVec = PetscMin(bw, 1024/team_size);
ierr = PetscInfo7(A,"Matrix Bandwidth = %d, number SMs/block = %d, num concurency = %d, num fields = %d, numSMs/GPU = %d, thread group size = %d,%d\n",bw,Ni,nconcurrent,Nf,nsm,team_size,nVec);CHKERRQ(ierr);
{
dim3 dimBlockTeam(nVec,team_size);
dim3 dimBlockLeague(Nf,Ni);
mat_lu_factor_band_copy_aij_aij<<<dimBlockLeague,dimBlockTeam>>>(n, bw, r, ic, ai_d, aj_d, aa_d, bi_t, ba_t);
CHECK_LAUNCH_ERROR(); // does a sync
#if PETSC_PKG_CUDA_VERSION_GE(11,0,0)
if (Ni > 1) {
void *kernelArgs[] = { (void*)&n, (void*)&bw, (void*)&bi_t, (void*)&ba_t, (void*)&nsm };
cudaLaunchCooperativeKernel((void*)mat_lu_factor_band, dimBlockLeague, dimBlockTeam, kernelArgs, 0, NULL);
} else {
mat_lu_factor_band<<<dimBlockLeague,dimBlockTeam>>>(n, bw, bi_t, ba_t, NULL);
}
#else
mat_lu_factor_band<<<dimBlockLeague,dimBlockTeam>>>(n, bw, bi_t, ba_t, NULL);
#endif
CHECK_LAUNCH_ERROR(); // does a sync
#if defined(PETSC_USE_LOG)
ierr = PetscLogGpuFlops((PetscLogDouble)Nf*(bm1*(bm1 + 1)*(PetscLogDouble)(2*bm1 + 1)/3 + (PetscLogDouble)2*(nl-bw)*bw*bw + (PetscLogDouble)nl*(nl+1)/2));CHKERRQ(ierr);
#endif
}
}
ierr = PetscLogGpuTimeEnd();CHKERRQ(ierr);
/* determine which version of MatSolve needs to be used. from MatLUFactorNumeric_AIJ_SeqAIJCUSPARSE */
B->ops->solve = MatSolve_SeqAIJCUSPARSEBAND;
B->ops->solvetranspose = NULL; // need transpose
B->ops->matsolve = NULL;
B->ops->matsolvetranspose = NULL;
PetscFunctionReturn(0);
}
static PetscErrorCode MatrixNfDestroy(void *ptr)
{
PetscInt *nf = (PetscInt *)ptr;
PetscErrorCode ierr;
PetscFunctionBegin;
ierr = PetscFree(nf);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
PetscErrorCode MatLUFactorSymbolic_SeqAIJCUSPARSEBAND(Mat B,Mat A,IS isrow,IS iscol,const MatFactorInfo *info)
{
Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data,*b;
IS isicol;
PetscErrorCode ierr;
cudaError_t cerr;
const PetscInt *ic,*ai=a->i,*aj=a->j;
PetscScalar *ba_t;
int *bi_t;
PetscInt i,n=A->rmap->n,Nf;
PetscInt nzBcsr,bwL,bwU;
PetscBool missing;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)B->spptr;
PetscContainer container;
PetscFunctionBegin;
if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"matrix must be square");
ierr = MatMissingDiagonal(A,&missing,&i);CHKERRQ(ierr);
if (missing) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is missing diagonal entry %D",i);
if (!cusparseTriFactors) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"!cusparseTriFactors");
ierr = MatGetOption(A,MAT_STRUCTURALLY_SYMMETRIC,&missing);CHKERRQ(ierr);
if (!missing) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"only structrally symmetric matrices supported");
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
ierr = PetscContainerCreate(PETSC_COMM_SELF, &container);CHKERRQ(ierr);
ierr = PetscMalloc(sizeof(PetscInt), &pNf);CHKERRQ(ierr);
*pNf = Nf;
ierr = PetscContainerSetPointer(container, (void *)pNf);CHKERRQ(ierr);
ierr = PetscContainerSetUserDestroy(container, MatrixNfDestroy);CHKERRQ(ierr);
ierr = PetscObjectCompose((PetscObject)B, "Nf", (PetscObject) container);CHKERRQ(ierr);
ierr = PetscContainerDestroy(&container);CHKERRQ(ierr);
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n % Nf != 0 %D %D",n,Nf);
ierr = ISInvertPermutation(iscol,PETSC_DECIDE,&isicol);CHKERRQ(ierr);
ierr = ISGetIndices(isicol,&ic);CHKERRQ(ierr);
ierr = MatSeqAIJSetPreallocation_SeqAIJ(B,MAT_SKIP_ALLOCATION,NULL);CHKERRQ(ierr);
ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)isicol);CHKERRQ(ierr);
b = (Mat_SeqAIJ*)(B)->data;
/* get band widths, MatComputeBandwidth should take a reordering ic and do this */
bwL = bwU = 0;
for (int rwb=0; rwb<n; rwb++) {
const PetscInt rwa = ic[rwb], anz = ai[rwb+1] - ai[rwb], *ajtmp = aj + ai[rwb];
for (int j=0;j<anz;j++) {
PetscInt colb = ic[ajtmp[j]];
if (colb<rwa) { // L
if (rwa-colb > bwL) bwL = rwa-colb;
} else {
if (colb-rwa > bwU) bwU = colb-rwa;
}
}
}
ierr = ISRestoreIndices(isicol,&ic);CHKERRQ(ierr);
/* only support structurally symmetric, but it might work */
if (bwL!=bwU) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Only symmetric structure supported (now) W_L=%D W_U=%D",bwL,bwU);
ierr = MatSeqAIJCUSPARSETriFactors_Reset(&cusparseTriFactors);CHKERRQ(ierr);
nzBcsr = n + (2*n-1)*bwU - bwU*bwU;
b->maxnz = b->nz = nzBcsr;
cusparseTriFactors->nnz = b->nz; // only meta data needed: n & nz
ierr = PetscInfo2(A,"Matrix Bandwidth = %D, nnz = %D\n",bwL,b->nz);CHKERRQ(ierr);
if (!cusparseTriFactors->workVector) { cusparseTriFactors->workVector = new THRUSTARRAY(n); }
cerr = cudaMalloc(&ba_t,(b->nz+1)*sizeof(PetscScalar));CHKERRCUDA(cerr); // include a place for flops
cerr = cudaMalloc(&bi_t,(n+1)*sizeof(int));CHKERRCUDA(cerr);
cusparseTriFactors->a_band_d = ba_t;
cusparseTriFactors->i_band_d = bi_t;
/* In b structure: Free imax, ilen, old a, old j. Allocate solve_work, new a, new j */
ierr = PetscLogObjectMemory((PetscObject)B,(nzBcsr+1)*(sizeof(PetscInt)+sizeof(PetscScalar)));CHKERRQ(ierr);
{
dim3 dimBlockTeam(1,128);
dim3 dimBlockLeague(Nf,1);
mat_lu_factor_band_init_set_i<<<dimBlockLeague,dimBlockTeam>>>(n, bwU, bi_t);
}
CHECK_LAUNCH_ERROR(); // does a sync
// setup data
if (!cusparseTriFactors->rpermIndices) {
const PetscInt *r;
ierr = ISGetIndices(isrow,&r);CHKERRQ(ierr);
cusparseTriFactors->rpermIndices = new THRUSTINTARRAY(n);
cusparseTriFactors->rpermIndices->assign(r, r+n);
ierr = ISRestoreIndices(isrow,&r);CHKERRQ(ierr);
ierr = PetscLogCpuToGpu(n*sizeof(PetscInt));CHKERRQ(ierr);
}
/* upper triangular indices */
if (!cusparseTriFactors->cpermIndices) {
const PetscInt *c;
ierr = ISGetIndices(isicol,&c);CHKERRQ(ierr);
cusparseTriFactors->cpermIndices = new THRUSTINTARRAY(n);
cusparseTriFactors->cpermIndices->assign(c, c+n);
ierr = ISRestoreIndices(isicol,&c);CHKERRQ(ierr);
ierr = PetscLogCpuToGpu(n*sizeof(PetscInt));CHKERRQ(ierr);
}
/* put together the new matrix */
b->free_a = PETSC_FALSE;
b->free_ij = PETSC_FALSE;
b->singlemalloc = PETSC_FALSE;
b->ilen = NULL;
b->imax = NULL;
b->row = isrow;
b->col = iscol;
ierr = PetscObjectReference((PetscObject)isrow);CHKERRQ(ierr);
ierr = PetscObjectReference((PetscObject)iscol);CHKERRQ(ierr);
b->icol = isicol;
ierr = PetscMalloc1(n+1,&b->solve_work);CHKERRQ(ierr);
B->factortype = MAT_FACTOR_LU;
B->info.factor_mallocs = 0;
B->info.fill_ratio_given = 0;
if (ai[n]) {
B->info.fill_ratio_needed = ((PetscReal)(nzBcsr))/((PetscReal)ai[n]);
} else {
B->info.fill_ratio_needed = 0.0;
}
#if defined(PETSC_USE_INFO)
if (ai[n] != 0) {
PetscReal af = B->info.fill_ratio_needed;
ierr = PetscInfo1(A,"Band fill ratio %g\n",(double)af);CHKERRQ(ierr);
} else {
ierr = PetscInfo(A,"Empty matrix\n");CHKERRQ(ierr);
}
#endif
if (a->inode.size) {
ierr = PetscInfo(A,"Warning: using inodes in band solver.\n");CHKERRQ(ierr);
}
ierr = MatSeqAIJCheckInode_FactorLU(B);CHKERRQ(ierr);
B->ops->lufactornumeric = MatLUFactorNumeric_SeqAIJCUSPARSEBAND;
B->offloadmask = PETSC_OFFLOAD_GPU;
PetscFunctionReturn(0);
}
/* Use -pc_factor_mat_solver_type cusparseband */
PetscErrorCode MatFactorGetSolverType_seqaij_cusparse_band(Mat A,MatSolverType *type)
{
PetscFunctionBegin;
*type = MATSOLVERCUSPARSEBAND;
PetscFunctionReturn(0);
}
PETSC_EXTERN PetscErrorCode MatGetFactor_seqaijcusparse_cusparse_band(Mat A,MatFactorType ftype,Mat *B)
{
PetscErrorCode ierr;
PetscInt n = A->rmap->n;
PetscFunctionBegin;
ierr = MatCreate(PetscObjectComm((PetscObject)A),B);CHKERRQ(ierr);
ierr = MatSetSizes(*B,n,n,n,n);CHKERRQ(ierr);
(*B)->factortype = ftype;
(*B)->canuseordering = PETSC_TRUE;
ierr = MatSetType(*B,MATSEQAIJCUSPARSE);CHKERRQ(ierr);
if (ftype == MAT_FACTOR_LU) {
ierr = MatSetBlockSizesFromMats(*B,A,A);CHKERRQ(ierr);
(*B)->ops->ilufactorsymbolic = NULL; // MatILUFactorSymbolic_SeqAIJCUSPARSE;
(*B)->ops->lufactorsymbolic = MatLUFactorSymbolic_SeqAIJCUSPARSEBAND;
ierr = PetscStrallocpy(MATORDERINGRCM,(char**)&(*B)->preferredordering[MAT_FACTOR_LU]);CHKERRQ(ierr);
} else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor type not supported for CUSPARSEBAND Matrix Types");
ierr = MatSeqAIJSetPreallocation(*B,MAT_SKIP_ALLOCATION,NULL);CHKERRQ(ierr);
ierr = PetscObjectComposeFunction((PetscObject)(*B),"MatFactorGetSolverType_C",MatFactorGetSolverType_seqaij_cusparse_band);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
#define WARP_SIZE 32
template <typename T>
__forceinline__ __device__
T wreduce(T a)
{
T b;
#pragma unroll
for (int i = WARP_SIZE/2; i >= 1; i = i >> 1) {
b = __shfl_down_sync(0xffffffff, a, i);
a += b;
}
return a;
}
// reduce in a block, returns result in thread 0
template <typename T, int BLOCK_SIZE>
__device__
T breduce(T a)
{
constexpr int NWARP = BLOCK_SIZE/WARP_SIZE;
__shared__ double buf[NWARP];
int wid = threadIdx.x / WARP_SIZE;
int laneid = threadIdx.x % WARP_SIZE;
T b = wreduce<T>(a);
if (laneid == 0)
buf[wid] = b;
__syncthreads();
if (wid == 0) {
if (threadIdx.x < NWARP)
a = buf[threadIdx.x];
else
a = 0;
for (int i = (NWARP+1)/2; i >= 1; i = i >> 1) {
a += __shfl_down_sync(0xffffffff, a, i);
}
}
return a;
}
// Band LU kernel --- ba_csr bi_csr
template <int BLOCK_SIZE>
__global__
void __launch_bounds__(256,1)
mat_solve_band(const PetscInt n, const PetscInt bw, const PetscScalar ba_csr[], PetscScalar x[])
{
const PetscInt Nf = gridDim.x, nloc = n/Nf, field = blockIdx.x, start = field*nloc, end = start + nloc, chopnz = bw*(bw+1)/2, blocknz=(2*bw+1)*nloc, blocknz_0 = blocknz-chopnz;
const PetscScalar *pLi;
const int tid = threadIdx.x;
/* Next, solve L */
pLi = ba_csr + (field==0 ? 0 : blocknz_0 + (field-1)*blocknz + bw); // diagonal (0,0) in field
for (int glbDD=start, locDD = 0; glbDD<end; glbDD++, locDD++) {
const PetscInt col = locDD<bw ? start : (glbDD-bw);
PetscScalar t = 0;
for (int j=col+tid,idx=tid;j<glbDD;j+=blockDim.x,idx+=blockDim.x) {
t += pLi[idx]*x[j];
}
#if defined(PETSC_USE_COMPLEX)
PetscReal tr = PetscRealPartComplex(t), ti = PetscImaginaryPartComplex(t);
PetscScalar tt(breduce<PetscReal,BLOCK_SIZE>(tr), breduce<PetscReal,BLOCK_SIZE>(ti));
t = tt;
#else
t = breduce<PetscReal,BLOCK_SIZE>(t);
#endif
if (threadIdx.x == 0)
x[glbDD] -= t; // /1.0
__syncthreads();
// inc
pLi += glbDD-col; // get to diagonal
if (glbDD > n-1-bw) pLi += n-1-glbDD; // skip over U, only last block has funny offset
else pLi += bw;
pLi += 1; // skip to next row
if (field>0 && (locDD+1)<bw) pLi += bw-(locDD+1); // skip padding at beginning (ear)
}
/* Then, solve U */
pLi = ba_csr + Nf*blocknz - 2*chopnz - 1; // end of real data on block (diagonal)
if (field != Nf-1) pLi -= blocknz_0 + (Nf-2-field)*blocknz + bw; // diagonal of last local row
for (int glbDD=end-1, locDD = 0; glbDD >= start; glbDD--, locDD++) {
const PetscInt col = (locDD<bw) ? end-1 : glbDD+bw; // end of row in U
PetscScalar t = 0;
for (int j=col-tid,idx=tid;j>glbDD;j-=blockDim.x,idx+=blockDim.x) {
t += pLi[-idx]*x[j];
}
#if defined(PETSC_USE_COMPLEX)
PetscReal tr = PetscRealPartComplex(t), ti = PetscImaginaryPartComplex(t);
PetscScalar tt(breduce<PetscReal,BLOCK_SIZE>(tr), breduce<PetscReal,BLOCK_SIZE>(ti));
t = tt;
#else
t = breduce<PetscReal,BLOCK_SIZE>(PetscRealPart(t));
#endif
pLi -= col-glbDD; // diagonal
if (threadIdx.x == 0) {
x[glbDD] -= t;
x[glbDD] /= pLi[0];
}
__syncthreads();
// inc past L to start of previous U
pLi -= bw+1;
if (glbDD<bw) pLi += bw-glbDD; // overshot in top left corner
if (((locDD+1) < bw) && field != Nf-1) pLi -= (bw - (locDD+1)); // skip past right corner
}
}
static PetscErrorCode MatSolve_SeqAIJCUSPARSEBAND(Mat A,Vec bb,Vec xx)
{
const PetscScalar *barray;
PetscScalar *xarray;
thrust::device_ptr<const PetscScalar> bGPU;
thrust::device_ptr<PetscScalar> xGPU;
Mat_SeqAIJCUSPARSETriFactors *cusparseTriFactors = (Mat_SeqAIJCUSPARSETriFactors*)A->spptr;
THRUSTARRAY *tempGPU = (THRUSTARRAY*)cusparseTriFactors->workVector;
PetscInt n=A->rmap->n, nz=cusparseTriFactors->nnz, Nf;
PetscInt bw = (int)(2.*(double)n-1.-(double)(PetscSqrtReal(1.+4.*((double)n*(double)n-(double)nz))+PETSC_MACHINE_EPSILON))/2; // quadric formula for bandwidth
PetscErrorCode ierr;
PetscContainer container;
PetscFunctionBegin;
if (A->rmap->n == 0) {
PetscFunctionReturn(0);
}
// factor: get Nf if available
ierr = PetscObjectQuery((PetscObject) A, "Nf", (PetscObject *) &container);CHKERRQ(ierr);
if (container) {
PetscInt *pNf=NULL;
ierr = PetscContainerGetPointer(container, (void **) &pNf);CHKERRQ(ierr);
Nf = (*pNf)%1000;
} else Nf = 1;
if (n%Nf) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"n(%D) % Nf(%D) != 0",n,Nf);
/* Get the GPU pointers */
ierr = VecCUDAGetArrayWrite(xx,&xarray);CHKERRQ(ierr);
ierr = VecCUDAGetArrayRead(bb,&barray);CHKERRQ(ierr);
xGPU = thrust::device_pointer_cast(xarray);
bGPU = thrust::device_pointer_cast(barray);
ierr = PetscLogGpuTimeBegin();CHKERRQ(ierr);
/* First, reorder with the row permutation */
thrust::copy(thrust::cuda::par.on(PetscDefaultCudaStream),thrust::make_permutation_iterator(bGPU, cusparseTriFactors->rpermIndices->begin()),
thrust::make_permutation_iterator(bGPU, cusparseTriFactors->rpermIndices->end()),
tempGPU->begin());
constexpr int block = 128;
mat_solve_band<block><<<Nf,block>>>(n,bw,cusparseTriFactors->a_band_d,tempGPU->data().get());
CHECK_LAUNCH_ERROR(); // does a sync
/* Last, reorder with the column permutation */
thrust::copy(thrust::cuda::par.on(PetscDefaultCudaStream),thrust::make_permutation_iterator(tempGPU->begin(), cusparseTriFactors->cpermIndices->begin()),
thrust::make_permutation_iterator(tempGPU->begin(), cusparseTriFactors->cpermIndices->end()),
xGPU);
ierr = VecCUDARestoreArrayRead(bb,&barray);CHKERRQ(ierr);
ierr = VecCUDARestoreArrayWrite(xx,&xarray);CHKERRQ(ierr);
ierr = PetscLogGpuFlops(2.0*cusparseTriFactors->nnz - A->cmap->n);CHKERRQ(ierr);
ierr = PetscLogGpuTimeEnd();CHKERRQ(ierr);
PetscFunctionReturn(0);
}
|
baded2f6195538931fa8be5748baee0477fbba8f.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
//
// auto-generated by ops.py
//
__constant__ int xdim0_zerores_kernel;
int xdim0_zerores_kernel_h = -1;
int ydim0_zerores_kernel_h = -1;
__constant__ int xdim1_zerores_kernel;
int xdim1_zerores_kernel_h = -1;
int ydim1_zerores_kernel_h = -1;
__constant__ int xdim2_zerores_kernel;
int xdim2_zerores_kernel_h = -1;
int ydim2_zerores_kernel_h = -1;
#undef OPS_ACC0
#undef OPS_ACC1
#undef OPS_ACC2
#define OPS_ACC0(x) (x)
#define OPS_ACC1(x) (x)
#define OPS_ACC2(x) (x)
// user function
__device__
void
zerores_kernel(double *rho_res, double *rhou_res, double *rhoE_res) {
rho_res[OPS_ACC0(0)] = 0.0;
rhou_res[OPS_ACC1(0)] = 0.0;
rhoE_res[OPS_ACC2(0)] = 0.0;
}
#undef OPS_ACC0
#undef OPS_ACC1
#undef OPS_ACC2
__global__ void ops_zerores_kernel(double *__restrict arg0,
double *__restrict arg1,
double *__restrict arg2, int size0) {
int idx_x = blockDim.x * blockIdx.x + threadIdx.x;
arg0 += idx_x * 1 * 1;
arg1 += idx_x * 1 * 1;
arg2 += idx_x * 1 * 1;
if (idx_x < size0) {
zerores_kernel(arg0, arg1, arg2);
}
}
// host stub function
void ops_par_loop_zerores_kernel(char const *name, ops_block block, int dim,
int *range, ops_arg arg0, ops_arg arg1,
ops_arg arg2) {
// Timing
double t1, t2, c1, c2;
ops_arg args[3] = {arg0, arg1, arg2};
#ifdef CHECKPOINTING
if (!ops_checkpointing_before(args, 3, range, 2))
return;
#endif
if (OPS_diags > 1) {
ops_timing_realloc(2, "zerores_kernel");
OPS_kernels[2].count++;
ops_timers_core(&c1, &t1);
}
// compute locally allocated range for the sub-block
int start[1];
int end[1];
#ifdef OPS_MPI
sub_block_list sb = OPS_sub_block_list[block->index];
if (!sb->owned)
return;
for (int n = 0; n < 1; n++) {
start[n] = sb->decomp_disp[n];
end[n] = sb->decomp_disp[n] + sb->decomp_size[n];
if (start[n] >= range[2 * n]) {
start[n] = 0;
} else {
start[n] = range[2 * n] - start[n];
}
if (sb->id_m[n] == MPI_PROC_NULL && range[2 * n] < 0)
start[n] = range[2 * n];
if (end[n] >= range[2 * n + 1]) {
end[n] = range[2 * n + 1] - sb->decomp_disp[n];
} else {
end[n] = sb->decomp_size[n];
}
if (sb->id_p[n] == MPI_PROC_NULL &&
(range[2 * n + 1] > sb->decomp_disp[n] + sb->decomp_size[n]))
end[n] += (range[2 * n + 1] - sb->decomp_disp[n] - sb->decomp_size[n]);
}
#else
for (int n = 0; n < 1; n++) {
start[n] = range[2 * n];
end[n] = range[2 * n + 1];
}
#endif
int x_size = MAX(0, end[0] - start[0]);
int xdim0 = args[0].dat->size[0];
int xdim1 = args[1].dat->size[0];
int xdim2 = args[2].dat->size[0];
if (xdim0 != xdim0_zerores_kernel_h || xdim1 != xdim1_zerores_kernel_h ||
xdim2 != xdim2_zerores_kernel_h) {
hipMemcpyToSymbol(xdim0_zerores_kernel, &xdim0, sizeof(int));
xdim0_zerores_kernel_h = xdim0;
hipMemcpyToSymbol(xdim1_zerores_kernel, &xdim1, sizeof(int));
xdim1_zerores_kernel_h = xdim1;
hipMemcpyToSymbol(xdim2_zerores_kernel, &xdim2, sizeof(int));
xdim2_zerores_kernel_h = xdim2;
}
dim3 grid((x_size - 1) / OPS_block_size_x + 1, 1, 1);
dim3 tblock(OPS_block_size_x, 1, 1);
int dat0 = args[0].dat->elem_size;
int dat1 = args[1].dat->elem_size;
int dat2 = args[2].dat->elem_size;
char *p_a[3];
// set up initial pointers
int d_m[OPS_MAX_DIM];
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[0].dat->d_m[d] + OPS_sub_dat_list[args[0].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[0].dat->d_m[d];
#endif
int base0 = dat0 * 1 * (start[0] * args[0].stencil->stride[0] -
args[0].dat->base[0] - d_m[0]);
p_a[0] = (char *)args[0].data_d + base0;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[1].dat->d_m[d] + OPS_sub_dat_list[args[1].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[1].dat->d_m[d];
#endif
int base1 = dat1 * 1 * (start[0] * args[1].stencil->stride[0] -
args[1].dat->base[0] - d_m[0]);
p_a[1] = (char *)args[1].data_d + base1;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[2].dat->d_m[d] + OPS_sub_dat_list[args[2].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[2].dat->d_m[d];
#endif
int base2 = dat2 * 1 * (start[0] * args[2].stencil->stride[0] -
args[2].dat->base[0] - d_m[0]);
p_a[2] = (char *)args[2].data_d + base2;
ops_H_D_exchanges_device(args, 3);
ops_halo_exchanges(args, 3, range);
if (OPS_diags > 1) {
ops_timers_core(&c2, &t2);
OPS_kernels[2].mpi_time += t2 - t1;
}
// call kernel wrapper function, passing in pointers to data
hipLaunchKernelGGL(( ops_zerores_kernel), dim3(grid), dim3(tblock), 0, 0, (double *)p_a[0], (double *)p_a[1],
(double *)p_a[2], x_size);
if (OPS_diags > 1) {
cutilSafeCall(hipDeviceSynchronize());
ops_timers_core(&c1, &t1);
OPS_kernels[2].time += t1 - t2;
}
ops_set_dirtybit_device(args, 3);
ops_set_halo_dirtybit3(&args[0], range);
ops_set_halo_dirtybit3(&args[1], range);
ops_set_halo_dirtybit3(&args[2], range);
if (OPS_diags > 1) {
// Update kernel record
ops_timers_core(&c2, &t2);
OPS_kernels[2].mpi_time += t2 - t1;
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg0);
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg1);
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg2);
}
}
| baded2f6195538931fa8be5748baee0477fbba8f.cu | //
// auto-generated by ops.py
//
__constant__ int xdim0_zerores_kernel;
int xdim0_zerores_kernel_h = -1;
int ydim0_zerores_kernel_h = -1;
__constant__ int xdim1_zerores_kernel;
int xdim1_zerores_kernel_h = -1;
int ydim1_zerores_kernel_h = -1;
__constant__ int xdim2_zerores_kernel;
int xdim2_zerores_kernel_h = -1;
int ydim2_zerores_kernel_h = -1;
#undef OPS_ACC0
#undef OPS_ACC1
#undef OPS_ACC2
#define OPS_ACC0(x) (x)
#define OPS_ACC1(x) (x)
#define OPS_ACC2(x) (x)
// user function
__device__
void
zerores_kernel(double *rho_res, double *rhou_res, double *rhoE_res) {
rho_res[OPS_ACC0(0)] = 0.0;
rhou_res[OPS_ACC1(0)] = 0.0;
rhoE_res[OPS_ACC2(0)] = 0.0;
}
#undef OPS_ACC0
#undef OPS_ACC1
#undef OPS_ACC2
__global__ void ops_zerores_kernel(double *__restrict arg0,
double *__restrict arg1,
double *__restrict arg2, int size0) {
int idx_x = blockDim.x * blockIdx.x + threadIdx.x;
arg0 += idx_x * 1 * 1;
arg1 += idx_x * 1 * 1;
arg2 += idx_x * 1 * 1;
if (idx_x < size0) {
zerores_kernel(arg0, arg1, arg2);
}
}
// host stub function
void ops_par_loop_zerores_kernel(char const *name, ops_block block, int dim,
int *range, ops_arg arg0, ops_arg arg1,
ops_arg arg2) {
// Timing
double t1, t2, c1, c2;
ops_arg args[3] = {arg0, arg1, arg2};
#ifdef CHECKPOINTING
if (!ops_checkpointing_before(args, 3, range, 2))
return;
#endif
if (OPS_diags > 1) {
ops_timing_realloc(2, "zerores_kernel");
OPS_kernels[2].count++;
ops_timers_core(&c1, &t1);
}
// compute locally allocated range for the sub-block
int start[1];
int end[1];
#ifdef OPS_MPI
sub_block_list sb = OPS_sub_block_list[block->index];
if (!sb->owned)
return;
for (int n = 0; n < 1; n++) {
start[n] = sb->decomp_disp[n];
end[n] = sb->decomp_disp[n] + sb->decomp_size[n];
if (start[n] >= range[2 * n]) {
start[n] = 0;
} else {
start[n] = range[2 * n] - start[n];
}
if (sb->id_m[n] == MPI_PROC_NULL && range[2 * n] < 0)
start[n] = range[2 * n];
if (end[n] >= range[2 * n + 1]) {
end[n] = range[2 * n + 1] - sb->decomp_disp[n];
} else {
end[n] = sb->decomp_size[n];
}
if (sb->id_p[n] == MPI_PROC_NULL &&
(range[2 * n + 1] > sb->decomp_disp[n] + sb->decomp_size[n]))
end[n] += (range[2 * n + 1] - sb->decomp_disp[n] - sb->decomp_size[n]);
}
#else
for (int n = 0; n < 1; n++) {
start[n] = range[2 * n];
end[n] = range[2 * n + 1];
}
#endif
int x_size = MAX(0, end[0] - start[0]);
int xdim0 = args[0].dat->size[0];
int xdim1 = args[1].dat->size[0];
int xdim2 = args[2].dat->size[0];
if (xdim0 != xdim0_zerores_kernel_h || xdim1 != xdim1_zerores_kernel_h ||
xdim2 != xdim2_zerores_kernel_h) {
cudaMemcpyToSymbol(xdim0_zerores_kernel, &xdim0, sizeof(int));
xdim0_zerores_kernel_h = xdim0;
cudaMemcpyToSymbol(xdim1_zerores_kernel, &xdim1, sizeof(int));
xdim1_zerores_kernel_h = xdim1;
cudaMemcpyToSymbol(xdim2_zerores_kernel, &xdim2, sizeof(int));
xdim2_zerores_kernel_h = xdim2;
}
dim3 grid((x_size - 1) / OPS_block_size_x + 1, 1, 1);
dim3 tblock(OPS_block_size_x, 1, 1);
int dat0 = args[0].dat->elem_size;
int dat1 = args[1].dat->elem_size;
int dat2 = args[2].dat->elem_size;
char *p_a[3];
// set up initial pointers
int d_m[OPS_MAX_DIM];
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[0].dat->d_m[d] + OPS_sub_dat_list[args[0].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[0].dat->d_m[d];
#endif
int base0 = dat0 * 1 * (start[0] * args[0].stencil->stride[0] -
args[0].dat->base[0] - d_m[0]);
p_a[0] = (char *)args[0].data_d + base0;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[1].dat->d_m[d] + OPS_sub_dat_list[args[1].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[1].dat->d_m[d];
#endif
int base1 = dat1 * 1 * (start[0] * args[1].stencil->stride[0] -
args[1].dat->base[0] - d_m[0]);
p_a[1] = (char *)args[1].data_d + base1;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++)
d_m[d] =
args[2].dat->d_m[d] + OPS_sub_dat_list[args[2].dat->index]->d_im[d];
#else
for (int d = 0; d < dim; d++)
d_m[d] = args[2].dat->d_m[d];
#endif
int base2 = dat2 * 1 * (start[0] * args[2].stencil->stride[0] -
args[2].dat->base[0] - d_m[0]);
p_a[2] = (char *)args[2].data_d + base2;
ops_H_D_exchanges_device(args, 3);
ops_halo_exchanges(args, 3, range);
if (OPS_diags > 1) {
ops_timers_core(&c2, &t2);
OPS_kernels[2].mpi_time += t2 - t1;
}
// call kernel wrapper function, passing in pointers to data
ops_zerores_kernel<<<grid, tblock>>>((double *)p_a[0], (double *)p_a[1],
(double *)p_a[2], x_size);
if (OPS_diags > 1) {
cutilSafeCall(cudaDeviceSynchronize());
ops_timers_core(&c1, &t1);
OPS_kernels[2].time += t1 - t2;
}
ops_set_dirtybit_device(args, 3);
ops_set_halo_dirtybit3(&args[0], range);
ops_set_halo_dirtybit3(&args[1], range);
ops_set_halo_dirtybit3(&args[2], range);
if (OPS_diags > 1) {
// Update kernel record
ops_timers_core(&c2, &t2);
OPS_kernels[2].mpi_time += t2 - t1;
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg0);
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg1);
OPS_kernels[2].transfer += ops_compute_transfer(dim, start, end, &arg2);
}
}
|
850bf120d52fabd075ef4a12d9ca78a09b7b837b.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* Copyright 1993-2010 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
// Simple 3D volume renderer
#ifndef _VOLUMERENDER_KERNEL_CU_
#define _VOLUMERENDER_KERNEL_CU_
#include "volumeRender_kernel.cuh"
#include "reductionMax.hh"
typedef unsigned int uint;
typedef unsigned char uchar;
typedef struct {
float4 m[3];
} float3x4;
typedef unsigned short VolumeType;
//typedef float VolumeType;
__constant__ float3x4 c_invViewMatrix; // inverse view matrix
texture<VolumeType, 3, hipReadModeElementType> tex; // 3D texture
//texture<VolumeType, 3, hipReadModeElementType> tex_cluster; // 3D texture
struct Ray {
float3 o; // origin
float3 d; // direction
};
// intersect ray with a box
// http://www.siggraph.org/education/materials/HyperGraph/raytrace/rtinter3.htm
__device__
int intersectBox(Ray r, float3 boxmin, float3 boxmax, float *tnear, float *tfar)
{
// compute intersection of ray with all six bbox planes
float3 invR = make_float3(1.0f) / r.d;
float3 tbot = invR * (boxmin - r.o);
float3 ttop = invR * (boxmax - r.o);
// re-order intersections to find smallest and largest on each axis
float3 tmin = fminf(ttop, tbot);
float3 tmax = fmaxf(ttop, tbot);
// find the largest tmin and the smallest tmax
float largest_tmin = fmaxf(fmaxf(tmin.x, tmin.y), fmaxf(tmin.x, tmin.z));
float smallest_tmax = fminf(fminf(tmax.x, tmax.y), fminf(tmax.x, tmax.z));
*tnear = largest_tmin;
*tfar = smallest_tmax;
return smallest_tmax > largest_tmin;
}
// transform vector by matrix (no translation)
__device__
float3 mul(const float3x4 &M, const float3 &v)
{
float3 r;
r.x = dot(v, make_float3(M.m[0]));
r.y = dot(v, make_float3(M.m[1]));
r.z = dot(v, make_float3(M.m[2]));
return r;
}
// transform vector by matrix with translation
__device__
float4 mul(const float3x4 &M, const float4 &v)
{
float4 r;
r.x = dot(v, M.m[0]);
r.y = dot(v, M.m[1]);
r.z = dot(v, M.m[2]);
r.w = 1.0f;
return r;
}
/*__device__
float4 color_interpolate_cluster(float sample){
// ACCENT
// if(sample <= 1)
// return make_float4((float)0.99215,(float)0.75294, (float)0.52549, 1.0);
// else if(sample <= 2)
// return make_float4( (float)0.498, (float)0.7882, (float)0.498, 0.25);
// else if(sample <= 3)
// return make_float4((float)0.74509,(float)0.68235, (float)0.83137, 1.0);
// else if(sample <= 4)
// return make_float4(1.0,1.0,1.0,1.0);
// Dark2
if(sample <= 1)
return make_float4( 0.8509803921569,0.3725490196078,0.007843137254902, 1.0);
else if(sample <= 2)
return make_float4( 0.1058823529412, 0.6196078431373, 0.4666666666667, 0.25);
else if(sample <= 3)
return make_float4( 0.4588235294118,0.4392156862745,0.7019607843137, 1.0);
else if(sample <= 4)
return make_float4(1.0,1.0,1.0,1.0);
return make_float4(0.0,0.0,0.0,0.0);
}*/
__device__
float4 color_interpolate_large(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six){
float4 retcolor = make_float4(0);
float percent = 0.0f;
if(sample <= 0.2f){
percent = (0.2f - sample) / 0.2f;
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > 0.2f && sample <= 0.3f){
percent = (0.3f - sample) / 0.1f;
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > 0.3f && sample <= 0.4f){
percent = (0.4f - sample) / 0.1f;
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > 0.4f && sample <= 0.5f){
percent = (0.5f - sample) / 0.1f;
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (1.0 - sample) / 0.5f;
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__
float4 color_interpolate_1(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize){
float4 retcolor = make_float4(0);
float percent = 0.0f;
float one_lim = one.x; one = make_float4(one.y, one.z, one.w, 0.1f);
float two_lim = two.x; two = make_float4(two.y, two.z, two.w, 0.1f);
float three_lim = three.x; three = make_float4(three.y, three.z, three.w, 0.1f);
float four_lim = four.x; four = make_float4(four.y, four.z, four.w, 0.1f);
float five_lim = five.x; five = make_float4(five.y, five.z, five.w, 0.1f);
float six_lim = six.x; six = make_float4(six.y, six.z, six.w, 0.1f);
if(sample <= two_lim){
percent = (sample - one_lim) / (two_lim - one_lim);
retcolor = one + percent * (two - one);
}else if(sample > two_lim && sample <= three_lim){
percent = (sample - two_lim) / (three_lim - two_lim);
retcolor = two + percent * (three - two);
}else if(sample > three_lim && sample <= four_lim){
percent = (sample - three_lim) / (four_lim - three_lim);
retcolor = three + percent * (four - three);
}else if(sample > four_lim && sample <= five_lim){
percent = (sample - four_lim) / (five_lim - four_lim);
retcolor = four + percent * (five - four);
}else{
percent = (sample - five_lim) / (six_lim - five_lim);
retcolor = five + percent * (six - five);
}
return retcolor;
}
__device__
float4 color_interpolate_old(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six){
float4 retcolor = make_float4(0);
float percent = 0.0f;
if(sample <= 25500.0f){
percent = (25500.0f - sample) / (25500.0f - 0.0f);
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > 25500.0f && sample <= 26500.0f){
percent = (26500.0f - sample) / (26500.0f - 25500.0f);
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > 26500.0f && sample <= 27500.0f){
percent = (27500.0f - sample) / (27500.0f - 26500.0f);
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > 27500.0f && sample <= 28500.0f){
percent = (28500.0f - sample) / (28500.0f - 27500.0f);
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (65535.0f - sample) / (65535.0f - 28500.0f);
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__
float4 color_interpolate(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize){
/*int index = 0;
for(index = 0; index < tfsize; index++)
if (sample < tf[index][0])
break;
float4 low_val = make_float4(tf[index-1][1], tf[index-1][2], tf[index-1][3], 0.1f);
float4 high_val = make_float4(tf[index][1], tf[index][2], tf[index][3], 0.1f);
float percent = (tf[index][0] - sample) / (tf[index][0] - tf[index-1][0]);
float4 retcolor = high_val - percent * (high_val - low_val);*/
//float percent = (sample - tf[index-1][0]) / (tf[index][0] - tf[index-1][0]);
//float4 retcolor = low_val - percent * (high_val - low_val);
float4 retcolor = make_float4(0);
float percent = 0.0f;
/*float one_lim = one.x; one = make_float4(one.y, one.z, one.w, 0.1f);
float two_lim = two.x; two = make_float4(two.y, two.z, two.w, 0.1f);
float three_lim = three.x; three = make_float4(three.y, three.z, three.w, 0.1f);
float four_lim = four.x; four = make_float4(four.y, four.z, four.w, 0.1f);
float five_lim = five.x; five = make_float4(five.y, five.z, five.w, 0.1f);
float six_lim = six.x; six = make_float4(six.y, six.z, six.w, 0.1f);*/
// parameters for 2
/*float one_lim = 0.0f; one = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
float two_lim = 25500.0f; two = make_float4(0.0f, 0.3f, 0.3f, 0.0f);
float three_lim = 26500.0f; three = make_float4(0.5f, 0.5f, 1.0f, 0.1f);
float four_lim = 27500.0f; four = make_float4(1.0f, 1.0f, 1.0f, 0.4f);
float five_lim = 28500.0f; five = make_float4(1.0f, 0.2f, 0.2f, 0.94f);
float six_lim = 65535.0f; six = make_float4(1.0f, 0.0f, 0.0f, 1.0f);*/
// parameters for 3_1
float one_lim = 0.0f; one = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
float two_lim = 45000.0f; two = make_float4(0.3f, 0.3f, 0.4f, 1.0f);
float three_lim = 46000.0f; three = make_float4(0.6f, 0.6f, 1.0f, 1.0f);
float four_lim = 47000.0f; four = make_float4(1.0f, 1.0f, 1.0f, 0.05f);
float five_lim = 48000.0f; five = make_float4(1.0f, 0.2f, 0.2f, 0.9f);
float six_lim = 65535.0f; six = make_float4(1.0f, 0.0f, 0.0f, 0.1f);
if(sample <= two_lim){
percent = (two_lim - sample) / two_lim;//(two_lim - one_lim);
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > two_lim && sample <= three_lim){
percent = (three_lim - sample) / three_lim;//(three_lim - two_lim);
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > three_lim && sample <= four_lim){
percent = (four_lim - sample) / four_lim;//(four_lim - three_lim);
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > four_lim && sample <= five_lim){
percent = (five_lim - sample) / five_lim;//(five_lim - four_lim);
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (six_lim - sample) / six_lim;//(six_lim - five_lim);
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__ uint rgbaFloatToInt(float4 rgba, float global_max, float red, float green, float blue)
{
rgba.x = rgba.x / (global_max+2);
rgba.y = rgba.y / (global_max+2);
rgba.z = rgba.z / (global_max+2);
rgba.w = 0.5;
return (uint(rgba.w*255)<<24) | (uint(rgba.z*255)<<16) | (uint(rgba.y*255)<<8) | uint(rgba.x*255);
}
__global__ void
d_render(float4 *d_iColors, ushort *data,
float *d_iRed, float *d_iGreen, float *d_iBlue, uint imageW, uint imageH,
float density, float brightness, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize/*, int type*/)
{
const int maxSteps = 500;
const float tstep = 0.01f;
const float3 boxMin = make_float3(-1.0f, -1.0f, -1.0f);
const float3 boxMax = make_float3(1.0f, 1.0f, 1.0f);
uint x = blockIdx.x*blockDim.x + threadIdx.x;
uint y = blockIdx.y*blockDim.y + threadIdx.y;
if ((x >= imageW) || (y >= imageH)) return;
float u = (x / (float) imageW)*2.0f-1.0f;
float v = (y / (float) imageH)*2.0f-1.0f;
// calculate eye ray in world space
Ray eyeRay;
eyeRay.o = make_float3(mul(c_invViewMatrix, make_float4(0.0f, 0.0f, 0.0f, 1.0f)));
eyeRay.d = normalize(make_float3(u, v, -2.0f));
eyeRay.d = mul(c_invViewMatrix, eyeRay.d);
// find intersection with box
float tnear, tfar;
int hit = intersectBox(eyeRay, boxMin, boxMax, &tnear, &tfar);
if (!hit) return;
if (tnear < 0.0f) tnear = 0.0f; // clamp to near plane
// march along ray from front to back, accumulating color
float4 sum = make_float4(0.0f);
float t = tnear;
float3 pos = eyeRay.o + eyeRay.d*tnear;
float3 step = eyeRay.d*tstep;
float sample = 0;
for(int i=0; i<maxSteps; i++) {
// read from 3D texture
// remap position to [0, 1] coordinates
//if(type == 0)
sample = tex3D(tex, pos.x*0.5f+0.5f, pos.y*0.5f+0.5f, pos.z*0.5f+0.5f);
//else
// sample = tex3D(tex_cluster, pos.x*0.5f+0.5f, pos.y*0.5f+0.5f, pos.z*0.5f+0.5f);
float4 col = make_float4(0.0f);
float4 col_ = make_float4(0.0f);
// lookup in transfer function texture
//if(type == 0)
//printf("values: %f %f %f %f\n", one.x, one.y. one.z, one.w);
col = color_interpolate(sample,one,two,three,four,five,six, tfsize);
//else
// col = color_interpolate_cluster(sample);
// pre-multiply alpha
col.x *= col.w;
col.y *= col.w;
col.z *= col.w;
// "over" operator for front-to-back blending
sum = sum + col;//*(1.0f - sum.w);
t += tstep;
if (t > tfar) break;
pos += step;
}
sum *= brightness;
d_iColors[y*imageW + x] = sum;
d_iRed[y*imageW + x] = sum.x;
d_iGreen[y*imageW + x] = sum.y;
d_iBlue[y*imageW + x] = sum.z;
}
__global__
void create_image(uint *output, float4 *d_iColors, float global_max, float red, float green, float blue, uint imageW, uint imageH){
uint x = blockIdx.x*blockDim.x + threadIdx.x;
uint y = blockIdx.y*blockDim.y + threadIdx.y;
if ((x >= imageW) || (y >= imageH)) return;
output[y*imageH+x] = rgbaFloatToInt(d_iColors[y*imageW+x], global_max, red, green, blue);
}
/*void setup_cluster(void *cluster, hipExtent volumeSize, uint image_size, hipArray *d_volumeArray_cluster){
// Cluster setup
// create 3D array
hipChannelFormatDesc channelDesc_cluster = hipCreateChannelDesc<VolumeType>();
cutilSafeCall( hipMalloc3DArray(&d_volumeArray_cluster, &channelDesc_cluster, volumeSize) );
// copy data to 3D array
hipMemcpy3DParms copyParams = {0};
copyParams.srcPtr = make_hipPitchedPtr(cluster, volumeSize.width*sizeof(VolumeType), volumeSize.width, volumeSize.height);
copyParams.dstArray = d_volumeArray_cluster;
copyParams.extent = volumeSize;
copyParams.kind = hipMemcpyHostToDevice;
cutilSafeCall( hipMemcpy3D(©Params) );
// set texture parameters
tex_cluster.normalized = true; // access with normalized texture coordinates
tex_cluster.filterMode = hipFilterModePoint; // linear interpolation
tex_cluster.addressMode[0] = hipAddressModeClamp; // clamp texture coordinates
tex_cluster.addressMode[1] = hipAddressModeClamp;
// bind array to 3D texture
cutilSafeCall(hipBindTextureToArray(tex_cluster, d_volumeArray_cluster, channelDesc_cluster));
}*/
void setup_volume(void *h_volume, hipExtent volumeSize, uint image_size, hipArray *d_volumeArray){
// create 3D array
hipChannelFormatDesc channelDesc = hipCreateChannelDesc<VolumeType>();
cutilSafeCall( hipMalloc3DArray(&d_volumeArray, &channelDesc, volumeSize) );
// copy data to 3D array
hipMemcpy3DParms copyParams = {0};
copyParams.srcPtr = make_hipPitchedPtr(h_volume, volumeSize.width*sizeof(VolumeType), volumeSize.width, volumeSize.height);
copyParams.dstArray = d_volumeArray;
copyParams.extent = volumeSize;
copyParams.kind = hipMemcpyHostToDevice;
cutilSafeCall( hipMemcpy3D(©Params) );
// set texture parameters
tex.normalized = true; // access with normalized texture coordinates
tex.filterMode = hipFilterModePoint; // linear interpolation
tex.addressMode[0] = hipAddressModeClamp; // clamp texture coordinates
tex.addressMode[1] = hipAddressModeClamp;
// bind array to 3D texture
cutilSafeCall(hipBindTextureToArray(tex, d_volumeArray, channelDesc));
}
void render_kernel(dim3 gridSize, dim3 blockSize, uint *d_output, /*uint *d_cluster, */float* d_iRed, float* d_oRed,
float* d_iGreen, float* d_oGreen, float* d_iBlue, float* d_oBlue, float4* d_iColors, unsigned short* data,
/*unsigned short *cluster_data, */uint imageW, uint imageH, float density, float brightness,
float4 one, float4 two, float4 three, float4 four, float4 five, float4 six, uint tfsize,
void *h_volume, /*void *cluster, */hipExtent volumeSize, hipArray *d_volumeArray, /*hipArray *d_volumeArray_cluster, */int *set)
{
int size = imageH * imageW;
if(set[0] == 0){
setup_volume(h_volume, volumeSize, size, d_volumeArray);
set[0] = 1;
}
// if(set[1] == 0){
// setup_cluster(cluster, volumeSize, size, d_volumeArray_cluster);
// set[1] = 1;
// }
/* clear colors buffers */
cutilSafeCall(hipMemset(d_iColors, 0, imageH*imageW*sizeof(float4)));
cutilSafeCall(hipMemset(d_iRed, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(hipMemset(d_oRed, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(hipMemset(d_iGreen, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(hipMemset(d_oGreen, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(hipMemset(d_iBlue, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(hipMemset(d_oBlue, 0, imageH*imageW*sizeof(float)));
hipLaunchKernelGGL(( d_render), dim3(gridSize), dim3(blockSize), 0, 0, d_iColors, data, d_iRed, d_iGreen, d_iBlue, imageW, imageH, density, brightness,
one, two, three, four, five, six, tfsize/*, 0*/);
float max_red = reduce_max(d_oRed, d_iRed, size);
float max_green = reduce_max(d_oGreen, d_iGreen, size);
float max_blue = reduce_max(d_oBlue, d_iBlue, size);
float global_max = fmax(max_red, max_green);
global_max = fmax(global_max, max_blue);
hipLaunchKernelGGL(( create_image), dim3(gridSize), dim3(blockSize), 0, 0, d_output, d_iColors, global_max, max_red, max_green, max_blue, imageW, imageH);
// render image
//hipLaunchKernelGGL(( d_render), dim3(gridSize), dim3(blockSize), 0, 0, d_iColors, cluster_data, d_iRed, d_iGreen, d_iBlue, imageW, imageH, density, brightness,
// one, two, three, four, five, six, 1);
//
// max_red = reduce_max(d_oRed, d_iRed, size);
// max_green = reduce_max(d_oGreen, d_iGreen, size);
// max_blue = reduce_max(d_oBlue, d_iBlue, size);
//
// global_max = fmax(max_red, max_green);
// global_max = fmax(global_max, max_blue);
//
//hipLaunchKernelGGL(( create_image), dim3(gridSize), dim3(blockSize), 0, 0, d_cluster, d_iColors, global_max, max_red, max_green, max_blue, imageW, imageH);
}
void copyInvViewMatrix(float *invViewMatrix, size_t sizeofMatrix)
{
cutilSafeCall( hipMemcpyToSymbol(c_invViewMatrix, invViewMatrix, sizeofMatrix) );
}
#endif // #ifndef _VOLUMERENDER_KERNEL_CU_
| 850bf120d52fabd075ef4a12d9ca78a09b7b837b.cu | /*
* Copyright 1993-2010 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
// Simple 3D volume renderer
#ifndef _VOLUMERENDER_KERNEL_CU_
#define _VOLUMERENDER_KERNEL_CU_
#include "volumeRender_kernel.cuh"
#include "reductionMax.hh"
typedef unsigned int uint;
typedef unsigned char uchar;
typedef struct {
float4 m[3];
} float3x4;
typedef unsigned short VolumeType;
//typedef float VolumeType;
__constant__ float3x4 c_invViewMatrix; // inverse view matrix
texture<VolumeType, 3, cudaReadModeElementType> tex; // 3D texture
//texture<VolumeType, 3, cudaReadModeElementType> tex_cluster; // 3D texture
struct Ray {
float3 o; // origin
float3 d; // direction
};
// intersect ray with a box
// http://www.siggraph.org/education/materials/HyperGraph/raytrace/rtinter3.htm
__device__
int intersectBox(Ray r, float3 boxmin, float3 boxmax, float *tnear, float *tfar)
{
// compute intersection of ray with all six bbox planes
float3 invR = make_float3(1.0f) / r.d;
float3 tbot = invR * (boxmin - r.o);
float3 ttop = invR * (boxmax - r.o);
// re-order intersections to find smallest and largest on each axis
float3 tmin = fminf(ttop, tbot);
float3 tmax = fmaxf(ttop, tbot);
// find the largest tmin and the smallest tmax
float largest_tmin = fmaxf(fmaxf(tmin.x, tmin.y), fmaxf(tmin.x, tmin.z));
float smallest_tmax = fminf(fminf(tmax.x, tmax.y), fminf(tmax.x, tmax.z));
*tnear = largest_tmin;
*tfar = smallest_tmax;
return smallest_tmax > largest_tmin;
}
// transform vector by matrix (no translation)
__device__
float3 mul(const float3x4 &M, const float3 &v)
{
float3 r;
r.x = dot(v, make_float3(M.m[0]));
r.y = dot(v, make_float3(M.m[1]));
r.z = dot(v, make_float3(M.m[2]));
return r;
}
// transform vector by matrix with translation
__device__
float4 mul(const float3x4 &M, const float4 &v)
{
float4 r;
r.x = dot(v, M.m[0]);
r.y = dot(v, M.m[1]);
r.z = dot(v, M.m[2]);
r.w = 1.0f;
return r;
}
/*__device__
float4 color_interpolate_cluster(float sample){
// ACCENT
// if(sample <= 1)
// return make_float4((float)0.99215,(float)0.75294, (float)0.52549, 1.0);
// else if(sample <= 2)
// return make_float4( (float)0.498, (float)0.7882, (float)0.498, 0.25);
// else if(sample <= 3)
// return make_float4((float)0.74509,(float)0.68235, (float)0.83137, 1.0);
// else if(sample <= 4)
// return make_float4(1.0,1.0,1.0,1.0);
// Dark2
if(sample <= 1)
return make_float4( 0.8509803921569,0.3725490196078,0.007843137254902, 1.0);
else if(sample <= 2)
return make_float4( 0.1058823529412, 0.6196078431373, 0.4666666666667, 0.25);
else if(sample <= 3)
return make_float4( 0.4588235294118,0.4392156862745,0.7019607843137, 1.0);
else if(sample <= 4)
return make_float4(1.0,1.0,1.0,1.0);
return make_float4(0.0,0.0,0.0,0.0);
}*/
__device__
float4 color_interpolate_large(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six){
float4 retcolor = make_float4(0);
float percent = 0.0f;
if(sample <= 0.2f){
percent = (0.2f - sample) / 0.2f;
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > 0.2f && sample <= 0.3f){
percent = (0.3f - sample) / 0.1f;
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > 0.3f && sample <= 0.4f){
percent = (0.4f - sample) / 0.1f;
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > 0.4f && sample <= 0.5f){
percent = (0.5f - sample) / 0.1f;
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (1.0 - sample) / 0.5f;
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__
float4 color_interpolate_1(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize){
float4 retcolor = make_float4(0);
float percent = 0.0f;
float one_lim = one.x; one = make_float4(one.y, one.z, one.w, 0.1f);
float two_lim = two.x; two = make_float4(two.y, two.z, two.w, 0.1f);
float three_lim = three.x; three = make_float4(three.y, three.z, three.w, 0.1f);
float four_lim = four.x; four = make_float4(four.y, four.z, four.w, 0.1f);
float five_lim = five.x; five = make_float4(five.y, five.z, five.w, 0.1f);
float six_lim = six.x; six = make_float4(six.y, six.z, six.w, 0.1f);
if(sample <= two_lim){
percent = (sample - one_lim) / (two_lim - one_lim);
retcolor = one + percent * (two - one);
}else if(sample > two_lim && sample <= three_lim){
percent = (sample - two_lim) / (three_lim - two_lim);
retcolor = two + percent * (three - two);
}else if(sample > three_lim && sample <= four_lim){
percent = (sample - three_lim) / (four_lim - three_lim);
retcolor = three + percent * (four - three);
}else if(sample > four_lim && sample <= five_lim){
percent = (sample - four_lim) / (five_lim - four_lim);
retcolor = four + percent * (five - four);
}else{
percent = (sample - five_lim) / (six_lim - five_lim);
retcolor = five + percent * (six - five);
}
return retcolor;
}
__device__
float4 color_interpolate_old(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six){
float4 retcolor = make_float4(0);
float percent = 0.0f;
if(sample <= 25500.0f){
percent = (25500.0f - sample) / (25500.0f - 0.0f);
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > 25500.0f && sample <= 26500.0f){
percent = (26500.0f - sample) / (26500.0f - 25500.0f);
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > 26500.0f && sample <= 27500.0f){
percent = (27500.0f - sample) / (27500.0f - 26500.0f);
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > 27500.0f && sample <= 28500.0f){
percent = (28500.0f - sample) / (28500.0f - 27500.0f);
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (65535.0f - sample) / (65535.0f - 28500.0f);
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__
float4 color_interpolate(float sample, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize){
/*int index = 0;
for(index = 0; index < tfsize; index++)
if (sample < tf[index][0])
break;
float4 low_val = make_float4(tf[index-1][1], tf[index-1][2], tf[index-1][3], 0.1f);
float4 high_val = make_float4(tf[index][1], tf[index][2], tf[index][3], 0.1f);
float percent = (tf[index][0] - sample) / (tf[index][0] - tf[index-1][0]);
float4 retcolor = high_val - percent * (high_val - low_val);*/
//float percent = (sample - tf[index-1][0]) / (tf[index][0] - tf[index-1][0]);
//float4 retcolor = low_val - percent * (high_val - low_val);
float4 retcolor = make_float4(0);
float percent = 0.0f;
/*float one_lim = one.x; one = make_float4(one.y, one.z, one.w, 0.1f);
float two_lim = two.x; two = make_float4(two.y, two.z, two.w, 0.1f);
float three_lim = three.x; three = make_float4(three.y, three.z, three.w, 0.1f);
float four_lim = four.x; four = make_float4(four.y, four.z, four.w, 0.1f);
float five_lim = five.x; five = make_float4(five.y, five.z, five.w, 0.1f);
float six_lim = six.x; six = make_float4(six.y, six.z, six.w, 0.1f);*/
// parameters for 2
/*float one_lim = 0.0f; one = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
float two_lim = 25500.0f; two = make_float4(0.0f, 0.3f, 0.3f, 0.0f);
float three_lim = 26500.0f; three = make_float4(0.5f, 0.5f, 1.0f, 0.1f);
float four_lim = 27500.0f; four = make_float4(1.0f, 1.0f, 1.0f, 0.4f);
float five_lim = 28500.0f; five = make_float4(1.0f, 0.2f, 0.2f, 0.94f);
float six_lim = 65535.0f; six = make_float4(1.0f, 0.0f, 0.0f, 1.0f);*/
// parameters for 3_1
float one_lim = 0.0f; one = make_float4(0.0f, 0.0f, 0.0f, 0.0f);
float two_lim = 45000.0f; two = make_float4(0.3f, 0.3f, 0.4f, 1.0f);
float three_lim = 46000.0f; three = make_float4(0.6f, 0.6f, 1.0f, 1.0f);
float four_lim = 47000.0f; four = make_float4(1.0f, 1.0f, 1.0f, 0.05f);
float five_lim = 48000.0f; five = make_float4(1.0f, 0.2f, 0.2f, 0.9f);
float six_lim = 65535.0f; six = make_float4(1.0f, 0.0f, 0.0f, 0.1f);
if(sample <= two_lim){
percent = (two_lim - sample) / two_lim;//(two_lim - one_lim);
retcolor = (percent)*one + (1.0f-percent) * two;
}else if(sample > two_lim && sample <= three_lim){
percent = (three_lim - sample) / three_lim;//(three_lim - two_lim);
retcolor = (percent)*two + (1.0f-percent) * three;
}else if(sample > three_lim && sample <= four_lim){
percent = (four_lim - sample) / four_lim;//(four_lim - three_lim);
retcolor = (percent)*three + (1.0f-percent) * four;
}else if(sample > four_lim && sample <= five_lim){
percent = (five_lim - sample) / five_lim;//(five_lim - four_lim);
retcolor = (percent)*four + (1.0f-percent) * five;
}else{
percent = (six_lim - sample) / six_lim;//(six_lim - five_lim);
retcolor = (percent)*five + (1.0f-percent) * six;
}
return retcolor;
}
__device__ uint rgbaFloatToInt(float4 rgba, float global_max, float red, float green, float blue)
{
rgba.x = rgba.x / (global_max+2);
rgba.y = rgba.y / (global_max+2);
rgba.z = rgba.z / (global_max+2);
rgba.w = 0.5;
return (uint(rgba.w*255)<<24) | (uint(rgba.z*255)<<16) | (uint(rgba.y*255)<<8) | uint(rgba.x*255);
}
__global__ void
d_render(float4 *d_iColors, ushort *data,
float *d_iRed, float *d_iGreen, float *d_iBlue, uint imageW, uint imageH,
float density, float brightness, float4 one, float4 two, float4 three,
float4 four, float4 five, float4 six, uint tfsize/*, int type*/)
{
const int maxSteps = 500;
const float tstep = 0.01f;
const float3 boxMin = make_float3(-1.0f, -1.0f, -1.0f);
const float3 boxMax = make_float3(1.0f, 1.0f, 1.0f);
uint x = blockIdx.x*blockDim.x + threadIdx.x;
uint y = blockIdx.y*blockDim.y + threadIdx.y;
if ((x >= imageW) || (y >= imageH)) return;
float u = (x / (float) imageW)*2.0f-1.0f;
float v = (y / (float) imageH)*2.0f-1.0f;
// calculate eye ray in world space
Ray eyeRay;
eyeRay.o = make_float3(mul(c_invViewMatrix, make_float4(0.0f, 0.0f, 0.0f, 1.0f)));
eyeRay.d = normalize(make_float3(u, v, -2.0f));
eyeRay.d = mul(c_invViewMatrix, eyeRay.d);
// find intersection with box
float tnear, tfar;
int hit = intersectBox(eyeRay, boxMin, boxMax, &tnear, &tfar);
if (!hit) return;
if (tnear < 0.0f) tnear = 0.0f; // clamp to near plane
// march along ray from front to back, accumulating color
float4 sum = make_float4(0.0f);
float t = tnear;
float3 pos = eyeRay.o + eyeRay.d*tnear;
float3 step = eyeRay.d*tstep;
float sample = 0;
for(int i=0; i<maxSteps; i++) {
// read from 3D texture
// remap position to [0, 1] coordinates
//if(type == 0)
sample = tex3D(tex, pos.x*0.5f+0.5f, pos.y*0.5f+0.5f, pos.z*0.5f+0.5f);
//else
// sample = tex3D(tex_cluster, pos.x*0.5f+0.5f, pos.y*0.5f+0.5f, pos.z*0.5f+0.5f);
float4 col = make_float4(0.0f);
float4 col_ = make_float4(0.0f);
// lookup in transfer function texture
//if(type == 0)
//printf("values: %f %f %f %f\n", one.x, one.y. one.z, one.w);
col = color_interpolate(sample,one,two,three,four,five,six, tfsize);
//else
// col = color_interpolate_cluster(sample);
// pre-multiply alpha
col.x *= col.w;
col.y *= col.w;
col.z *= col.w;
// "over" operator for front-to-back blending
sum = sum + col;//*(1.0f - sum.w);
t += tstep;
if (t > tfar) break;
pos += step;
}
sum *= brightness;
d_iColors[y*imageW + x] = sum;
d_iRed[y*imageW + x] = sum.x;
d_iGreen[y*imageW + x] = sum.y;
d_iBlue[y*imageW + x] = sum.z;
}
__global__
void create_image(uint *output, float4 *d_iColors, float global_max, float red, float green, float blue, uint imageW, uint imageH){
uint x = blockIdx.x*blockDim.x + threadIdx.x;
uint y = blockIdx.y*blockDim.y + threadIdx.y;
if ((x >= imageW) || (y >= imageH)) return;
output[y*imageH+x] = rgbaFloatToInt(d_iColors[y*imageW+x], global_max, red, green, blue);
}
/*void setup_cluster(void *cluster, cudaExtent volumeSize, uint image_size, cudaArray *d_volumeArray_cluster){
// Cluster setup
// create 3D array
cudaChannelFormatDesc channelDesc_cluster = cudaCreateChannelDesc<VolumeType>();
cutilSafeCall( cudaMalloc3DArray(&d_volumeArray_cluster, &channelDesc_cluster, volumeSize) );
// copy data to 3D array
cudaMemcpy3DParms copyParams = {0};
copyParams.srcPtr = make_cudaPitchedPtr(cluster, volumeSize.width*sizeof(VolumeType), volumeSize.width, volumeSize.height);
copyParams.dstArray = d_volumeArray_cluster;
copyParams.extent = volumeSize;
copyParams.kind = cudaMemcpyHostToDevice;
cutilSafeCall( cudaMemcpy3D(©Params) );
// set texture parameters
tex_cluster.normalized = true; // access with normalized texture coordinates
tex_cluster.filterMode = cudaFilterModePoint; // linear interpolation
tex_cluster.addressMode[0] = cudaAddressModeClamp; // clamp texture coordinates
tex_cluster.addressMode[1] = cudaAddressModeClamp;
// bind array to 3D texture
cutilSafeCall(cudaBindTextureToArray(tex_cluster, d_volumeArray_cluster, channelDesc_cluster));
}*/
void setup_volume(void *h_volume, cudaExtent volumeSize, uint image_size, cudaArray *d_volumeArray){
// create 3D array
cudaChannelFormatDesc channelDesc = cudaCreateChannelDesc<VolumeType>();
cutilSafeCall( cudaMalloc3DArray(&d_volumeArray, &channelDesc, volumeSize) );
// copy data to 3D array
cudaMemcpy3DParms copyParams = {0};
copyParams.srcPtr = make_cudaPitchedPtr(h_volume, volumeSize.width*sizeof(VolumeType), volumeSize.width, volumeSize.height);
copyParams.dstArray = d_volumeArray;
copyParams.extent = volumeSize;
copyParams.kind = cudaMemcpyHostToDevice;
cutilSafeCall( cudaMemcpy3D(©Params) );
// set texture parameters
tex.normalized = true; // access with normalized texture coordinates
tex.filterMode = cudaFilterModePoint; // linear interpolation
tex.addressMode[0] = cudaAddressModeClamp; // clamp texture coordinates
tex.addressMode[1] = cudaAddressModeClamp;
// bind array to 3D texture
cutilSafeCall(cudaBindTextureToArray(tex, d_volumeArray, channelDesc));
}
void render_kernel(dim3 gridSize, dim3 blockSize, uint *d_output, /*uint *d_cluster, */float* d_iRed, float* d_oRed,
float* d_iGreen, float* d_oGreen, float* d_iBlue, float* d_oBlue, float4* d_iColors, unsigned short* data,
/*unsigned short *cluster_data, */uint imageW, uint imageH, float density, float brightness,
float4 one, float4 two, float4 three, float4 four, float4 five, float4 six, uint tfsize,
void *h_volume, /*void *cluster, */cudaExtent volumeSize, cudaArray *d_volumeArray, /*cudaArray *d_volumeArray_cluster, */int *set)
{
int size = imageH * imageW;
if(set[0] == 0){
setup_volume(h_volume, volumeSize, size, d_volumeArray);
set[0] = 1;
}
// if(set[1] == 0){
// setup_cluster(cluster, volumeSize, size, d_volumeArray_cluster);
// set[1] = 1;
// }
/* clear colors buffers */
cutilSafeCall(cudaMemset(d_iColors, 0, imageH*imageW*sizeof(float4)));
cutilSafeCall(cudaMemset(d_iRed, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(cudaMemset(d_oRed, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(cudaMemset(d_iGreen, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(cudaMemset(d_oGreen, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(cudaMemset(d_iBlue, 0, imageH*imageW*sizeof(float)));
cutilSafeCall(cudaMemset(d_oBlue, 0, imageH*imageW*sizeof(float)));
d_render<<<gridSize, blockSize>>>(d_iColors, data, d_iRed, d_iGreen, d_iBlue, imageW, imageH, density, brightness,
one, two, three, four, five, six, tfsize/*, 0*/);
float max_red = reduce_max(d_oRed, d_iRed, size);
float max_green = reduce_max(d_oGreen, d_iGreen, size);
float max_blue = reduce_max(d_oBlue, d_iBlue, size);
float global_max = fmax(max_red, max_green);
global_max = fmax(global_max, max_blue);
create_image<<<gridSize, blockSize>>>(d_output, d_iColors, global_max, max_red, max_green, max_blue, imageW, imageH);
// render image
// d_render<<<gridSize, blockSize>>>(d_iColors, cluster_data, d_iRed, d_iGreen, d_iBlue, imageW, imageH, density, brightness,
// one, two, three, four, five, six, 1);
//
// max_red = reduce_max(d_oRed, d_iRed, size);
// max_green = reduce_max(d_oGreen, d_iGreen, size);
// max_blue = reduce_max(d_oBlue, d_iBlue, size);
//
// global_max = fmax(max_red, max_green);
// global_max = fmax(global_max, max_blue);
//
// create_image<<<gridSize, blockSize>>>(d_cluster, d_iColors, global_max, max_red, max_green, max_blue, imageW, imageH);
}
void copyInvViewMatrix(float *invViewMatrix, size_t sizeofMatrix)
{
cutilSafeCall( cudaMemcpyToSymbol(c_invViewMatrix, invViewMatrix, sizeofMatrix) );
}
#endif // #ifndef _VOLUMERENDER_KERNEL_CU_
|
33e633284c979ab4be64476ea8dd3b6100497206.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
// Andrew Gloster
// June 2018
// Function declarations for cuPentBatch routine to solve batches of pentadiagonal systems
// Copyright 2018 Andrew Gloster
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ---------------------------------------------------------------------
// Standard Libraries and Headers
// ---------------------------------------------------------------------
// ---------------------------------------------------------------------
// User Libraries and Headers
// ---------------------------------------------------------------------
#include "cuPentBatch.h"
// ---------------------------------------------------------------------
// Function to factorise the LHS matrix
// ---------------------------------------------------------------------
__global__ void pentFactorBatch
(
double* ds, // Array containing the lower diagonal, 2 away from the main diagonal. First two elements are 0. Stored in interleaved format.
double* dl, // Array containing the lower diagonal, 1 away from the main diagonal. First elements is 0. Stored in interleaved format.
double* d, // Array containing the main diagonal. Stored in interleaved format.
double* du, // Array containing the upper diagonal, 1 away from the main diagonal. Last element is 0. Stored in interleaved format.
double* dw, // Array containing the upper diagonal, 2 awy from the main diagonal. Last 2 elements are 0. Stored in interleaved format.
int m, // Size of the linear systems, number of unknowns
int batchCount // Number of linear systems
)
{
// Indices used to store relative indexes
int rowCurrent;
int rowPrevious;
int rowSecondPrevious;
// Starting index
rowCurrent = blockDim.x * blockIdx.x + threadIdx.x;
// Only want to solve equations that exist
if (rowCurrent < batchCount)
{
// First Row
d[rowCurrent] = d[rowCurrent];
du[rowCurrent] = du[rowCurrent] / d[rowCurrent];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
// Second row index
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second row
dl[rowCurrent] = dl[rowCurrent];
d[rowCurrent] = d[rowCurrent] - dl[rowCurrent] * du[rowPrevious];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
// Interior rows - Note 0 indexing
for (int i = 2; i < m - 2; i++)
{
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
}
// Second last row indexes
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second last row
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
// Last row indexes
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Last row
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
}
}
// ---------------------------------------------------------------------
// Function to solve the Ax = b system of pentadiagonal matrices
// ---------------------------------------------------------------------
__global__ void pentSolveBatch
(
double* ds, // Array containing updated ds after using pentFactorBatch
double* dl, // Array containing updated ds after using pentFactorBatch
double* d, // Array containing updated ds after using pentFactorBatch
double* du, // Array containing updated ds after using pentFactorBatch
double* dw, // Array containing updated ds after using pentFactorBatch
double* b, // Dense array of RHS stored in interleaved format
int m, // Size of the linear systems, number of unknowns
int batchCount // Number of linear systems
)
{
// Indices used to store relative indexes
int rowCurrent;
int rowPrevious;
int rowSecondPrevious;
int rowAhead;
int rowSecondAhead;
// Starting index
rowCurrent = blockDim.x * blockIdx.x + threadIdx.x;
// Only want to solve equations that exist
if (rowCurrent < batchCount)
{
// --------------------------
// Forward Substitution
// --------------------------
// First Row
b[rowCurrent] = b[rowCurrent] / d[rowCurrent];
// Second row index
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second row
b[rowCurrent] = (b[rowCurrent] - dl[rowCurrent] * b[rowPrevious]) / d[rowCurrent];
// Interior rows - Note 0 indexing
for (int i = 2; i < m; i++)
{
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
b[rowCurrent] = (b[rowCurrent] - ds[rowCurrent] * b[rowSecondPrevious] - dl[rowCurrent] * b[rowPrevious]) / d[rowCurrent];
}
// --------------------------
// Backward Substitution
// --------------------------
// Last row
b[rowCurrent] = b[rowCurrent];
// Second last row index
rowAhead = rowCurrent;
rowCurrent -= batchCount;
// Second last row
b[rowCurrent] = b[rowCurrent] - du[rowCurrent] * b[rowAhead];
// Interior points - Note row indexing
for (int i = m - 3; i >= 0; i -= 1)
{
rowSecondAhead = rowCurrent + batchCount;
rowAhead = rowCurrent;
rowCurrent -= batchCount;
b[rowCurrent] = b[rowCurrent] - du[rowCurrent] * b[rowAhead] - dw[rowCurrent] * b[rowSecondAhead];
}
}
}
// ---------------------------------------------------------------------
// End of file
// ---------------------------------------------------------------------
| 33e633284c979ab4be64476ea8dd3b6100497206.cu | // Andrew Gloster
// June 2018
// Function declarations for cuPentBatch routine to solve batches of pentadiagonal systems
// Copyright 2018 Andrew Gloster
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ---------------------------------------------------------------------
// Standard Libraries and Headers
// ---------------------------------------------------------------------
// ---------------------------------------------------------------------
// User Libraries and Headers
// ---------------------------------------------------------------------
#include "cuPentBatch.h"
// ---------------------------------------------------------------------
// Function to factorise the LHS matrix
// ---------------------------------------------------------------------
__global__ void pentFactorBatch
(
double* ds, // Array containing the lower diagonal, 2 away from the main diagonal. First two elements are 0. Stored in interleaved format.
double* dl, // Array containing the lower diagonal, 1 away from the main diagonal. First elements is 0. Stored in interleaved format.
double* d, // Array containing the main diagonal. Stored in interleaved format.
double* du, // Array containing the upper diagonal, 1 away from the main diagonal. Last element is 0. Stored in interleaved format.
double* dw, // Array containing the upper diagonal, 2 awy from the main diagonal. Last 2 elements are 0. Stored in interleaved format.
int m, // Size of the linear systems, number of unknowns
int batchCount // Number of linear systems
)
{
// Indices used to store relative indexes
int rowCurrent;
int rowPrevious;
int rowSecondPrevious;
// Starting index
rowCurrent = blockDim.x * blockIdx.x + threadIdx.x;
// Only want to solve equations that exist
if (rowCurrent < batchCount)
{
// First Row
d[rowCurrent] = d[rowCurrent];
du[rowCurrent] = du[rowCurrent] / d[rowCurrent];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
// Second row index
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second row
dl[rowCurrent] = dl[rowCurrent];
d[rowCurrent] = d[rowCurrent] - dl[rowCurrent] * du[rowPrevious];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
// Interior rows - Note 0 indexing
for (int i = 2; i < m - 2; i++)
{
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
dw[rowCurrent] = dw[rowCurrent] / d[rowCurrent];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
}
// Second last row indexes
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second last row
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
du[rowCurrent] = (du[rowCurrent] - dl[rowCurrent] * dw[rowPrevious]) / d[rowCurrent];
// Last row indexes
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Last row
dl[rowCurrent] = dl[rowCurrent] - ds[rowCurrent] * du[rowSecondPrevious];
d[rowCurrent] = d[rowCurrent] - ds[rowCurrent] * dw[rowSecondPrevious] - dl[rowCurrent] * du[rowPrevious];
}
}
// ---------------------------------------------------------------------
// Function to solve the Ax = b system of pentadiagonal matrices
// ---------------------------------------------------------------------
__global__ void pentSolveBatch
(
double* ds, // Array containing updated ds after using pentFactorBatch
double* dl, // Array containing updated ds after using pentFactorBatch
double* d, // Array containing updated ds after using pentFactorBatch
double* du, // Array containing updated ds after using pentFactorBatch
double* dw, // Array containing updated ds after using pentFactorBatch
double* b, // Dense array of RHS stored in interleaved format
int m, // Size of the linear systems, number of unknowns
int batchCount // Number of linear systems
)
{
// Indices used to store relative indexes
int rowCurrent;
int rowPrevious;
int rowSecondPrevious;
int rowAhead;
int rowSecondAhead;
// Starting index
rowCurrent = blockDim.x * blockIdx.x + threadIdx.x;
// Only want to solve equations that exist
if (rowCurrent < batchCount)
{
// --------------------------
// Forward Substitution
// --------------------------
// First Row
b[rowCurrent] = b[rowCurrent] / d[rowCurrent];
// Second row index
rowPrevious = rowCurrent;
rowCurrent += batchCount;
// Second row
b[rowCurrent] = (b[rowCurrent] - dl[rowCurrent] * b[rowPrevious]) / d[rowCurrent];
// Interior rows - Note 0 indexing
for (int i = 2; i < m; i++)
{
rowSecondPrevious = rowCurrent - batchCount;
rowPrevious = rowCurrent;
rowCurrent += batchCount;
b[rowCurrent] = (b[rowCurrent] - ds[rowCurrent] * b[rowSecondPrevious] - dl[rowCurrent] * b[rowPrevious]) / d[rowCurrent];
}
// --------------------------
// Backward Substitution
// --------------------------
// Last row
b[rowCurrent] = b[rowCurrent];
// Second last row index
rowAhead = rowCurrent;
rowCurrent -= batchCount;
// Second last row
b[rowCurrent] = b[rowCurrent] - du[rowCurrent] * b[rowAhead];
// Interior points - Note row indexing
for (int i = m - 3; i >= 0; i -= 1)
{
rowSecondAhead = rowCurrent + batchCount;
rowAhead = rowCurrent;
rowCurrent -= batchCount;
b[rowCurrent] = b[rowCurrent] - du[rowCurrent] * b[rowAhead] - dw[rowCurrent] * b[rowSecondAhead];
}
}
}
// ---------------------------------------------------------------------
// End of file
// ---------------------------------------------------------------------
|
a672e935a628482c84c03522252b0f73654a3732.hip | // !!! This is a file automatically generated by hipify!!!
/**
* Copyright (c) 2017-present, Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "gpu_context.h"
#include "memory_helpers.h"
#include <hip/hip_runtime_api.h>
namespace hvvr {
bool GPUContext::cudaInit(bool forceNonTCC) {
int deviceCount = 0;
cutilSafeCall(hipGetDeviceCount(&deviceCount));
int device = 0;
if (forceNonTCC) {
hipDeviceProp_t deviceProps = {};
// if we're on Windows, search for a non-TCC device
for (int n = 0; n < deviceCount; n++) {
hipGetDeviceProperties(&deviceProps, n);
if (deviceProps.tccDriver == 0) {
device = n;
break;
}
}
}
cutilSafeCall(hipSetDevice(device));
uint32_t deviceFlags = 0;
deviceFlags |= hipDeviceMapHost;
auto error = hipSetDeviceFlags(deviceFlags);
if (hipSuccess != error) {
fprintf(stderr, "error %d: cuda call failed with %s\n", error, hipGetErrorString(error));
assert(false);
return false;
}
return true;
}
void GPUContext::cudaCleanup() {
cutilSafeCall(hipProfilerStop()); // Flush profiling data for nvprof
}
GPUContext::GPUContext() : graphicsResourcesMapped(false) {}
GPUContext::~GPUContext() {
cleanup();
}
void GPUContext::getCudaGraphicsResources(std::vector<cudaGraphicsResource_t>& resources) {
for (const auto& c : cameras) {
if (c->resultsResource) {
resources.push_back(c->resultsResource);
}
}
}
void GPUContext::interopMapResources() {
if (!graphicsResourcesMapped) {
std::vector<cudaGraphicsResource_t> resources;
getCudaGraphicsResources(resources);
hipStream_t stream = 0;
if (resources.size() > 0) {
cutilSafeCall(hipGraphicsMapResources((int)resources.size(), resources.data(), stream));
}
graphicsResourcesMapped = true;
}
}
void GPUContext::interopUnmapResources() {
if (graphicsResourcesMapped) {
std::vector<cudaGraphicsResource_t> resources;
getCudaGraphicsResources(resources);
hipStream_t stream = 0;
if (resources.size() > 0) {
cutilSafeCall(hipGraphicsUnmapResources((int)resources.size(), resources.data(), stream));
}
graphicsResourcesMapped = false;
}
}
void GPUContext::cleanup() {
interopUnmapResources();
for (auto& c : cameras) {
if (c->resultsResource) {
cutilSafeCall(hipGraphicsUnregisterResource(c->resultsResource));
}
c->resultImage.reset();
c->d_sampleResults = GPUBuffer<uint32_t>();
safeCudaStreamDestroy(c->stream);
}
cameras.clear();
}
GPUCamera* GPUContext::getCreateCamera(const Camera* cameraPtr, bool& created) {
created = false;
for (size_t i = 0; i < cameras.size(); ++i) {
if (cameras[i]->cameraPtr == cameraPtr) {
return cameras[i].get();
}
}
cameras.emplace_back(std::make_unique<GPUCamera>(cameraPtr));
created = true;
return (cameras.end() - 1)->get();
}
} // namespace hvvr
| a672e935a628482c84c03522252b0f73654a3732.cu | /**
* Copyright (c) 2017-present, Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "gpu_context.h"
#include "memory_helpers.h"
#include <cuda_profiler_api.h>
namespace hvvr {
bool GPUContext::cudaInit(bool forceNonTCC) {
int deviceCount = 0;
cutilSafeCall(cudaGetDeviceCount(&deviceCount));
int device = 0;
if (forceNonTCC) {
cudaDeviceProp deviceProps = {};
// if we're on Windows, search for a non-TCC device
for (int n = 0; n < deviceCount; n++) {
cudaGetDeviceProperties(&deviceProps, n);
if (deviceProps.tccDriver == 0) {
device = n;
break;
}
}
}
cutilSafeCall(cudaSetDevice(device));
uint32_t deviceFlags = 0;
deviceFlags |= cudaDeviceMapHost;
auto error = cudaSetDeviceFlags(deviceFlags);
if (cudaSuccess != error) {
fprintf(stderr, "error %d: cuda call failed with %s\n", error, cudaGetErrorString(error));
assert(false);
return false;
}
return true;
}
void GPUContext::cudaCleanup() {
cutilSafeCall(cudaProfilerStop()); // Flush profiling data for nvprof
}
GPUContext::GPUContext() : graphicsResourcesMapped(false) {}
GPUContext::~GPUContext() {
cleanup();
}
void GPUContext::getCudaGraphicsResources(std::vector<cudaGraphicsResource_t>& resources) {
for (const auto& c : cameras) {
if (c->resultsResource) {
resources.push_back(c->resultsResource);
}
}
}
void GPUContext::interopMapResources() {
if (!graphicsResourcesMapped) {
std::vector<cudaGraphicsResource_t> resources;
getCudaGraphicsResources(resources);
cudaStream_t stream = 0;
if (resources.size() > 0) {
cutilSafeCall(cudaGraphicsMapResources((int)resources.size(), resources.data(), stream));
}
graphicsResourcesMapped = true;
}
}
void GPUContext::interopUnmapResources() {
if (graphicsResourcesMapped) {
std::vector<cudaGraphicsResource_t> resources;
getCudaGraphicsResources(resources);
cudaStream_t stream = 0;
if (resources.size() > 0) {
cutilSafeCall(cudaGraphicsUnmapResources((int)resources.size(), resources.data(), stream));
}
graphicsResourcesMapped = false;
}
}
void GPUContext::cleanup() {
interopUnmapResources();
for (auto& c : cameras) {
if (c->resultsResource) {
cutilSafeCall(cudaGraphicsUnregisterResource(c->resultsResource));
}
c->resultImage.reset();
c->d_sampleResults = GPUBuffer<uint32_t>();
safeCudaStreamDestroy(c->stream);
}
cameras.clear();
}
GPUCamera* GPUContext::getCreateCamera(const Camera* cameraPtr, bool& created) {
created = false;
for (size_t i = 0; i < cameras.size(); ++i) {
if (cameras[i]->cameraPtr == cameraPtr) {
return cameras[i].get();
}
}
cameras.emplace_back(std::make_unique<GPUCamera>(cameraPtr));
created = true;
return (cameras.end() - 1)->get();
}
} // namespace hvvr
|
82043d4e4e8f5f06b322ffe73e0724d64d039bbc.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <math.h>
__device__ double dist(double x1, double y1, double x2, double y2){
return sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2));
}
__global__ void testKernel(double *xs, double *ys, double *b){
b[blockIdx.x] = dist(xs[blockIdx.x], 1.0, ys[blockIdx.x], 1.0);
}
/* r^3 */
__device__ double rbf(double x1, double y1, double x2, double y2){
return pow(dist(x1, y1, x2, y2),3);
}
/* 6*r */
__device__ double rbfd2(double x1, double y1, double x2, double y2){
return 6*dist(x1, y1, x2, y2);
}
/* Gaussian Elimination */
__device__ void gauss_elim(double *A, double *b, double *x, int n){
int i, j, k;
int idxi, idxj, idxij, idxik, idxjk;
double m, diff;
// Swap first and second rows
int r1 = 0;
int r2 = 1;
double mtemp, vtemp;
int idx1;
int idx2;
for (i = 0; i < n; ++i)
{
// matrix swap
idx1 = r1*n + i;
idx2 = r2*n + i;
mtemp = A[idx1];
A[idx1] = A[idx2];
A[idx2] = mtemp;
}
// RHS vector swap
vtemp = b[1];
b[1] = b[0];
b[0] = vtemp;
// Gauss-Jordan Forward Elimination to Upper triangular matrix
for (j = 0; j < n-1; j++){
for (i = j+1; i < n; i++){
idxij = i*n + j;
idxj = j*n + j;
m = A[idxij]/A[idxj];
for (k = 0; k < n; k++){
idxik = i*n + k;
idxjk = j*n + k;
A[idxik] = A[idxik] - m*A[idxjk];
}
b[i] = b[i] - m*b[j];
}
}
// Back substituion
for (i = n-1; i >= 0; i--){
diff = b[i];
for (j = i+1; j < n; j++){
idxij = i*n + j;
diff = diff - x[j]*A[idxij];
}
idxi = i*n + i;
x[i] = diff/A[idxi];
}
}
__device__ void build_stencil_matrix(double* xs, double* ys,
int* nn, double* full_mat1, double* RHS1,
int l_max, int l, int deg, int k){
int pdim = (deg+1)*(deg+2)/2;
int i, j;
// Make matrix 0
for(i = 0; i < l + pdim; i++){
for(j = 0; j < l + pdim; j++){
full_mat1[i*(l+pdim) + j] = 0.0;
}
}
// Build A and O matrices
for(i = 0; i < l + pdim; i++){
for(j = 0; j < l + pdim; j++){
if(i < l && j < l){
full_mat1[i*(l+pdim)+j] = rbf(
xs[nn[k*l_max+i]], ys[nn[k*l_max+i]],
xs[nn[k*l_max+j]], ys[nn[k*l_max+j]]);
}
else if(i >= l && j>= l){
full_mat1[i*(l+pdim) + j] = 0.0;
}
}
}
// Build P matrix
int d = deg;
int xp = 0;
int yp = d;
for(j = l+pdim - 1; j >= l; j--){
for(i = 0; i < l; i++){
full_mat1[i*(l+pdim) + j] =
pow(xs[nn[k*l_max+i]] - xs[nn[k*l_max+0]], xp) *
pow(ys[nn[k*l_max+i]] - ys[nn[k+l_max+0]], yp);
}
if(yp - 1 < 0){
--d;
yp = d;
xp = 0;
}
else{
xp++;
yp--;
}
}
// Build P transpose matrix
d = deg;
xp = 0;
yp = d;
for(i = l+pdim - 1; i >= l; i--){
for(j = 0; j < l; j++){
//full_mat1[i*(l+pdim) + j] = pow(xs[nn[k*l+j]],xp)*pow(ys[nn[k*l+j]],yp);
full_mat1[i*(l+pdim) + j] = full_mat1[j*(l+pdim) + i];
}
if(yp - 1 < 0){
--d;
yp = d;
xp = 0;
}
else{
xp++;
yp--;
}
}
// RHS vector
for(i = 0; i < l + pdim; i++){
if(i < l){
RHS1[i] = rbfd2(
xs[nn[k*l_max+0]], ys[nn[k*l_max+0]],
xs[nn[k*l_max+i]], ys[nn[k*l_max+i]]);
}
else if(i==l+3 || i==l+5){
RHS1[i] = 2.0;
}
else{
RHS1[i] = 0.0;
}
}
}
__global__ void genDMatrix(int n, double* xs, double* ys,
int* nn, double* weights_root,
double* full_mat1_root, double* RHS1_root,
int l_max, int l, int deg){
int my_id = blockDim.x*blockIdx.x + threadIdx.x;
int pdim = (deg+1)*(deg+2)/2;
if(my_id <n){
double* full_mat1 = &full_mat1_root[my_id * (l+pdim)*(l+pdim)];
double* RHS1 = &RHS1_root[my_id * (l+pdim)];
double* weights = &weights_root[my_id * (l+pdim)];
build_stencil_matrix(xs, ys, nn, full_mat1, RHS1, l_max, l, deg, my_id);
gauss_elim(full_mat1, RHS1, weights, l+pdim);
}
}
| 82043d4e4e8f5f06b322ffe73e0724d64d039bbc.cu | #include <math.h>
__device__ double dist(double x1, double y1, double x2, double y2){
return sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2));
}
__global__ void testKernel(double *xs, double *ys, double *b){
b[blockIdx.x] = dist(xs[blockIdx.x], 1.0, ys[blockIdx.x], 1.0);
}
/* r^3 */
__device__ double rbf(double x1, double y1, double x2, double y2){
return pow(dist(x1, y1, x2, y2),3);
}
/* 6*r */
__device__ double rbfd2(double x1, double y1, double x2, double y2){
return 6*dist(x1, y1, x2, y2);
}
/* Gaussian Elimination */
__device__ void gauss_elim(double *A, double *b, double *x, int n){
int i, j, k;
int idxi, idxj, idxij, idxik, idxjk;
double m, diff;
// Swap first and second rows
int r1 = 0;
int r2 = 1;
double mtemp, vtemp;
int idx1;
int idx2;
for (i = 0; i < n; ++i)
{
// matrix swap
idx1 = r1*n + i;
idx2 = r2*n + i;
mtemp = A[idx1];
A[idx1] = A[idx2];
A[idx2] = mtemp;
}
// RHS vector swap
vtemp = b[1];
b[1] = b[0];
b[0] = vtemp;
// Gauss-Jordan Forward Elimination to Upper triangular matrix
for (j = 0; j < n-1; j++){
for (i = j+1; i < n; i++){
idxij = i*n + j;
idxj = j*n + j;
m = A[idxij]/A[idxj];
for (k = 0; k < n; k++){
idxik = i*n + k;
idxjk = j*n + k;
A[idxik] = A[idxik] - m*A[idxjk];
}
b[i] = b[i] - m*b[j];
}
}
// Back substituion
for (i = n-1; i >= 0; i--){
diff = b[i];
for (j = i+1; j < n; j++){
idxij = i*n + j;
diff = diff - x[j]*A[idxij];
}
idxi = i*n + i;
x[i] = diff/A[idxi];
}
}
__device__ void build_stencil_matrix(double* xs, double* ys,
int* nn, double* full_mat1, double* RHS1,
int l_max, int l, int deg, int k){
int pdim = (deg+1)*(deg+2)/2;
int i, j;
// Make matrix 0
for(i = 0; i < l + pdim; i++){
for(j = 0; j < l + pdim; j++){
full_mat1[i*(l+pdim) + j] = 0.0;
}
}
// Build A and O matrices
for(i = 0; i < l + pdim; i++){
for(j = 0; j < l + pdim; j++){
if(i < l && j < l){
full_mat1[i*(l+pdim)+j] = rbf(
xs[nn[k*l_max+i]], ys[nn[k*l_max+i]],
xs[nn[k*l_max+j]], ys[nn[k*l_max+j]]);
}
else if(i >= l && j>= l){
full_mat1[i*(l+pdim) + j] = 0.0;
}
}
}
// Build P matrix
int d = deg;
int xp = 0;
int yp = d;
for(j = l+pdim - 1; j >= l; j--){
for(i = 0; i < l; i++){
full_mat1[i*(l+pdim) + j] =
pow(xs[nn[k*l_max+i]] - xs[nn[k*l_max+0]], xp) *
pow(ys[nn[k*l_max+i]] - ys[nn[k+l_max+0]], yp);
}
if(yp - 1 < 0){
--d;
yp = d;
xp = 0;
}
else{
xp++;
yp--;
}
}
// Build P transpose matrix
d = deg;
xp = 0;
yp = d;
for(i = l+pdim - 1; i >= l; i--){
for(j = 0; j < l; j++){
//full_mat1[i*(l+pdim) + j] = pow(xs[nn[k*l+j]],xp)*pow(ys[nn[k*l+j]],yp);
full_mat1[i*(l+pdim) + j] = full_mat1[j*(l+pdim) + i];
}
if(yp - 1 < 0){
--d;
yp = d;
xp = 0;
}
else{
xp++;
yp--;
}
}
// RHS vector
for(i = 0; i < l + pdim; i++){
if(i < l){
RHS1[i] = rbfd2(
xs[nn[k*l_max+0]], ys[nn[k*l_max+0]],
xs[nn[k*l_max+i]], ys[nn[k*l_max+i]]);
}
else if(i==l+3 || i==l+5){
RHS1[i] = 2.0;
}
else{
RHS1[i] = 0.0;
}
}
}
__global__ void genDMatrix(int n, double* xs, double* ys,
int* nn, double* weights_root,
double* full_mat1_root, double* RHS1_root,
int l_max, int l, int deg){
int my_id = blockDim.x*blockIdx.x + threadIdx.x;
int pdim = (deg+1)*(deg+2)/2;
if(my_id <n){
double* full_mat1 = &full_mat1_root[my_id * (l+pdim)*(l+pdim)];
double* RHS1 = &RHS1_root[my_id * (l+pdim)];
double* weights = &weights_root[my_id * (l+pdim)];
build_stencil_matrix(xs, ys, nn, full_mat1, RHS1, l_max, l, deg, my_id);
gauss_elim(full_mat1, RHS1, weights, l+pdim);
}
}
|
7f4c518b504d6c8d08bc70292bc5a145b98db844.hip | // !!! This is a file automatically generated by hipify!!!
#ifndef __GQD_SIN_COS_CU__
#define __GQD_SIN_COS_CU__
#include "gqd_real.h"
//#include "common.hip"
extern __device__ __constant__ double qd_inv_fact[n_qd_inv_fact][4];
// Table of sin(k * pi/1024) and cos(k * pi/1024).
static __device__ __constant__ double qd_sin_tbl[256][4] = {
{ 3.0679567629659761e-03, 1.2690279085455925e-19, 5.2879464245328389e-36, -1.7820334081955298e-52},
{ 6.1358846491544753e-03, 9.0545257482474933e-20, 1.6260113133745320e-37, -9.7492001208767410e-55},
{ 9.2037547820598194e-03, -1.2136591693535934e-19, 5.5696903949425567e-36, 1.2505635791936951e-52},
{ 1.2271538285719925e-02, 6.9197907640283170e-19, -4.0203726713435555e-36, -2.0688703606952816e-52},
{ 1.5339206284988102e-02, -8.4462578865401696e-19, 4.6535897505058629e-35, -1.3923682978570467e-51},
{ 1.8406729905804820e-02, 7.4195533812833160e-19, 3.9068476486787607e-35, 3.6393321292898614e-52},
{ 2.1474080275469508e-02, -4.5407960207688566e-19, -2.2031770119723005e-35, 1.2709814654833741e-51},
{ 2.4541228522912288e-02, -9.1868490125778782e-20, 4.8706148704467061e-36, -2.8153947855469224e-52},
{ 2.7608145778965743e-02, -1.5932358831389269e-18, -7.0475416242776030e-35, -2.7518494176602744e-51},
{ 3.0674803176636626e-02, -1.6936054844107918e-20, -2.0039543064442544e-36, -1.6267505108658196e-52},
{ 3.3741171851377587e-02, -2.0096074292368340e-18, -1.3548237016537134e-34, 6.5554881875899973e-51},
{ 3.6807222941358832e-02, 6.1060088803529842e-19, -4.0448721259852727e-35, -2.1111056765671495e-51},
{ 3.9872927587739811e-02, 4.6657453481183289e-19, 3.4119333562288684e-35, 2.4007534726187511e-51},
{ 4.2938256934940820e-02, 2.8351940588660907e-18, 1.6991309601186475e-34, 6.8026536098672629e-51},
{ 4.6003182130914630e-02, -1.1182813940157788e-18, 7.5235020270378946e-35, 4.1187304955493722e-52},
{ 4.9067674327418015e-02, -6.7961037205182801e-19, -4.4318868124718325e-35, -9.9376628132525316e-52},
{ 5.2131704680283324e-02, -2.4243695291953779e-18, -1.3675405320092298e-34, -8.3938137621145070e-51},
{ 5.5195244349689941e-02, -1.3340299860891103e-18, -3.4359574125665608e-35, 1.1911462755409369e-51},
{ 5.8258264500435759e-02, 2.3299905496077492e-19, 1.9376108990628660e-36, -5.1273775710095301e-53},
{ 6.1320736302208578e-02, -5.1181134064638108e-19, -4.2726335866706313e-35, 2.6368495557440691e-51},
{ 6.4382630929857465e-02, -4.2325997000052705e-18, 3.3260117711855937e-35, 1.4736267706718352e-51},
{ 6.7443919563664065e-02, -6.9221796556983636e-18, 1.5909286358911040e-34, -7.8828946891835218e-51},
{ 7.0504573389613870e-02, -6.8552791107342883e-18, -1.9961177630841580e-34, 2.0127129580485300e-50},
{ 7.3564563599667426e-02, -2.7784941506273593e-18, -9.1240375489852821e-35, -1.9589752023546795e-51},
{ 7.6623861392031492e-02, 2.3253700287958801e-19, -1.3186083921213440e-36, -4.9927872608099673e-53},
{ 7.9682437971430126e-02, -4.4867664311373041e-18, 2.8540789143650264e-34, 2.8491348583262741e-51},
{ 8.2740264549375692e-02, 1.4735983530877760e-18, 3.7284093452233713e-35, 2.9024430036724088e-52},
{ 8.5797312344439894e-02, -3.3881893830684029e-18, -1.6135529531508258e-34, 7.7294651620588049e-51},
{ 8.8853552582524600e-02, -3.7501775830290691e-18, 3.7543606373911573e-34, 2.2233701854451859e-50},
{ 9.1908956497132724e-02, 4.7631594854274564e-18, 1.5722874642939344e-34, -4.8464145447831456e-51},
{ 9.4963495329639006e-02, -6.5885886400417564e-18, -2.1371116991641965e-34, 1.3819370559249300e-50},
{ 9.8017140329560604e-02, -1.6345823622442560e-18, -1.3209238810006454e-35, -3.5691060049117942e-52},
{ 1.0106986275482782e-01, 3.3164325719308656e-18, -1.2004224885132282e-34, 7.2028828495418631e-51},
{ 1.0412163387205457e-01, 6.5760254085385100e-18, 1.7066246171219214e-34, -4.9499340996893514e-51},
{ 1.0717242495680884e-01, 6.4424044279026198e-18, -8.3956976499698139e-35, -4.0667730213318321e-51},
{ 1.1022220729388306e-01, -5.6789503537823233e-19, 1.0380274792383233e-35, 1.5213997918456695e-52},
{ 1.1327095217756435e-01, 2.7100481012132900e-18, 1.5323292999491619e-35, 4.9564432810360879e-52},
{ 1.1631863091190477e-01, 1.0294914877509705e-18, -9.3975734948993038e-35, 1.3534827323719708e-52},
{ 1.1936521481099137e-01, -3.9500089391898506e-18, 3.5317349978227311e-34, 1.8856046807012275e-51},
{ 1.2241067519921620e-01, 2.8354501489965335e-18, 1.8151655751493305e-34, -2.8716592177915192e-51},
{ 1.2545498341154623e-01, 4.8686751763148235e-18, 5.9878105258097936e-35, -3.3534629098722107e-51},
{ 1.2849811079379317e-01, 3.8198603954988802e-18, -1.8627501455947798e-34, -2.4308161133527791e-51},
{ 1.3154002870288312e-01, -5.0039708262213813e-18, -1.2983004159245552e-34, -4.6872034915794122e-51},
{ 1.3458070850712620e-01, -9.1670359171480699e-18, 1.5916493007073973e-34, 4.0237002484366833e-51},
{ 1.3762012158648604e-01, 6.6253255866774482e-18, -2.3746583031401459e-34, -9.3703876173093250e-52},
{ 1.4065823933284924e-01, -7.9193932965524741e-18, 6.0972464202108397e-34, 2.4566623241035797e-50},
{ 1.4369503315029444e-01, 1.1472723016618666e-17, -5.1884954557576435e-35, -4.2220684832186607e-51},
{ 1.4673047445536175e-01, 3.7269471470465677e-18, 3.7352398151250827e-34, -4.0881822289508634e-51},
{ 1.4976453467732151e-01, 8.0812114131285151e-18, 1.2979142554917325e-34, 9.9380667487736254e-51},
{ 1.5279718525844344e-01, -7.6313573938416838e-18, 5.7714690450284125e-34, -3.7731132582986687e-50},
{ 1.5582839765426523e-01, 3.0351307187678221e-18, -1.0976942315176184e-34, 7.8734647685257867e-51},
{ 1.5885814333386145e-01, -4.0163200573859079e-18, -9.2840580257628812e-35, -2.8567420029274875e-51},
{ 1.6188639378011183e-01, 1.1850519643573528e-17, -5.0440990519162957e-34, 3.0510028707928009e-50},
{ 1.6491312048996992e-01, -7.0405288319166738e-19, 3.3211107491245527e-35, 8.6663299254686031e-52},
{ 1.6793829497473117e-01, 5.4284533721558139e-18, -3.3263339336181369e-34, -1.8536367335123848e-50},
{ 1.7096188876030122e-01, 9.1919980181759094e-18, -6.7688743940982606e-34, -1.0377711384318389e-50},
{ 1.7398387338746382e-01, 5.8151994618107928e-18, -1.6751014298301606e-34, -6.6982259797164963e-51},
{ 1.7700422041214875e-01, 6.7329300635408167e-18, 2.8042736644246623e-34, 3.6786888232793599e-51},
{ 1.8002290140569951e-01, 7.9701826047392143e-18, -7.0765920110524977e-34, 1.9622512608461784e-50},
{ 1.8303988795514095e-01, 7.7349918688637383e-18, -4.4803769968145083e-34, 1.1201148793328890e-50},
{ 1.8605515166344666e-01, -1.2564893007679552e-17, 7.5953844248530810e-34, -3.8471695132415039e-51},
{ 1.8906866414980622e-01, -7.6208955803527778e-18, -4.4792298656662981e-34, -4.4136824096645007e-50},
{ 1.9208039704989244e-01, 4.3348343941174903e-18, -2.3404121848139937e-34, 1.5789970962611856e-50},
{ 1.9509032201612828e-01, -7.9910790684617313e-18, 6.1846270024220713e-34, -3.5840270918032937e-50},
{ 1.9809841071795359e-01, -1.8434411800689445e-18, 1.4139031318237285e-34, 1.0542811125343809e-50},
{ 2.0110463484209190e-01, 1.1010032669300739e-17, -3.9123576757413791e-34, 2.4084852500063531e-51},
{ 2.0410896609281687e-01, 6.0941297773957752e-18, -2.8275409970449641e-34, 4.6101008563532989e-51},
{ 2.0711137619221856e-01, -1.0613362528971356e-17, 2.2456805112690884e-34, 1.3483736125280904e-50},
{ 2.1011183688046961e-01, 1.1561548476512844e-17, 6.0355905610401254e-34, 3.3329909618405675e-50},
{ 2.1311031991609136e-01, 1.2031873821063860e-17, -3.4142699719695635e-34, -1.2436262780241778e-50},
{ 2.1610679707621952e-01, -1.0111196082609117e-17, 7.2789545335189643e-34, -2.9347540365258610e-50},
{ 2.1910124015686980e-01, -3.6513812299150776e-19, -2.3359499418606442e-35, 3.1785298198458653e-52},
{ 2.2209362097320354e-01, -3.0337210995812162e-18, 6.6654668033632998e-35, 2.0110862322656942e-51},
{ 2.2508391135979283e-01, 3.9507040822556510e-18, 2.4287993958305375e-35, 5.6662797513020322e-52},
{ 2.2807208317088573e-01, 8.2361837339258012e-18, 6.9786781316397937e-34, -6.4122962482639504e-51},
{ 2.3105810828067111e-01, 1.0129787149761869e-17, -6.9359234615816044e-34, -2.8877355604883782e-50},
{ 2.3404195858354343e-01, -6.9922402696101173e-18, -5.7323031922750280e-34, 5.3092579966872727e-51},
{ 2.3702360599436720e-01, 8.8544852285039918e-18, 1.3588480826354134e-34, 1.0381022520213867e-50},
{ 2.4000302244874150e-01, -1.2137758975632164e-17, -2.6448807731703891e-34, -1.9929733800670473e-51},
{ 2.4298017990326390e-01, -8.7514315297196632e-18, -6.5723260373079431e-34, -1.0333158083172177e-50},
{ 2.4595505033579462e-01, -1.1129044052741832e-17, 4.3805998202883397e-34, 1.2219399554686291e-50},
{ 2.4892760574572018e-01, -8.1783436100020990e-18, 5.5666875261111840e-34, 3.8080473058748167e-50},
{ 2.5189781815421697e-01, -1.7591436032517039e-17, -1.0959681232525285e-33, 5.6209426020232456e-50},
{ 2.5486565960451457e-01, -1.3602299806901461e-19, -6.0073844642762535e-36, -3.0072751311893878e-52},
{ 2.5783110216215899e-01, 1.8480038630879957e-17, 3.3201664714047599e-34, -5.5547819290576764e-51},
{ 2.6079411791527551e-01, 4.2721420983550075e-18, 5.6782126934777920e-35, 3.1428338084365397e-51},
{ 2.6375467897483140e-01, -1.8837947680038700e-17, 1.3720129045754794e-33, -8.2763406665966033e-50},
{ 2.6671275747489837e-01, 2.0941222578826688e-17, -1.1303466524727989e-33, 1.9954224050508963e-50},
{ 2.6966832557291509e-01, 1.5765657618133259e-17, -6.9696142173370086e-34, -4.0455346879146776e-50},
{ 2.7262135544994898e-01, 7.8697166076387850e-18, 6.6179388602933372e-35, -2.7642903696386267e-51},
{ 2.7557181931095814e-01, 1.9320328962556582e-17, 1.3932094180100280e-33, 1.3617253920018116e-50},
{ 2.7851968938505312e-01, -1.0030273719543544e-17, 7.2592115325689254e-34, -1.0068516296655851e-50},
{ 2.8146493792575800e-01, -1.2322299641274009e-17, -1.0564788706386435e-34, 7.5137424251265885e-51},
{ 2.8440753721127182e-01, 2.2209268510661475e-17, -9.1823095629523708e-34, -5.2192875308892218e-50},
{ 2.8734745954472951e-01, 1.5461117367645717e-17, -6.3263973663444076e-34, -2.2982538416476214e-50},
{ 2.9028467725446239e-01, -1.8927978707774251e-17, 1.1522953157142315e-33, 7.4738655654716596e-50},
{ 2.9321916269425863e-01, 2.2385430811901833e-17, 1.3662484646539680e-33, -4.2451325253996938e-50},
{ 2.9615088824362384e-01, -2.0220736360876938e-17, -7.9252212533920413e-35, -2.8990577729572470e-51},
{ 2.9907982630804048e-01, 1.6701181609219447e-18, 8.6091151117316292e-35, 3.9931286230012102e-52},
{ 3.0200594931922808e-01, -1.7167666235262474e-17, 2.3336182149008069e-34, 8.3025334555220004e-51},
{ 3.0492922973540243e-01, -2.2989033898191262e-17, -1.4598901099661133e-34, 3.7760487693121827e-51},
{ 3.0784964004153487e-01, 2.7074088527245185e-17, 1.2568858206899284e-33, 7.2931815105901645e-50},
{ 3.1076715274961147e-01, 2.0887076364048513e-17, -3.0130590791065942e-34, 1.3876739009935179e-51},
{ 3.1368174039889146e-01, 1.4560447299968912e-17, 3.6564186898011595e-34, 1.1654264734999375e-50},
{ 3.1659337555616585e-01, 2.1435292512726283e-17, 1.2338169231377316e-33, 3.3963542100989293e-50},
{ 3.1950203081601569e-01, -1.3981562491096626e-17, 8.1730000697411350e-34, -7.7671096270210952e-50},
{ 3.2240767880106985e-01, -4.0519039937959398e-18, 3.7438302780296796e-34, 8.7936731046639195e-51},
{ 3.2531029216226293e-01, 7.9171249463765892e-18, -6.7576622068146391e-35, 2.3021655066929538e-51},
{ 3.2820984357909255e-01, -2.6693140719641896e-17, 7.8928851447534788e-34, 2.5525163821987809e-51},
{ 3.3110630575987643e-01, -2.7469465474778694e-17, -1.3401245916610206e-33, 6.5531762489976163e-50},
{ 3.3399965144200938e-01, 2.2598986806288142e-17, 7.8063057192586115e-34, 2.0427600895486683e-50},
{ 3.3688985339222005e-01, -4.2000940033475092e-19, -2.9178652969985438e-36, -1.1597376437036749e-52},
{ 3.3977688440682685e-01, 6.6028679499418282e-18, 1.2575009988669683e-34, 2.5569067699008304e-51},
{ 3.4266071731199438e-01, 1.9261518449306319e-17, -9.2754189135990867e-34, 8.5439996687390166e-50},
{ 3.4554132496398904e-01, 2.7251143672916123e-17, 7.0138163601941737e-34, -1.4176292197454015e-50},
{ 3.4841868024943456e-01, 3.6974420514204918e-18, 3.5532146878499996e-34, 1.9565462544501322e-50},
{ 3.5129275608556715e-01, -2.2670712098795844e-17, -1.6994216673139631e-34, -1.2271556077284517e-50},
{ 3.5416352542049040e-01, -1.6951763305764860e-17, 1.2772331777814617e-33, -3.3703785435843310e-50},
{ 3.5703096123343003e-01, -4.8218191137919166e-19, -4.1672436994492361e-35, -7.1531167149364352e-52},
{ 3.5989503653498817e-01, -1.7601687123839282e-17, 1.3375125473046791e-33, 7.9467815593584340e-50},
{ 3.6275572436739723e-01, -9.1668352663749849e-18, -7.4317843956936735e-34, -2.0199582511804564e-50},
{ 3.6561299780477385e-01, 1.6217898770457546e-17, 1.1286970151961055e-33, -7.1825287318139010e-50},
{ 3.6846682995337232e-01, 1.0463640796159268e-17, 2.0554984738517304e-35, 1.0441861305618769e-51},
{ 3.7131719395183754e-01, 3.4749239648238266e-19, -7.5151053042866671e-37, -2.8153468438650851e-53},
{ 3.7416406297145799e-01, 8.0114103761962118e-18, 5.3429599813406052e-34, 1.0351378796539210e-50},
{ 3.7700741021641826e-01, -2.7255302041956930e-18, 6.3646586445018137e-35, 8.3048657176503559e-52},
{ 3.7984720892405116e-01, 9.9151305855172370e-18, 4.8761409697224886e-34, 1.4025084000776705e-50},
{ 3.8268343236508978e-01, -1.0050772696461588e-17, -2.0605316302806695e-34, -1.2717724698085205e-50},
{ 3.8551605384391885e-01, 1.5177665396472313e-17, 1.4198230518016535e-33, 5.8955167159904235e-50},
{ 3.8834504669882630e-01, -1.0053770598398717e-17, 7.5942999255057131e-34, -3.1967974046654219e-50},
{ 3.9117038430225387e-01, 1.7997787858243995e-17, -1.0613482402609856e-33, -5.4582148817791032e-50},
{ 3.9399204006104810e-01, 9.7649241641239336e-18, -2.1233599441284617e-34, -5.5529836795340819e-51},
{ 3.9680998741671031e-01, 2.0545063670840126e-17, 6.1347058801922842e-34, 1.0733788150636430e-50},
{ 3.9962419984564684e-01, -1.5065497476189372e-17, -9.9653258881867298e-34, -5.7524323712725355e-50},
{ 4.0243465085941843e-01, 1.0902619339328270e-17, 7.3998528125989765e-34, 2.2745784806823499e-50},
{ 4.0524131400498986e-01, 9.9111401942899884e-18, -2.5169070895434648e-34, 9.2772984818436573e-53},
{ 4.0804416286497869e-01, -7.0006015137351311e-18, -1.4108207334268228e-34, 1.5175546997577136e-52},
{ 4.1084317105790397e-01, -2.4219835190355499e-17, -1.1418902925313314e-33, -2.0996843165093468e-50},
{ 4.1363831223843456e-01, -1.0393984940597871e-17, -1.1481681174503880e-34, -2.0281052851028680e-51},
{ 4.1642956009763721e-01, -2.5475580413131732e-17, -3.4482678506112824e-34, 7.1788619351865480e-51},
{ 4.1921688836322396e-01, -4.2232463750110590e-18, -3.6053023045255790e-34, -2.2209673210025631e-50},
{ 4.2200027079979968e-01, 4.3543266994128527e-18, 3.1734310272251190e-34, -1.3573247980738668e-50},
{ 4.2477968120910881e-01, 2.7462312204277281e-17, -4.6552847802111948e-34, 6.5961781099193122e-51},
{ 4.2755509343028208e-01, 9.4111898162954726e-18, -1.7446682426598801e-34, -2.2054492626480169e-51},
{ 4.3032648134008261e-01, 2.2259686974092690e-17, 8.5972591314085075e-34, -2.9420897889003020e-50},
{ 4.3309381885315196e-01, 1.1224283329847517e-17, 5.3223748041075651e-35, 5.3926192627014212e-51},
{ 4.3585707992225547e-01, 1.6230515450644527e-17, -6.4371449063579431e-35, -6.9102436481386757e-51},
{ 4.3861623853852766e-01, -2.0883315831075090e-17, -1.4259583540891877e-34, 6.3864763590657077e-52},
{ 4.4137126873171667e-01, 2.2360783886964969e-17, 1.1864769603515770e-34, -3.8087003266189232e-51},
{ 4.4412214457042926e-01, -2.4218874422178315e-17, 2.2205230838703907e-34, 9.2133035911356258e-51},
{ 4.4686884016237421e-01, -1.9222136142309382e-17, -4.4425678589732049e-35, -1.3673609292149535e-51},
{ 4.4961132965460660e-01, 4.8831924232035243e-18, 2.7151084498191381e-34, -1.5653993171613154e-50},
{ 4.5234958723377089e-01, -1.4827977472196122e-17, -7.6947501088972324e-34, 1.7656856882031319e-50},
{ 4.5508358712634384e-01, -1.2379906758116472e-17, 5.5289688955542643e-34, -8.5382312840209386e-51},
{ 4.5781330359887723e-01, -8.4554254922295949e-18, -6.3770394246764263e-34, 3.1778253575564249e-50},
{ 4.6053871095824001e-01, 1.8488777492177872e-17, -1.0527732154209725e-33, 3.3235593490947102e-50},
{ 4.6325978355186020e-01, -7.3514924533231707e-18, 6.7175396881707035e-34, 3.9594127612123379e-50},
{ 4.6597649576796618e-01, -3.3023547778235135e-18, 3.4904677050476886e-35, 3.4483855263874246e-51},
{ 4.6868882203582796e-01, -2.2949251681845054e-17, -1.1364757641823658e-33, 6.8840522501918612e-50},
{ 4.7139673682599764e-01, 6.5166781360690130e-18, 2.9457546966235984e-34, -6.2159717738836630e-51},
{ 4.7410021465055002e-01, -8.1451601548978075e-18, -3.4789448555614422e-34, -1.1681943974658508e-50},
{ 4.7679923006332214e-01, -1.0293515338305794e-17, -3.6582045008369952e-34, 1.7424131479176475e-50},
{ 4.7949375766015301e-01, 1.8419999662684771e-17, -1.3040838621273312e-33, 1.0977131822246471e-50},
{ 4.8218377207912277e-01, -2.5861500925520442e-17, -6.2913197606500007e-36, 4.0802359808684726e-52},
{ 4.8486924800079112e-01, -1.8034004203262245e-17, -3.5244276906958044e-34, -1.7138318654749246e-50},
{ 4.8755016014843594e-01, 1.4231090931273653e-17, -1.8277733073262697e-34, -1.5208291790429557e-51},
{ 4.9022648328829116e-01, -5.1496145643440404e-18, -3.6903027405284104e-34, 1.5172940095151304e-50},
{ 4.9289819222978404e-01, -1.0257831676562186e-18, 6.9520817760885069e-35, -2.4260961214090389e-51},
{ 4.9556526182577254e-01, -9.4323241942365362e-18, 3.1212918657699143e-35, 4.2009072375242736e-52},
{ 4.9822766697278187e-01, -1.6126383830540798e-17, -1.5092897319298871e-33, 1.1049298890895917e-50},
{ 5.0088538261124083e-01, -3.9604015147074639e-17, -2.2208395201898007e-33, 1.3648202735839417e-49},
{ 5.0353838372571758e-01, -1.6731308204967497e-17, -1.0140233644074786e-33, 4.0953071937671477e-50},
{ 5.0618664534515534e-01, -4.8321592986493711e-17, 9.2858107226642252e-34, 4.2699802401037005e-50},
{ 5.0883014254310699e-01, 4.7836968268014130e-17, -1.0727022928806035e-33, 2.7309374513672757e-50},
{ 5.1146885043797041e-01, -1.3088001221007579e-17, 4.0929033363366899e-34, -3.7952190153477926e-50},
{ 5.1410274419322177e-01, -4.5712707523615624e-17, 1.5488279442238283e-33, -2.5853959305521130e-50},
{ 5.1673179901764987e-01, 8.3018617233836515e-18, 5.8251027467695202e-34, -2.2812397190535076e-50},
{ 5.1935599016558964e-01, -5.5331248144171145e-17, -3.1628375609769026e-35, -2.4091972051188571e-51},
{ 5.2197529293715439e-01, -4.6555795692088883e-17, 4.6378980936850430e-34, -3.3470542934689532e-51},
{ 5.2458968267846895e-01, -4.3068869040082345e-17, -4.2013155291932055e-34, -1.5096069926700274e-50},
{ 5.2719913478190139e-01, -4.2202983480560619e-17, 8.5585916184867295e-34, 7.9974339336732307e-50},
{ 5.2980362468629472e-01, -4.8067841706482342e-17, 5.8309721046630296e-34, -8.9740761521756660e-51},
{ 5.3240312787719801e-01, -4.1020306135800895e-17, -1.9239996374230821e-33, -1.5326987913812184e-49},
{ 5.3499761988709726e-01, -5.3683132708358134e-17, -1.3900569918838112e-33, 2.7154084726474092e-50},
{ 5.3758707629564551e-01, -2.2617365388403054e-17, -5.9787279033447075e-34, 3.1204419729043625e-51},
{ 5.4017147272989285e-01, 2.7072447965935839e-17, 1.1698799709213829e-33, -5.9094668515881500e-50},
{ 5.4275078486451589e-01, 1.7148261004757101e-17, -1.3525905925200870e-33, 4.9724411290727323e-50},
{ 5.4532498842204646e-01, -4.1517817538384258e-17, -1.5318930219385941e-33, 6.3629921101413974e-50},
{ 5.4789405917310019e-01, -2.4065878297113363e-17, -3.5639213669362606e-36, -2.6013270854271645e-52},
{ 5.5045797293660481e-01, -8.3319903015807663e-18, -2.3058454035767633e-34, -2.1611290432369010e-50},
{ 5.5301670558002758e-01, -4.7061536623798204e-17, -1.0617111545918056e-33, -1.6196316144407379e-50},
{ 5.5557023301960218e-01, 4.7094109405616768e-17, -2.0640520383682921e-33, 1.2290163188567138e-49},
{ 5.5811853122055610e-01, 1.3481176324765226e-17, -5.5016743873011438e-34, -2.3484822739335416e-50},
{ 5.6066157619733603e-01, -7.3956418153476152e-18, 3.9680620611731193e-34, 3.1995952200836223e-50},
{ 5.6319934401383409e-01, 2.3835775146854829e-17, 1.3511793173769814e-34, 9.3201311581248143e-51},
{ 5.6573181078361323e-01, -3.4096079596590466e-17, -1.7073289744303546e-33, 8.9147089975404507e-50},
{ 5.6825895267013160e-01, -5.0935673642769248e-17, -1.6274356351028249e-33, 9.8183151561702966e-51},
{ 5.7078074588696726e-01, 2.4568151455566208e-17, -1.2844481247560350e-33, -1.8037634376936261e-50},
{ 5.7329716669804220e-01, 8.5176611669306400e-18, -6.4443208788026766e-34, 2.2546105543273003e-50},
{ 5.7580819141784534e-01, -3.7909495458942734e-17, -2.7433738046854309e-33, 1.1130841524216795e-49},
{ 5.7831379641165559e-01, -2.6237691512372831e-17, 1.3679051680738167e-33, -3.1409808935335900e-50},
{ 5.8081395809576453e-01, 1.8585338586613408e-17, 2.7673843114549181e-34, 1.9605349619836937e-50},
{ 5.8330865293769829e-01, 3.4516601079044858e-18, 1.8065977478946306e-34, -6.3953958038544646e-51},
{ 5.8579785745643886e-01, -3.7485501964311294e-18, 2.7965403775536614e-34, -7.1816936024157202e-51},
{ 5.8828154822264533e-01, -2.9292166725006846e-17, -2.3744954603693934e-33, -1.1571631191512480e-50},
{ 5.9075970185887428e-01, -4.7013584170659542e-17, 2.4808417611768356e-33, 1.2598907673643198e-50},
{ 5.9323229503979980e-01, 1.2892320944189053e-17, 5.3058364776359583e-34, 4.1141674699390052e-50},
{ 5.9569930449243336e-01, -1.3438641936579467e-17, -6.7877687907721049e-35, -5.6046937531684890e-51},
{ 5.9816070699634227e-01, 3.8801885783000657e-17, -1.2084165858094663e-33, -4.0456610843430061e-50},
{ 6.0061647938386897e-01, -4.6398198229461932e-17, -1.6673493003710801e-33, 5.1982824378491445e-50},
{ 6.0306659854034816e-01, 3.7323357680559650e-17, 2.7771920866974305e-33, -1.6194229649742458e-49},
{ 6.0551104140432555e-01, -3.1202672493305677e-17, 1.2761267338680916e-33, -4.0859368598379647e-50},
{ 6.0794978496777363e-01, 3.5160832362096660e-17, -2.5546242776778394e-34, -1.4085313551220694e-50},
{ 6.1038280627630948e-01, -2.2563265648229169e-17, 1.3185575011226730e-33, 8.2316691420063460e-50},
{ 6.1281008242940971e-01, -4.2693476568409685e-18, 2.5839965886650320e-34, 1.6884412005622537e-50},
{ 6.1523159058062682e-01, 2.6231417767266950e-17, -1.4095366621106716e-33, 7.2058690491304558e-50},
{ 6.1764730793780398e-01, -4.7478594510902452e-17, -7.2986558263123996e-34, -3.0152327517439154e-50},
{ 6.2005721176328921e-01, -2.7983410837681118e-17, 1.1649951056138923e-33, -5.4539089117135207e-50},
{ 6.2246127937414997e-01, 5.2940728606573002e-18, -4.8486411215945827e-35, 1.2696527641980109e-52},
{ 6.2485948814238634e-01, 3.3671846037243900e-17, -2.7846053391012096e-33, 5.6102718120012104e-50},
{ 6.2725181549514408e-01, 3.0763585181253225e-17, 2.7068930273498138e-34, -1.1172240309286484e-50},
{ 6.2963823891492698e-01, 4.1115334049626806e-17, -1.9167473580230747e-33, 1.1118424028161730e-49},
{ 6.3201873593980906e-01, -4.0164942296463612e-17, -7.2208643641736723e-34, 3.7828920470544344e-50},
{ 6.3439328416364549e-01, 1.0420901929280035e-17, 4.1174558929280492e-34, -1.4464152986630705e-51},
{ 6.3676186123628420e-01, 3.1419048711901611e-17, -2.2693738415126449e-33, -1.6023584204297388e-49},
{ 6.3912444486377573e-01, 1.2416796312271043e-17, -6.2095419626356605e-34, 2.7762065999506603e-50},
{ 6.4148101280858316e-01, -9.9883430115943310e-18, 4.1969230376730128e-34, 5.6980543799257597e-51},
{ 6.4383154288979150e-01, -3.2084798795046886e-17, -1.2595311907053305e-33, -4.0205885230841536e-50},
{ 6.4617601298331639e-01, -2.9756137382280815e-17, -1.0275370077518259e-33, 8.0852478665893014e-51},
{ 6.4851440102211244e-01, 3.9870270313386831e-18, 1.9408388509540788e-34, -5.1798420636193190e-51},
{ 6.5084668499638088e-01, 3.9714670710500257e-17, 2.9178546787002963e-34, 3.8140635508293278e-51},
{ 6.5317284295377676e-01, 8.5695642060026238e-18, -6.9165322305070633e-34, 2.3873751224185395e-50},
{ 6.5549285299961535e-01, 3.5638734426385005e-17, 1.2695365790889811e-33, 4.3984952865412050e-50},
{ 6.5780669329707864e-01, 1.9580943058468545e-17, -1.1944272256627192e-33, 2.8556402616436858e-50},
{ 6.6011434206742048e-01, -1.3960054386823638e-19, 6.1515777931494047e-36, 5.3510498875622660e-52},
{ 6.6241577759017178e-01, -2.2615508885764591e-17, 5.0177050318126862e-34, 2.9162532399530762e-50},
{ 6.6471097820334490e-01, -3.6227793598034367e-17, -9.0607934765540427e-34, 3.0917036342380213e-50},
{ 6.6699992230363747e-01, 3.5284364997428166e-17, -1.0382057232458238e-33, 7.3812756550167626e-50},
{ 6.6928258834663612e-01, -5.4592652417447913e-17, -2.5181014709695152e-33, -1.6867875999437174e-49},
{ 6.7155895484701844e-01, -4.0489037749296692e-17, 3.1995835625355681e-34, -1.4044414655670960e-50},
{ 6.7382900037875604e-01, 2.3091901236161086e-17, 5.7428037192881319e-34, 1.1240668354625977e-50},
{ 6.7609270357531592e-01, 3.7256902248049466e-17, 1.7059417895764375e-33, 9.7326347795300652e-50},
{ 6.7835004312986147e-01, 1.8302093041863122e-17, 9.5241675746813072e-34, 5.0328101116133503e-50},
{ 6.8060099779545302e-01, 2.8473293354522047e-17, 4.1331805977270903e-34, 4.2579030510748576e-50},
{ 6.8284554638524808e-01, -1.2958058061524531e-17, 1.8292386959330698e-34, 3.4536209116044487e-51},
{ 6.8508366777270036e-01, 2.5948135194645137e-17, -8.5030743129500702e-34, -6.9572086141009930e-50},
{ 6.8731534089175916e-01, -5.5156158714917168e-17, 1.1896489854266829e-33, -7.8505896218220662e-51},
{ 6.8954054473706694e-01, -1.5889323294806790e-17, 9.1242356240205712e-34, 3.8315454152267638e-50},
{ 6.9175925836415775e-01, 2.7406078472410668e-17, 1.3286508943202092e-33, 1.0651869129580079e-51},
{ 6.9397146088965400e-01, 7.4345076956280137e-18, 7.5061528388197460e-34, -1.5928000240686583e-50},
{ 6.9617713149146299e-01, -4.1224081213582889e-17, -3.1838716762083291e-35, -3.9625587412119131e-51},
{ 6.9837624940897280e-01, 4.8988282435667768e-17, 1.9134010413244152e-33, 2.6161153243793989e-50},
{ 7.0056879394324834e-01, 3.1027960192992922e-17, 9.5638250509179997e-34, 4.5896916138107048e-51},
{ 7.0275474445722530e-01, 2.5278294383629822e-18, -8.6985561210674942e-35, -5.6899862307812990e-51},
{ 7.0493408037590488e-01, 2.7608725585748502e-17, 2.9816599471629137e-34, 1.1533044185111206e-50},
{ 7.0710678118654757e-01, -4.8336466567264567e-17, 2.0693376543497068e-33, 2.4677734957341755e-50},
};
static __device__ __constant__ double qd_cos_tbl[256][4] = {
{ 9.9999529380957619e-01, -1.9668064285322189e-17, -6.3053955095883481e-34, 5.3266110855726731e-52},
{ 9.9998117528260111e-01, 3.3568103522895585e-17, -1.4740132559368063e-35, 9.8603097594755596e-52},
{ 9.9995764455196390e-01, -3.1527836866647287e-17, 2.6363251186638437e-33, 1.0007504815488399e-49},
{ 9.9992470183914450e-01, 3.7931082512668012e-17, -8.5099918660501484e-35, -4.9956973223295153e-51},
{ 9.9988234745421256e-01, -3.5477814872408538e-17, 1.7102001035303974e-33, -1.0725388519026542e-49},
{ 9.9983058179582340e-01, 1.8825140517551119e-17, -5.1383513457616937e-34, -3.8378827995403787e-50},
{ 9.9976940535121528e-01, 4.2681177032289012e-17, 1.9062302359737099e-33, -6.0221153262881160e-50},
{ 9.9969881869620425e-01, -2.9851486403799753e-17, -1.9084787370733737e-33, 5.5980260344029202e-51},
{ 9.9961882249517864e-01, -4.1181965521424734e-17, 2.0915365593699916e-33, 8.1403390920903734e-50},
{ 9.9952941750109314e-01, 2.0517917823755591e-17, -4.7673802585706520e-34, -2.9443604198656772e-50},
{ 9.9943060455546173e-01, 3.9644497752257798e-17, -2.3757223716722428e-34, -1.2856759011361726e-51},
{ 9.9932238458834954e-01, -4.2858538440845682e-17, 3.3235101605146565e-34, -8.3554272377057543e-51},
{ 9.9920475861836389e-01, 9.1796317110385693e-18, 5.5416208185868570e-34, 8.0267046717615311e-52},
{ 9.9907772775264536e-01, 2.1419007653587032e-17, -7.9048203318529618e-34, -5.3166296181112712e-50},
{ 9.9894129318685687e-01, -2.0610641910058638e-17, -1.2546525485913485e-33, -7.5175888806157064e-50},
{ 9.9879545620517241e-01, -1.2291693337075465e-17, 2.4468446786491271e-34, 1.0723891085210268e-50},
{ 9.9864021818026527e-01, -4.8690254312923302e-17, -2.9470881967909147e-34, -1.3000650761346907e-50},
{ 9.9847558057329477e-01, -2.2002931182778795e-17, -1.2371509454944992e-33, -2.4911225131232065e-50},
{ 9.9830154493389289e-01, -5.1869402702792278e-17, 1.0480195493633452e-33, -2.8995649143155511e-50},
{ 9.9811811290014918e-01, 2.7935487558113833e-17, 2.4423341255830345e-33, -6.7646699175334417e-50},
{ 9.9792528619859600e-01, 1.7143659778886362e-17, 5.7885840902887460e-34, -9.2601432603894597e-51},
{ 9.9772306664419164e-01, -2.6394475274898721e-17, -1.6176223087661783e-34, -9.9924942889362281e-51},
{ 9.9751145614030345e-01, 5.6007205919806937e-18, -5.9477673514685690e-35, -1.4166807162743627e-54},
{ 9.9729045667869021e-01, 9.1647695371101735e-18, 6.7824134309739296e-34, -8.6191392795543357e-52},
{ 9.9706007033948296e-01, 1.6734093546241963e-17, -1.3169951440780028e-33, 1.0311048767952477e-50},
{ 9.9682029929116567e-01, 4.7062820708615655e-17, 2.8412041076474937e-33, -8.0006155670263622e-50},
{ 9.9657114579055484e-01, 1.1707179088390986e-17, -7.5934413263024663e-34, 2.8474848436926008e-50},
{ 9.9631261218277800e-01, 1.1336497891624735e-17, 3.4002458674414360e-34, 7.7419075921544901e-52},
{ 9.9604470090125197e-01, 2.2870031707670695e-17, -3.9184839405013148e-34, -3.7081260416246375e-50},
{ 9.9576741446765982e-01, -2.3151908323094359e-17, -1.6306512931944591e-34, -1.5925420783863192e-51},
{ 9.9548075549192694e-01, 3.2084621412226554e-18, -4.9501292146013023e-36, -2.7811428850878516e-52},
{ 9.9518472667219693e-01, -4.2486913678304410e-17, 1.3315510772504614e-33, 6.7927987417051888e-50},
{ 9.9487933079480562e-01, 4.2130813284943662e-18, -4.2062597488288452e-35, 2.5157064556087620e-51},
{ 9.9456457073425542e-01, 3.6745069641528058e-17, -3.0603304105471010e-33, 1.0397872280487526e-49},
{ 9.9424044945318790e-01, 4.4129423472462673e-17, -3.0107231708238066e-33, 7.4201582906861892e-50},
{ 9.9390697000235606e-01, -1.8964849471123746e-17, -1.5980853777937752e-35, -8.5374807150597082e-52},
{ 9.9356413552059530e-01, 2.9752309927797428e-17, -4.5066707331134233e-34, -3.3548191633805036e-50},
{ 9.9321194923479450e-01, 3.3096906261272262e-17, 1.5592811973249567e-33, 1.4373977733253592e-50},
{ 9.9285041445986510e-01, -1.4094517733693302e-17, -1.1954558131616916e-33, 1.8761873742867983e-50},
{ 9.9247953459870997e-01, 3.1093055095428906e-17, -1.8379594757818019e-33, -3.9885758559381314e-51},
{ 9.9209931314219180e-01, -3.9431926149588778e-17, -6.2758062911047230e-34, -1.2960929559212390e-50},
{ 9.9170975366909953e-01, -2.3372891311883661e-18, 2.7073298824968591e-35, -1.2569459441802872e-51},
{ 9.9131085984611544e-01, -2.5192111583372105e-17, -1.2852471567380887e-33, 5.2385212584310483e-50},
{ 9.9090263542778001e-01, 1.5394565094566704e-17, -1.0799984133184567e-33, 2.7451115960133595e-51},
{ 9.9048508425645709e-01, -5.5411437553780867e-17, -1.4614017210753585e-33, -3.8339374397387620e-50},
{ 9.9005821026229712e-01, -1.7055485906233963e-17, 1.3454939685758777e-33, 7.3117589137300036e-50},
{ 9.8962201746320089e-01, -5.2398217968132530e-17, 1.3463189211456219e-33, 5.8021640554894872e-50},
{ 9.8917650996478101e-01, -4.0987309937047111e-17, -4.4857560552048437e-34, -3.9414504502871125e-50},
{ 9.8872169196032378e-01, -1.0976227206656125e-17, 3.2311342577653764e-34, 9.6367946583575041e-51},
{ 9.8825756773074946e-01, 2.7030607784372632e-17, 7.7514866488601377e-35, 2.1019644956864938e-51},
{ 9.8778414164457218e-01, -2.3600693397159021e-17, -1.2323283769707861e-33, 3.0130900716803339e-50},
{ 9.8730141815785843e-01, -5.2332261255715652e-17, -2.7937644333152473e-33, 1.2074160567958408e-49},
{ 9.8680940181418553e-01, -5.0287214351061075e-17, -2.2681526238144461e-33, 4.4003694320169133e-50},
{ 9.8630809724459867e-01, -2.1520877103013341e-17, 1.1866528054187716e-33, -7.8532199199813836e-50},
{ 9.8579750916756748e-01, -5.1439452979953012e-17, 2.6276439309996725e-33, 7.5423552783286347e-50},
{ 9.8527764238894122e-01, 2.3155637027900207e-17, -7.5275971545764833e-34, 1.0582231660456094e-50},
{ 9.8474850180190421e-01, 1.0548144061829957e-17, 2.8786145266267306e-34, -3.6782210081466112e-51},
{ 9.8421009238692903e-01, 4.7983922627050691e-17, 2.2597419645070588e-34, 1.7573875814863400e-50},
{ 9.8366241921173025e-01, 1.9864948201635255e-17, -1.0743046281211033e-35, 1.7975662796558100e-52},
{ 9.8310548743121629e-01, 4.2170007522888628e-17, 8.2396265656440904e-34, -8.0803700139096561e-50},
{ 9.8253930228744124e-01, 1.5149580813777224e-17, -4.1802771422186237e-34, -2.2150174326226160e-50},
{ 9.8196386910955524e-01, 2.1108443711513084e-17, -1.5253013442896054e-33, -6.8388082079337969e-50},
{ 9.8137919331375456e-01, 1.3428163260355633e-17, -6.5294290469962986e-34, 2.7965412287456268e-51},
{ 9.8078528040323043e-01, 1.8546939997825006e-17, -1.0696564445530757e-33, 6.6668174475264961e-50},
{ 9.8018213596811743e-01, -3.6801786963856159e-17, 6.3245171387992842e-34, 1.8600176137175971e-50},
{ 9.7956976568544052e-01, 1.5573991584990420e-17, -1.3401066029782990e-33, -1.7263702199862149e-50},
{ 9.7894817531906220e-01, -2.3817727961148053e-18, -1.0694750370381661e-34, -8.2293047196087462e-51},
{ 9.7831737071962765e-01, -2.1623082233344895e-17, 1.0970403012028032e-33, 7.7091923099369339e-50},
{ 9.7767735782450993e-01, 5.0514136167059628e-17, -1.3254751701428788e-33, 7.0161254312124538e-50},
{ 9.7702814265775439e-01, -4.3353875751555997e-17, 5.4948839831535478e-34, -9.2755263105377306e-51},
{ 9.7636973133002114e-01, 9.3093931526213780e-18, -4.1184949155685665e-34, -3.1913926031393690e-50},
{ 9.7570213003852857e-01, -2.5572556081259686e-17, -9.3174244508942223e-34, -8.3675863211646863e-51},
{ 9.7502534506699412e-01, 2.6642660651899135e-17, 1.7819392739353853e-34, -3.3159625385648947e-51},
{ 9.7433938278557586e-01, 2.3041221476151512e-18, 1.0758686005031430e-34, 5.1074116432809478e-51},
{ 9.7364424965081198e-01, -5.1729808691005871e-17, -1.5508473005989887e-33, -1.6505125917675401e-49},
{ 9.7293995220556018e-01, -3.1311211122281800e-17, -2.6874087789006141e-33, -2.1652434818822145e-51},
{ 9.7222649707893627e-01, 3.6461169785938221e-17, 3.0309636883883133e-33, -1.2702716907967306e-51},
{ 9.7150389098625178e-01, -7.9865421122289046e-18, -4.3628417211263380e-34, 3.4307517798759352e-51},
{ 9.7077214072895035e-01, -4.7992163325114922e-17, 3.0347528910975783e-33, 8.5989199506479701e-50},
{ 9.7003125319454397e-01, 1.8365300348428844e-17, -1.4311097571944918e-33, 8.5846781998740697e-51},
{ 9.6928123535654853e-01, -4.5663660261927896e-17, 9.6147526917239387e-34, 8.1267605207871330e-51},
{ 9.6852209427441727e-01, 4.9475074918244771e-17, 2.8558738351911241e-33, 6.2948422316507461e-50},
{ 9.6775383709347551e-01, -4.5512132825515820e-17, -1.4127617988719093e-33, -8.4620609089704578e-50},
{ 9.6697647104485207e-01, 3.8496228837337864e-17, -5.3881631542745647e-34, -3.5221863171458959e-50},
{ 9.6619000344541250e-01, 5.1298840401665493e-17, 1.4564075904769808e-34, 1.0095973971377432e-50},
{ 9.6539444169768940e-01, -2.3745389918392156e-17, 5.9221515590053862e-34, -3.8811192556231094e-50},
{ 9.6458979328981276e-01, -3.4189470735959786e-17, 2.2982074155463522e-33, -4.5128791045607634e-50},
{ 9.6377606579543984e-01, 2.6463950561220029e-17, -2.9073234590199323e-36, -1.2938328629395601e-52},
{ 9.6295326687368388e-01, 8.9341960404313634e-18, -3.9071244661020126e-34, 1.6212091116847394e-50},
{ 9.6212140426904158e-01, 1.5236770453846305e-17, -1.3050173525597142e-33, 7.9016122394092666e-50},
{ 9.6128048581132064e-01, 2.0933955216674039e-18, 1.0768607469015692e-34, -5.9453639304361774e-51},
{ 9.6043051941556579e-01, 2.4653904815317185e-17, -1.3792169410906322e-33, -4.7726598378506903e-51},
{ 9.5957151308198452e-01, 1.1000640085000957e-17, -4.2036030828223975e-34, 4.0023704842606573e-51},
{ 9.5870347489587160e-01, -4.3685014392372053e-17, 2.2001800662729131e-33, -1.0553721324358075e-49},
{ 9.5782641302753291e-01, -1.7696710075371263e-17, 1.9164034110382190e-34, 8.1489235071754813e-51},
{ 9.5694033573220882e-01, 4.0553869861875701e-17, -1.7147013364302149e-33, 2.5736745295329455e-50},
{ 9.5604525134999641e-01, 3.7705045279589067e-17, 1.9678699997347571e-33, 8.5093177731230180e-50},
{ 9.5514116830577067e-01, 5.0088652955014668e-17, -2.6983181838059211e-33, 1.0102323575596493e-49},
{ 9.5422809510910567e-01, -3.7545901690626874e-17, 1.4951619241257764e-33, -8.2717333151394973e-50},
{ 9.5330604035419386e-01, -2.5190738779919934e-17, -1.4272239821134379e-33, -4.6717286809283155e-50},
{ 9.5237501271976588e-01, -2.0269300462299272e-17, -1.0635956887246246e-33, -3.5514537666487619e-50},
{ 9.5143502096900834e-01, 3.1350584123266695e-17, -2.4824833452737813e-33, 9.5450335525380613e-51},
{ 9.5048607394948170e-01, 1.9410097562630436e-17, -8.1559393949816789e-34, -1.0501209720164562e-50},
{ 9.4952818059303667e-01, -7.5544151928043298e-18, -5.1260245024046686e-34, 1.8093643389040406e-50},
{ 9.4856134991573027e-01, 2.0668262262333232e-17, -5.9440730243667306e-34, 1.4268853111554300e-50},
{ 9.4758559101774109e-01, 4.3417993852125991e-17, -2.7728667889840373e-34, 5.5709160196519968e-51},
{ 9.4660091308328353e-01, 3.5056800210680730e-17, 9.8578536940318117e-34, 6.6035911064585197e-50},
{ 9.4560732538052128e-01, 4.6019102478523738e-17, -6.2534384769452059e-34, 1.5758941215779961e-50},
{ 9.4460483726148026e-01, 8.8100545476641165e-18, 5.2291695602757842e-34, -3.3487256018407123e-50},
{ 9.4359345816196039e-01, -2.4093127844404214e-17, 1.0283279856803939e-34, -2.3398232614531355e-51},
{ 9.4257319760144687e-01, 1.3235564806436886e-17, -5.7048262885386911e-35, 3.9947050442753744e-51},
{ 9.4154406518302081e-01, -2.7896379547698341e-17, 1.6273236356733898e-33, -5.3075944708471203e-51},
{ 9.4050607059326830e-01, 2.8610421567116268e-17, 2.9261501147538827e-33, -2.6849867690896925e-50},
{ 9.3945922360218992e-01, -7.0152867943098655e-18, -5.6395693818011210e-34, 3.5568142678987651e-50},
{ 9.3840353406310806e-01, 5.4242545044795490e-17, -1.9039966607859759e-33, -1.5627792988341215e-49},
{ 9.3733901191257496e-01, -3.6570926284362776e-17, -1.1902940071273247e-33, -1.1215082331583223e-50},
{ 9.3626566717027826e-01, -1.3013766145497654e-17, 5.2229870061990595e-34, -3.3972777075634108e-51},
{ 9.3518350993894761e-01, -3.2609395302485065e-17, -8.1813015218875245e-34, 5.5642140024928139e-50},
{ 9.3409255040425887e-01, 4.4662824360767511e-17, -2.5903243047396916e-33, 8.1505209004343043e-50},
{ 9.3299279883473885e-01, 4.2041415555384355e-17, 9.0285896495521276e-34, 5.3019984977661259e-50},
{ 9.3188426558166815e-01, -4.0785944377318095e-17, 1.7631450298754169e-33, 2.5776403305507453e-50},
{ 9.3076696107898371e-01, 1.9703775102838329e-17, 6.5657908718278205e-34, -1.9480347966259524e-51},
{ 9.2964089584318121e-01, 5.1282530016864107e-17, 2.3719739891916261e-34, -1.7230065426917127e-50},
{ 9.2850608047321559e-01, -2.3306639848485943e-17, -7.7799084333208503e-34, -5.8597558009300305e-50},
{ 9.2736252565040111e-01, -2.7677111692155437e-17, 2.2110293450199576e-34, 2.0349190819680613e-50},
{ 9.2621024213831138e-01, -3.7303754586099054e-17, 2.0464457809993405e-33, 1.3831799631231817e-49},
{ 9.2504924078267758e-01, 6.0529447412576159e-18, -8.8256517760278541e-35, 1.8285462122388328e-51},
{ 9.2387953251128674e-01, 1.7645047084336677e-17, -5.0442537321586818e-34, -4.0478677716823890e-50},
{ 9.2270112833387852e-01, 5.2963798918539814e-17, -5.7135699628876685e-34, 3.0163671797219087e-50},
{ 9.2151403934204190e-01, 4.1639843390684644e-17, 1.1891485604702356e-33, 2.0862437594380324e-50},
{ 9.2031827670911059e-01, -2.7806888779036837e-17, 2.7011013677071274e-33, 1.1998578792455499e-49},
{ 9.1911385169005777e-01, -2.6496484622344718e-17, 6.5403604763461920e-34, -2.8997180201186078e-50},
{ 9.1790077562139050e-01, -3.9074579680849515e-17, 2.3004636541490264e-33, 3.9851762744443107e-50},
{ 9.1667905992104270e-01, -4.1733978698287568e-17, 1.2094444804381172e-33, 4.9356916826097816e-50},
{ 9.1544871608826783e-01, -1.3591056692900894e-17, 5.9923027475594735e-34, 2.1403295925962879e-50},
{ 9.1420975570353069e-01, -3.6316182527814423e-17, -1.9438819777122554e-33, 2.8340679287728316e-50},
{ 9.1296219042839821e-01, -4.7932505228039469e-17, -1.7753551889428638e-33, 4.0607782903868160e-51},
{ 9.1170603200542988e-01, -2.6913273175034130e-17, -5.1928101916162528e-35, 1.1338175936090630e-51},
{ 9.1044129225806725e-01, -5.0433041673313820e-17, 1.0938746257404305e-33, 9.5378272084170731e-51},
{ 9.0916798309052238e-01, -3.6878564091359894e-18, 2.9951330310507693e-34, -1.2225666136919926e-50},
{ 9.0788611648766626e-01, -4.9459964301225840e-17, -1.6599682707075313e-33, -5.1925202712634716e-50},
{ 9.0659570451491533e-01, 3.0506718955442023e-17, -1.4478836557141204e-33, 1.8906373784448725e-50},
{ 9.0529675931811882e-01, -4.1153099826889901e-17, 2.9859368705184223e-33, 5.1145293917439211e-50},
{ 9.0398929312344334e-01, -6.6097544687484308e-18, 1.2728013034680357e-34, -4.3026097234014823e-51},
{ 9.0267331823725883e-01, -1.9250787033961483e-17, 1.3242128993244527e-33, -5.2971030688703665e-50},
{ 9.0134884704602203e-01, -1.3524789367698682e-17, 6.3605353115880091e-34, 3.6227400654573828e-50},
{ 9.0001589201616028e-01, -5.0639618050802273e-17, 1.0783525384031576e-33, 2.8130016326515111e-50},
{ 8.9867446569395382e-01, 2.6316906461033013e-17, 3.7003137047796840e-35, -2.3447719900465938e-51},
{ 8.9732458070541832e-01, -3.6396283314867290e-17, -2.3611649895474815e-33, 1.1837247047900082e-49},
{ 8.9596624975618511e-01, 4.9025099114811813e-17, -1.9440489814795326e-33, -1.7070486667767033e-49},
{ 8.9459948563138270e-01, -1.7516226396814919e-17, -1.3200670047246923e-33, -1.5953009884324695e-50},
{ 8.9322430119551532e-01, -4.1161239151908913e-18, 2.5380253805715999e-34, 4.2849455510516192e-51},
{ 8.9184070939234272e-01, 4.6690228137124547e-18, 1.6150254286841982e-34, -3.9617448820725012e-51},
{ 8.9044872324475788e-01, 1.1781931459051803e-17, -1.3346142209571930e-34, -9.4982373530733431e-51},
{ 8.8904835585466457e-01, -1.1164514966766675e-17, -3.4797636107798736e-34, -1.5605079997040631e-50},
{ 8.8763962040285393e-01, 1.2805091918587960e-17, 3.9948742059584459e-35, 3.8940716325338136e-51},
{ 8.8622253014888064e-01, -6.7307369600274315e-18, 1.2385593432917413e-34, 2.0364014759133320e-51},
{ 8.8479709843093779e-01, -9.4331469628972690e-18, -5.7106541478701439e-34, 1.8260134111907397e-50},
{ 8.8336333866573158e-01, 1.5822643380255127e-17, -7.8921320007588250e-34, -1.4782321016179836e-50},
{ 8.8192126434835505e-01, -1.9843248405890562e-17, -7.0412114007673834e-34, -1.0636770169389104e-50},
{ 8.8047088905216075e-01, 1.6311096602996350e-17, -5.7541360594724172e-34, -4.0128611862170021e-50},
{ 8.7901222642863353e-01, -4.7356837291118011e-17, 1.4388771297975192e-33, -2.9085554304479134e-50},
{ 8.7754529020726124e-01, 5.0113311846499550e-17, 2.8382769008739543e-34, 1.5550640393164140e-50},
{ 8.7607009419540660e-01, 5.8729024235147677e-18, 2.7941144391738458e-34, -1.8536073846509828e-50},
{ 8.7458665227817611e-01, -5.7216617730397065e-19, -2.9705811503689596e-35, 8.7389593969796752e-52},
{ 8.7309497841829009e-01, 7.8424672990129903e-18, -4.8685015839797165e-34, -2.2815570587477527e-50},
{ 8.7159508665595109e-01, -5.5272998038551050e-17, -2.2104090204984907e-33, -9.7749763187643172e-50},
{ 8.7008699110871146e-01, -4.1888510868549968e-17, 7.0900185861878415e-34, 3.7600251115157260e-50},
{ 8.6857070597134090e-01, 2.7192781689782903e-19, -1.6710140396932428e-35, -1.2625514734637969e-51},
{ 8.6704624551569265e-01, 3.0267859550930567e-18, -1.1559438782171572e-34, -5.3580556397808012e-52},
{ 8.6551362409056909e-01, -6.3723113549628899e-18, 2.3725520321746832e-34, 1.5911880348395175e-50},
{ 8.6397285612158670e-01, 4.1486355957361607e-17, 2.2709976932210266e-33, -8.1228385659479984e-50},
{ 8.6242395611104050e-01, 3.7008992527383130e-17, 5.2128411542701573e-34, 2.6945600081026861e-50},
{ 8.6086693863776731e-01, -3.0050048898573656e-17, -8.8706183090892111e-34, 1.5005320558097301e-50},
{ 8.5930181835700836e-01, 4.2435655816850687e-17, 7.6181814059912025e-34, -3.9592127850658708e-50},
{ 8.5772861000027212e-01, -4.8183447936336620e-17, -1.1044130517687532e-33, -8.7400233444645562e-50},
{ 8.5614732837519447e-01, 9.1806925616606261e-18, 5.6328649785951470e-34, 2.3326646113217378e-51},
{ 8.5455798836540053e-01, -1.2991124236396092e-17, 1.2893407722948080e-34, -3.6506925747583053e-52},
{ 8.5296060493036363e-01, 2.7152984251981370e-17, 7.4336483283120719e-34, 4.2162417622350668e-50},
{ 8.5135519310526520e-01, -5.3279874446016209e-17, 2.2281156380919942e-33, -4.0281886404138477e-50},
{ 8.4974176800085244e-01, 5.1812347659974015e-17, 3.0810626087331275e-33, -2.5931308201994965e-50},
{ 8.4812034480329723e-01, 1.8762563415239981e-17, 1.4048773307919617e-33, -2.4915221509958691e-50},
{ 8.4649093877405213e-01, -4.7969419958569345e-17, -2.7518267097886703e-33, -7.3518959727313350e-50},
{ 8.4485356524970712e-01, -4.3631360296879637e-17, -2.0307726853367547e-33, 4.3097229819851761e-50},
{ 8.4320823964184544e-01, 9.6536707005959077e-19, 2.8995142431556364e-36, 9.6715076811480284e-53},
{ 8.4155497743689844e-01, -3.4095465391321557e-17, -8.4130208607579595e-34, -4.9447283960568686e-50},
{ 8.3989379419599952e-01, -1.6673694881511411e-17, -1.4759184141750289e-33, -7.5795098161914058e-50},
{ 8.3822470555483808e-01, -3.5560085052855026e-17, 1.1689791577022643e-33, -5.8627347359723411e-50},
{ 8.3654772722351201e-01, -2.0899059027066533e-17, -9.8104097821002585e-35, -3.1609177868229853e-51},
{ 8.3486287498638001e-01, 4.6048430609159657e-17, -5.1827423265239912e-34, -7.0505343435504109e-51},
{ 8.3317016470191319e-01, 1.3275129507229764e-18, 4.8589164115370863e-35, 4.5422281300506859e-51},
{ 8.3146961230254524e-01, 1.4073856984728024e-18, 4.6951315383980830e-35, 5.1431906049905658e-51},
{ 8.2976123379452305e-01, -2.9349109376485597e-18, 1.1496917934149818e-34, 3.5186665544980233e-51},
{ 8.2804504525775580e-01, -4.4196593225871532e-17, 2.7967864855211251e-33, 1.0030777287393502e-49},
{ 8.2632106284566353e-01, -5.3957485453612902e-17, 6.8976896130138550e-34, 3.8106164274199196e-50},
{ 8.2458930278502529e-01, -2.6512360488868275e-17, 1.6916964350914386e-34, 6.7693974813562649e-51},
{ 8.2284978137582632e-01, 1.5193019034505495e-17, 9.6890547246521685e-34, 5.6994562923653264e-50},
{ 8.2110251499110465e-01, 3.0715131609697682e-17, -1.7037168325855879e-33, -1.1149862443283853e-49},
{ 8.1934752007679701e-01, -4.8200736995191133e-17, -1.5574489646672781e-35, -9.5647853614522216e-53},
{ 8.1758481315158371e-01, -1.4883149812426772e-17, -7.8273262771298917e-34, 4.1332149161031594e-50},
{ 8.1581441080673378e-01, 8.2652693782130871e-18, -2.3028778135179471e-34, 1.5102071387249843e-50},
{ 8.1403632970594841e-01, -5.2127351877042624e-17, -1.9047670611316360e-33, -1.6937269585941507e-49},
{ 8.1225058658520388e-01, 3.1054545609214803e-17, 2.2649541922707251e-34, -7.4221684154649405e-51},
{ 8.1045719825259477e-01, 2.3520367349840499e-17, -7.7530070904846341e-34, -7.2792616357197140e-50},
{ 8.0865618158817498e-01, 9.3251597879721674e-18, -7.1823301933068394e-34, 2.3925440846132106e-50},
{ 8.0684755354379922e-01, 4.9220603766095546e-17, 2.9796016899903487e-33, 1.5220754223615788e-49},
{ 8.0503133114296355e-01, 5.1368289568212149e-17, 6.3082807402256524e-34, 7.3277646085129827e-51},
{ 8.0320753148064494e-01, -3.3060609804814910e-17, -1.2242726252420433e-33, 2.8413673268630117e-50},
{ 8.0137617172314024e-01, -2.0958013413495834e-17, -4.3798162198006931e-34, 2.0235690497752515e-50},
{ 7.9953726910790501e-01, 2.0356723822005431e-17, -9.7448513696896360e-34, 5.3608109599696008e-52},
{ 7.9769084094339116e-01, -4.6730759884788944e-17, 2.3075897077191757e-33, 3.1605567774640253e-51},
{ 7.9583690460888357e-01, -3.0062724851910721e-17, -2.2496210832042235e-33, -6.5881774117183040e-50},
{ 7.9397547755433717e-01, -7.4194631759921416e-18, 2.4124341304631069e-34, -4.9956808616244972e-51},
{ 7.9210657730021239e-01, -3.7087850202326467e-17, -1.4874457267228264e-33, 2.9323097289153505e-50},
{ 7.9023022143731003e-01, 2.3056905954954492e-17, 1.4481080533260193e-33, -7.6725237057203488e-50},
{ 7.8834642762660623e-01, 3.4396993154059708e-17, 1.7710623746737170e-33, 1.7084159098417402e-49},
{ 7.8645521359908577e-01, -9.7841429939305265e-18, 3.3906063272445472e-34, 5.7269505320382577e-51},
{ 7.8455659715557524e-01, -8.5627965423173476e-18, -2.1106834459001849e-34, -1.6890322182469603e-50},
{ 7.8265059616657573e-01, 9.0745866975808825e-18, 6.7623847404278666e-34, -1.7173237731987271e-50},
{ 7.8073722857209449e-01, -9.9198782066678806e-18, -2.1265794012162715e-36, 3.0772165598957647e-54},
{ 7.7881651238147598e-01, -2.4891385579973807e-17, 6.7665497024807980e-35, -6.5218594281701332e-52},
{ 7.7688846567323244e-01, 7.7418602570672864e-18, -5.9986517872157897e-34, 3.0566548232958972e-50},
{ 7.7495310659487393e-01, -5.2209083189826433e-17, -9.6653593393686612e-34, 3.7027750076562569e-50},
{ 7.7301045336273699e-01, -3.2565907033649772e-17, 1.3860807251523929e-33, -3.9971329917586022e-50},
{ 7.7106052426181382e-01, -4.4558442347769265e-17, -2.9863565614083783e-33, -6.8795262083596236e-50},
{ 7.6910333764557959e-01, 5.1546455184564817e-17, 2.6142829553524292e-33, -1.6199023632773298e-49},
{ 7.6713891193582040e-01, -1.8885903683750782e-17, -1.3659359331495433e-33, -2.2538834962921934e-50},
{ 7.6516726562245896e-01, -3.2707225612534598e-17, 1.1177117747079528e-33, -3.7005182280175715e-50},
{ 7.6318841726338127e-01, 2.6314748416750748e-18, 1.4048039063095910e-34, 8.9601886626630321e-52},
{ 7.6120238548426178e-01, 3.5315510881690551e-17, 1.2833566381864357e-33, 8.6221435180890613e-50},
{ 7.5920918897838807e-01, -3.8558842175523123e-17, 2.9720241208332759e-34, -1.2521388928220163e-50},
{ 7.5720884650648457e-01, -1.9909098777335502e-17, 3.9409283266158482e-34, 2.0744254207802976e-50},
{ 7.5520137689653655e-01, -1.9402238001823017e-17, -3.7756206444727573e-34, -2.1212242308178287e-50},
{ 7.5318679904361252e-01, -3.7937789838736540e-17, -6.7009539920231559e-34, -6.7128562115050214e-51},
{ 7.5116513190968637e-01, 4.3499761158645868e-17, 2.5227718971102212e-33, -6.5969709212757102e-50},
{ 7.4913639452345937e-01, -4.4729078447011889e-17, -2.4206025249983768e-33, 1.1336681351116422e-49},
{ 7.4710060598018013e-01, 1.1874824875965430e-17, 2.1992523849833518e-34, 1.1025018564644483e-50},
{ 7.4505778544146595e-01, 1.5078686911877863e-17, 8.0898987212942471e-34, 8.2677958765323532e-50},
{ 7.4300795213512172e-01, -2.5144629669719265e-17, 7.1128989512526157e-34, 3.0181629077821220e-50},
{ 7.4095112535495911e-01, -1.4708616952297345e-17, -4.9550433827142032e-34, 3.1434132533735671e-50},
{ 7.3888732446061511e-01, 3.4324874808225091e-17, -1.3706639444717610e-33, -3.3520827530718938e-51},
{ 7.3681656887736990e-01, -2.8932468101656295e-17, -3.4649887126202378e-34, -1.8484474476291476e-50},
{ 7.3473887809596350e-01, -3.4507595976263941e-17, -2.3718000676666409e-33, -3.9696090387165402e-50},
{ 7.3265427167241282e-01, 1.8918673481573520e-17, -1.5123719544119886e-33, -9.7922152011625728e-51},
{ 7.3056276922782759e-01, -2.9689959904476928e-17, -1.1276871244239744e-33, -3.0531520961539007e-50},
{ 7.2846439044822520e-01, 1.1924642323370718e-19, 5.9001892316611011e-36, 1.2178089069502704e-52},
{ 7.2635915508434601e-01, -3.1917502443460542e-17, 7.7047912412039396e-34, 4.1455880160182123e-50},
{ 7.2424708295146689e-01, 2.9198471334403004e-17, 2.3027324968739464e-33, -1.2928820533892183e-51},
{ 7.2212819392921535e-01, -2.3871262053452047e-17, 1.0636125432862273e-33, -4.4598638837802517e-50},
{ 7.2000250796138165e-01, -2.5689658854462333e-17, -9.1492566948567925e-34, 4.4403780801267786e-50},
{ 7.1787004505573171e-01, 2.7006476062511453e-17, -2.2854956580215348e-34, 9.1726903890287867e-51},
{ 7.1573082528381871e-01, -5.1581018476410262e-17, -1.3736271349300259e-34, -1.2734611344111297e-50},
{ 7.1358486878079364e-01, -4.2342504403133584e-17, -4.2690366101617268e-34, -2.6352370883066522e-50},
{ 7.1143219574521643e-01, 7.9643298613856813e-18, 2.9488239510721469e-34, 1.6985236437666356e-50},
{ 7.0927282643886569e-01, -3.7597359110245730e-17, 1.0613125954645119e-34, 8.9465480185486032e-51},
{ 7.0710678118654757e-01, -4.8336466567264567e-17, 2.0693376543497068e-33, 2.4677734957341755e-50}
};
// Computes sin(a) and cos(a) using Taylor series.
// Assumes |a| <= pi/2048.
__device__
static void sincos_taylor(const gqd_real &a, gqd_real &sin_a, gqd_real &cos_a) {
const double thresh = 0.5 * _qd_eps * fabs(to_double(a));
gqd_real p, s, t, x;
if (is_zero(a)) {
sin_a[0] = sin_a[1] = sin_a[2] = sin_a[3] = 0.0;
cos_a[0] = 1.0;
cos_a[1] = cos_a[2] = cos_a[3] = 0.0;
return;
}
//x = -sqr(a);
x = negative( sqr(a) );
s = a;
p = a;
int i = 0;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
sin_a = s;
cos_a = sqrt(1.0 - sqr(s));
}
__device__
static gqd_real sin_taylor(const gqd_real &a) {
const double thresh = 0.5 * _qd_eps * fabs(to_double(a));
gqd_real p, s, t, x;
if (is_zero(a)) {
//return gqd_real(0.0);
s[0] = s[1] = s[2] = s[3] = 0.0;
return s;
}
//x = -sqr(a);
x = negative(sqr(a));
s = a;
p = a;
int i = 0;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
return s;
}
__device__
static gqd_real cos_taylor(const gqd_real &a) {
const double thresh = 0.5 * _qd_eps;
gqd_real p, s, t, x;
if (is_zero(a)) {
//return gqd_real(1.0);
s[0] = 1.0;
s[1] = s[2] = s[3] = 0.0;
return s;
}
//x = -sqr(a);
x = negative(sqr(a));
s = 1.0 + mul_pwr2(x, 0.5);
p = x;
int i = 1;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
return s;
}
__device__
gqd_real sin(const gqd_real &a) {
/* Strategy:
To compute sin(x), we choose integers a, b so that
x = s + a * (pi/2) + b * (pi/1024)
and |s| <= pi/2048. Using a precomputed table of
sin(k pi / 1024) and cos(k pi / 1024), we can compute
sin(x) from sin(s) and cos(s). This greatly increases the
convergence of the sine Taylor series.
*/
gqd_real z, r;
if (is_zero(a)) {
//return gqd_real(0.0);
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
// approximately reduce modulo 2*pi
z = nint(a / _qd_2pi);
r = a - _qd_2pi * z;
// approximately reduce modulo pi/2 and then modulo pi/1024
double q = floor(r[0] / _qd_pi2[0] + 0.5);
gqd_real t = r - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//gqd_real::error("(gqd_real::sin): Cannot reduce modulo pi/2.");
//return gqd_real::_nan;
//return gqd_real(0.0);
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
if (abs_k > 256) {
//gqd_real::error("(gqd_real::sin): Cannot reduce modulo pi/1024.");
//return gqd_real::_nan;
//return gqd_real( 0.0 );
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
if (k == 0) {
switch (j) {
case 0:
return sin_taylor(t);
case 1:
return cos_taylor(t);
case -1:
return negative(cos_taylor(t));
default:
return negative(sin_taylor(t));
}
}
//gqd_real sin_t, cos_t;
//gqd_real u = qd_cos_tbl[abs_k-1];
//gqd_real v = qd_sin_tbl[abs_k-1];
//sincos_taylor(t, sin_t, cos_t);
///use z and r again to avoid allocate additional memory
///z = sin_t, r = cos_t
sincos_taylor( t, z, r );
int i = abs_k - 1;
if (j == 0) {
z = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * z;
r = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * r;
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
if (k > 0) {
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
return z + r;
} else {
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
return z - r;
}
} else if (j == 1) {
r = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * r;
z = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * z;
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
if (k > 0) {
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
return r - z;
} else {
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
return r + z;
}
} else if (j == -1) {
z = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * z;
r = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * r;
//z = qd_sin_tbl[abs_k-1] * z;
//r = qd_cos_tbl[abs_k-1] * r;
if (k > 0) {
//z = qd_sin_tbl[abs_k-1] * z;
//r = qd_cos_tbl[abs_k-1] * r;
return z - r;
} else {
//r = negative(qd_cos_tbl[abs_k-1]) * r;
//r = (qd_cos_tbl[abs_k-1]) * r;
r[0] = -r[0];
r[1] = -r[1];
r[2] = -r[2];
r[3] = -r[3];
//z = qd_sin_tbl[abs_k-1] * z;
return r - z;
}
} else {
r = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * r;
z = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * z;
//r = qd_sin_tbl[abs_k-1] * r ;
//z = qd_cos_tbl[abs_k-1] * z;
if (k > 0) {
//z = negative(qd_cos_tbl[abs_k-1]) * z;
//z = qd_cos_tbl[abs_k-1] * z;
z[0] = -z[0];
z[1] = -z[1];
z[2] = -z[2];
z[3] = -z[3];
//r = qd_sin_tbl[abs_k-1] * r;
return z - r;
} else {
//r = qd_sin_tbl[abs_k-1] * r ;
//z = qd_cos_tbl[abs_k-1] * z;
return r - z;
}
}
}
__device__
gqd_real cos(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(1.0);
}
// approximately reduce modulo 2*pi
gqd_real z = nint(a / _qd_2pi);
gqd_real r = a - _qd_2pi * z;
// approximately reduce modulo pi/2 and then modulo pi/1024
double q = floor(r[0] / _qd_pi2[0] + 0.5);
gqd_real t = r - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//qd_real::error("(qd_real::cos): Cannot reduce modulo pi/2.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (abs_k > 256) {
//qd_real::error("(qd_real::cos): Cannot reduce modulo pi/1024.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (k == 0) {
switch (j) {
case 0:
return cos_taylor(t);
case 1:
return negative(sin_taylor(t));
case -1:
return sin_taylor(t);
default:
return negative(cos_taylor(t));
}
}
gqd_real sin_t, cos_t;
sincos_taylor(t, sin_t, cos_t);
//gqd_real u = qd_cos_tbl[abs_k - 1];
//gqd_real v = qd_sin_tbl[abs_k - 1];
int i = abs_k - 1;
gqd_real u(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]);
gqd_real v(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]);
if (j == 0) {
if (k > 0) {
r = u * cos_t - v * sin_t;
} else {
r = u * cos_t + v * sin_t;
}
} else if (j == 1) {
if (k > 0) {
r = negative(u * sin_t) - v * cos_t;
} else {
r = v * cos_t - u * sin_t;
}
} else if (j == -1) {
if (k > 0) {
r = u * sin_t + v * cos_t;
} else {
r = u * sin_t - v * cos_t;
}
} else {
if (k > 0) {
r = v * sin_t - u * cos_t;
} else {
r = negative(u * cos_t) - v * sin_t;
}
}
return r;
}
__device__
void sincos(const gqd_real &a, gqd_real &sin_a, gqd_real &cos_a) {
if (is_zero(a)) {
sin_a = gqd_real(0.0);
cos_a = gqd_real(1.0);
return;
}
// approximately reduce by 2*pi
gqd_real z = nint(a / _qd_2pi);
gqd_real t = a - _qd_2pi * z;
// approximately reduce by pi/2 and then by pi/1024.
double q = floor(t[0] / _qd_pi2[0] + 0.5);
t = t - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//qd_real::error("(qd_real::sincos): Cannot reduce modulo pi/2.");
//cos_a = sin_a = qd_real::_nan;
cos_a = sin_a = gqd_real(0.0);
return;
}
if (abs_k > 256) {
//qd_real::error("(qd_real::sincos): Cannot reduce modulo pi/1024.");
//cos_a = sin_a = qd_real::_nan;
cos_a = sin_a = gqd_real(0.0);
return;
}
gqd_real sin_t, cos_t;
sincos_taylor(t, sin_t, cos_t);
if (k == 0) {
if (j == 0) {
sin_a = sin_t;
cos_a = cos_t;
} else if (j == 1) {
sin_a = cos_t;
cos_a = negative(sin_t);
} else if (j == -1) {
sin_a = negative(cos_t);
cos_a = sin_t;
} else {
sin_a = negative(sin_t);
cos_a = negative(cos_t);
}
return;
}
//gqd_real u = qd_cos_tbl[abs_k - 1];
//gqd_real v = qd_sin_tbl[abs_k - 1];
int i = abs_k - 1;
gqd_real u(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]);
gqd_real v(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]);
if (j == 0) {
if (k > 0) {
sin_a = u * sin_t + v * cos_t;
cos_a = u * cos_t - v * sin_t;
} else {
sin_a = u * sin_t - v * cos_t;
cos_a = u * cos_t + v * sin_t;
}
} else if (j == 1) {
if (k > 0) {
cos_a = negative(u * sin_t) - v * cos_t;
sin_a = u * cos_t - v * sin_t;
} else {
cos_a = v * cos_t - u * sin_t;
sin_a = u * cos_t + v * sin_t;
}
} else if (j == -1) {
if (k > 0) {
cos_a = u * sin_t + v * cos_t;
sin_a = v * sin_t - u * cos_t;
} else {
cos_a = u * sin_t - v * cos_t;
sin_a = negative(u * cos_t) - v * sin_t;
}
} else {
if (k > 0) {
sin_a = negative(u * sin_t) - v * cos_t;
cos_a = v * sin_t - u * cos_t;
} else {
sin_a = v * cos_t - u * sin_t;
cos_a = negative(u * cos_t) - v * sin_t;
}
}
}
__device__
gqd_real tan(const gqd_real &a) {
gqd_real s, c;
sincos(a, s, c);
return s/c;
}
#ifdef ALL_MATH
__device__
gqd_real atan2(const gqd_real &y, const gqd_real &x) {
/* Strategy:
Instead of using Taylor series to compute arctan,
we instead use Newton's iteration to solve the equation
sin(z) = y/r or cos(z) = x/r
where r = sqrt(x^2 + y^2).
The iteration is given by
z' = z + (y - sin(z)) / cos(z) (for equation 1)
z' = z - (x - cos(z)) / sin(z) (for equation 2)
Here, x and y are normalized so that x^2 + y^2 = 1.
If |x| > |y|, then first iteration is used since the
denominator is larger. Otherwise, the second is used.
*/
if (is_zero(x)) {
if (is_zero(y)) {
// Both x and y is zero.
//qd_real::error("(qd_real::atan2): Both arguments zero.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return (is_positive(y)) ? _qd_pi2 : negative(_qd_pi2);
} else if (is_zero(y)) {
return (is_positive(x)) ? gqd_real(0.0) : _qd_pi;
}
if (x == y) {
return (is_positive(y)) ? _qd_pi4 : negative(_qd_3pi4);
}
if (x == negative(y)) {
return (is_positive(y)) ? _qd_3pi4 : negative(_qd_pi4);
}
gqd_real r = sqrt(sqr(x) + sqr(y));
gqd_real xx = x / r;
gqd_real yy = y / r;
gqd_real z = gqd_real(atan2(to_double(y), to_double(x)));
gqd_real sin_z, cos_z;
if (abs(xx[0]) > abs(yy[0])) {
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
} else {
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
}
return z;
}
__device__
gqd_real atan(const gqd_real &a) {
return atan2(a, gqd_real(1.0));
}
__device__
gqd_real asin(const gqd_real &a) {
gqd_real abs_a = abs(a);
if (abs_a > 1.0) {
//qd_real::error("(qd_real::asin): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (is_one(abs_a)) {
return (is_positive(a)) ? _qd_pi2 : negative(_qd_pi2);
}
return atan2(a, sqrt(1.0 - sqr(a)));
}
__device__
gqd_real acos(const gqd_real &a) {
gqd_real abs_a = abs(a);
if (abs_a > 1.0) {
//qd_real::error("(qd_real::acos): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (is_one(abs_a)) {
return (is_positive(a)) ? gqd_real(0.0) : _qd_pi;
}
return atan2(sqrt(1.0 - sqr(a)), a);
}
__device__
gqd_real sinh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(0.0);
}
if (abs(a) > 0.05) {
gqd_real ea = exp(a);
return mul_pwr2(ea - inv(ea), 0.5);
}
// Since a is small, using the above formula gives
// a lot of cancellation. So use Taylor series.
gqd_real s = a;
gqd_real t = a;
gqd_real r = sqr(t);
double m = 1.0;
double thresh = abs(to_double(a) * _qd_eps);
do {
m = m + 2.0;
t = (t*r);
t = t/((m-1) * m);
s = s + t;
} while (abs(t) > thresh);
return s;
}
__device__
gqd_real cosh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(1.0);
}
gqd_real ea = exp(a);
return mul_pwr2(ea + inv(ea), 0.5);
}
__device__
gqd_real tanh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(0.0);
}
if (abs(to_double(a)) > 0.05) {
gqd_real ea = exp(a);
gqd_real inv_ea = inv(ea);
return (ea - inv_ea) / (ea + inv_ea);
} else {
gqd_real s, c;
s = sinh(a);
c = sqrt(1.0 + sqr(s));
return s / c;
}
}
__device__
void sincosh(const gqd_real &a, gqd_real &s, gqd_real &c) {
if (abs(to_double(a)) <= 0.05) {
s = sinh(a);
c = sqrt(1.0 + sqr(s));
} else {
gqd_real ea = exp(a);
gqd_real inv_ea = inv(ea);
s = mul_pwr2(ea - inv_ea, 0.5);
c = mul_pwr2(ea + inv_ea, 0.5);
}
}
__device__
gqd_real asinh(const gqd_real &a) {
return log(a + sqrt(sqr(a) + 1.0));
}
__device__
gqd_real acosh(const gqd_real &a) {
if (a < 1.0) {
///qd_real::error("(qd_real::acosh): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return log(a + sqrt(sqr(a) - 1.0));
}
__device__
gqd_real atanh(const gqd_real &a) {
if (abs(a) >= 1.0) {
//qd_real::error("(qd_real::atanh): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return mul_pwr2(log((1.0 + a) / (1.0 - a)), 0.5);
}
#endif /* ALL_MATH */
#endif /* __GQD_SIN_COS_CU__ */
| 7f4c518b504d6c8d08bc70292bc5a145b98db844.cu | #ifndef __GQD_SIN_COS_CU__
#define __GQD_SIN_COS_CU__
#include "gqd_real.h"
//#include "common.cu"
extern __device__ __constant__ double qd_inv_fact[n_qd_inv_fact][4];
// Table of sin(k * pi/1024) and cos(k * pi/1024).
static __device__ __constant__ double qd_sin_tbl[256][4] = {
{ 3.0679567629659761e-03, 1.2690279085455925e-19, 5.2879464245328389e-36, -1.7820334081955298e-52},
{ 6.1358846491544753e-03, 9.0545257482474933e-20, 1.6260113133745320e-37, -9.7492001208767410e-55},
{ 9.2037547820598194e-03, -1.2136591693535934e-19, 5.5696903949425567e-36, 1.2505635791936951e-52},
{ 1.2271538285719925e-02, 6.9197907640283170e-19, -4.0203726713435555e-36, -2.0688703606952816e-52},
{ 1.5339206284988102e-02, -8.4462578865401696e-19, 4.6535897505058629e-35, -1.3923682978570467e-51},
{ 1.8406729905804820e-02, 7.4195533812833160e-19, 3.9068476486787607e-35, 3.6393321292898614e-52},
{ 2.1474080275469508e-02, -4.5407960207688566e-19, -2.2031770119723005e-35, 1.2709814654833741e-51},
{ 2.4541228522912288e-02, -9.1868490125778782e-20, 4.8706148704467061e-36, -2.8153947855469224e-52},
{ 2.7608145778965743e-02, -1.5932358831389269e-18, -7.0475416242776030e-35, -2.7518494176602744e-51},
{ 3.0674803176636626e-02, -1.6936054844107918e-20, -2.0039543064442544e-36, -1.6267505108658196e-52},
{ 3.3741171851377587e-02, -2.0096074292368340e-18, -1.3548237016537134e-34, 6.5554881875899973e-51},
{ 3.6807222941358832e-02, 6.1060088803529842e-19, -4.0448721259852727e-35, -2.1111056765671495e-51},
{ 3.9872927587739811e-02, 4.6657453481183289e-19, 3.4119333562288684e-35, 2.4007534726187511e-51},
{ 4.2938256934940820e-02, 2.8351940588660907e-18, 1.6991309601186475e-34, 6.8026536098672629e-51},
{ 4.6003182130914630e-02, -1.1182813940157788e-18, 7.5235020270378946e-35, 4.1187304955493722e-52},
{ 4.9067674327418015e-02, -6.7961037205182801e-19, -4.4318868124718325e-35, -9.9376628132525316e-52},
{ 5.2131704680283324e-02, -2.4243695291953779e-18, -1.3675405320092298e-34, -8.3938137621145070e-51},
{ 5.5195244349689941e-02, -1.3340299860891103e-18, -3.4359574125665608e-35, 1.1911462755409369e-51},
{ 5.8258264500435759e-02, 2.3299905496077492e-19, 1.9376108990628660e-36, -5.1273775710095301e-53},
{ 6.1320736302208578e-02, -5.1181134064638108e-19, -4.2726335866706313e-35, 2.6368495557440691e-51},
{ 6.4382630929857465e-02, -4.2325997000052705e-18, 3.3260117711855937e-35, 1.4736267706718352e-51},
{ 6.7443919563664065e-02, -6.9221796556983636e-18, 1.5909286358911040e-34, -7.8828946891835218e-51},
{ 7.0504573389613870e-02, -6.8552791107342883e-18, -1.9961177630841580e-34, 2.0127129580485300e-50},
{ 7.3564563599667426e-02, -2.7784941506273593e-18, -9.1240375489852821e-35, -1.9589752023546795e-51},
{ 7.6623861392031492e-02, 2.3253700287958801e-19, -1.3186083921213440e-36, -4.9927872608099673e-53},
{ 7.9682437971430126e-02, -4.4867664311373041e-18, 2.8540789143650264e-34, 2.8491348583262741e-51},
{ 8.2740264549375692e-02, 1.4735983530877760e-18, 3.7284093452233713e-35, 2.9024430036724088e-52},
{ 8.5797312344439894e-02, -3.3881893830684029e-18, -1.6135529531508258e-34, 7.7294651620588049e-51},
{ 8.8853552582524600e-02, -3.7501775830290691e-18, 3.7543606373911573e-34, 2.2233701854451859e-50},
{ 9.1908956497132724e-02, 4.7631594854274564e-18, 1.5722874642939344e-34, -4.8464145447831456e-51},
{ 9.4963495329639006e-02, -6.5885886400417564e-18, -2.1371116991641965e-34, 1.3819370559249300e-50},
{ 9.8017140329560604e-02, -1.6345823622442560e-18, -1.3209238810006454e-35, -3.5691060049117942e-52},
{ 1.0106986275482782e-01, 3.3164325719308656e-18, -1.2004224885132282e-34, 7.2028828495418631e-51},
{ 1.0412163387205457e-01, 6.5760254085385100e-18, 1.7066246171219214e-34, -4.9499340996893514e-51},
{ 1.0717242495680884e-01, 6.4424044279026198e-18, -8.3956976499698139e-35, -4.0667730213318321e-51},
{ 1.1022220729388306e-01, -5.6789503537823233e-19, 1.0380274792383233e-35, 1.5213997918456695e-52},
{ 1.1327095217756435e-01, 2.7100481012132900e-18, 1.5323292999491619e-35, 4.9564432810360879e-52},
{ 1.1631863091190477e-01, 1.0294914877509705e-18, -9.3975734948993038e-35, 1.3534827323719708e-52},
{ 1.1936521481099137e-01, -3.9500089391898506e-18, 3.5317349978227311e-34, 1.8856046807012275e-51},
{ 1.2241067519921620e-01, 2.8354501489965335e-18, 1.8151655751493305e-34, -2.8716592177915192e-51},
{ 1.2545498341154623e-01, 4.8686751763148235e-18, 5.9878105258097936e-35, -3.3534629098722107e-51},
{ 1.2849811079379317e-01, 3.8198603954988802e-18, -1.8627501455947798e-34, -2.4308161133527791e-51},
{ 1.3154002870288312e-01, -5.0039708262213813e-18, -1.2983004159245552e-34, -4.6872034915794122e-51},
{ 1.3458070850712620e-01, -9.1670359171480699e-18, 1.5916493007073973e-34, 4.0237002484366833e-51},
{ 1.3762012158648604e-01, 6.6253255866774482e-18, -2.3746583031401459e-34, -9.3703876173093250e-52},
{ 1.4065823933284924e-01, -7.9193932965524741e-18, 6.0972464202108397e-34, 2.4566623241035797e-50},
{ 1.4369503315029444e-01, 1.1472723016618666e-17, -5.1884954557576435e-35, -4.2220684832186607e-51},
{ 1.4673047445536175e-01, 3.7269471470465677e-18, 3.7352398151250827e-34, -4.0881822289508634e-51},
{ 1.4976453467732151e-01, 8.0812114131285151e-18, 1.2979142554917325e-34, 9.9380667487736254e-51},
{ 1.5279718525844344e-01, -7.6313573938416838e-18, 5.7714690450284125e-34, -3.7731132582986687e-50},
{ 1.5582839765426523e-01, 3.0351307187678221e-18, -1.0976942315176184e-34, 7.8734647685257867e-51},
{ 1.5885814333386145e-01, -4.0163200573859079e-18, -9.2840580257628812e-35, -2.8567420029274875e-51},
{ 1.6188639378011183e-01, 1.1850519643573528e-17, -5.0440990519162957e-34, 3.0510028707928009e-50},
{ 1.6491312048996992e-01, -7.0405288319166738e-19, 3.3211107491245527e-35, 8.6663299254686031e-52},
{ 1.6793829497473117e-01, 5.4284533721558139e-18, -3.3263339336181369e-34, -1.8536367335123848e-50},
{ 1.7096188876030122e-01, 9.1919980181759094e-18, -6.7688743940982606e-34, -1.0377711384318389e-50},
{ 1.7398387338746382e-01, 5.8151994618107928e-18, -1.6751014298301606e-34, -6.6982259797164963e-51},
{ 1.7700422041214875e-01, 6.7329300635408167e-18, 2.8042736644246623e-34, 3.6786888232793599e-51},
{ 1.8002290140569951e-01, 7.9701826047392143e-18, -7.0765920110524977e-34, 1.9622512608461784e-50},
{ 1.8303988795514095e-01, 7.7349918688637383e-18, -4.4803769968145083e-34, 1.1201148793328890e-50},
{ 1.8605515166344666e-01, -1.2564893007679552e-17, 7.5953844248530810e-34, -3.8471695132415039e-51},
{ 1.8906866414980622e-01, -7.6208955803527778e-18, -4.4792298656662981e-34, -4.4136824096645007e-50},
{ 1.9208039704989244e-01, 4.3348343941174903e-18, -2.3404121848139937e-34, 1.5789970962611856e-50},
{ 1.9509032201612828e-01, -7.9910790684617313e-18, 6.1846270024220713e-34, -3.5840270918032937e-50},
{ 1.9809841071795359e-01, -1.8434411800689445e-18, 1.4139031318237285e-34, 1.0542811125343809e-50},
{ 2.0110463484209190e-01, 1.1010032669300739e-17, -3.9123576757413791e-34, 2.4084852500063531e-51},
{ 2.0410896609281687e-01, 6.0941297773957752e-18, -2.8275409970449641e-34, 4.6101008563532989e-51},
{ 2.0711137619221856e-01, -1.0613362528971356e-17, 2.2456805112690884e-34, 1.3483736125280904e-50},
{ 2.1011183688046961e-01, 1.1561548476512844e-17, 6.0355905610401254e-34, 3.3329909618405675e-50},
{ 2.1311031991609136e-01, 1.2031873821063860e-17, -3.4142699719695635e-34, -1.2436262780241778e-50},
{ 2.1610679707621952e-01, -1.0111196082609117e-17, 7.2789545335189643e-34, -2.9347540365258610e-50},
{ 2.1910124015686980e-01, -3.6513812299150776e-19, -2.3359499418606442e-35, 3.1785298198458653e-52},
{ 2.2209362097320354e-01, -3.0337210995812162e-18, 6.6654668033632998e-35, 2.0110862322656942e-51},
{ 2.2508391135979283e-01, 3.9507040822556510e-18, 2.4287993958305375e-35, 5.6662797513020322e-52},
{ 2.2807208317088573e-01, 8.2361837339258012e-18, 6.9786781316397937e-34, -6.4122962482639504e-51},
{ 2.3105810828067111e-01, 1.0129787149761869e-17, -6.9359234615816044e-34, -2.8877355604883782e-50},
{ 2.3404195858354343e-01, -6.9922402696101173e-18, -5.7323031922750280e-34, 5.3092579966872727e-51},
{ 2.3702360599436720e-01, 8.8544852285039918e-18, 1.3588480826354134e-34, 1.0381022520213867e-50},
{ 2.4000302244874150e-01, -1.2137758975632164e-17, -2.6448807731703891e-34, -1.9929733800670473e-51},
{ 2.4298017990326390e-01, -8.7514315297196632e-18, -6.5723260373079431e-34, -1.0333158083172177e-50},
{ 2.4595505033579462e-01, -1.1129044052741832e-17, 4.3805998202883397e-34, 1.2219399554686291e-50},
{ 2.4892760574572018e-01, -8.1783436100020990e-18, 5.5666875261111840e-34, 3.8080473058748167e-50},
{ 2.5189781815421697e-01, -1.7591436032517039e-17, -1.0959681232525285e-33, 5.6209426020232456e-50},
{ 2.5486565960451457e-01, -1.3602299806901461e-19, -6.0073844642762535e-36, -3.0072751311893878e-52},
{ 2.5783110216215899e-01, 1.8480038630879957e-17, 3.3201664714047599e-34, -5.5547819290576764e-51},
{ 2.6079411791527551e-01, 4.2721420983550075e-18, 5.6782126934777920e-35, 3.1428338084365397e-51},
{ 2.6375467897483140e-01, -1.8837947680038700e-17, 1.3720129045754794e-33, -8.2763406665966033e-50},
{ 2.6671275747489837e-01, 2.0941222578826688e-17, -1.1303466524727989e-33, 1.9954224050508963e-50},
{ 2.6966832557291509e-01, 1.5765657618133259e-17, -6.9696142173370086e-34, -4.0455346879146776e-50},
{ 2.7262135544994898e-01, 7.8697166076387850e-18, 6.6179388602933372e-35, -2.7642903696386267e-51},
{ 2.7557181931095814e-01, 1.9320328962556582e-17, 1.3932094180100280e-33, 1.3617253920018116e-50},
{ 2.7851968938505312e-01, -1.0030273719543544e-17, 7.2592115325689254e-34, -1.0068516296655851e-50},
{ 2.8146493792575800e-01, -1.2322299641274009e-17, -1.0564788706386435e-34, 7.5137424251265885e-51},
{ 2.8440753721127182e-01, 2.2209268510661475e-17, -9.1823095629523708e-34, -5.2192875308892218e-50},
{ 2.8734745954472951e-01, 1.5461117367645717e-17, -6.3263973663444076e-34, -2.2982538416476214e-50},
{ 2.9028467725446239e-01, -1.8927978707774251e-17, 1.1522953157142315e-33, 7.4738655654716596e-50},
{ 2.9321916269425863e-01, 2.2385430811901833e-17, 1.3662484646539680e-33, -4.2451325253996938e-50},
{ 2.9615088824362384e-01, -2.0220736360876938e-17, -7.9252212533920413e-35, -2.8990577729572470e-51},
{ 2.9907982630804048e-01, 1.6701181609219447e-18, 8.6091151117316292e-35, 3.9931286230012102e-52},
{ 3.0200594931922808e-01, -1.7167666235262474e-17, 2.3336182149008069e-34, 8.3025334555220004e-51},
{ 3.0492922973540243e-01, -2.2989033898191262e-17, -1.4598901099661133e-34, 3.7760487693121827e-51},
{ 3.0784964004153487e-01, 2.7074088527245185e-17, 1.2568858206899284e-33, 7.2931815105901645e-50},
{ 3.1076715274961147e-01, 2.0887076364048513e-17, -3.0130590791065942e-34, 1.3876739009935179e-51},
{ 3.1368174039889146e-01, 1.4560447299968912e-17, 3.6564186898011595e-34, 1.1654264734999375e-50},
{ 3.1659337555616585e-01, 2.1435292512726283e-17, 1.2338169231377316e-33, 3.3963542100989293e-50},
{ 3.1950203081601569e-01, -1.3981562491096626e-17, 8.1730000697411350e-34, -7.7671096270210952e-50},
{ 3.2240767880106985e-01, -4.0519039937959398e-18, 3.7438302780296796e-34, 8.7936731046639195e-51},
{ 3.2531029216226293e-01, 7.9171249463765892e-18, -6.7576622068146391e-35, 2.3021655066929538e-51},
{ 3.2820984357909255e-01, -2.6693140719641896e-17, 7.8928851447534788e-34, 2.5525163821987809e-51},
{ 3.3110630575987643e-01, -2.7469465474778694e-17, -1.3401245916610206e-33, 6.5531762489976163e-50},
{ 3.3399965144200938e-01, 2.2598986806288142e-17, 7.8063057192586115e-34, 2.0427600895486683e-50},
{ 3.3688985339222005e-01, -4.2000940033475092e-19, -2.9178652969985438e-36, -1.1597376437036749e-52},
{ 3.3977688440682685e-01, 6.6028679499418282e-18, 1.2575009988669683e-34, 2.5569067699008304e-51},
{ 3.4266071731199438e-01, 1.9261518449306319e-17, -9.2754189135990867e-34, 8.5439996687390166e-50},
{ 3.4554132496398904e-01, 2.7251143672916123e-17, 7.0138163601941737e-34, -1.4176292197454015e-50},
{ 3.4841868024943456e-01, 3.6974420514204918e-18, 3.5532146878499996e-34, 1.9565462544501322e-50},
{ 3.5129275608556715e-01, -2.2670712098795844e-17, -1.6994216673139631e-34, -1.2271556077284517e-50},
{ 3.5416352542049040e-01, -1.6951763305764860e-17, 1.2772331777814617e-33, -3.3703785435843310e-50},
{ 3.5703096123343003e-01, -4.8218191137919166e-19, -4.1672436994492361e-35, -7.1531167149364352e-52},
{ 3.5989503653498817e-01, -1.7601687123839282e-17, 1.3375125473046791e-33, 7.9467815593584340e-50},
{ 3.6275572436739723e-01, -9.1668352663749849e-18, -7.4317843956936735e-34, -2.0199582511804564e-50},
{ 3.6561299780477385e-01, 1.6217898770457546e-17, 1.1286970151961055e-33, -7.1825287318139010e-50},
{ 3.6846682995337232e-01, 1.0463640796159268e-17, 2.0554984738517304e-35, 1.0441861305618769e-51},
{ 3.7131719395183754e-01, 3.4749239648238266e-19, -7.5151053042866671e-37, -2.8153468438650851e-53},
{ 3.7416406297145799e-01, 8.0114103761962118e-18, 5.3429599813406052e-34, 1.0351378796539210e-50},
{ 3.7700741021641826e-01, -2.7255302041956930e-18, 6.3646586445018137e-35, 8.3048657176503559e-52},
{ 3.7984720892405116e-01, 9.9151305855172370e-18, 4.8761409697224886e-34, 1.4025084000776705e-50},
{ 3.8268343236508978e-01, -1.0050772696461588e-17, -2.0605316302806695e-34, -1.2717724698085205e-50},
{ 3.8551605384391885e-01, 1.5177665396472313e-17, 1.4198230518016535e-33, 5.8955167159904235e-50},
{ 3.8834504669882630e-01, -1.0053770598398717e-17, 7.5942999255057131e-34, -3.1967974046654219e-50},
{ 3.9117038430225387e-01, 1.7997787858243995e-17, -1.0613482402609856e-33, -5.4582148817791032e-50},
{ 3.9399204006104810e-01, 9.7649241641239336e-18, -2.1233599441284617e-34, -5.5529836795340819e-51},
{ 3.9680998741671031e-01, 2.0545063670840126e-17, 6.1347058801922842e-34, 1.0733788150636430e-50},
{ 3.9962419984564684e-01, -1.5065497476189372e-17, -9.9653258881867298e-34, -5.7524323712725355e-50},
{ 4.0243465085941843e-01, 1.0902619339328270e-17, 7.3998528125989765e-34, 2.2745784806823499e-50},
{ 4.0524131400498986e-01, 9.9111401942899884e-18, -2.5169070895434648e-34, 9.2772984818436573e-53},
{ 4.0804416286497869e-01, -7.0006015137351311e-18, -1.4108207334268228e-34, 1.5175546997577136e-52},
{ 4.1084317105790397e-01, -2.4219835190355499e-17, -1.1418902925313314e-33, -2.0996843165093468e-50},
{ 4.1363831223843456e-01, -1.0393984940597871e-17, -1.1481681174503880e-34, -2.0281052851028680e-51},
{ 4.1642956009763721e-01, -2.5475580413131732e-17, -3.4482678506112824e-34, 7.1788619351865480e-51},
{ 4.1921688836322396e-01, -4.2232463750110590e-18, -3.6053023045255790e-34, -2.2209673210025631e-50},
{ 4.2200027079979968e-01, 4.3543266994128527e-18, 3.1734310272251190e-34, -1.3573247980738668e-50},
{ 4.2477968120910881e-01, 2.7462312204277281e-17, -4.6552847802111948e-34, 6.5961781099193122e-51},
{ 4.2755509343028208e-01, 9.4111898162954726e-18, -1.7446682426598801e-34, -2.2054492626480169e-51},
{ 4.3032648134008261e-01, 2.2259686974092690e-17, 8.5972591314085075e-34, -2.9420897889003020e-50},
{ 4.3309381885315196e-01, 1.1224283329847517e-17, 5.3223748041075651e-35, 5.3926192627014212e-51},
{ 4.3585707992225547e-01, 1.6230515450644527e-17, -6.4371449063579431e-35, -6.9102436481386757e-51},
{ 4.3861623853852766e-01, -2.0883315831075090e-17, -1.4259583540891877e-34, 6.3864763590657077e-52},
{ 4.4137126873171667e-01, 2.2360783886964969e-17, 1.1864769603515770e-34, -3.8087003266189232e-51},
{ 4.4412214457042926e-01, -2.4218874422178315e-17, 2.2205230838703907e-34, 9.2133035911356258e-51},
{ 4.4686884016237421e-01, -1.9222136142309382e-17, -4.4425678589732049e-35, -1.3673609292149535e-51},
{ 4.4961132965460660e-01, 4.8831924232035243e-18, 2.7151084498191381e-34, -1.5653993171613154e-50},
{ 4.5234958723377089e-01, -1.4827977472196122e-17, -7.6947501088972324e-34, 1.7656856882031319e-50},
{ 4.5508358712634384e-01, -1.2379906758116472e-17, 5.5289688955542643e-34, -8.5382312840209386e-51},
{ 4.5781330359887723e-01, -8.4554254922295949e-18, -6.3770394246764263e-34, 3.1778253575564249e-50},
{ 4.6053871095824001e-01, 1.8488777492177872e-17, -1.0527732154209725e-33, 3.3235593490947102e-50},
{ 4.6325978355186020e-01, -7.3514924533231707e-18, 6.7175396881707035e-34, 3.9594127612123379e-50},
{ 4.6597649576796618e-01, -3.3023547778235135e-18, 3.4904677050476886e-35, 3.4483855263874246e-51},
{ 4.6868882203582796e-01, -2.2949251681845054e-17, -1.1364757641823658e-33, 6.8840522501918612e-50},
{ 4.7139673682599764e-01, 6.5166781360690130e-18, 2.9457546966235984e-34, -6.2159717738836630e-51},
{ 4.7410021465055002e-01, -8.1451601548978075e-18, -3.4789448555614422e-34, -1.1681943974658508e-50},
{ 4.7679923006332214e-01, -1.0293515338305794e-17, -3.6582045008369952e-34, 1.7424131479176475e-50},
{ 4.7949375766015301e-01, 1.8419999662684771e-17, -1.3040838621273312e-33, 1.0977131822246471e-50},
{ 4.8218377207912277e-01, -2.5861500925520442e-17, -6.2913197606500007e-36, 4.0802359808684726e-52},
{ 4.8486924800079112e-01, -1.8034004203262245e-17, -3.5244276906958044e-34, -1.7138318654749246e-50},
{ 4.8755016014843594e-01, 1.4231090931273653e-17, -1.8277733073262697e-34, -1.5208291790429557e-51},
{ 4.9022648328829116e-01, -5.1496145643440404e-18, -3.6903027405284104e-34, 1.5172940095151304e-50},
{ 4.9289819222978404e-01, -1.0257831676562186e-18, 6.9520817760885069e-35, -2.4260961214090389e-51},
{ 4.9556526182577254e-01, -9.4323241942365362e-18, 3.1212918657699143e-35, 4.2009072375242736e-52},
{ 4.9822766697278187e-01, -1.6126383830540798e-17, -1.5092897319298871e-33, 1.1049298890895917e-50},
{ 5.0088538261124083e-01, -3.9604015147074639e-17, -2.2208395201898007e-33, 1.3648202735839417e-49},
{ 5.0353838372571758e-01, -1.6731308204967497e-17, -1.0140233644074786e-33, 4.0953071937671477e-50},
{ 5.0618664534515534e-01, -4.8321592986493711e-17, 9.2858107226642252e-34, 4.2699802401037005e-50},
{ 5.0883014254310699e-01, 4.7836968268014130e-17, -1.0727022928806035e-33, 2.7309374513672757e-50},
{ 5.1146885043797041e-01, -1.3088001221007579e-17, 4.0929033363366899e-34, -3.7952190153477926e-50},
{ 5.1410274419322177e-01, -4.5712707523615624e-17, 1.5488279442238283e-33, -2.5853959305521130e-50},
{ 5.1673179901764987e-01, 8.3018617233836515e-18, 5.8251027467695202e-34, -2.2812397190535076e-50},
{ 5.1935599016558964e-01, -5.5331248144171145e-17, -3.1628375609769026e-35, -2.4091972051188571e-51},
{ 5.2197529293715439e-01, -4.6555795692088883e-17, 4.6378980936850430e-34, -3.3470542934689532e-51},
{ 5.2458968267846895e-01, -4.3068869040082345e-17, -4.2013155291932055e-34, -1.5096069926700274e-50},
{ 5.2719913478190139e-01, -4.2202983480560619e-17, 8.5585916184867295e-34, 7.9974339336732307e-50},
{ 5.2980362468629472e-01, -4.8067841706482342e-17, 5.8309721046630296e-34, -8.9740761521756660e-51},
{ 5.3240312787719801e-01, -4.1020306135800895e-17, -1.9239996374230821e-33, -1.5326987913812184e-49},
{ 5.3499761988709726e-01, -5.3683132708358134e-17, -1.3900569918838112e-33, 2.7154084726474092e-50},
{ 5.3758707629564551e-01, -2.2617365388403054e-17, -5.9787279033447075e-34, 3.1204419729043625e-51},
{ 5.4017147272989285e-01, 2.7072447965935839e-17, 1.1698799709213829e-33, -5.9094668515881500e-50},
{ 5.4275078486451589e-01, 1.7148261004757101e-17, -1.3525905925200870e-33, 4.9724411290727323e-50},
{ 5.4532498842204646e-01, -4.1517817538384258e-17, -1.5318930219385941e-33, 6.3629921101413974e-50},
{ 5.4789405917310019e-01, -2.4065878297113363e-17, -3.5639213669362606e-36, -2.6013270854271645e-52},
{ 5.5045797293660481e-01, -8.3319903015807663e-18, -2.3058454035767633e-34, -2.1611290432369010e-50},
{ 5.5301670558002758e-01, -4.7061536623798204e-17, -1.0617111545918056e-33, -1.6196316144407379e-50},
{ 5.5557023301960218e-01, 4.7094109405616768e-17, -2.0640520383682921e-33, 1.2290163188567138e-49},
{ 5.5811853122055610e-01, 1.3481176324765226e-17, -5.5016743873011438e-34, -2.3484822739335416e-50},
{ 5.6066157619733603e-01, -7.3956418153476152e-18, 3.9680620611731193e-34, 3.1995952200836223e-50},
{ 5.6319934401383409e-01, 2.3835775146854829e-17, 1.3511793173769814e-34, 9.3201311581248143e-51},
{ 5.6573181078361323e-01, -3.4096079596590466e-17, -1.7073289744303546e-33, 8.9147089975404507e-50},
{ 5.6825895267013160e-01, -5.0935673642769248e-17, -1.6274356351028249e-33, 9.8183151561702966e-51},
{ 5.7078074588696726e-01, 2.4568151455566208e-17, -1.2844481247560350e-33, -1.8037634376936261e-50},
{ 5.7329716669804220e-01, 8.5176611669306400e-18, -6.4443208788026766e-34, 2.2546105543273003e-50},
{ 5.7580819141784534e-01, -3.7909495458942734e-17, -2.7433738046854309e-33, 1.1130841524216795e-49},
{ 5.7831379641165559e-01, -2.6237691512372831e-17, 1.3679051680738167e-33, -3.1409808935335900e-50},
{ 5.8081395809576453e-01, 1.8585338586613408e-17, 2.7673843114549181e-34, 1.9605349619836937e-50},
{ 5.8330865293769829e-01, 3.4516601079044858e-18, 1.8065977478946306e-34, -6.3953958038544646e-51},
{ 5.8579785745643886e-01, -3.7485501964311294e-18, 2.7965403775536614e-34, -7.1816936024157202e-51},
{ 5.8828154822264533e-01, -2.9292166725006846e-17, -2.3744954603693934e-33, -1.1571631191512480e-50},
{ 5.9075970185887428e-01, -4.7013584170659542e-17, 2.4808417611768356e-33, 1.2598907673643198e-50},
{ 5.9323229503979980e-01, 1.2892320944189053e-17, 5.3058364776359583e-34, 4.1141674699390052e-50},
{ 5.9569930449243336e-01, -1.3438641936579467e-17, -6.7877687907721049e-35, -5.6046937531684890e-51},
{ 5.9816070699634227e-01, 3.8801885783000657e-17, -1.2084165858094663e-33, -4.0456610843430061e-50},
{ 6.0061647938386897e-01, -4.6398198229461932e-17, -1.6673493003710801e-33, 5.1982824378491445e-50},
{ 6.0306659854034816e-01, 3.7323357680559650e-17, 2.7771920866974305e-33, -1.6194229649742458e-49},
{ 6.0551104140432555e-01, -3.1202672493305677e-17, 1.2761267338680916e-33, -4.0859368598379647e-50},
{ 6.0794978496777363e-01, 3.5160832362096660e-17, -2.5546242776778394e-34, -1.4085313551220694e-50},
{ 6.1038280627630948e-01, -2.2563265648229169e-17, 1.3185575011226730e-33, 8.2316691420063460e-50},
{ 6.1281008242940971e-01, -4.2693476568409685e-18, 2.5839965886650320e-34, 1.6884412005622537e-50},
{ 6.1523159058062682e-01, 2.6231417767266950e-17, -1.4095366621106716e-33, 7.2058690491304558e-50},
{ 6.1764730793780398e-01, -4.7478594510902452e-17, -7.2986558263123996e-34, -3.0152327517439154e-50},
{ 6.2005721176328921e-01, -2.7983410837681118e-17, 1.1649951056138923e-33, -5.4539089117135207e-50},
{ 6.2246127937414997e-01, 5.2940728606573002e-18, -4.8486411215945827e-35, 1.2696527641980109e-52},
{ 6.2485948814238634e-01, 3.3671846037243900e-17, -2.7846053391012096e-33, 5.6102718120012104e-50},
{ 6.2725181549514408e-01, 3.0763585181253225e-17, 2.7068930273498138e-34, -1.1172240309286484e-50},
{ 6.2963823891492698e-01, 4.1115334049626806e-17, -1.9167473580230747e-33, 1.1118424028161730e-49},
{ 6.3201873593980906e-01, -4.0164942296463612e-17, -7.2208643641736723e-34, 3.7828920470544344e-50},
{ 6.3439328416364549e-01, 1.0420901929280035e-17, 4.1174558929280492e-34, -1.4464152986630705e-51},
{ 6.3676186123628420e-01, 3.1419048711901611e-17, -2.2693738415126449e-33, -1.6023584204297388e-49},
{ 6.3912444486377573e-01, 1.2416796312271043e-17, -6.2095419626356605e-34, 2.7762065999506603e-50},
{ 6.4148101280858316e-01, -9.9883430115943310e-18, 4.1969230376730128e-34, 5.6980543799257597e-51},
{ 6.4383154288979150e-01, -3.2084798795046886e-17, -1.2595311907053305e-33, -4.0205885230841536e-50},
{ 6.4617601298331639e-01, -2.9756137382280815e-17, -1.0275370077518259e-33, 8.0852478665893014e-51},
{ 6.4851440102211244e-01, 3.9870270313386831e-18, 1.9408388509540788e-34, -5.1798420636193190e-51},
{ 6.5084668499638088e-01, 3.9714670710500257e-17, 2.9178546787002963e-34, 3.8140635508293278e-51},
{ 6.5317284295377676e-01, 8.5695642060026238e-18, -6.9165322305070633e-34, 2.3873751224185395e-50},
{ 6.5549285299961535e-01, 3.5638734426385005e-17, 1.2695365790889811e-33, 4.3984952865412050e-50},
{ 6.5780669329707864e-01, 1.9580943058468545e-17, -1.1944272256627192e-33, 2.8556402616436858e-50},
{ 6.6011434206742048e-01, -1.3960054386823638e-19, 6.1515777931494047e-36, 5.3510498875622660e-52},
{ 6.6241577759017178e-01, -2.2615508885764591e-17, 5.0177050318126862e-34, 2.9162532399530762e-50},
{ 6.6471097820334490e-01, -3.6227793598034367e-17, -9.0607934765540427e-34, 3.0917036342380213e-50},
{ 6.6699992230363747e-01, 3.5284364997428166e-17, -1.0382057232458238e-33, 7.3812756550167626e-50},
{ 6.6928258834663612e-01, -5.4592652417447913e-17, -2.5181014709695152e-33, -1.6867875999437174e-49},
{ 6.7155895484701844e-01, -4.0489037749296692e-17, 3.1995835625355681e-34, -1.4044414655670960e-50},
{ 6.7382900037875604e-01, 2.3091901236161086e-17, 5.7428037192881319e-34, 1.1240668354625977e-50},
{ 6.7609270357531592e-01, 3.7256902248049466e-17, 1.7059417895764375e-33, 9.7326347795300652e-50},
{ 6.7835004312986147e-01, 1.8302093041863122e-17, 9.5241675746813072e-34, 5.0328101116133503e-50},
{ 6.8060099779545302e-01, 2.8473293354522047e-17, 4.1331805977270903e-34, 4.2579030510748576e-50},
{ 6.8284554638524808e-01, -1.2958058061524531e-17, 1.8292386959330698e-34, 3.4536209116044487e-51},
{ 6.8508366777270036e-01, 2.5948135194645137e-17, -8.5030743129500702e-34, -6.9572086141009930e-50},
{ 6.8731534089175916e-01, -5.5156158714917168e-17, 1.1896489854266829e-33, -7.8505896218220662e-51},
{ 6.8954054473706694e-01, -1.5889323294806790e-17, 9.1242356240205712e-34, 3.8315454152267638e-50},
{ 6.9175925836415775e-01, 2.7406078472410668e-17, 1.3286508943202092e-33, 1.0651869129580079e-51},
{ 6.9397146088965400e-01, 7.4345076956280137e-18, 7.5061528388197460e-34, -1.5928000240686583e-50},
{ 6.9617713149146299e-01, -4.1224081213582889e-17, -3.1838716762083291e-35, -3.9625587412119131e-51},
{ 6.9837624940897280e-01, 4.8988282435667768e-17, 1.9134010413244152e-33, 2.6161153243793989e-50},
{ 7.0056879394324834e-01, 3.1027960192992922e-17, 9.5638250509179997e-34, 4.5896916138107048e-51},
{ 7.0275474445722530e-01, 2.5278294383629822e-18, -8.6985561210674942e-35, -5.6899862307812990e-51},
{ 7.0493408037590488e-01, 2.7608725585748502e-17, 2.9816599471629137e-34, 1.1533044185111206e-50},
{ 7.0710678118654757e-01, -4.8336466567264567e-17, 2.0693376543497068e-33, 2.4677734957341755e-50},
};
static __device__ __constant__ double qd_cos_tbl[256][4] = {
{ 9.9999529380957619e-01, -1.9668064285322189e-17, -6.3053955095883481e-34, 5.3266110855726731e-52},
{ 9.9998117528260111e-01, 3.3568103522895585e-17, -1.4740132559368063e-35, 9.8603097594755596e-52},
{ 9.9995764455196390e-01, -3.1527836866647287e-17, 2.6363251186638437e-33, 1.0007504815488399e-49},
{ 9.9992470183914450e-01, 3.7931082512668012e-17, -8.5099918660501484e-35, -4.9956973223295153e-51},
{ 9.9988234745421256e-01, -3.5477814872408538e-17, 1.7102001035303974e-33, -1.0725388519026542e-49},
{ 9.9983058179582340e-01, 1.8825140517551119e-17, -5.1383513457616937e-34, -3.8378827995403787e-50},
{ 9.9976940535121528e-01, 4.2681177032289012e-17, 1.9062302359737099e-33, -6.0221153262881160e-50},
{ 9.9969881869620425e-01, -2.9851486403799753e-17, -1.9084787370733737e-33, 5.5980260344029202e-51},
{ 9.9961882249517864e-01, -4.1181965521424734e-17, 2.0915365593699916e-33, 8.1403390920903734e-50},
{ 9.9952941750109314e-01, 2.0517917823755591e-17, -4.7673802585706520e-34, -2.9443604198656772e-50},
{ 9.9943060455546173e-01, 3.9644497752257798e-17, -2.3757223716722428e-34, -1.2856759011361726e-51},
{ 9.9932238458834954e-01, -4.2858538440845682e-17, 3.3235101605146565e-34, -8.3554272377057543e-51},
{ 9.9920475861836389e-01, 9.1796317110385693e-18, 5.5416208185868570e-34, 8.0267046717615311e-52},
{ 9.9907772775264536e-01, 2.1419007653587032e-17, -7.9048203318529618e-34, -5.3166296181112712e-50},
{ 9.9894129318685687e-01, -2.0610641910058638e-17, -1.2546525485913485e-33, -7.5175888806157064e-50},
{ 9.9879545620517241e-01, -1.2291693337075465e-17, 2.4468446786491271e-34, 1.0723891085210268e-50},
{ 9.9864021818026527e-01, -4.8690254312923302e-17, -2.9470881967909147e-34, -1.3000650761346907e-50},
{ 9.9847558057329477e-01, -2.2002931182778795e-17, -1.2371509454944992e-33, -2.4911225131232065e-50},
{ 9.9830154493389289e-01, -5.1869402702792278e-17, 1.0480195493633452e-33, -2.8995649143155511e-50},
{ 9.9811811290014918e-01, 2.7935487558113833e-17, 2.4423341255830345e-33, -6.7646699175334417e-50},
{ 9.9792528619859600e-01, 1.7143659778886362e-17, 5.7885840902887460e-34, -9.2601432603894597e-51},
{ 9.9772306664419164e-01, -2.6394475274898721e-17, -1.6176223087661783e-34, -9.9924942889362281e-51},
{ 9.9751145614030345e-01, 5.6007205919806937e-18, -5.9477673514685690e-35, -1.4166807162743627e-54},
{ 9.9729045667869021e-01, 9.1647695371101735e-18, 6.7824134309739296e-34, -8.6191392795543357e-52},
{ 9.9706007033948296e-01, 1.6734093546241963e-17, -1.3169951440780028e-33, 1.0311048767952477e-50},
{ 9.9682029929116567e-01, 4.7062820708615655e-17, 2.8412041076474937e-33, -8.0006155670263622e-50},
{ 9.9657114579055484e-01, 1.1707179088390986e-17, -7.5934413263024663e-34, 2.8474848436926008e-50},
{ 9.9631261218277800e-01, 1.1336497891624735e-17, 3.4002458674414360e-34, 7.7419075921544901e-52},
{ 9.9604470090125197e-01, 2.2870031707670695e-17, -3.9184839405013148e-34, -3.7081260416246375e-50},
{ 9.9576741446765982e-01, -2.3151908323094359e-17, -1.6306512931944591e-34, -1.5925420783863192e-51},
{ 9.9548075549192694e-01, 3.2084621412226554e-18, -4.9501292146013023e-36, -2.7811428850878516e-52},
{ 9.9518472667219693e-01, -4.2486913678304410e-17, 1.3315510772504614e-33, 6.7927987417051888e-50},
{ 9.9487933079480562e-01, 4.2130813284943662e-18, -4.2062597488288452e-35, 2.5157064556087620e-51},
{ 9.9456457073425542e-01, 3.6745069641528058e-17, -3.0603304105471010e-33, 1.0397872280487526e-49},
{ 9.9424044945318790e-01, 4.4129423472462673e-17, -3.0107231708238066e-33, 7.4201582906861892e-50},
{ 9.9390697000235606e-01, -1.8964849471123746e-17, -1.5980853777937752e-35, -8.5374807150597082e-52},
{ 9.9356413552059530e-01, 2.9752309927797428e-17, -4.5066707331134233e-34, -3.3548191633805036e-50},
{ 9.9321194923479450e-01, 3.3096906261272262e-17, 1.5592811973249567e-33, 1.4373977733253592e-50},
{ 9.9285041445986510e-01, -1.4094517733693302e-17, -1.1954558131616916e-33, 1.8761873742867983e-50},
{ 9.9247953459870997e-01, 3.1093055095428906e-17, -1.8379594757818019e-33, -3.9885758559381314e-51},
{ 9.9209931314219180e-01, -3.9431926149588778e-17, -6.2758062911047230e-34, -1.2960929559212390e-50},
{ 9.9170975366909953e-01, -2.3372891311883661e-18, 2.7073298824968591e-35, -1.2569459441802872e-51},
{ 9.9131085984611544e-01, -2.5192111583372105e-17, -1.2852471567380887e-33, 5.2385212584310483e-50},
{ 9.9090263542778001e-01, 1.5394565094566704e-17, -1.0799984133184567e-33, 2.7451115960133595e-51},
{ 9.9048508425645709e-01, -5.5411437553780867e-17, -1.4614017210753585e-33, -3.8339374397387620e-50},
{ 9.9005821026229712e-01, -1.7055485906233963e-17, 1.3454939685758777e-33, 7.3117589137300036e-50},
{ 9.8962201746320089e-01, -5.2398217968132530e-17, 1.3463189211456219e-33, 5.8021640554894872e-50},
{ 9.8917650996478101e-01, -4.0987309937047111e-17, -4.4857560552048437e-34, -3.9414504502871125e-50},
{ 9.8872169196032378e-01, -1.0976227206656125e-17, 3.2311342577653764e-34, 9.6367946583575041e-51},
{ 9.8825756773074946e-01, 2.7030607784372632e-17, 7.7514866488601377e-35, 2.1019644956864938e-51},
{ 9.8778414164457218e-01, -2.3600693397159021e-17, -1.2323283769707861e-33, 3.0130900716803339e-50},
{ 9.8730141815785843e-01, -5.2332261255715652e-17, -2.7937644333152473e-33, 1.2074160567958408e-49},
{ 9.8680940181418553e-01, -5.0287214351061075e-17, -2.2681526238144461e-33, 4.4003694320169133e-50},
{ 9.8630809724459867e-01, -2.1520877103013341e-17, 1.1866528054187716e-33, -7.8532199199813836e-50},
{ 9.8579750916756748e-01, -5.1439452979953012e-17, 2.6276439309996725e-33, 7.5423552783286347e-50},
{ 9.8527764238894122e-01, 2.3155637027900207e-17, -7.5275971545764833e-34, 1.0582231660456094e-50},
{ 9.8474850180190421e-01, 1.0548144061829957e-17, 2.8786145266267306e-34, -3.6782210081466112e-51},
{ 9.8421009238692903e-01, 4.7983922627050691e-17, 2.2597419645070588e-34, 1.7573875814863400e-50},
{ 9.8366241921173025e-01, 1.9864948201635255e-17, -1.0743046281211033e-35, 1.7975662796558100e-52},
{ 9.8310548743121629e-01, 4.2170007522888628e-17, 8.2396265656440904e-34, -8.0803700139096561e-50},
{ 9.8253930228744124e-01, 1.5149580813777224e-17, -4.1802771422186237e-34, -2.2150174326226160e-50},
{ 9.8196386910955524e-01, 2.1108443711513084e-17, -1.5253013442896054e-33, -6.8388082079337969e-50},
{ 9.8137919331375456e-01, 1.3428163260355633e-17, -6.5294290469962986e-34, 2.7965412287456268e-51},
{ 9.8078528040323043e-01, 1.8546939997825006e-17, -1.0696564445530757e-33, 6.6668174475264961e-50},
{ 9.8018213596811743e-01, -3.6801786963856159e-17, 6.3245171387992842e-34, 1.8600176137175971e-50},
{ 9.7956976568544052e-01, 1.5573991584990420e-17, -1.3401066029782990e-33, -1.7263702199862149e-50},
{ 9.7894817531906220e-01, -2.3817727961148053e-18, -1.0694750370381661e-34, -8.2293047196087462e-51},
{ 9.7831737071962765e-01, -2.1623082233344895e-17, 1.0970403012028032e-33, 7.7091923099369339e-50},
{ 9.7767735782450993e-01, 5.0514136167059628e-17, -1.3254751701428788e-33, 7.0161254312124538e-50},
{ 9.7702814265775439e-01, -4.3353875751555997e-17, 5.4948839831535478e-34, -9.2755263105377306e-51},
{ 9.7636973133002114e-01, 9.3093931526213780e-18, -4.1184949155685665e-34, -3.1913926031393690e-50},
{ 9.7570213003852857e-01, -2.5572556081259686e-17, -9.3174244508942223e-34, -8.3675863211646863e-51},
{ 9.7502534506699412e-01, 2.6642660651899135e-17, 1.7819392739353853e-34, -3.3159625385648947e-51},
{ 9.7433938278557586e-01, 2.3041221476151512e-18, 1.0758686005031430e-34, 5.1074116432809478e-51},
{ 9.7364424965081198e-01, -5.1729808691005871e-17, -1.5508473005989887e-33, -1.6505125917675401e-49},
{ 9.7293995220556018e-01, -3.1311211122281800e-17, -2.6874087789006141e-33, -2.1652434818822145e-51},
{ 9.7222649707893627e-01, 3.6461169785938221e-17, 3.0309636883883133e-33, -1.2702716907967306e-51},
{ 9.7150389098625178e-01, -7.9865421122289046e-18, -4.3628417211263380e-34, 3.4307517798759352e-51},
{ 9.7077214072895035e-01, -4.7992163325114922e-17, 3.0347528910975783e-33, 8.5989199506479701e-50},
{ 9.7003125319454397e-01, 1.8365300348428844e-17, -1.4311097571944918e-33, 8.5846781998740697e-51},
{ 9.6928123535654853e-01, -4.5663660261927896e-17, 9.6147526917239387e-34, 8.1267605207871330e-51},
{ 9.6852209427441727e-01, 4.9475074918244771e-17, 2.8558738351911241e-33, 6.2948422316507461e-50},
{ 9.6775383709347551e-01, -4.5512132825515820e-17, -1.4127617988719093e-33, -8.4620609089704578e-50},
{ 9.6697647104485207e-01, 3.8496228837337864e-17, -5.3881631542745647e-34, -3.5221863171458959e-50},
{ 9.6619000344541250e-01, 5.1298840401665493e-17, 1.4564075904769808e-34, 1.0095973971377432e-50},
{ 9.6539444169768940e-01, -2.3745389918392156e-17, 5.9221515590053862e-34, -3.8811192556231094e-50},
{ 9.6458979328981276e-01, -3.4189470735959786e-17, 2.2982074155463522e-33, -4.5128791045607634e-50},
{ 9.6377606579543984e-01, 2.6463950561220029e-17, -2.9073234590199323e-36, -1.2938328629395601e-52},
{ 9.6295326687368388e-01, 8.9341960404313634e-18, -3.9071244661020126e-34, 1.6212091116847394e-50},
{ 9.6212140426904158e-01, 1.5236770453846305e-17, -1.3050173525597142e-33, 7.9016122394092666e-50},
{ 9.6128048581132064e-01, 2.0933955216674039e-18, 1.0768607469015692e-34, -5.9453639304361774e-51},
{ 9.6043051941556579e-01, 2.4653904815317185e-17, -1.3792169410906322e-33, -4.7726598378506903e-51},
{ 9.5957151308198452e-01, 1.1000640085000957e-17, -4.2036030828223975e-34, 4.0023704842606573e-51},
{ 9.5870347489587160e-01, -4.3685014392372053e-17, 2.2001800662729131e-33, -1.0553721324358075e-49},
{ 9.5782641302753291e-01, -1.7696710075371263e-17, 1.9164034110382190e-34, 8.1489235071754813e-51},
{ 9.5694033573220882e-01, 4.0553869861875701e-17, -1.7147013364302149e-33, 2.5736745295329455e-50},
{ 9.5604525134999641e-01, 3.7705045279589067e-17, 1.9678699997347571e-33, 8.5093177731230180e-50},
{ 9.5514116830577067e-01, 5.0088652955014668e-17, -2.6983181838059211e-33, 1.0102323575596493e-49},
{ 9.5422809510910567e-01, -3.7545901690626874e-17, 1.4951619241257764e-33, -8.2717333151394973e-50},
{ 9.5330604035419386e-01, -2.5190738779919934e-17, -1.4272239821134379e-33, -4.6717286809283155e-50},
{ 9.5237501271976588e-01, -2.0269300462299272e-17, -1.0635956887246246e-33, -3.5514537666487619e-50},
{ 9.5143502096900834e-01, 3.1350584123266695e-17, -2.4824833452737813e-33, 9.5450335525380613e-51},
{ 9.5048607394948170e-01, 1.9410097562630436e-17, -8.1559393949816789e-34, -1.0501209720164562e-50},
{ 9.4952818059303667e-01, -7.5544151928043298e-18, -5.1260245024046686e-34, 1.8093643389040406e-50},
{ 9.4856134991573027e-01, 2.0668262262333232e-17, -5.9440730243667306e-34, 1.4268853111554300e-50},
{ 9.4758559101774109e-01, 4.3417993852125991e-17, -2.7728667889840373e-34, 5.5709160196519968e-51},
{ 9.4660091308328353e-01, 3.5056800210680730e-17, 9.8578536940318117e-34, 6.6035911064585197e-50},
{ 9.4560732538052128e-01, 4.6019102478523738e-17, -6.2534384769452059e-34, 1.5758941215779961e-50},
{ 9.4460483726148026e-01, 8.8100545476641165e-18, 5.2291695602757842e-34, -3.3487256018407123e-50},
{ 9.4359345816196039e-01, -2.4093127844404214e-17, 1.0283279856803939e-34, -2.3398232614531355e-51},
{ 9.4257319760144687e-01, 1.3235564806436886e-17, -5.7048262885386911e-35, 3.9947050442753744e-51},
{ 9.4154406518302081e-01, -2.7896379547698341e-17, 1.6273236356733898e-33, -5.3075944708471203e-51},
{ 9.4050607059326830e-01, 2.8610421567116268e-17, 2.9261501147538827e-33, -2.6849867690896925e-50},
{ 9.3945922360218992e-01, -7.0152867943098655e-18, -5.6395693818011210e-34, 3.5568142678987651e-50},
{ 9.3840353406310806e-01, 5.4242545044795490e-17, -1.9039966607859759e-33, -1.5627792988341215e-49},
{ 9.3733901191257496e-01, -3.6570926284362776e-17, -1.1902940071273247e-33, -1.1215082331583223e-50},
{ 9.3626566717027826e-01, -1.3013766145497654e-17, 5.2229870061990595e-34, -3.3972777075634108e-51},
{ 9.3518350993894761e-01, -3.2609395302485065e-17, -8.1813015218875245e-34, 5.5642140024928139e-50},
{ 9.3409255040425887e-01, 4.4662824360767511e-17, -2.5903243047396916e-33, 8.1505209004343043e-50},
{ 9.3299279883473885e-01, 4.2041415555384355e-17, 9.0285896495521276e-34, 5.3019984977661259e-50},
{ 9.3188426558166815e-01, -4.0785944377318095e-17, 1.7631450298754169e-33, 2.5776403305507453e-50},
{ 9.3076696107898371e-01, 1.9703775102838329e-17, 6.5657908718278205e-34, -1.9480347966259524e-51},
{ 9.2964089584318121e-01, 5.1282530016864107e-17, 2.3719739891916261e-34, -1.7230065426917127e-50},
{ 9.2850608047321559e-01, -2.3306639848485943e-17, -7.7799084333208503e-34, -5.8597558009300305e-50},
{ 9.2736252565040111e-01, -2.7677111692155437e-17, 2.2110293450199576e-34, 2.0349190819680613e-50},
{ 9.2621024213831138e-01, -3.7303754586099054e-17, 2.0464457809993405e-33, 1.3831799631231817e-49},
{ 9.2504924078267758e-01, 6.0529447412576159e-18, -8.8256517760278541e-35, 1.8285462122388328e-51},
{ 9.2387953251128674e-01, 1.7645047084336677e-17, -5.0442537321586818e-34, -4.0478677716823890e-50},
{ 9.2270112833387852e-01, 5.2963798918539814e-17, -5.7135699628876685e-34, 3.0163671797219087e-50},
{ 9.2151403934204190e-01, 4.1639843390684644e-17, 1.1891485604702356e-33, 2.0862437594380324e-50},
{ 9.2031827670911059e-01, -2.7806888779036837e-17, 2.7011013677071274e-33, 1.1998578792455499e-49},
{ 9.1911385169005777e-01, -2.6496484622344718e-17, 6.5403604763461920e-34, -2.8997180201186078e-50},
{ 9.1790077562139050e-01, -3.9074579680849515e-17, 2.3004636541490264e-33, 3.9851762744443107e-50},
{ 9.1667905992104270e-01, -4.1733978698287568e-17, 1.2094444804381172e-33, 4.9356916826097816e-50},
{ 9.1544871608826783e-01, -1.3591056692900894e-17, 5.9923027475594735e-34, 2.1403295925962879e-50},
{ 9.1420975570353069e-01, -3.6316182527814423e-17, -1.9438819777122554e-33, 2.8340679287728316e-50},
{ 9.1296219042839821e-01, -4.7932505228039469e-17, -1.7753551889428638e-33, 4.0607782903868160e-51},
{ 9.1170603200542988e-01, -2.6913273175034130e-17, -5.1928101916162528e-35, 1.1338175936090630e-51},
{ 9.1044129225806725e-01, -5.0433041673313820e-17, 1.0938746257404305e-33, 9.5378272084170731e-51},
{ 9.0916798309052238e-01, -3.6878564091359894e-18, 2.9951330310507693e-34, -1.2225666136919926e-50},
{ 9.0788611648766626e-01, -4.9459964301225840e-17, -1.6599682707075313e-33, -5.1925202712634716e-50},
{ 9.0659570451491533e-01, 3.0506718955442023e-17, -1.4478836557141204e-33, 1.8906373784448725e-50},
{ 9.0529675931811882e-01, -4.1153099826889901e-17, 2.9859368705184223e-33, 5.1145293917439211e-50},
{ 9.0398929312344334e-01, -6.6097544687484308e-18, 1.2728013034680357e-34, -4.3026097234014823e-51},
{ 9.0267331823725883e-01, -1.9250787033961483e-17, 1.3242128993244527e-33, -5.2971030688703665e-50},
{ 9.0134884704602203e-01, -1.3524789367698682e-17, 6.3605353115880091e-34, 3.6227400654573828e-50},
{ 9.0001589201616028e-01, -5.0639618050802273e-17, 1.0783525384031576e-33, 2.8130016326515111e-50},
{ 8.9867446569395382e-01, 2.6316906461033013e-17, 3.7003137047796840e-35, -2.3447719900465938e-51},
{ 8.9732458070541832e-01, -3.6396283314867290e-17, -2.3611649895474815e-33, 1.1837247047900082e-49},
{ 8.9596624975618511e-01, 4.9025099114811813e-17, -1.9440489814795326e-33, -1.7070486667767033e-49},
{ 8.9459948563138270e-01, -1.7516226396814919e-17, -1.3200670047246923e-33, -1.5953009884324695e-50},
{ 8.9322430119551532e-01, -4.1161239151908913e-18, 2.5380253805715999e-34, 4.2849455510516192e-51},
{ 8.9184070939234272e-01, 4.6690228137124547e-18, 1.6150254286841982e-34, -3.9617448820725012e-51},
{ 8.9044872324475788e-01, 1.1781931459051803e-17, -1.3346142209571930e-34, -9.4982373530733431e-51},
{ 8.8904835585466457e-01, -1.1164514966766675e-17, -3.4797636107798736e-34, -1.5605079997040631e-50},
{ 8.8763962040285393e-01, 1.2805091918587960e-17, 3.9948742059584459e-35, 3.8940716325338136e-51},
{ 8.8622253014888064e-01, -6.7307369600274315e-18, 1.2385593432917413e-34, 2.0364014759133320e-51},
{ 8.8479709843093779e-01, -9.4331469628972690e-18, -5.7106541478701439e-34, 1.8260134111907397e-50},
{ 8.8336333866573158e-01, 1.5822643380255127e-17, -7.8921320007588250e-34, -1.4782321016179836e-50},
{ 8.8192126434835505e-01, -1.9843248405890562e-17, -7.0412114007673834e-34, -1.0636770169389104e-50},
{ 8.8047088905216075e-01, 1.6311096602996350e-17, -5.7541360594724172e-34, -4.0128611862170021e-50},
{ 8.7901222642863353e-01, -4.7356837291118011e-17, 1.4388771297975192e-33, -2.9085554304479134e-50},
{ 8.7754529020726124e-01, 5.0113311846499550e-17, 2.8382769008739543e-34, 1.5550640393164140e-50},
{ 8.7607009419540660e-01, 5.8729024235147677e-18, 2.7941144391738458e-34, -1.8536073846509828e-50},
{ 8.7458665227817611e-01, -5.7216617730397065e-19, -2.9705811503689596e-35, 8.7389593969796752e-52},
{ 8.7309497841829009e-01, 7.8424672990129903e-18, -4.8685015839797165e-34, -2.2815570587477527e-50},
{ 8.7159508665595109e-01, -5.5272998038551050e-17, -2.2104090204984907e-33, -9.7749763187643172e-50},
{ 8.7008699110871146e-01, -4.1888510868549968e-17, 7.0900185861878415e-34, 3.7600251115157260e-50},
{ 8.6857070597134090e-01, 2.7192781689782903e-19, -1.6710140396932428e-35, -1.2625514734637969e-51},
{ 8.6704624551569265e-01, 3.0267859550930567e-18, -1.1559438782171572e-34, -5.3580556397808012e-52},
{ 8.6551362409056909e-01, -6.3723113549628899e-18, 2.3725520321746832e-34, 1.5911880348395175e-50},
{ 8.6397285612158670e-01, 4.1486355957361607e-17, 2.2709976932210266e-33, -8.1228385659479984e-50},
{ 8.6242395611104050e-01, 3.7008992527383130e-17, 5.2128411542701573e-34, 2.6945600081026861e-50},
{ 8.6086693863776731e-01, -3.0050048898573656e-17, -8.8706183090892111e-34, 1.5005320558097301e-50},
{ 8.5930181835700836e-01, 4.2435655816850687e-17, 7.6181814059912025e-34, -3.9592127850658708e-50},
{ 8.5772861000027212e-01, -4.8183447936336620e-17, -1.1044130517687532e-33, -8.7400233444645562e-50},
{ 8.5614732837519447e-01, 9.1806925616606261e-18, 5.6328649785951470e-34, 2.3326646113217378e-51},
{ 8.5455798836540053e-01, -1.2991124236396092e-17, 1.2893407722948080e-34, -3.6506925747583053e-52},
{ 8.5296060493036363e-01, 2.7152984251981370e-17, 7.4336483283120719e-34, 4.2162417622350668e-50},
{ 8.5135519310526520e-01, -5.3279874446016209e-17, 2.2281156380919942e-33, -4.0281886404138477e-50},
{ 8.4974176800085244e-01, 5.1812347659974015e-17, 3.0810626087331275e-33, -2.5931308201994965e-50},
{ 8.4812034480329723e-01, 1.8762563415239981e-17, 1.4048773307919617e-33, -2.4915221509958691e-50},
{ 8.4649093877405213e-01, -4.7969419958569345e-17, -2.7518267097886703e-33, -7.3518959727313350e-50},
{ 8.4485356524970712e-01, -4.3631360296879637e-17, -2.0307726853367547e-33, 4.3097229819851761e-50},
{ 8.4320823964184544e-01, 9.6536707005959077e-19, 2.8995142431556364e-36, 9.6715076811480284e-53},
{ 8.4155497743689844e-01, -3.4095465391321557e-17, -8.4130208607579595e-34, -4.9447283960568686e-50},
{ 8.3989379419599952e-01, -1.6673694881511411e-17, -1.4759184141750289e-33, -7.5795098161914058e-50},
{ 8.3822470555483808e-01, -3.5560085052855026e-17, 1.1689791577022643e-33, -5.8627347359723411e-50},
{ 8.3654772722351201e-01, -2.0899059027066533e-17, -9.8104097821002585e-35, -3.1609177868229853e-51},
{ 8.3486287498638001e-01, 4.6048430609159657e-17, -5.1827423265239912e-34, -7.0505343435504109e-51},
{ 8.3317016470191319e-01, 1.3275129507229764e-18, 4.8589164115370863e-35, 4.5422281300506859e-51},
{ 8.3146961230254524e-01, 1.4073856984728024e-18, 4.6951315383980830e-35, 5.1431906049905658e-51},
{ 8.2976123379452305e-01, -2.9349109376485597e-18, 1.1496917934149818e-34, 3.5186665544980233e-51},
{ 8.2804504525775580e-01, -4.4196593225871532e-17, 2.7967864855211251e-33, 1.0030777287393502e-49},
{ 8.2632106284566353e-01, -5.3957485453612902e-17, 6.8976896130138550e-34, 3.8106164274199196e-50},
{ 8.2458930278502529e-01, -2.6512360488868275e-17, 1.6916964350914386e-34, 6.7693974813562649e-51},
{ 8.2284978137582632e-01, 1.5193019034505495e-17, 9.6890547246521685e-34, 5.6994562923653264e-50},
{ 8.2110251499110465e-01, 3.0715131609697682e-17, -1.7037168325855879e-33, -1.1149862443283853e-49},
{ 8.1934752007679701e-01, -4.8200736995191133e-17, -1.5574489646672781e-35, -9.5647853614522216e-53},
{ 8.1758481315158371e-01, -1.4883149812426772e-17, -7.8273262771298917e-34, 4.1332149161031594e-50},
{ 8.1581441080673378e-01, 8.2652693782130871e-18, -2.3028778135179471e-34, 1.5102071387249843e-50},
{ 8.1403632970594841e-01, -5.2127351877042624e-17, -1.9047670611316360e-33, -1.6937269585941507e-49},
{ 8.1225058658520388e-01, 3.1054545609214803e-17, 2.2649541922707251e-34, -7.4221684154649405e-51},
{ 8.1045719825259477e-01, 2.3520367349840499e-17, -7.7530070904846341e-34, -7.2792616357197140e-50},
{ 8.0865618158817498e-01, 9.3251597879721674e-18, -7.1823301933068394e-34, 2.3925440846132106e-50},
{ 8.0684755354379922e-01, 4.9220603766095546e-17, 2.9796016899903487e-33, 1.5220754223615788e-49},
{ 8.0503133114296355e-01, 5.1368289568212149e-17, 6.3082807402256524e-34, 7.3277646085129827e-51},
{ 8.0320753148064494e-01, -3.3060609804814910e-17, -1.2242726252420433e-33, 2.8413673268630117e-50},
{ 8.0137617172314024e-01, -2.0958013413495834e-17, -4.3798162198006931e-34, 2.0235690497752515e-50},
{ 7.9953726910790501e-01, 2.0356723822005431e-17, -9.7448513696896360e-34, 5.3608109599696008e-52},
{ 7.9769084094339116e-01, -4.6730759884788944e-17, 2.3075897077191757e-33, 3.1605567774640253e-51},
{ 7.9583690460888357e-01, -3.0062724851910721e-17, -2.2496210832042235e-33, -6.5881774117183040e-50},
{ 7.9397547755433717e-01, -7.4194631759921416e-18, 2.4124341304631069e-34, -4.9956808616244972e-51},
{ 7.9210657730021239e-01, -3.7087850202326467e-17, -1.4874457267228264e-33, 2.9323097289153505e-50},
{ 7.9023022143731003e-01, 2.3056905954954492e-17, 1.4481080533260193e-33, -7.6725237057203488e-50},
{ 7.8834642762660623e-01, 3.4396993154059708e-17, 1.7710623746737170e-33, 1.7084159098417402e-49},
{ 7.8645521359908577e-01, -9.7841429939305265e-18, 3.3906063272445472e-34, 5.7269505320382577e-51},
{ 7.8455659715557524e-01, -8.5627965423173476e-18, -2.1106834459001849e-34, -1.6890322182469603e-50},
{ 7.8265059616657573e-01, 9.0745866975808825e-18, 6.7623847404278666e-34, -1.7173237731987271e-50},
{ 7.8073722857209449e-01, -9.9198782066678806e-18, -2.1265794012162715e-36, 3.0772165598957647e-54},
{ 7.7881651238147598e-01, -2.4891385579973807e-17, 6.7665497024807980e-35, -6.5218594281701332e-52},
{ 7.7688846567323244e-01, 7.7418602570672864e-18, -5.9986517872157897e-34, 3.0566548232958972e-50},
{ 7.7495310659487393e-01, -5.2209083189826433e-17, -9.6653593393686612e-34, 3.7027750076562569e-50},
{ 7.7301045336273699e-01, -3.2565907033649772e-17, 1.3860807251523929e-33, -3.9971329917586022e-50},
{ 7.7106052426181382e-01, -4.4558442347769265e-17, -2.9863565614083783e-33, -6.8795262083596236e-50},
{ 7.6910333764557959e-01, 5.1546455184564817e-17, 2.6142829553524292e-33, -1.6199023632773298e-49},
{ 7.6713891193582040e-01, -1.8885903683750782e-17, -1.3659359331495433e-33, -2.2538834962921934e-50},
{ 7.6516726562245896e-01, -3.2707225612534598e-17, 1.1177117747079528e-33, -3.7005182280175715e-50},
{ 7.6318841726338127e-01, 2.6314748416750748e-18, 1.4048039063095910e-34, 8.9601886626630321e-52},
{ 7.6120238548426178e-01, 3.5315510881690551e-17, 1.2833566381864357e-33, 8.6221435180890613e-50},
{ 7.5920918897838807e-01, -3.8558842175523123e-17, 2.9720241208332759e-34, -1.2521388928220163e-50},
{ 7.5720884650648457e-01, -1.9909098777335502e-17, 3.9409283266158482e-34, 2.0744254207802976e-50},
{ 7.5520137689653655e-01, -1.9402238001823017e-17, -3.7756206444727573e-34, -2.1212242308178287e-50},
{ 7.5318679904361252e-01, -3.7937789838736540e-17, -6.7009539920231559e-34, -6.7128562115050214e-51},
{ 7.5116513190968637e-01, 4.3499761158645868e-17, 2.5227718971102212e-33, -6.5969709212757102e-50},
{ 7.4913639452345937e-01, -4.4729078447011889e-17, -2.4206025249983768e-33, 1.1336681351116422e-49},
{ 7.4710060598018013e-01, 1.1874824875965430e-17, 2.1992523849833518e-34, 1.1025018564644483e-50},
{ 7.4505778544146595e-01, 1.5078686911877863e-17, 8.0898987212942471e-34, 8.2677958765323532e-50},
{ 7.4300795213512172e-01, -2.5144629669719265e-17, 7.1128989512526157e-34, 3.0181629077821220e-50},
{ 7.4095112535495911e-01, -1.4708616952297345e-17, -4.9550433827142032e-34, 3.1434132533735671e-50},
{ 7.3888732446061511e-01, 3.4324874808225091e-17, -1.3706639444717610e-33, -3.3520827530718938e-51},
{ 7.3681656887736990e-01, -2.8932468101656295e-17, -3.4649887126202378e-34, -1.8484474476291476e-50},
{ 7.3473887809596350e-01, -3.4507595976263941e-17, -2.3718000676666409e-33, -3.9696090387165402e-50},
{ 7.3265427167241282e-01, 1.8918673481573520e-17, -1.5123719544119886e-33, -9.7922152011625728e-51},
{ 7.3056276922782759e-01, -2.9689959904476928e-17, -1.1276871244239744e-33, -3.0531520961539007e-50},
{ 7.2846439044822520e-01, 1.1924642323370718e-19, 5.9001892316611011e-36, 1.2178089069502704e-52},
{ 7.2635915508434601e-01, -3.1917502443460542e-17, 7.7047912412039396e-34, 4.1455880160182123e-50},
{ 7.2424708295146689e-01, 2.9198471334403004e-17, 2.3027324968739464e-33, -1.2928820533892183e-51},
{ 7.2212819392921535e-01, -2.3871262053452047e-17, 1.0636125432862273e-33, -4.4598638837802517e-50},
{ 7.2000250796138165e-01, -2.5689658854462333e-17, -9.1492566948567925e-34, 4.4403780801267786e-50},
{ 7.1787004505573171e-01, 2.7006476062511453e-17, -2.2854956580215348e-34, 9.1726903890287867e-51},
{ 7.1573082528381871e-01, -5.1581018476410262e-17, -1.3736271349300259e-34, -1.2734611344111297e-50},
{ 7.1358486878079364e-01, -4.2342504403133584e-17, -4.2690366101617268e-34, -2.6352370883066522e-50},
{ 7.1143219574521643e-01, 7.9643298613856813e-18, 2.9488239510721469e-34, 1.6985236437666356e-50},
{ 7.0927282643886569e-01, -3.7597359110245730e-17, 1.0613125954645119e-34, 8.9465480185486032e-51},
{ 7.0710678118654757e-01, -4.8336466567264567e-17, 2.0693376543497068e-33, 2.4677734957341755e-50}
};
// Computes sin(a) and cos(a) using Taylor series.
// Assumes |a| <= pi/2048.
__device__
static void sincos_taylor(const gqd_real &a, gqd_real &sin_a, gqd_real &cos_a) {
const double thresh = 0.5 * _qd_eps * fabs(to_double(a));
gqd_real p, s, t, x;
if (is_zero(a)) {
sin_a[0] = sin_a[1] = sin_a[2] = sin_a[3] = 0.0;
cos_a[0] = 1.0;
cos_a[1] = cos_a[2] = cos_a[3] = 0.0;
return;
}
//x = -sqr(a);
x = negative( sqr(a) );
s = a;
p = a;
int i = 0;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
sin_a = s;
cos_a = sqrt(1.0 - sqr(s));
}
__device__
static gqd_real sin_taylor(const gqd_real &a) {
const double thresh = 0.5 * _qd_eps * fabs(to_double(a));
gqd_real p, s, t, x;
if (is_zero(a)) {
//return gqd_real(0.0);
s[0] = s[1] = s[2] = s[3] = 0.0;
return s;
}
//x = -sqr(a);
x = negative(sqr(a));
s = a;
p = a;
int i = 0;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
return s;
}
__device__
static gqd_real cos_taylor(const gqd_real &a) {
const double thresh = 0.5 * _qd_eps;
gqd_real p, s, t, x;
if (is_zero(a)) {
//return gqd_real(1.0);
s[0] = 1.0;
s[1] = s[2] = s[3] = 0.0;
return s;
}
//x = -sqr(a);
x = negative(sqr(a));
s = 1.0 + mul_pwr2(x, 0.5);
p = x;
int i = 1;
do {
p = p * x;
t = p * gqd_real(qd_inv_fact[i][0], qd_inv_fact[i][1], qd_inv_fact[i][2], qd_inv_fact[i][3]);;
s = s + t;
i += 2;
} while (i < n_qd_inv_fact && fabs(to_double(t)) > thresh);
return s;
}
__device__
gqd_real sin(const gqd_real &a) {
/* Strategy:
To compute sin(x), we choose integers a, b so that
x = s + a * (pi/2) + b * (pi/1024)
and |s| <= pi/2048. Using a precomputed table of
sin(k pi / 1024) and cos(k pi / 1024), we can compute
sin(x) from sin(s) and cos(s). This greatly increases the
convergence of the sine Taylor series.
*/
gqd_real z, r;
if (is_zero(a)) {
//return gqd_real(0.0);
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
// approximately reduce modulo 2*pi
z = nint(a / _qd_2pi);
r = a - _qd_2pi * z;
// approximately reduce modulo pi/2 and then modulo pi/1024
double q = floor(r[0] / _qd_pi2[0] + 0.5);
gqd_real t = r - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//gqd_real::error("(gqd_real::sin): Cannot reduce modulo pi/2.");
//return gqd_real::_nan;
//return gqd_real(0.0);
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
if (abs_k > 256) {
//gqd_real::error("(gqd_real::sin): Cannot reduce modulo pi/1024.");
//return gqd_real::_nan;
//return gqd_real( 0.0 );
r[0] = r[1] = r[2] = r[3] = 0.0;
return r;
}
if (k == 0) {
switch (j) {
case 0:
return sin_taylor(t);
case 1:
return cos_taylor(t);
case -1:
return negative(cos_taylor(t));
default:
return negative(sin_taylor(t));
}
}
//gqd_real sin_t, cos_t;
//gqd_real u = qd_cos_tbl[abs_k-1];
//gqd_real v = qd_sin_tbl[abs_k-1];
//sincos_taylor(t, sin_t, cos_t);
///use z and r again to avoid allocate additional memory
///z = sin_t, r = cos_t
sincos_taylor( t, z, r );
int i = abs_k - 1;
if (j == 0) {
z = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * z;
r = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * r;
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
if (k > 0) {
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
return z + r;
} else {
//z = qd_cos_tbl[abs_k-1] * z;
//r = qd_sin_tbl[abs_k-1] * r;
return z - r;
}
} else if (j == 1) {
r = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * r;
z = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * z;
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
if (k > 0) {
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
return r - z;
} else {
//r = qd_cos_tbl[abs_k-1] * r;
//z = qd_sin_tbl[abs_k-1] * z;
return r + z;
}
} else if (j == -1) {
z = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * z;
r = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * r;
//z = qd_sin_tbl[abs_k-1] * z;
//r = qd_cos_tbl[abs_k-1] * r;
if (k > 0) {
//z = qd_sin_tbl[abs_k-1] * z;
//r = qd_cos_tbl[abs_k-1] * r;
return z - r;
} else {
//r = negative(qd_cos_tbl[abs_k-1]) * r;
//r = (qd_cos_tbl[abs_k-1]) * r;
r[0] = -r[0];
r[1] = -r[1];
r[2] = -r[2];
r[3] = -r[3];
//z = qd_sin_tbl[abs_k-1] * z;
return r - z;
}
} else {
r = gqd_real(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]) * r;
z = gqd_real(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]) * z;
//r = qd_sin_tbl[abs_k-1] * r ;
//z = qd_cos_tbl[abs_k-1] * z;
if (k > 0) {
//z = negative(qd_cos_tbl[abs_k-1]) * z;
//z = qd_cos_tbl[abs_k-1] * z;
z[0] = -z[0];
z[1] = -z[1];
z[2] = -z[2];
z[3] = -z[3];
//r = qd_sin_tbl[abs_k-1] * r;
return z - r;
} else {
//r = qd_sin_tbl[abs_k-1] * r ;
//z = qd_cos_tbl[abs_k-1] * z;
return r - z;
}
}
}
__device__
gqd_real cos(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(1.0);
}
// approximately reduce modulo 2*pi
gqd_real z = nint(a / _qd_2pi);
gqd_real r = a - _qd_2pi * z;
// approximately reduce modulo pi/2 and then modulo pi/1024
double q = floor(r[0] / _qd_pi2[0] + 0.5);
gqd_real t = r - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//qd_real::error("(qd_real::cos): Cannot reduce modulo pi/2.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (abs_k > 256) {
//qd_real::error("(qd_real::cos): Cannot reduce modulo pi/1024.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (k == 0) {
switch (j) {
case 0:
return cos_taylor(t);
case 1:
return negative(sin_taylor(t));
case -1:
return sin_taylor(t);
default:
return negative(cos_taylor(t));
}
}
gqd_real sin_t, cos_t;
sincos_taylor(t, sin_t, cos_t);
//gqd_real u = qd_cos_tbl[abs_k - 1];
//gqd_real v = qd_sin_tbl[abs_k - 1];
int i = abs_k - 1;
gqd_real u(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]);
gqd_real v(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]);
if (j == 0) {
if (k > 0) {
r = u * cos_t - v * sin_t;
} else {
r = u * cos_t + v * sin_t;
}
} else if (j == 1) {
if (k > 0) {
r = negative(u * sin_t) - v * cos_t;
} else {
r = v * cos_t - u * sin_t;
}
} else if (j == -1) {
if (k > 0) {
r = u * sin_t + v * cos_t;
} else {
r = u * sin_t - v * cos_t;
}
} else {
if (k > 0) {
r = v * sin_t - u * cos_t;
} else {
r = negative(u * cos_t) - v * sin_t;
}
}
return r;
}
__device__
void sincos(const gqd_real &a, gqd_real &sin_a, gqd_real &cos_a) {
if (is_zero(a)) {
sin_a = gqd_real(0.0);
cos_a = gqd_real(1.0);
return;
}
// approximately reduce by 2*pi
gqd_real z = nint(a / _qd_2pi);
gqd_real t = a - _qd_2pi * z;
// approximately reduce by pi/2 and then by pi/1024.
double q = floor(t[0] / _qd_pi2[0] + 0.5);
t = t - _qd_pi2 * q;
int j = (int)(q);
q = floor(t[0] / _qd_pi1024[0] + 0.5);
t = t - _qd_pi1024 * q;
int k = (int)(q);
int abs_k = abs(k);
if (j < -2 || j > 2) {
//qd_real::error("(qd_real::sincos): Cannot reduce modulo pi/2.");
//cos_a = sin_a = qd_real::_nan;
cos_a = sin_a = gqd_real(0.0);
return;
}
if (abs_k > 256) {
//qd_real::error("(qd_real::sincos): Cannot reduce modulo pi/1024.");
//cos_a = sin_a = qd_real::_nan;
cos_a = sin_a = gqd_real(0.0);
return;
}
gqd_real sin_t, cos_t;
sincos_taylor(t, sin_t, cos_t);
if (k == 0) {
if (j == 0) {
sin_a = sin_t;
cos_a = cos_t;
} else if (j == 1) {
sin_a = cos_t;
cos_a = negative(sin_t);
} else if (j == -1) {
sin_a = negative(cos_t);
cos_a = sin_t;
} else {
sin_a = negative(sin_t);
cos_a = negative(cos_t);
}
return;
}
//gqd_real u = qd_cos_tbl[abs_k - 1];
//gqd_real v = qd_sin_tbl[abs_k - 1];
int i = abs_k - 1;
gqd_real u(qd_cos_tbl[i][0], qd_cos_tbl[i][1], qd_cos_tbl[i][2], qd_cos_tbl[i][3]);
gqd_real v(qd_sin_tbl[i][0], qd_sin_tbl[i][1], qd_sin_tbl[i][2], qd_sin_tbl[i][3]);
if (j == 0) {
if (k > 0) {
sin_a = u * sin_t + v * cos_t;
cos_a = u * cos_t - v * sin_t;
} else {
sin_a = u * sin_t - v * cos_t;
cos_a = u * cos_t + v * sin_t;
}
} else if (j == 1) {
if (k > 0) {
cos_a = negative(u * sin_t) - v * cos_t;
sin_a = u * cos_t - v * sin_t;
} else {
cos_a = v * cos_t - u * sin_t;
sin_a = u * cos_t + v * sin_t;
}
} else if (j == -1) {
if (k > 0) {
cos_a = u * sin_t + v * cos_t;
sin_a = v * sin_t - u * cos_t;
} else {
cos_a = u * sin_t - v * cos_t;
sin_a = negative(u * cos_t) - v * sin_t;
}
} else {
if (k > 0) {
sin_a = negative(u * sin_t) - v * cos_t;
cos_a = v * sin_t - u * cos_t;
} else {
sin_a = v * cos_t - u * sin_t;
cos_a = negative(u * cos_t) - v * sin_t;
}
}
}
__device__
gqd_real tan(const gqd_real &a) {
gqd_real s, c;
sincos(a, s, c);
return s/c;
}
#ifdef ALL_MATH
__device__
gqd_real atan2(const gqd_real &y, const gqd_real &x) {
/* Strategy:
Instead of using Taylor series to compute arctan,
we instead use Newton's iteration to solve the equation
sin(z) = y/r or cos(z) = x/r
where r = sqrt(x^2 + y^2).
The iteration is given by
z' = z + (y - sin(z)) / cos(z) (for equation 1)
z' = z - (x - cos(z)) / sin(z) (for equation 2)
Here, x and y are normalized so that x^2 + y^2 = 1.
If |x| > |y|, then first iteration is used since the
denominator is larger. Otherwise, the second is used.
*/
if (is_zero(x)) {
if (is_zero(y)) {
// Both x and y is zero.
//qd_real::error("(qd_real::atan2): Both arguments zero.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return (is_positive(y)) ? _qd_pi2 : negative(_qd_pi2);
} else if (is_zero(y)) {
return (is_positive(x)) ? gqd_real(0.0) : _qd_pi;
}
if (x == y) {
return (is_positive(y)) ? _qd_pi4 : negative(_qd_3pi4);
}
if (x == negative(y)) {
return (is_positive(y)) ? _qd_3pi4 : negative(_qd_pi4);
}
gqd_real r = sqrt(sqr(x) + sqr(y));
gqd_real xx = x / r;
gqd_real yy = y / r;
gqd_real z = gqd_real(atan2(to_double(y), to_double(x)));
gqd_real sin_z, cos_z;
if (abs(xx[0]) > abs(yy[0])) {
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
sincos(z, sin_z, cos_z);
z = z + (yy - sin_z) / cos_z;
} else {
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
sincos(z, sin_z, cos_z);
z = z - (xx - cos_z) / sin_z;
}
return z;
}
__device__
gqd_real atan(const gqd_real &a) {
return atan2(a, gqd_real(1.0));
}
__device__
gqd_real asin(const gqd_real &a) {
gqd_real abs_a = abs(a);
if (abs_a > 1.0) {
//qd_real::error("(qd_real::asin): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (is_one(abs_a)) {
return (is_positive(a)) ? _qd_pi2 : negative(_qd_pi2);
}
return atan2(a, sqrt(1.0 - sqr(a)));
}
__device__
gqd_real acos(const gqd_real &a) {
gqd_real abs_a = abs(a);
if (abs_a > 1.0) {
//qd_real::error("(qd_real::acos): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
if (is_one(abs_a)) {
return (is_positive(a)) ? gqd_real(0.0) : _qd_pi;
}
return atan2(sqrt(1.0 - sqr(a)), a);
}
__device__
gqd_real sinh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(0.0);
}
if (abs(a) > 0.05) {
gqd_real ea = exp(a);
return mul_pwr2(ea - inv(ea), 0.5);
}
// Since a is small, using the above formula gives
// a lot of cancellation. So use Taylor series.
gqd_real s = a;
gqd_real t = a;
gqd_real r = sqr(t);
double m = 1.0;
double thresh = abs(to_double(a) * _qd_eps);
do {
m = m + 2.0;
t = (t*r);
t = t/((m-1) * m);
s = s + t;
} while (abs(t) > thresh);
return s;
}
__device__
gqd_real cosh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(1.0);
}
gqd_real ea = exp(a);
return mul_pwr2(ea + inv(ea), 0.5);
}
__device__
gqd_real tanh(const gqd_real &a) {
if (is_zero(a)) {
return gqd_real(0.0);
}
if (abs(to_double(a)) > 0.05) {
gqd_real ea = exp(a);
gqd_real inv_ea = inv(ea);
return (ea - inv_ea) / (ea + inv_ea);
} else {
gqd_real s, c;
s = sinh(a);
c = sqrt(1.0 + sqr(s));
return s / c;
}
}
__device__
void sincosh(const gqd_real &a, gqd_real &s, gqd_real &c) {
if (abs(to_double(a)) <= 0.05) {
s = sinh(a);
c = sqrt(1.0 + sqr(s));
} else {
gqd_real ea = exp(a);
gqd_real inv_ea = inv(ea);
s = mul_pwr2(ea - inv_ea, 0.5);
c = mul_pwr2(ea + inv_ea, 0.5);
}
}
__device__
gqd_real asinh(const gqd_real &a) {
return log(a + sqrt(sqr(a) + 1.0));
}
__device__
gqd_real acosh(const gqd_real &a) {
if (a < 1.0) {
///qd_real::error("(qd_real::acosh): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return log(a + sqrt(sqr(a) - 1.0));
}
__device__
gqd_real atanh(const gqd_real &a) {
if (abs(a) >= 1.0) {
//qd_real::error("(qd_real::atanh): Argument out of domain.");
//return qd_real::_nan;
return gqd_real(0.0);
}
return mul_pwr2(log((1.0 + a) / (1.0 - a)), 0.5);
}
#endif /* ALL_MATH */
#endif /* __GQD_SIN_COS_CU__ */
|
ee48b83050d19d3e08c44d1c5fd2df0f09af0945.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2017, Natural Language Processing Lab, Northeastern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Created by: Xu Chen (email: hello_master1954@163.com) 2018-07-31
*/
#include "OnehotAndIndex.cuh"
#include "../../XDevice.h"
namespace nts { // namespace nts(NiuTrans.Tensor)
#ifdef USE_ROCM
/*
convert onehot tensor to index tensor (kernel version)
>> onehotData - the data pointer of the onehot tensor
>> indexData - the data pointer of the index tensor
>> blockNum - the number of block
>> stride - stride of a data block
*/
__global__
void KernelOnehotToIndex(int * onehotData, int * indexData, int blockNum, int stride)
{
/* block id */
int i = blockDim.x * blockIdx.x + threadIdx.x;
/* offset in each block */
int offset = blockDim.y * blockIdx.y + threadIdx.y;
if (i >= blockNum || offset >= stride)
return;
int * od = onehotData + i * stride;
int * id = indexData + i;
if (od[offset] != 0)
*id = offset;
}
/*
convert onehot tensor to index tensor (cuda version)
>> onehot - onehot tensor, which value is 0 or 1
>> index - index tensor, which value is an integer num
>> size - the last dimension size of the onehot tensor
*/
void _CudaOnehotToIndex(const XTensor * onehot, XTensor * index, int size)
{
int devID = onehot->devID;
int blockNum = index->unitNum;
int stride = size;
int cudaGrids[3];
int cudaBlocks[3];
int devIDBackup;
ProtectCudaDev(devID, devIDBackup);
GDevs.GetCudaThread2D(devID, blockNum, stride, MAX_INT, cudaGrids, cudaBlocks);
dim3 blocks(cudaGrids[0], cudaGrids[1]);
dim3 threads(cudaBlocks[0], cudaBlocks[1]);
int * onehotData = (int *)onehot->data;
int * indexData = (int *)index->data;
hipLaunchKernelGGL(( KernelOnehotToIndex), dim3(blocks), dim3(threads) , 0, 0, onehotData, indexData, blockNum, stride);
BacktoCudaDev(devID, devIDBackup);
}
/*
convert index tensor to onehot tensor (kernel version)
>> onehotData - the data pointer of the onehot tensor
>> indexData - the data pointer of the index tensor
>> blockNum - the number of block
>> stride - stride of a data block
*/
__global__
void KernelIndexToOnehot(DTYPE * onehotData, int * indexData, int blockNum, int stride, float confidence, float lowconfidence)
{
/* block id */
int i = blockDim.x * blockIdx.x + threadIdx.x;
/* offset in each block */
int offset = blockDim.y * blockIdx.y + threadIdx.y;
if (i >= blockNum || offset >= stride)
return;
DTYPE * od = onehotData + i * stride;
int id = indexData[i];
if (offset == id)
od[offset] = confidence;
//else
// od[offset] = lowconfidence;
}
/*
convert index tensor to onehot tensor (cuda version)
>> index - index tensor, which value is an integer num
>> onehot - onehot tensor, which value is 0 or 1
>> size - the last dimension size of the onehot tensor
*/
void _CudaIndexToOnehot(const XTensor * index, XTensor * onehot,
int size, float confidence, float lowconfidence)
{
int devID = onehot->devID;
int blockNum = index->unitNum;
int stride = size;
int cudaGrids[3];
int cudaBlocks[3];
int devIDBackup;
ProtectCudaDev(devID, devIDBackup);
GDevs.GetCudaThread2D(devID, blockNum, stride, MAX_INT, cudaGrids, cudaBlocks);
dim3 blocks(cudaGrids[0], cudaGrids[1]);
dim3 threads(cudaBlocks[0], cudaBlocks[1]);
DTYPE * onehotData = (DTYPE *)onehot->data;
int * indexData = (int *)index->data;
hipLaunchKernelGGL(( KernelIndexToOnehot), dim3(blocks), dim3(threads) , 0, 0, onehotData, indexData, blockNum, stride, confidence, lowconfidence);
BacktoCudaDev(devID, devIDBackup);
}
#endif // USE_ROCM
} // namespace nts(NiuTrans.Tensor) | ee48b83050d19d3e08c44d1c5fd2df0f09af0945.cu | /* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2017, Natural Language Processing Lab, Northeastern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Created by: Xu Chen (email: hello_master1954@163.com) 2018-07-31
*/
#include "OnehotAndIndex.cuh"
#include "../../XDevice.h"
namespace nts { // namespace nts(NiuTrans.Tensor)
#ifdef USE_CUDA
/*
convert onehot tensor to index tensor (kernel version)
>> onehotData - the data pointer of the onehot tensor
>> indexData - the data pointer of the index tensor
>> blockNum - the number of block
>> stride - stride of a data block
*/
__global__
void KernelOnehotToIndex(int * onehotData, int * indexData, int blockNum, int stride)
{
/* block id */
int i = blockDim.x * blockIdx.x + threadIdx.x;
/* offset in each block */
int offset = blockDim.y * blockIdx.y + threadIdx.y;
if (i >= blockNum || offset >= stride)
return;
int * od = onehotData + i * stride;
int * id = indexData + i;
if (od[offset] != 0)
*id = offset;
}
/*
convert onehot tensor to index tensor (cuda version)
>> onehot - onehot tensor, which value is 0 or 1
>> index - index tensor, which value is an integer num
>> size - the last dimension size of the onehot tensor
*/
void _CudaOnehotToIndex(const XTensor * onehot, XTensor * index, int size)
{
int devID = onehot->devID;
int blockNum = index->unitNum;
int stride = size;
int cudaGrids[3];
int cudaBlocks[3];
int devIDBackup;
ProtectCudaDev(devID, devIDBackup);
GDevs.GetCudaThread2D(devID, blockNum, stride, MAX_INT, cudaGrids, cudaBlocks);
dim3 blocks(cudaGrids[0], cudaGrids[1]);
dim3 threads(cudaBlocks[0], cudaBlocks[1]);
int * onehotData = (int *)onehot->data;
int * indexData = (int *)index->data;
KernelOnehotToIndex<<<blocks, threads >>>(onehotData, indexData, blockNum, stride);
BacktoCudaDev(devID, devIDBackup);
}
/*
convert index tensor to onehot tensor (kernel version)
>> onehotData - the data pointer of the onehot tensor
>> indexData - the data pointer of the index tensor
>> blockNum - the number of block
>> stride - stride of a data block
*/
__global__
void KernelIndexToOnehot(DTYPE * onehotData, int * indexData, int blockNum, int stride, float confidence, float lowconfidence)
{
/* block id */
int i = blockDim.x * blockIdx.x + threadIdx.x;
/* offset in each block */
int offset = blockDim.y * blockIdx.y + threadIdx.y;
if (i >= blockNum || offset >= stride)
return;
DTYPE * od = onehotData + i * stride;
int id = indexData[i];
if (offset == id)
od[offset] = confidence;
//else
// od[offset] = lowconfidence;
}
/*
convert index tensor to onehot tensor (cuda version)
>> index - index tensor, which value is an integer num
>> onehot - onehot tensor, which value is 0 or 1
>> size - the last dimension size of the onehot tensor
*/
void _CudaIndexToOnehot(const XTensor * index, XTensor * onehot,
int size, float confidence, float lowconfidence)
{
int devID = onehot->devID;
int blockNum = index->unitNum;
int stride = size;
int cudaGrids[3];
int cudaBlocks[3];
int devIDBackup;
ProtectCudaDev(devID, devIDBackup);
GDevs.GetCudaThread2D(devID, blockNum, stride, MAX_INT, cudaGrids, cudaBlocks);
dim3 blocks(cudaGrids[0], cudaGrids[1]);
dim3 threads(cudaBlocks[0], cudaBlocks[1]);
DTYPE * onehotData = (DTYPE *)onehot->data;
int * indexData = (int *)index->data;
KernelIndexToOnehot<<<blocks, threads >>>(onehotData, indexData, blockNum, stride, confidence, lowconfidence);
BacktoCudaDev(devID, devIDBackup);
}
#endif // USE_CUDA
} // namespace nts(NiuTrans.Tensor) |
0acbdc4721b3f4375d7622594e66063bd2e16d7a.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "caffe2/core/context_gpu.h"
#include "caffe2/operators/reduction_ops.h"
#include "caffe2/utils/conversions.h"
#include <hipcub/hipcub.hpp>
namespace caffe2 {
REGISTER_CUDA_OPERATOR(SumElements, SumElementsOp<float, CUDAContext>);
REGISTER_CUDA_OPERATOR(SumElementsInt, SumElementsIntOp<int, CUDAContext>);
REGISTER_CUDA_OPERATOR(SumSqrElements, SumSqrElementsOp<CUDAContext>);
REGISTER_CUDA_OPERATOR(RowwiseMax, MaxReductionOp<float, CUDAContext, true>);
REGISTER_CUDA_OPERATOR(ColwiseMax, MaxReductionOp<float, CUDAContext, false>);
REGISTER_CUDA_OPERATOR(
RowwiseMaxGradient,
MaxReductionGradientOp<float, CUDAContext, true>)
REGISTER_CUDA_OPERATOR(
ColwiseMaxGradient,
MaxReductionGradientOp<float, CUDAContext, false>)
REGISTER_CUDA_OPERATOR(
SumElementsGradient,
SumElementsGradientOp<float, CUDAContext>);
template <typename T>
__global__ void
SumElementsGradientKernel(bool average, const int N, const T* dY, T* dX) {
const T value = average ? (*dY) / N : *dY;
CUDA_1D_KERNEL_LOOP(i, N) {
dX[i] = value;
}
}
__global__ void rowwise_max_gradient_kernel(
const int batch_size,
const int M,
const int N,
const float* X,
const float* Y,
const float* dY,
float* dX) {
const int input_size = M * N;
CUDA_1D_KERNEL_LOOP(i, batch_size * M * N) {
const int b_i = i / input_size;
const int b_n = i / input_size / N;
const int y_index = b_i * M + b_n;
if (X[i] == Y[y_index]) {
dX[i] = dY[y_index];
} else {
dX[i] = 0.0;
}
}
}
template <>
bool SumSqrElementsOp<CUDAContext>::RunOnDevice() {
return DispatchHelper<TensorTypes<float, at::Half>>::call(this, Input(0));
}
__global__ void colwise_max_gradient_kernel(
const int batch_size,
const int M,
const int N,
const float* X,
const float* Y,
const float* dY,
float* dX) {
const int input_size = M * N;
CUDA_1D_KERNEL_LOOP(i, batch_size * M * N) {
const int b_i = i / input_size;
const int b_n = i % input_size % N;
const int y_index = b_i * N + b_n;
if (X[i] == Y[y_index]) {
dX[i] = dY[y_index];
} else {
dX[i] = 0.0;
}
}
}
template <>
bool SumElementsGradientOp<float, CUDAContext>::RunOnDevice() {
auto& X = Input(0);
auto& dY = Input(1);
DCHECK_EQ(dY.numel(), 1);
auto* dX = Output(0, X.sizes(), at::dtype<float>());
hipLaunchKernelGGL(( SumElementsGradientKernel<float>)
, dim3(CAFFE_GET_BLOCKS(X.numel())),
dim3(CAFFE_CUDA_NUM_THREADS),
0,
context_.cuda_stream(),
average_,
X.numel(),
dY.data<float>(),
dX->template mutable_data<float>());
C10_HIP_KERNEL_LAUNCH_CHECK();
return true;
}
template <typename T, class Context, bool ROWWISE>
bool MaxReductionGradientOp<T, Context, ROWWISE>::RunOnDevice() {
auto& X = Input(0);
auto& Y = Input(1);
auto& dY = Input(2);
auto* dX = Output(0, X.sizes(), at::dtype<T>());
CAFFE_ENFORCE_EQ(X.dim(), 3);
const int batch_size = X.dim32(0);
const int M = X.dim32(1);
const int N = X.dim32(2);
const T* Xdata = X.template data<T>();
const T* Ydata = Y.template data<T>();
const T* dYdata = dY.template data<T>();
T* dXdata = dX->template mutable_data<T>();
const int input_size = M * N;
if (ROWWISE) {
hipLaunchKernelGGL(( rowwise_max_gradient_kernel),
dim3(CAFFE_GET_BLOCKS(batch_size * input_size)),
dim3(CAFFE_CUDA_NUM_THREADS),
0,
context_.cuda_stream(),
batch_size, M, N, Xdata, Ydata, dYdata, dXdata);
C10_HIP_KERNEL_LAUNCH_CHECK();
} else {
hipLaunchKernelGGL(( colwise_max_gradient_kernel),
dim3(CAFFE_GET_BLOCKS(batch_size * input_size)),
dim3(CAFFE_CUDA_NUM_THREADS),
0,
context_.cuda_stream(),
batch_size, M, N, Xdata, Ydata, dYdata, dXdata);
C10_HIP_KERNEL_LAUNCH_CHECK();
}
return true;
}
} // namespace caffe2
| 0acbdc4721b3f4375d7622594e66063bd2e16d7a.cu | #include "caffe2/core/context_gpu.h"
#include "caffe2/operators/reduction_ops.h"
#include "caffe2/utils/conversions.h"
#include <cub/cub.cuh>
namespace caffe2 {
REGISTER_CUDA_OPERATOR(SumElements, SumElementsOp<float, CUDAContext>);
REGISTER_CUDA_OPERATOR(SumElementsInt, SumElementsIntOp<int, CUDAContext>);
REGISTER_CUDA_OPERATOR(SumSqrElements, SumSqrElementsOp<CUDAContext>);
REGISTER_CUDA_OPERATOR(RowwiseMax, MaxReductionOp<float, CUDAContext, true>);
REGISTER_CUDA_OPERATOR(ColwiseMax, MaxReductionOp<float, CUDAContext, false>);
REGISTER_CUDA_OPERATOR(
RowwiseMaxGradient,
MaxReductionGradientOp<float, CUDAContext, true>)
REGISTER_CUDA_OPERATOR(
ColwiseMaxGradient,
MaxReductionGradientOp<float, CUDAContext, false>)
REGISTER_CUDA_OPERATOR(
SumElementsGradient,
SumElementsGradientOp<float, CUDAContext>);
template <typename T>
__global__ void
SumElementsGradientKernel(bool average, const int N, const T* dY, T* dX) {
const T value = average ? (*dY) / N : *dY;
CUDA_1D_KERNEL_LOOP(i, N) {
dX[i] = value;
}
}
__global__ void rowwise_max_gradient_kernel(
const int batch_size,
const int M,
const int N,
const float* X,
const float* Y,
const float* dY,
float* dX) {
const int input_size = M * N;
CUDA_1D_KERNEL_LOOP(i, batch_size * M * N) {
const int b_i = i / input_size;
const int b_n = i / input_size / N;
const int y_index = b_i * M + b_n;
if (X[i] == Y[y_index]) {
dX[i] = dY[y_index];
} else {
dX[i] = 0.0;
}
}
}
template <>
bool SumSqrElementsOp<CUDAContext>::RunOnDevice() {
return DispatchHelper<TensorTypes<float, at::Half>>::call(this, Input(0));
}
__global__ void colwise_max_gradient_kernel(
const int batch_size,
const int M,
const int N,
const float* X,
const float* Y,
const float* dY,
float* dX) {
const int input_size = M * N;
CUDA_1D_KERNEL_LOOP(i, batch_size * M * N) {
const int b_i = i / input_size;
const int b_n = i % input_size % N;
const int y_index = b_i * N + b_n;
if (X[i] == Y[y_index]) {
dX[i] = dY[y_index];
} else {
dX[i] = 0.0;
}
}
}
template <>
bool SumElementsGradientOp<float, CUDAContext>::RunOnDevice() {
auto& X = Input(0);
auto& dY = Input(1);
DCHECK_EQ(dY.numel(), 1);
auto* dX = Output(0, X.sizes(), at::dtype<float>());
SumElementsGradientKernel<float>
<<<CAFFE_GET_BLOCKS(X.numel()),
CAFFE_CUDA_NUM_THREADS,
0,
context_.cuda_stream()>>>(
average_,
X.numel(),
dY.data<float>(),
dX->template mutable_data<float>());
C10_CUDA_KERNEL_LAUNCH_CHECK();
return true;
}
template <typename T, class Context, bool ROWWISE>
bool MaxReductionGradientOp<T, Context, ROWWISE>::RunOnDevice() {
auto& X = Input(0);
auto& Y = Input(1);
auto& dY = Input(2);
auto* dX = Output(0, X.sizes(), at::dtype<T>());
CAFFE_ENFORCE_EQ(X.dim(), 3);
const int batch_size = X.dim32(0);
const int M = X.dim32(1);
const int N = X.dim32(2);
const T* Xdata = X.template data<T>();
const T* Ydata = Y.template data<T>();
const T* dYdata = dY.template data<T>();
T* dXdata = dX->template mutable_data<T>();
const int input_size = M * N;
if (ROWWISE) {
rowwise_max_gradient_kernel<<<
CAFFE_GET_BLOCKS(batch_size * input_size),
CAFFE_CUDA_NUM_THREADS,
0,
context_.cuda_stream()>>>(
batch_size, M, N, Xdata, Ydata, dYdata, dXdata);
C10_CUDA_KERNEL_LAUNCH_CHECK();
} else {
colwise_max_gradient_kernel<<<
CAFFE_GET_BLOCKS(batch_size * input_size),
CAFFE_CUDA_NUM_THREADS,
0,
context_.cuda_stream()>>>(
batch_size, M, N, Xdata, Ydata, dYdata, dXdata);
C10_CUDA_KERNEL_LAUNCH_CHECK();
}
return true;
}
} // namespace caffe2
|
41a0a8751944162b3bff42edef6b9ab8ef292006.hip | // !!! This is a file automatically generated by hipify!!!
#include "chainerx/cuda/cuda_device.h"
#include <cmath>
#include <cstdint>
#include <hip/hip_runtime.h>
#include "chainerx/array.h"
#include "chainerx/cuda/cuda_runtime.h"
#include "chainerx/cuda/cuda_set_device_scope.h"
#include "chainerx/cuda/elementwise.cuh"
#include "chainerx/cuda/numeric.cuh"
#include "chainerx/cuda/op_regist.h"
#include "chainerx/device.h"
#include "chainerx/dtype.h"
#include "chainerx/routines/math.h"
namespace chainerx {
namespace cuda {
namespace {
template <typename T>
struct SquareImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = x * x; }
};
class CudaSquareOp : public SquareOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(SquareImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(SquareOp, CudaSquareOp);
template <typename T>
struct SqrtImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Sqrt(x); }
};
class CudaSqrtOp : public SqrtOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
CudaSetDeviceScope scope{device.index()};
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(SqrtImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(SqrtOp, CudaSqrtOp);
template <typename T>
struct IsNanImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, bool& out) { out = cuda::IsNan(x); }
};
class CudaIsNanOp : public IsNanOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitDtype(x.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, bool>(IsNanImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(IsNanOp, CudaIsNanOp);
template <typename T>
struct IsInfImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, bool& out) { out = cuda::IsInf(x); }
};
class CudaIsInfOp : public IsInfOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitDtype(x.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, bool>(IsInfImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(IsInfOp, CudaIsInfOp);
template <typename T>
struct CeilImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Ceil(x); }
};
class CudaCeilOp : public CeilOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(CeilImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(CeilOp, CudaCeilOp);
template <typename T>
struct FloorImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Floor(x); }
};
class CudaFloorOp : public FloorOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(FloorImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(FloorOp, CudaFloorOp);
} // namespace
} // namespace cuda
} // namespace chainerx
| 41a0a8751944162b3bff42edef6b9ab8ef292006.cu | #include "chainerx/cuda/cuda_device.h"
#include <cmath>
#include <cstdint>
#include <cuda_runtime.h>
#include "chainerx/array.h"
#include "chainerx/cuda/cuda_runtime.h"
#include "chainerx/cuda/cuda_set_device_scope.h"
#include "chainerx/cuda/elementwise.cuh"
#include "chainerx/cuda/numeric.cuh"
#include "chainerx/cuda/op_regist.h"
#include "chainerx/device.h"
#include "chainerx/dtype.h"
#include "chainerx/routines/math.h"
namespace chainerx {
namespace cuda {
namespace {
template <typename T>
struct SquareImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = x * x; }
};
class CudaSquareOp : public SquareOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(SquareImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(SquareOp, CudaSquareOp);
template <typename T>
struct SqrtImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Sqrt(x); }
};
class CudaSqrtOp : public SqrtOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
CudaSetDeviceScope scope{device.index()};
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(SqrtImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(SqrtOp, CudaSqrtOp);
template <typename T>
struct IsNanImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, bool& out) { out = cuda::IsNan(x); }
};
class CudaIsNanOp : public IsNanOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitDtype(x.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, bool>(IsNanImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(IsNanOp, CudaIsNanOp);
template <typename T>
struct IsInfImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, bool& out) { out = cuda::IsInf(x); }
};
class CudaIsInfOp : public IsInfOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
VisitDtype(x.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, bool>(IsInfImpl<T>{}, x, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(IsInfOp, CudaIsInfOp);
template <typename T>
struct CeilImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Ceil(x); }
};
class CudaCeilOp : public CeilOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(CeilImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(CeilOp, CudaCeilOp);
template <typename T>
struct FloorImpl {
using CudaType = cuda_internal::DataType<T>;
__device__ void operator()(int64_t /*i*/, CudaType x, CudaType& out) { out = cuda::Floor(x); }
};
class CudaFloorOp : public FloorOp {
public:
void Call(const Array& x, const Array& out) override {
Device& device = x.device();
device.CheckDevicesCompatible(x, out);
CudaSetDeviceScope scope{device.index()};
const Array& x_cast = x.dtype() == out.dtype() ? x : x.AsType(out.dtype());
VisitFloatingPointDtype(out.dtype(), [&](auto pt) {
using T = typename decltype(pt)::type;
Elementwise<const T, T>(FloorImpl<T>{}, x_cast, out);
});
}
};
CHAINERX_REGISTER_OP_CUDA(FloorOp, CudaFloorOp);
} // namespace
} // namespace cuda
} // namespace chainerx
|
2052d6245ccfcbdd1c880727438cb9a4ef714e43.hip | // !!! This is a file automatically generated by hipify!!!
#include "../include/gpu_errchk.cuh"
void gpuAssert(hipError_t code, const char *file, int line, bool abort)
{
if(code != hipSuccess)
{
fprintf(stderr, "GPU Assert: %s \nIn file: %s, line: %d \n", hipGetErrorString(code), file, line);
if(abort) exit(code);
}
}
| 2052d6245ccfcbdd1c880727438cb9a4ef714e43.cu | #include "../include/gpu_errchk.cuh"
void gpuAssert(cudaError_t code, const char *file, int line, bool abort)
{
if(code != cudaSuccess)
{
fprintf(stderr, "GPU Assert: %s \nIn file: %s, line: %d \n", cudaGetErrorString(code), file, line);
if(abort) exit(code);
}
}
|
c4635486d1a2be5aad0133684afa7bce7c4e3c99.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <ATen/ATen.h>
#include <ATen/AccumulateType.h>
#include <ATen/NativeFunctions.h>
#include <ATen/TensorUtils.h>
#include <ATen/Utils.h>
#include <ATen/hip/HIPContext.h>
#include <ATen/hip/HIPApplyUtils.cuh>
#include <ATen/native/hip/UpSample.cuh>
namespace at {
namespace native {
namespace {
template <typename scalar_t, typename accscalar_t>
#ifdef __HIP_PLATFORM_HCC__
C10_LAUNCH_BOUNDS_1(1024)
#endif
__global__ void upsample_bicubic2d_out_frame(
const int num_elements,
const accscalar_t height_scale,
const accscalar_t width_scale,
const bool align_corners,
const PackedTensorAccessor<scalar_t, 4> idata,
PackedTensorAccessor<scalar_t, 4> odata) {
int index = threadIdx.x + blockIdx.x * blockDim.x;
const int batchsize = idata.size(0);
const int channels = idata.size(1);
const int input_height = idata.size(2);
const int input_width = idata.size(3);
const int output_height = odata.size(2);
const int output_width = odata.size(3);
if (index >= num_elements) {
return;
}
// Special case: input and output are the same size, just copy
const int output_x = index % output_width;
const int output_y = index / output_width;
if (input_height == output_height && input_width == output_width) {
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; c++) {
const scalar_t val = idata[n][c][output_y][output_x];
odata[n][c][output_y][output_x] = val;
}
}
return;
}
// Interpolation kernel
accscalar_t real_x = area_pixel_compute_source_index(
width_scale, output_x, align_corners, /*cubic=*/true);
int in_x = floorf(real_x);
accscalar_t t_x = real_x - in_x;
accscalar_t real_y = area_pixel_compute_source_index(
height_scale, output_y, align_corners, /*cubic=*/true);
int in_y = floorf(real_y);
accscalar_t t_y = real_y - in_y;
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; c++) {
accscalar_t coefficients[4];
for (int k = 0; k < 4; k++) {
/* TODO: change width and height order in the arguments */
/* TODO: maybe change x and y order in the arguments */
/* TODO: maybe change c and n order in the arguments */
coefficients[k] = cubic_interp1d(
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x - 1, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 0, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 1, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 2, in_y - 1 + k),
t_x);
}
odata[n][c][output_y][output_x] = static_cast<scalar_t>(cubic_interp1d(
coefficients[0],
coefficients[1],
coefficients[2],
coefficients[3],
t_y));
}
}
}
// Backward (adjoint) operation 1 <- 2 (accumulates)
template <typename scalar_t, typename accscalar_t>
#ifdef __HIP_PLATFORM_HCC__
C10_LAUNCH_BOUNDS_1(1024)
#endif
__global__ void upsample_bicubic2d_backward_out_frame(
const int num_elements,
const accscalar_t height_scale,
const accscalar_t width_scale,
const bool align_corners,
PackedTensorAccessor<scalar_t, 4> idata,
const PackedTensorAccessor<scalar_t, 4> odata) {
int index = threadIdx.x + blockIdx.x * blockDim.x;
const int batchsize = idata.size(0);
const int channels = idata.size(1);
const int input_height = idata.size(2);
const int input_width = idata.size(3);
const int output_height = odata.size(2);
const int output_width = odata.size(3);
if (index >= num_elements) {
return;
}
const int output_x = index % output_width;
const int output_y = index / output_width;
// special case: output_xust copy
if (input_height == output_height && input_width == output_width) {
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; ++c) {
const scalar_t val = odata[n][c][output_y][output_x];
idata[n][c][output_y][output_x] = val;
}
}
return;
}
accscalar_t real_x = area_pixel_compute_source_index(
width_scale, output_x, align_corners, /*cubic=*/true);
int input_x = floorf(real_x);
accscalar_t t_x = real_x - input_x;
accscalar_t real_y = area_pixel_compute_source_index(
height_scale, output_y, align_corners, /*cubic=*/true);
int input_y = floorf(real_y);
accscalar_t t_y = real_y - input_y;
accscalar_t x_coeffs[4];
accscalar_t y_coeffs[4];
get_cubic_upsampling_coefficients(x_coeffs, t_x);
get_cubic_upsampling_coefficients(y_coeffs, t_y);
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; ++c) {
scalar_t out_value = odata[n][c][output_y][output_x];
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
/* TODO: change width and height order in the arguments */
/* TODO: maybe change x and y order in the arguments */
upsample_increment_value_bounded<scalar_t, accscalar_t>(
idata,
c,
n,
input_width,
input_height,
input_x - 1 + j,
input_y - 1 + i,
out_value * y_coeffs[i] * x_coeffs[j]);
}
}
}
}
}
static void upsample_bicubic2d_out_cuda_template(
Tensor& output,
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
TensorArg input_arg{input, "input", 1}, output_arg{output, "output", 2};
checkAllSameGPU("upsample_bicubic2d_out", {input_arg, output_arg});
AT_CHECK(
output_size.size() == 2,
"It is expected output_size equals to 2, but got size ",
output_size.size());
int output_height = output_size[0];
int output_width = output_size[1];
int nbatch = input.size(0);
int channels = input.size(1);
int input_height = input.size(2);
int input_width = input.size(3);
upsample_2d_shape_check(
input,
Tensor(),
nbatch,
channels,
input_height,
input_width,
output_height,
output_width);
output.resize_({input.size(0), input.size(1), output_height, output_width});
output.zero_();
AT_ASSERT(
input_height > 0 && input_width > 0 && output_height > 0 &&
output_width > 0);
const int num_output_elements = output_height * output_width;
const int max_threads =
at::cuda::getCurrentDeviceProperties()->maxThreadsPerBlock / 2;
// Launch kernel
hipStream_t stream = at::hip::getCurrentHIPStreamMasqueradingAsCUDA();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
input.scalar_type(), "upsample_bicubic2d_out_frame", [&] {
using accscalar_t = at::acc_type<scalar_t, true>;
auto idata = input.packed_accessor<scalar_t, 4>();
auto odata = output.packed_accessor<scalar_t, 4>();
// Get scaling factors
const accscalar_t rheight = area_pixel_compute_scale<accscalar_t>(
input_height, output_height, align_corners);
const accscalar_t rwidth = area_pixel_compute_scale<accscalar_t>(
input_width, output_width, align_corners);
hipLaunchKernelGGL(( upsample_bicubic2d_out_frame<scalar_t, accscalar_t>)
, dim3(cuda::ATenCeilDiv(num_output_elements, max_threads)),
dim3(max_threads),
0,
stream,
num_output_elements,
rheight,
rwidth,
align_corners,
idata,
odata);
});
AT_CUDA_CHECK(hipGetLastError());
}
static void upsample_bicubic2d_backward_out_cuda_template(
Tensor& grad_input,
const Tensor& grad_output_,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
TensorArg grad_input_arg{grad_input, "grad_input", 1},
grad_output_arg{grad_output_, "grad_output_", 2};
checkAllSameGPU(
"upsample_bicubic2d_backward_out_cuda",
{grad_output_arg, grad_input_arg});
AT_CHECK(
output_size.size() == 2,
"It is expected output_size equals to 2, but got size ",
output_size.size());
AT_CHECK(
input_size.size() == 4,
"It is expected input_size equals to 4, but got size ",
input_size.size());
int output_height = output_size[0];
int output_width = output_size[1];
int nbatch = input_size[0];
int channels = input_size[1];
int input_height = input_size[2];
int input_width = input_size[3];
upsample_2d_shape_check(
Tensor(),
grad_output_,
nbatch,
channels,
input_height,
input_width,
output_height,
output_width);
Tensor grad_output = grad_output_.contiguous();
grad_input.resize_({nbatch, channels, input_height, input_width});
grad_input.zero_();
const int num_kernels = output_height * output_width;
const int num_threads =
at::cuda::getCurrentDeviceProperties()->maxThreadsPerBlock / 2;
hipStream_t stream = at::hip::getCurrentHIPStreamMasqueradingAsCUDA();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
grad_output.scalar_type(), "upsample_bicubic2d_backward_out_frame", [&] {
using accscalar_t = at::acc_type<scalar_t, true>;
auto idata = grad_input.packed_accessor<scalar_t, 4>();
auto odata = grad_output.packed_accessor<scalar_t, 4>();
const accscalar_t rheight = area_pixel_compute_scale<accscalar_t>(
input_height, output_height, align_corners);
const accscalar_t rwidth = area_pixel_compute_scale<accscalar_t>(
input_width, output_width, align_corners);
hipLaunchKernelGGL(( upsample_bicubic2d_backward_out_frame<scalar_t, accscalar_t>)
, dim3(cuda::ATenCeilDiv(num_kernels, num_threads)),
dim3(num_threads),
0,
stream,
num_kernels, rheight, rwidth, align_corners, idata, odata);
});
AT_CUDA_CHECK(hipGetLastError());
}
} // namespace
Tensor& upsample_bicubic2d_out_cuda(
Tensor& output,
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
upsample_bicubic2d_out_cuda_template(
output, input, output_size, align_corners);
return output;
}
Tensor upsample_bicubic2d_cuda(
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
Tensor output = at::empty_like(input);
upsample_bicubic2d_out_cuda_template(
output, input, output_size, align_corners);
return output;
}
Tensor& upsample_bicubic2d_backward_out_cuda(
Tensor& grad_input,
const Tensor& grad_output,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
upsample_bicubic2d_backward_out_cuda_template(
grad_input, grad_output, output_size, input_size, align_corners);
return grad_input;
}
Tensor upsample_bicubic2d_backward_cuda(
const Tensor& grad_output,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
Tensor grad_input = at::empty_like(grad_output);
upsample_bicubic2d_backward_out_cuda_template(
grad_input, grad_output, output_size, input_size, align_corners);
return grad_input;
}
} // namespace native
} // namespace at
| c4635486d1a2be5aad0133684afa7bce7c4e3c99.cu | #include <ATen/ATen.h>
#include <ATen/AccumulateType.h>
#include <ATen/NativeFunctions.h>
#include <ATen/TensorUtils.h>
#include <ATen/Utils.h>
#include <ATen/cuda/CUDAContext.h>
#include <ATen/cuda/CUDAApplyUtils.cuh>
#include <ATen/native/cuda/UpSample.cuh>
namespace at {
namespace native {
namespace {
template <typename scalar_t, typename accscalar_t>
#ifdef __HIP_PLATFORM_HCC__
C10_LAUNCH_BOUNDS_1(1024)
#endif
__global__ void upsample_bicubic2d_out_frame(
const int num_elements,
const accscalar_t height_scale,
const accscalar_t width_scale,
const bool align_corners,
const PackedTensorAccessor<scalar_t, 4> idata,
PackedTensorAccessor<scalar_t, 4> odata) {
int index = threadIdx.x + blockIdx.x * blockDim.x;
const int batchsize = idata.size(0);
const int channels = idata.size(1);
const int input_height = idata.size(2);
const int input_width = idata.size(3);
const int output_height = odata.size(2);
const int output_width = odata.size(3);
if (index >= num_elements) {
return;
}
// Special case: input and output are the same size, just copy
const int output_x = index % output_width;
const int output_y = index / output_width;
if (input_height == output_height && input_width == output_width) {
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; c++) {
const scalar_t val = idata[n][c][output_y][output_x];
odata[n][c][output_y][output_x] = val;
}
}
return;
}
// Interpolation kernel
accscalar_t real_x = area_pixel_compute_source_index(
width_scale, output_x, align_corners, /*cubic=*/true);
int in_x = floorf(real_x);
accscalar_t t_x = real_x - in_x;
accscalar_t real_y = area_pixel_compute_source_index(
height_scale, output_y, align_corners, /*cubic=*/true);
int in_y = floorf(real_y);
accscalar_t t_y = real_y - in_y;
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; c++) {
accscalar_t coefficients[4];
for (int k = 0; k < 4; k++) {
/* TODO: change width and height order in the arguments */
/* TODO: maybe change x and y order in the arguments */
/* TODO: maybe change c and n order in the arguments */
coefficients[k] = cubic_interp1d(
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x - 1, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 0, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 1, in_y - 1 + k),
upsample_get_value_bounded<scalar_t>(
idata, c, n, input_width, input_height, in_x + 2, in_y - 1 + k),
t_x);
}
odata[n][c][output_y][output_x] = static_cast<scalar_t>(cubic_interp1d(
coefficients[0],
coefficients[1],
coefficients[2],
coefficients[3],
t_y));
}
}
}
// Backward (adjoint) operation 1 <- 2 (accumulates)
template <typename scalar_t, typename accscalar_t>
#ifdef __HIP_PLATFORM_HCC__
C10_LAUNCH_BOUNDS_1(1024)
#endif
__global__ void upsample_bicubic2d_backward_out_frame(
const int num_elements,
const accscalar_t height_scale,
const accscalar_t width_scale,
const bool align_corners,
PackedTensorAccessor<scalar_t, 4> idata,
const PackedTensorAccessor<scalar_t, 4> odata) {
int index = threadIdx.x + blockIdx.x * blockDim.x;
const int batchsize = idata.size(0);
const int channels = idata.size(1);
const int input_height = idata.size(2);
const int input_width = idata.size(3);
const int output_height = odata.size(2);
const int output_width = odata.size(3);
if (index >= num_elements) {
return;
}
const int output_x = index % output_width;
const int output_y = index / output_width;
// special case: output_xust copy
if (input_height == output_height && input_width == output_width) {
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; ++c) {
const scalar_t val = odata[n][c][output_y][output_x];
idata[n][c][output_y][output_x] = val;
}
}
return;
}
accscalar_t real_x = area_pixel_compute_source_index(
width_scale, output_x, align_corners, /*cubic=*/true);
int input_x = floorf(real_x);
accscalar_t t_x = real_x - input_x;
accscalar_t real_y = area_pixel_compute_source_index(
height_scale, output_y, align_corners, /*cubic=*/true);
int input_y = floorf(real_y);
accscalar_t t_y = real_y - input_y;
accscalar_t x_coeffs[4];
accscalar_t y_coeffs[4];
get_cubic_upsampling_coefficients(x_coeffs, t_x);
get_cubic_upsampling_coefficients(y_coeffs, t_y);
for (int n = 0; n < batchsize; n++) {
for (int c = 0; c < channels; ++c) {
scalar_t out_value = odata[n][c][output_y][output_x];
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
/* TODO: change width and height order in the arguments */
/* TODO: maybe change x and y order in the arguments */
upsample_increment_value_bounded<scalar_t, accscalar_t>(
idata,
c,
n,
input_width,
input_height,
input_x - 1 + j,
input_y - 1 + i,
out_value * y_coeffs[i] * x_coeffs[j]);
}
}
}
}
}
static void upsample_bicubic2d_out_cuda_template(
Tensor& output,
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
TensorArg input_arg{input, "input", 1}, output_arg{output, "output", 2};
checkAllSameGPU("upsample_bicubic2d_out", {input_arg, output_arg});
AT_CHECK(
output_size.size() == 2,
"It is expected output_size equals to 2, but got size ",
output_size.size());
int output_height = output_size[0];
int output_width = output_size[1];
int nbatch = input.size(0);
int channels = input.size(1);
int input_height = input.size(2);
int input_width = input.size(3);
upsample_2d_shape_check(
input,
Tensor(),
nbatch,
channels,
input_height,
input_width,
output_height,
output_width);
output.resize_({input.size(0), input.size(1), output_height, output_width});
output.zero_();
AT_ASSERT(
input_height > 0 && input_width > 0 && output_height > 0 &&
output_width > 0);
const int num_output_elements = output_height * output_width;
const int max_threads =
at::cuda::getCurrentDeviceProperties()->maxThreadsPerBlock / 2;
// Launch kernel
cudaStream_t stream = at::cuda::getCurrentCUDAStream();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
input.scalar_type(), "upsample_bicubic2d_out_frame", [&] {
using accscalar_t = at::acc_type<scalar_t, true>;
auto idata = input.packed_accessor<scalar_t, 4>();
auto odata = output.packed_accessor<scalar_t, 4>();
// Get scaling factors
const accscalar_t rheight = area_pixel_compute_scale<accscalar_t>(
input_height, output_height, align_corners);
const accscalar_t rwidth = area_pixel_compute_scale<accscalar_t>(
input_width, output_width, align_corners);
upsample_bicubic2d_out_frame<scalar_t, accscalar_t>
<<<cuda::ATenCeilDiv(num_output_elements, max_threads),
max_threads,
0,
stream>>>(
num_output_elements,
rheight,
rwidth,
align_corners,
idata,
odata);
});
AT_CUDA_CHECK(cudaGetLastError());
}
static void upsample_bicubic2d_backward_out_cuda_template(
Tensor& grad_input,
const Tensor& grad_output_,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
TensorArg grad_input_arg{grad_input, "grad_input", 1},
grad_output_arg{grad_output_, "grad_output_", 2};
checkAllSameGPU(
"upsample_bicubic2d_backward_out_cuda",
{grad_output_arg, grad_input_arg});
AT_CHECK(
output_size.size() == 2,
"It is expected output_size equals to 2, but got size ",
output_size.size());
AT_CHECK(
input_size.size() == 4,
"It is expected input_size equals to 4, but got size ",
input_size.size());
int output_height = output_size[0];
int output_width = output_size[1];
int nbatch = input_size[0];
int channels = input_size[1];
int input_height = input_size[2];
int input_width = input_size[3];
upsample_2d_shape_check(
Tensor(),
grad_output_,
nbatch,
channels,
input_height,
input_width,
output_height,
output_width);
Tensor grad_output = grad_output_.contiguous();
grad_input.resize_({nbatch, channels, input_height, input_width});
grad_input.zero_();
const int num_kernels = output_height * output_width;
const int num_threads =
at::cuda::getCurrentDeviceProperties()->maxThreadsPerBlock / 2;
cudaStream_t stream = at::cuda::getCurrentCUDAStream();
AT_DISPATCH_FLOATING_TYPES_AND_HALF(
grad_output.scalar_type(), "upsample_bicubic2d_backward_out_frame", [&] {
using accscalar_t = at::acc_type<scalar_t, true>;
auto idata = grad_input.packed_accessor<scalar_t, 4>();
auto odata = grad_output.packed_accessor<scalar_t, 4>();
const accscalar_t rheight = area_pixel_compute_scale<accscalar_t>(
input_height, output_height, align_corners);
const accscalar_t rwidth = area_pixel_compute_scale<accscalar_t>(
input_width, output_width, align_corners);
upsample_bicubic2d_backward_out_frame<scalar_t, accscalar_t>
<<<cuda::ATenCeilDiv(num_kernels, num_threads),
num_threads,
0,
stream>>>(
num_kernels, rheight, rwidth, align_corners, idata, odata);
});
AT_CUDA_CHECK(cudaGetLastError());
}
} // namespace
Tensor& upsample_bicubic2d_out_cuda(
Tensor& output,
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
upsample_bicubic2d_out_cuda_template(
output, input, output_size, align_corners);
return output;
}
Tensor upsample_bicubic2d_cuda(
const Tensor& input,
IntArrayRef output_size,
bool align_corners) {
Tensor output = at::empty_like(input);
upsample_bicubic2d_out_cuda_template(
output, input, output_size, align_corners);
return output;
}
Tensor& upsample_bicubic2d_backward_out_cuda(
Tensor& grad_input,
const Tensor& grad_output,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
upsample_bicubic2d_backward_out_cuda_template(
grad_input, grad_output, output_size, input_size, align_corners);
return grad_input;
}
Tensor upsample_bicubic2d_backward_cuda(
const Tensor& grad_output,
IntArrayRef output_size,
IntArrayRef input_size,
bool align_corners) {
Tensor grad_input = at::empty_like(grad_output);
upsample_bicubic2d_backward_out_cuda_template(
grad_input, grad_output, output_size, input_size, align_corners);
return grad_input;
}
} // namespace native
} // namespace at
|
af57f5ecb5d3de34d6f4c8e37b81298c8e9fecb3.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* Copyright 1993-2007 NVIDIA Corporation. All rights reserved.
*
* NOTICE TO USER:
*
* This source code is subject to NVIDIA ownership rights under U.S. and
* international Copyright laws. Users and possessors of this source code
* are hereby granted a nonexclusive, royalty-free license to use this code
* in individual and commercial software.
*
* NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE
* CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR
* IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH
* REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
* IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL,
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
* OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
* OR PERFORMANCE OF THIS SOURCE CODE.
*
* U.S. Government End Users. This source code is a "commercial item" as
* that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of
* "commercial computer software" and "commercial computer software
* documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995)
* and is provided to the U.S. Government only as a commercial end item.
* Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through
* 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the
* source code with only those rights set forth herein.
*
* Any use of this source code in individual and commercial software must
* include, in the user documentation and internal comments to the code,
* the above Disclaimer and U.S. Government End Users Notice.
*/
//Fast integer multiplication macro
#define IMUL(a, b) __mul24(a, b)
//Input data texture reference
texture<float, 2, hipReadModeElementType> texData;
////////////////////////////////////////////////////////////////////////////////
// Kernel configuration
////////////////////////////////////////////////////////////////////////////////
#define KERNEL_RADIUS 8
#define KERNEL_W (2 * KERNEL_RADIUS + 1)
__device__ __constant__ float d_Kernel[KERNEL_W];
////////////////////////////////////////////////////////////////////////////////
// Loop unrolling templates, needed for best performance
////////////////////////////////////////////////////////////////////////////////
template<int i> __device__ float convolutionRow(float x, float y){
return
tex2D(texData, x + KERNEL_RADIUS - i, y) * d_Kernel[i]
+ convolutionRow<i - 1>(x, y);
}
template<> __device__ float convolutionRow<-1>(float x, float y){
return 0;
}
template<int i> __device__ float convolutionColumn(float x, float y){
return
tex2D(texData, x, y + KERNEL_RADIUS - i) * d_Kernel[i]
+ convolutionColumn<i - 1>(x, y);
}
template<> __device__ float convolutionColumn<-1>(float x, float y){
return 0;
}
////////////////////////////////////////////////////////////////////////////////
// Row convolution filter
////////////////////////////////////////////////////////////////////////////////
__global__ void convolutionRowGPU(
float *d_Result,
int dataW,
int dataH
){
const int ix = IMUL(blockDim.x, blockIdx.x) + threadIdx.x;
const int iy = IMUL(blockDim.y, blockIdx.y) + threadIdx.y;
const float x = (float)ix + 0.5f;
const float y = (float)iy + 0.5f;
if(ix < dataW && iy < dataH){
float sum = 0;
#ifdef UNROLL_INNER
sum = convolutionRow<2 * KERNEL_RADIUS>(x, y);
#else
for(int k = -KERNEL_RADIUS; k <= KERNEL_RADIUS; k++)
sum += tex2D(texData, x + k, y) * d_Kernel[KERNEL_RADIUS - k];
#endif
d_Result[IMUL(iy, dataW) + ix] = sum;
}
}
////////////////////////////////////////////////////////////////////////////////
// Column convolution filter
////////////////////////////////////////////////////////////////////////////////
__global__ void convolutionColumnGPU(
float *d_Result,
int dataW,
int dataH
){
const int ix = IMUL(blockDim.x, blockIdx.x) + threadIdx.x;
const int iy = IMUL(blockDim.y, blockIdx.y) + threadIdx.y;
const float x = (float)ix + 0.5f;
const float y = (float)iy + 0.5f;
if(ix < dataW && iy < dataH){
float sum = 0;
#ifdef UNROLL_INNER
sum = convolutionColumn<2 * KERNEL_RADIUS>(x, y);
#else
for(int k = -KERNEL_RADIUS; k <= KERNEL_RADIUS; k++)
sum += tex2D(texData, x, y + k) * d_Kernel[KERNEL_RADIUS - k];
#endif
d_Result[IMUL(iy, dataW) + ix] = sum;
}
}
| af57f5ecb5d3de34d6f4c8e37b81298c8e9fecb3.cu | /*
* Copyright 1993-2007 NVIDIA Corporation. All rights reserved.
*
* NOTICE TO USER:
*
* This source code is subject to NVIDIA ownership rights under U.S. and
* international Copyright laws. Users and possessors of this source code
* are hereby granted a nonexclusive, royalty-free license to use this code
* in individual and commercial software.
*
* NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE
* CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR
* IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH
* REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
* IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL,
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
* OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
* OR PERFORMANCE OF THIS SOURCE CODE.
*
* U.S. Government End Users. This source code is a "commercial item" as
* that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of
* "commercial computer software" and "commercial computer software
* documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995)
* and is provided to the U.S. Government only as a commercial end item.
* Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through
* 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the
* source code with only those rights set forth herein.
*
* Any use of this source code in individual and commercial software must
* include, in the user documentation and internal comments to the code,
* the above Disclaimer and U.S. Government End Users Notice.
*/
//Fast integer multiplication macro
#define IMUL(a, b) __mul24(a, b)
//Input data texture reference
texture<float, 2, cudaReadModeElementType> texData;
////////////////////////////////////////////////////////////////////////////////
// Kernel configuration
////////////////////////////////////////////////////////////////////////////////
#define KERNEL_RADIUS 8
#define KERNEL_W (2 * KERNEL_RADIUS + 1)
__device__ __constant__ float d_Kernel[KERNEL_W];
////////////////////////////////////////////////////////////////////////////////
// Loop unrolling templates, needed for best performance
////////////////////////////////////////////////////////////////////////////////
template<int i> __device__ float convolutionRow(float x, float y){
return
tex2D(texData, x + KERNEL_RADIUS - i, y) * d_Kernel[i]
+ convolutionRow<i - 1>(x, y);
}
template<> __device__ float convolutionRow<-1>(float x, float y){
return 0;
}
template<int i> __device__ float convolutionColumn(float x, float y){
return
tex2D(texData, x, y + KERNEL_RADIUS - i) * d_Kernel[i]
+ convolutionColumn<i - 1>(x, y);
}
template<> __device__ float convolutionColumn<-1>(float x, float y){
return 0;
}
////////////////////////////////////////////////////////////////////////////////
// Row convolution filter
////////////////////////////////////////////////////////////////////////////////
__global__ void convolutionRowGPU(
float *d_Result,
int dataW,
int dataH
){
const int ix = IMUL(blockDim.x, blockIdx.x) + threadIdx.x;
const int iy = IMUL(blockDim.y, blockIdx.y) + threadIdx.y;
const float x = (float)ix + 0.5f;
const float y = (float)iy + 0.5f;
if(ix < dataW && iy < dataH){
float sum = 0;
#ifdef UNROLL_INNER
sum = convolutionRow<2 * KERNEL_RADIUS>(x, y);
#else
for(int k = -KERNEL_RADIUS; k <= KERNEL_RADIUS; k++)
sum += tex2D(texData, x + k, y) * d_Kernel[KERNEL_RADIUS - k];
#endif
d_Result[IMUL(iy, dataW) + ix] = sum;
}
}
////////////////////////////////////////////////////////////////////////////////
// Column convolution filter
////////////////////////////////////////////////////////////////////////////////
__global__ void convolutionColumnGPU(
float *d_Result,
int dataW,
int dataH
){
const int ix = IMUL(blockDim.x, blockIdx.x) + threadIdx.x;
const int iy = IMUL(blockDim.y, blockIdx.y) + threadIdx.y;
const float x = (float)ix + 0.5f;
const float y = (float)iy + 0.5f;
if(ix < dataW && iy < dataH){
float sum = 0;
#ifdef UNROLL_INNER
sum = convolutionColumn<2 * KERNEL_RADIUS>(x, y);
#else
for(int k = -KERNEL_RADIUS; k <= KERNEL_RADIUS; k++)
sum += tex2D(texData, x, y + k) * d_Kernel[KERNEL_RADIUS - k];
#endif
d_Result[IMUL(iy, dataW) + ix] = sum;
}
}
|
5064c51b3f0003ac1761dc06af1e0897872bb096.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <ATen/ATen.h>
#include <ATen/hip/HIPApplyUtils.cuh>
#include <ATen/hip/HIPContext.h>
#include <ATen/hip/impl/HIPGuardImplMasqueradingAsCUDA.h>
#include <stdio.h>
#include <utility>
#define CUDA_1D_KERNEL_LOOP(i, n) \
for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \
i += blockDim.x * gridDim.x)
template <typename T>
__global__ void nnSearch(const int nthreads, const int M, const T *query,
const T *ref, long *idx, float *dist) {
CUDA_1D_KERNEL_LOOP(index, nthreads) {
float minDist = INFINITY;
int minIdx = -1;
float queryX = query[index * 3 + 0];
float queryY = query[index * 3 + 1];
float queryZ = query[index * 3 + 2];
float refX, refY, refZ, tempDist;
for (int j = 0; j < M; j++) {
refX = ref[j * 3 + 0];
refY = ref[j * 3 + 1];
refZ = ref[j * 3 + 2];
tempDist = (queryX - refX) * (queryX - refX) +
(queryY - refY) * (queryY - refY) +
(queryZ - refZ) * (queryZ - refZ);
if (tempDist < minDist) {
minDist = tempDist;
minIdx = j;
}
} // forj
idx[index] = minIdx;
dist[index] = minDist;
}
}
std::pair<at::Tensor, at::Tensor> nnSearch_cuda(const at::Tensor &query,
const at::Tensor &ref) {
AT_ASSERTM(query.device().is_cuda(),
"query point cloud must be a CUDA tensor");
AT_ASSERTM(ref.device().is_cuda(), "ref point cloud must be a CUDA tensor");
at::TensorArg query_t{query, "query", 1}, ref_t{ref, "ref", 2};
at::CheckedFrom c = "nnSearch_cuda"; // function name for check
at::checkAllSameGPU(c, {query_t, ref_t});
at::checkAllSameType(c, {query_t, ref_t});
at::hip::HIPGuardMasqueradingAsCUDA device_guard(query.device());
auto N = query.size(0);
auto M = ref.size(0);
auto dist = at::empty({N}, query.options());
auto idx = at::empty({N}, query.options().dtype(at::kLong));
hipStream_t stream = at::hip::getCurrentHIPStreamMasqueradingAsCUDA();
dim3 grid(::min(at::cuda::ATenCeilDiv(N, 512L), 4096L));
dim3 block(512);
AT_DISPATCH_FLOATING_TYPES(query.scalar_type(), "nnSearch", [&] {
hipLaunchKernelGGL(( nnSearch<scalar_t>), dim3(grid), dim3(block), 0, stream,
N, M, query.contiguous().data_ptr<scalar_t>(),
ref.contiguous().data_ptr<scalar_t>(),
idx.contiguous().data_ptr<long>(), dist.contiguous().data_ptr<float>());
});
hipDeviceSynchronize();
AT_CUDA_CHECK(hipGetLastError());
return std::make_pair(idx, dist);
}
| 5064c51b3f0003ac1761dc06af1e0897872bb096.cu | #include <ATen/ATen.h>
#include <ATen/cuda/CUDAApplyUtils.cuh>
#include <ATen/cuda/CUDAContext.h>
#include <c10/cuda/CUDAGuard.h>
#include <stdio.h>
#include <utility>
#define CUDA_1D_KERNEL_LOOP(i, n) \
for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \
i += blockDim.x * gridDim.x)
template <typename T>
__global__ void nnSearch(const int nthreads, const int M, const T *query,
const T *ref, long *idx, float *dist) {
CUDA_1D_KERNEL_LOOP(index, nthreads) {
float minDist = INFINITY;
int minIdx = -1;
float queryX = query[index * 3 + 0];
float queryY = query[index * 3 + 1];
float queryZ = query[index * 3 + 2];
float refX, refY, refZ, tempDist;
for (int j = 0; j < M; j++) {
refX = ref[j * 3 + 0];
refY = ref[j * 3 + 1];
refZ = ref[j * 3 + 2];
tempDist = (queryX - refX) * (queryX - refX) +
(queryY - refY) * (queryY - refY) +
(queryZ - refZ) * (queryZ - refZ);
if (tempDist < minDist) {
minDist = tempDist;
minIdx = j;
}
} // forj
idx[index] = minIdx;
dist[index] = minDist;
}
}
std::pair<at::Tensor, at::Tensor> nnSearch_cuda(const at::Tensor &query,
const at::Tensor &ref) {
AT_ASSERTM(query.device().is_cuda(),
"query point cloud must be a CUDA tensor");
AT_ASSERTM(ref.device().is_cuda(), "ref point cloud must be a CUDA tensor");
at::TensorArg query_t{query, "query", 1}, ref_t{ref, "ref", 2};
at::CheckedFrom c = "nnSearch_cuda"; // function name for check
at::checkAllSameGPU(c, {query_t, ref_t});
at::checkAllSameType(c, {query_t, ref_t});
at::cuda::CUDAGuard device_guard(query.device());
auto N = query.size(0);
auto M = ref.size(0);
auto dist = at::empty({N}, query.options());
auto idx = at::empty({N}, query.options().dtype(at::kLong));
cudaStream_t stream = at::cuda::getCurrentCUDAStream();
dim3 grid(std::min(at::cuda::ATenCeilDiv(N, 512L), 4096L));
dim3 block(512);
AT_DISPATCH_FLOATING_TYPES(query.scalar_type(), "nnSearch", [&] {
nnSearch<scalar_t><<<grid, block, 0, stream>>>(
N, M, query.contiguous().data_ptr<scalar_t>(),
ref.contiguous().data_ptr<scalar_t>(),
idx.contiguous().data_ptr<long>(), dist.contiguous().data_ptr<float>());
});
cudaDeviceSynchronize();
AT_CUDA_CHECK(cudaGetLastError());
return std::make_pair(idx, dist);
}
|
18859a28e9387a28a50143f3dc9d75c03c501b9b.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
// ----------------------------------------------------------------------------------------------------
// Copyrighted by Marko Rakita.
// Author: Marko Rakita
// File contains: Neural network.
// Created: 12/27/2015.
// ----------------------------------------------------------------------------------------------------
#include "include/neuralnet.cuh"
NeuralNet::NeuralNet(size_t maxNetworkTierSize)
{
m_maxNetworkTierSize = maxNetworkTierSize;
for (size_t tierLayer = 0; tierLayer < m_maxNetworkTierSize; ++tierLayer)
{
CudaAssert(hipSetDevice((int)tierLayer));
// Initialize calculation stream.
hipStream_t deviceCalculationStream;
CudaAssert(hipStreamCreateWithFlags(&deviceCalculationStream, hipStreamNonBlocking));
m_deviceCalculationStreams.push_back(deviceCalculationStream);
// Initialize memory stream.
hipStream_t deviceMemoryStream;
CudaAssert(hipStreamCreateWithFlags(&deviceMemoryStream, hipStreamNonBlocking));
m_deviceMemoryStreams.push_back(deviceMemoryStream);
// Initialize cuBLAS handles.
hipblasHandle_t cublasHandle;
CudaCublasAssert(hipblasCreate(&cublasHandle));
m_cublasHandles.push_back(cublasHandle);
// Initialize cuRAND state buffers.
hiprandState_t* curandStateBuffer;
CudaAssert(hipMalloc<hiprandState_t>(&curandStateBuffer, DropoutLayer::c_numCurandBlocks * DropoutLayer::c_numCurandThreadsPerBlock * sizeof(hiprandState_t)));
InitCurandStatesBuffer(curandStateBuffer, deviceCalculationStream);
m_curandStatesBuffers.push_back(curandStateBuffer);
// We need to sync whole device since cublas uses stream 0 to create handles,
// but this is called once per network so we don't care.
CudaAssert(hipDeviceSynchronize());
}
// Reverting back to default device.
CudaAssert(hipSetDevice(0));
}
/*
Initializes one cuRAND state per thread.
*/
__global__ void InitializeCurandStates(hiprandState_t* curandStatesBuffer, unsigned long long seedValue)
{
const uint c_stateIndex = blockIdx.x * blockDim.x + threadIdx.x;
// Initializing each state with different subsequence, to get more statistically uncorrelated sequences in different cuRAND states.
hiprand_init(seedValue, c_stateIndex, 0, &curandStatesBuffer[c_stateIndex]);
}
void NeuralNet::InitCurandStatesBuffer(hiprandState_t* curandStatesBuffer, hipStream_t deviceCalculationStream)
{
dim3 blockDimensions(DropoutLayer::c_numCurandThreadsPerBlock);
dim3 gridDimensions(DropoutLayer::c_numCurandBlocks);
// Making it less likely for statistically correlated sequences of random values across different cuRAND state buffers,
// since they are all initialized in approximately same time.
unsigned long long seedValue = 2 * chrono::system_clock::now().time_since_epoch().count() + 1;
LAUNCH_KERNEL_ASYNC(InitializeCurandStates, gridDimensions, blockDimensions, deviceCalculationStream)(curandStatesBuffer, seedValue);
CudaAssert(hipGetLastError());
}
NeuralNet::~NeuralNet()
{
// Delete layers.
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
for (size_t layerIndex = 0; layerIndex < m_layersTiers[tier].size(); ++layerIndex)
{
delete m_layersTiers[tier][layerIndex];
}
}
// Destroy streams.
for (size_t stream = 0; stream < m_deviceCalculationStreams.size(); ++stream)
{
CudaAssert(hipStreamDestroy(m_deviceCalculationStreams[stream]));
CudaAssert(hipStreamDestroy(m_deviceMemoryStreams[stream]));
}
// Destroy cuBLAS handles.
for (size_t handle = 0; handle < m_cublasHandles.size(); ++handle)
{
CudaCublasAssert(hipblasDestroy(m_cublasHandles[handle]));
}
// Destroy cuRAND state buffers.
for (size_t buffer = 0; buffer < m_curandStatesBuffers.size(); ++buffer)
{
CudaAssert(hipFree(m_curandStatesBuffers[buffer]));
}
}
void NeuralNet::SaveModel(string modelFile, bool saveUpdateBuffers)
{
ofstream modelStream(modelFile, ofstream::out | ofstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
vector<ConvolutionalLayer*> layers;
if (m_layersTiers[tier][0]->GetParallelismMode() == ParallelismMode::Data)
{
layers.push_back(static_cast<ConvolutionalLayer*>(m_layersTiers[tier][0]));
}
else
{
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<ConvolutionalLayer*>(layer));
}
}
// Writing filters.
float* tempFiltersBuffer;
CudaAssert(hipHostMalloc<float>(&tempFiltersBuffer, layers[0]->GetFiltersBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempFiltersBuffer, convLayer->GetFiltersBuffer(), convLayer->GetFiltersBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
}
CudaAssert(hipHostFree(tempFiltersBuffer));
// Writing biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempBiasesBuffer, convLayer->GetBiasesBuffer(), convLayer->GetBiasesBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
}
CudaAssert(hipHostFree(tempBiasesBuffer));
if (saveUpdateBuffers)
{
// Writing filters update buffers.
float* tempFiltersUpdatesBuffer;
CudaAssert(hipHostMalloc<float>(&tempFiltersUpdatesBuffer, layers[0]->GetFiltersBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempFiltersUpdatesBuffer, convLayer->GetFiltersUpdateBuffer(), convLayer->GetFiltersBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempFiltersUpdatesBuffer), convLayer->GetFiltersBufferSize());
}
CudaAssert(hipHostFree(tempFiltersUpdatesBuffer));
// Writing biases update buffers.
float* tempBiasesUpdatesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesUpdatesBuffer, layers[0]->GetBiasesBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempBiasesUpdatesBuffer, convLayer->GetBiasesUpdateBuffer(), convLayer->GetBiasesBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesUpdatesBuffer), convLayer->GetBiasesBufferSize());
}
CudaAssert(hipHostFree(tempBiasesUpdatesBuffer));
}
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
vector<StandardLayer*> layers;
if (m_layersTiers[tier][0]->GetParallelismMode() == ParallelismMode::Data)
{
layers.push_back(static_cast<StandardLayer*>(m_layersTiers[tier][0]));
}
else
{
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<StandardLayer*>(layer));
}
}
// Writing weights.
float* tempWeightsBuffer;
CudaAssert(hipHostMalloc<float>(&tempWeightsBuffer, layers[0]->GetWeightsBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempWeightsBuffer, standardLayer->GetWeightsBuffer(), standardLayer->GetWeightsBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
}
CudaAssert(hipHostFree(tempWeightsBuffer));
// Writing biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempBiasesBuffer, standardLayer->GetBiasesBuffer(), standardLayer->GetBiasesBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
}
CudaAssert(hipHostFree(tempBiasesBuffer));
if (saveUpdateBuffers)
{
// Writing weights update buffers.
float* tempWeightsUpdatesBuffer;
CudaAssert(hipHostMalloc<float>(&tempWeightsUpdatesBuffer, layers[0]->GetWeightsBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempWeightsUpdatesBuffer, standardLayer->GetWeightsUpdateBuffer(), standardLayer->GetWeightsBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempWeightsUpdatesBuffer), standardLayer->GetWeightsBufferSize());
}
CudaAssert(hipHostFree(tempWeightsUpdatesBuffer));
// Writing biases update buffers.
float* tempBiasesUpdatesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesUpdatesBuffer, layers[0]->GetBiasesBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(hipMemcpy(tempBiasesUpdatesBuffer, standardLayer->GetBiasesUpdateBuffer(), standardLayer->GetBiasesBufferSize(), hipMemcpyDeviceToHost));
// Need to do synchronize since hipMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(hipDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesUpdatesBuffer), standardLayer->GetBiasesBufferSize());
}
CudaAssert(hipHostFree(tempBiasesUpdatesBuffer));
}
}
}
CudaAssert(hipSetDevice(0));
modelStream.close();
}
void NeuralNet::SaveModelCheckpoint(string modelFile)
{
SaveModel(modelFile, true);
}
void NeuralNet::SaveModelForPrediction(string modelFile)
{
SaveModel(modelFile, false);
}
void NeuralNet::LoadModelCheckpoint(string modelFile)
{
ifstream modelStream(modelFile, ifstream::in | ifstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
vector<ConvolutionalLayer*> layers;
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<ConvolutionalLayer*>(layer));
}
// Reading filters.
float* tempFiltersBuffer;
CudaAssert(hipHostMalloc<float>(&tempFiltersBuffer, layers[0]->GetFiltersBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), layers[0]->GetFiltersBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
}
}
CudaAssert(hipHostFree(tempFiltersBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), layers[0]->GetBiasesBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
CudaAssert(hipHostFree(tempBiasesBuffer));
// Reading filters update buffer.
float* tempFiltersUpdateBuffer;
CudaAssert(hipHostMalloc<float>(&tempFiltersUpdateBuffer, layers[0]->GetFiltersBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempFiltersUpdateBuffer), layers[0]->GetFiltersBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyFiltersUpdateFromHost(tempFiltersUpdateBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempFiltersUpdateBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersUpdateFromHost(tempFiltersUpdateBuffer);
}
}
CudaAssert(hipHostFree(tempFiltersUpdateBuffer));
// Reading biases update buffer.
float* tempBiasesUpdateBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesUpdateBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), layers[0]->GetBiasesBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(hipSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
CudaAssert(hipHostFree(tempBiasesUpdateBuffer));
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
vector<StandardLayer*> layers;
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<StandardLayer*>(layer));
}
// Reading weights.
float* tempWeightsBuffer;
CudaAssert(hipHostMalloc<float>(&tempWeightsBuffer, layers[0]->GetWeightsBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), layers[0]->GetWeightsBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
}
}
CudaAssert(hipHostFree(tempWeightsBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), layers[0]->GetBiasesBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
CudaAssert(hipHostFree(tempBiasesBuffer));
// Reading weights update buffer.
float* tempWeightsUpdateBuffer;
CudaAssert(hipHostMalloc<float>(&tempWeightsUpdateBuffer, layers[0]->GetWeightsBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempWeightsUpdateBuffer), layers[0]->GetWeightsBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyWeightsUpdateFromHost(tempWeightsUpdateBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempWeightsUpdateBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsUpdateFromHost(tempWeightsUpdateBuffer);
}
}
CudaAssert(hipHostFree(tempWeightsUpdateBuffer));
// Reading biases update buffer.
float* tempBiasesUpdateBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesUpdateBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), layers[0]->GetBiasesBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(hipSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
CudaAssert(hipHostFree(tempBiasesUpdateBuffer));
}
}
CudaAssert(hipSetDevice(0));
modelStream.close();
}
void NeuralNet::LoadModelForPrediction(string modelFile)
{
ifstream modelStream(modelFile, ifstream::in | ifstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
ConvolutionalLayer* convLayer = static_cast<ConvolutionalLayer*>(m_layersTiers[tier][0]);
// Reading filters.
float* tempFiltersBuffer;
CudaAssert(hipHostMalloc<float>(&tempFiltersBuffer, convLayer->GetFiltersBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
CudaAssert(hipHostFree(tempFiltersBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, convLayer->GetBiasesBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
CudaAssert(hipHostFree(tempBiasesBuffer));
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
StandardLayer* standardLayer = static_cast<StandardLayer*>(m_layersTiers[tier][0]);
// Reading weights.
float* tempWeightsBuffer;
CudaAssert(hipHostMalloc<float>(&tempWeightsBuffer, standardLayer->GetWeightsBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
CudaAssert(hipHostFree(tempWeightsBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(hipHostMalloc<float>(&tempBiasesBuffer, standardLayer->GetBiasesBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
CudaAssert(hipHostFree(tempBiasesBuffer));
}
}
modelStream.close();
} | 18859a28e9387a28a50143f3dc9d75c03c501b9b.cu | // ----------------------------------------------------------------------------------------------------
// Copyrighted by Marko Rakita.
// Author: Marko Rakita
// File contains: Neural network.
// Created: 12/27/2015.
// ----------------------------------------------------------------------------------------------------
#include "include/neuralnet.cuh"
NeuralNet::NeuralNet(size_t maxNetworkTierSize)
{
m_maxNetworkTierSize = maxNetworkTierSize;
for (size_t tierLayer = 0; tierLayer < m_maxNetworkTierSize; ++tierLayer)
{
CudaAssert(cudaSetDevice((int)tierLayer));
// Initialize calculation stream.
cudaStream_t deviceCalculationStream;
CudaAssert(cudaStreamCreateWithFlags(&deviceCalculationStream, cudaStreamNonBlocking));
m_deviceCalculationStreams.push_back(deviceCalculationStream);
// Initialize memory stream.
cudaStream_t deviceMemoryStream;
CudaAssert(cudaStreamCreateWithFlags(&deviceMemoryStream, cudaStreamNonBlocking));
m_deviceMemoryStreams.push_back(deviceMemoryStream);
// Initialize cuBLAS handles.
cublasHandle_t cublasHandle;
CudaCublasAssert(cublasCreate(&cublasHandle));
m_cublasHandles.push_back(cublasHandle);
// Initialize cuRAND state buffers.
curandState* curandStateBuffer;
CudaAssert(cudaMalloc<curandState>(&curandStateBuffer, DropoutLayer::c_numCurandBlocks * DropoutLayer::c_numCurandThreadsPerBlock * sizeof(curandState)));
InitCurandStatesBuffer(curandStateBuffer, deviceCalculationStream);
m_curandStatesBuffers.push_back(curandStateBuffer);
// We need to sync whole device since cublas uses stream 0 to create handles,
// but this is called once per network so we don't care.
CudaAssert(cudaDeviceSynchronize());
}
// Reverting back to default device.
CudaAssert(cudaSetDevice(0));
}
/*
Initializes one cuRAND state per thread.
*/
__global__ void InitializeCurandStates(curandState* curandStatesBuffer, unsigned long long seedValue)
{
const uint c_stateIndex = blockIdx.x * blockDim.x + threadIdx.x;
// Initializing each state with different subsequence, to get more statistically uncorrelated sequences in different cuRAND states.
curand_init(seedValue, c_stateIndex, 0, &curandStatesBuffer[c_stateIndex]);
}
void NeuralNet::InitCurandStatesBuffer(curandState* curandStatesBuffer, cudaStream_t deviceCalculationStream)
{
dim3 blockDimensions(DropoutLayer::c_numCurandThreadsPerBlock);
dim3 gridDimensions(DropoutLayer::c_numCurandBlocks);
// Making it less likely for statistically correlated sequences of random values across different cuRAND state buffers,
// since they are all initialized in approximately same time.
unsigned long long seedValue = 2 * chrono::system_clock::now().time_since_epoch().count() + 1;
LAUNCH_KERNEL_ASYNC(InitializeCurandStates, gridDimensions, blockDimensions, deviceCalculationStream)(curandStatesBuffer, seedValue);
CudaAssert(cudaGetLastError());
}
NeuralNet::~NeuralNet()
{
// Delete layers.
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
for (size_t layerIndex = 0; layerIndex < m_layersTiers[tier].size(); ++layerIndex)
{
delete m_layersTiers[tier][layerIndex];
}
}
// Destroy streams.
for (size_t stream = 0; stream < m_deviceCalculationStreams.size(); ++stream)
{
CudaAssert(cudaStreamDestroy(m_deviceCalculationStreams[stream]));
CudaAssert(cudaStreamDestroy(m_deviceMemoryStreams[stream]));
}
// Destroy cuBLAS handles.
for (size_t handle = 0; handle < m_cublasHandles.size(); ++handle)
{
CudaCublasAssert(cublasDestroy(m_cublasHandles[handle]));
}
// Destroy cuRAND state buffers.
for (size_t buffer = 0; buffer < m_curandStatesBuffers.size(); ++buffer)
{
CudaAssert(cudaFree(m_curandStatesBuffers[buffer]));
}
}
void NeuralNet::SaveModel(string modelFile, bool saveUpdateBuffers)
{
ofstream modelStream(modelFile, ofstream::out | ofstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
vector<ConvolutionalLayer*> layers;
if (m_layersTiers[tier][0]->GetParallelismMode() == ParallelismMode::Data)
{
layers.push_back(static_cast<ConvolutionalLayer*>(m_layersTiers[tier][0]));
}
else
{
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<ConvolutionalLayer*>(layer));
}
}
// Writing filters.
float* tempFiltersBuffer;
CudaAssert(cudaMallocHost<float>(&tempFiltersBuffer, layers[0]->GetFiltersBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempFiltersBuffer, convLayer->GetFiltersBuffer(), convLayer->GetFiltersBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
}
CudaAssert(cudaFreeHost(tempFiltersBuffer));
// Writing biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempBiasesBuffer, convLayer->GetBiasesBuffer(), convLayer->GetBiasesBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
}
CudaAssert(cudaFreeHost(tempBiasesBuffer));
if (saveUpdateBuffers)
{
// Writing filters update buffers.
float* tempFiltersUpdatesBuffer;
CudaAssert(cudaMallocHost<float>(&tempFiltersUpdatesBuffer, layers[0]->GetFiltersBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempFiltersUpdatesBuffer, convLayer->GetFiltersUpdateBuffer(), convLayer->GetFiltersBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempFiltersUpdatesBuffer), convLayer->GetFiltersBufferSize());
}
CudaAssert(cudaFreeHost(tempFiltersUpdatesBuffer));
// Writing biases update buffers.
float* tempBiasesUpdatesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesUpdatesBuffer, layers[0]->GetBiasesBufferSize()));
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempBiasesUpdatesBuffer, convLayer->GetBiasesUpdateBuffer(), convLayer->GetBiasesBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesUpdatesBuffer), convLayer->GetBiasesBufferSize());
}
CudaAssert(cudaFreeHost(tempBiasesUpdatesBuffer));
}
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
vector<StandardLayer*> layers;
if (m_layersTiers[tier][0]->GetParallelismMode() == ParallelismMode::Data)
{
layers.push_back(static_cast<StandardLayer*>(m_layersTiers[tier][0]));
}
else
{
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<StandardLayer*>(layer));
}
}
// Writing weights.
float* tempWeightsBuffer;
CudaAssert(cudaMallocHost<float>(&tempWeightsBuffer, layers[0]->GetWeightsBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempWeightsBuffer, standardLayer->GetWeightsBuffer(), standardLayer->GetWeightsBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
}
CudaAssert(cudaFreeHost(tempWeightsBuffer));
// Writing biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempBiasesBuffer, standardLayer->GetBiasesBuffer(), standardLayer->GetBiasesBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
}
CudaAssert(cudaFreeHost(tempBiasesBuffer));
if (saveUpdateBuffers)
{
// Writing weights update buffers.
float* tempWeightsUpdatesBuffer;
CudaAssert(cudaMallocHost<float>(&tempWeightsUpdatesBuffer, layers[0]->GetWeightsBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempWeightsUpdatesBuffer, standardLayer->GetWeightsUpdateBuffer(), standardLayer->GetWeightsBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempWeightsUpdatesBuffer), standardLayer->GetWeightsBufferSize());
}
CudaAssert(cudaFreeHost(tempWeightsUpdatesBuffer));
// Writing biases update buffers.
float* tempBiasesUpdatesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesUpdatesBuffer, layers[0]->GetBiasesBufferSize()));
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
CudaAssert(cudaMemcpy(tempBiasesUpdatesBuffer, standardLayer->GetBiasesUpdateBuffer(), standardLayer->GetBiasesBufferSize(), cudaMemcpyDeviceToHost));
// Need to do synchronize since cudaMemcpy is asynchronous for memory copy under 64kb.
CudaAssert(cudaDeviceSynchronize());
modelStream.write(reinterpret_cast<const char*>(tempBiasesUpdatesBuffer), standardLayer->GetBiasesBufferSize());
}
CudaAssert(cudaFreeHost(tempBiasesUpdatesBuffer));
}
}
}
CudaAssert(cudaSetDevice(0));
modelStream.close();
}
void NeuralNet::SaveModelCheckpoint(string modelFile)
{
SaveModel(modelFile, true);
}
void NeuralNet::SaveModelForPrediction(string modelFile)
{
SaveModel(modelFile, false);
}
void NeuralNet::LoadModelCheckpoint(string modelFile)
{
ifstream modelStream(modelFile, ifstream::in | ifstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
vector<ConvolutionalLayer*> layers;
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<ConvolutionalLayer*>(layer));
}
// Reading filters.
float* tempFiltersBuffer;
CudaAssert(cudaMallocHost<float>(&tempFiltersBuffer, layers[0]->GetFiltersBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), layers[0]->GetFiltersBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
}
}
CudaAssert(cudaFreeHost(tempFiltersBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), layers[0]->GetBiasesBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
CudaAssert(cudaFreeHost(tempBiasesBuffer));
// Reading filters update buffer.
float* tempFiltersUpdateBuffer;
CudaAssert(cudaMallocHost<float>(&tempFiltersUpdateBuffer, layers[0]->GetFiltersBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempFiltersUpdateBuffer), layers[0]->GetFiltersBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyFiltersUpdateFromHost(tempFiltersUpdateBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempFiltersUpdateBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersUpdateFromHost(tempFiltersUpdateBuffer);
}
}
CudaAssert(cudaFreeHost(tempFiltersUpdateBuffer));
// Reading biases update buffer.
float* tempBiasesUpdateBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesUpdateBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), layers[0]->GetBiasesBufferSize());
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
convLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
else
{
for (ConvolutionalLayer* convLayer : layers)
{
CudaAssert(cudaSetDevice(convLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
CudaAssert(cudaFreeHost(tempBiasesUpdateBuffer));
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
vector<StandardLayer*> layers;
for (Layer* layer : m_layersTiers[tier])
{
layers.push_back(static_cast<StandardLayer*>(layer));
}
// Reading weights.
float* tempWeightsBuffer;
CudaAssert(cudaMallocHost<float>(&tempWeightsBuffer, layers[0]->GetWeightsBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), layers[0]->GetWeightsBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
}
}
CudaAssert(cudaFreeHost(tempWeightsBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), layers[0]->GetBiasesBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
}
}
CudaAssert(cudaFreeHost(tempBiasesBuffer));
// Reading weights update buffer.
float* tempWeightsUpdateBuffer;
CudaAssert(cudaMallocHost<float>(&tempWeightsUpdateBuffer, layers[0]->GetWeightsBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempWeightsUpdateBuffer), layers[0]->GetWeightsBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyWeightsUpdateFromHost(tempWeightsUpdateBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempWeightsUpdateBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsUpdateFromHost(tempWeightsUpdateBuffer);
}
}
CudaAssert(cudaFreeHost(tempWeightsUpdateBuffer));
// Reading biases update buffer.
float* tempBiasesUpdateBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesUpdateBuffer, layers[0]->GetBiasesBufferSize()));
if (layers[0]->GetParallelismMode() == ParallelismMode::Data)
{
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), layers[0]->GetBiasesBufferSize());
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
standardLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
else
{
for (StandardLayer* standardLayer : layers)
{
CudaAssert(cudaSetDevice(standardLayer->GetIndexInTier()));
modelStream.read(reinterpret_cast<char*>(tempBiasesUpdateBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesUpdateFromHost(tempBiasesUpdateBuffer);
}
}
CudaAssert(cudaFreeHost(tempBiasesUpdateBuffer));
}
}
CudaAssert(cudaSetDevice(0));
modelStream.close();
}
void NeuralNet::LoadModelForPrediction(string modelFile)
{
ifstream modelStream(modelFile, ifstream::in | ifstream::binary);
for (size_t tier = 0; tier < m_layersTiers.size(); ++tier)
{
if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Convolutional)
{
ConvolutionalLayer* convLayer = static_cast<ConvolutionalLayer*>(m_layersTiers[tier][0]);
// Reading filters.
float* tempFiltersBuffer;
CudaAssert(cudaMallocHost<float>(&tempFiltersBuffer, convLayer->GetFiltersBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempFiltersBuffer), convLayer->GetFiltersBufferSize());
convLayer->CopyFiltersFromHost(tempFiltersBuffer);
CudaAssert(cudaFreeHost(tempFiltersBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, convLayer->GetBiasesBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), convLayer->GetBiasesBufferSize());
convLayer->CopyBiasesFromHost(tempBiasesBuffer);
CudaAssert(cudaFreeHost(tempBiasesBuffer));
}
else if (m_layersTiers[tier][0]->GetLayerType() == LayerType::Standard)
{
StandardLayer* standardLayer = static_cast<StandardLayer*>(m_layersTiers[tier][0]);
// Reading weights.
float* tempWeightsBuffer;
CudaAssert(cudaMallocHost<float>(&tempWeightsBuffer, standardLayer->GetWeightsBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempWeightsBuffer), standardLayer->GetWeightsBufferSize());
standardLayer->CopyWeightsFromHost(tempWeightsBuffer);
CudaAssert(cudaFreeHost(tempWeightsBuffer));
// Reading biases.
float* tempBiasesBuffer;
CudaAssert(cudaMallocHost<float>(&tempBiasesBuffer, standardLayer->GetBiasesBufferSize()));
modelStream.read(reinterpret_cast<char*>(tempBiasesBuffer), standardLayer->GetBiasesBufferSize());
standardLayer->CopyBiasesFromHost(tempBiasesBuffer);
CudaAssert(cudaFreeHost(tempBiasesBuffer));
}
}
modelStream.close();
} |
41dbf2578364922e77173ada14fd163fe4c5adb1.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
//
// auto-generated by ops.py
//
__constant__ int xdim0_update_halo_kernel2_xvel_plus_2_front;
int xdim0_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int ydim0_update_halo_kernel2_xvel_plus_2_front;
int ydim0_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int xdim1_update_halo_kernel2_xvel_plus_2_front;
int xdim1_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int ydim1_update_halo_kernel2_xvel_plus_2_front;
int ydim1_update_halo_kernel2_xvel_plus_2_front_h = -1;
#define OPS_ACC0(x,y,z) (x+xdim0_update_halo_kernel2_xvel_plus_2_front*(y)+xdim0_update_halo_kernel2_xvel_plus_2_front*ydim0_update_halo_kernel2_xvel_plus_2_front*(z))
#define OPS_ACC1(x,y,z) (x+xdim1_update_halo_kernel2_xvel_plus_2_front*(y)+xdim1_update_halo_kernel2_xvel_plus_2_front*ydim1_update_halo_kernel2_xvel_plus_2_front*(z))
//user function
__device__
inline void update_halo_kernel2_xvel_plus_2_front(double *xvel0, double *xvel1, const int* fields)
{
if(fields[FIELD_XVEL0] == 1) xvel0[OPS_ACC0(0,0,0)] = xvel0[OPS_ACC0(0,0,-2)];
if(fields[FIELD_XVEL1] == 1) xvel1[OPS_ACC1(0,0,0)] = xvel1[OPS_ACC1(0,0,-2)];
}
#undef OPS_ACC0
#undef OPS_ACC1
__global__ void ops_update_halo_kernel2_xvel_plus_2_front(
double* __restrict arg0,
double* __restrict arg1,
const int* __restrict arg2,
int size0,
int size1,
int size2 ){
int idx_z = blockDim.z * blockIdx.z + threadIdx.z;
int idx_y = blockDim.y * blockIdx.y + threadIdx.y;
int idx_x = blockDim.x * blockIdx.x + threadIdx.x;
arg0 += idx_x * 1 + idx_y * 1 * xdim0_update_halo_kernel2_xvel_plus_2_front + idx_z * 1 * xdim0_update_halo_kernel2_xvel_plus_2_front * ydim0_update_halo_kernel2_xvel_plus_2_front;
arg1 += idx_x * 1 + idx_y * 1 * xdim1_update_halo_kernel2_xvel_plus_2_front + idx_z * 1 * xdim1_update_halo_kernel2_xvel_plus_2_front * ydim1_update_halo_kernel2_xvel_plus_2_front;
if (idx_x < size0 && idx_y < size1 && idx_z < size2) {
update_halo_kernel2_xvel_plus_2_front(arg0, arg1, arg2);
}
}
// host stub function
void ops_par_loop_update_halo_kernel2_xvel_plus_2_front(char const *name, ops_block block, int dim, int* range,
ops_arg arg0, ops_arg arg1, ops_arg arg2) {
ops_arg args[3] = { arg0, arg1, arg2};
ops_timing_realloc(64,"update_halo_kernel2_xvel_plus_2_front");
OPS_kernels[64].count++;
//compute locally allocated range for the sub-block
int start[3];
int end[3];
#ifdef OPS_MPI
sub_block_list sb = OPS_sub_block_list[block->index];
if (!sb->owned) return;
for ( int n=0; n<3; n++ ){
start[n] = sb->decomp_disp[n];end[n] = sb->decomp_disp[n]+sb->decomp_size[n];
if (start[n] >= range[2*n]) {
start[n] = 0;
}
else {
start[n] = range[2*n] - start[n];
}
if (sb->id_m[n]==MPI_PROC_NULL && range[2*n] < 0) start[n] = range[2*n];
if (end[n] >= range[2*n+1]) {
end[n] = range[2*n+1] - sb->decomp_disp[n];
}
else {
end[n] = sb->decomp_size[n];
}
if (sb->id_p[n]==MPI_PROC_NULL && (range[2*n+1] > sb->decomp_disp[n]+sb->decomp_size[n]))
end[n] += (range[2*n+1]-sb->decomp_disp[n]-sb->decomp_size[n]);
}
#else //OPS_MPI
for ( int n=0; n<3; n++ ){
start[n] = range[2*n];end[n] = range[2*n+1];
}
#endif //OPS_MPI
int x_size = MAX(0,end[0]-start[0]);
int y_size = MAX(0,end[1]-start[1]);
int z_size = MAX(0,end[2]-start[2]);
int xdim0 = args[0].dat->size[0]*args[0].dat->dim;
int ydim0 = args[0].dat->size[1];
int xdim1 = args[1].dat->size[0]*args[1].dat->dim;
int ydim1 = args[1].dat->size[1];
//Timing
double t1,t2,c1,c2;
ops_timers_core(&c2,&t2);
if (xdim0 != xdim0_update_halo_kernel2_xvel_plus_2_front_h || ydim0 != ydim0_update_halo_kernel2_xvel_plus_2_front_h || xdim1 != xdim1_update_halo_kernel2_xvel_plus_2_front_h || ydim1 != ydim1_update_halo_kernel2_xvel_plus_2_front_h) {
hipMemcpyToSymbol( xdim0_update_halo_kernel2_xvel_plus_2_front, &xdim0, sizeof(int) );
xdim0_update_halo_kernel2_xvel_plus_2_front_h = xdim0;
hipMemcpyToSymbol( ydim0_update_halo_kernel2_xvel_plus_2_front, &ydim0, sizeof(int) );
ydim0_update_halo_kernel2_xvel_plus_2_front_h = ydim0;
hipMemcpyToSymbol( xdim1_update_halo_kernel2_xvel_plus_2_front, &xdim1, sizeof(int) );
xdim1_update_halo_kernel2_xvel_plus_2_front_h = xdim1;
hipMemcpyToSymbol( ydim1_update_halo_kernel2_xvel_plus_2_front, &ydim1, sizeof(int) );
ydim1_update_halo_kernel2_xvel_plus_2_front_h = ydim1;
}
int *arg2h = (int *)arg2.data;
dim3 grid( (x_size-1)/OPS_block_size_x+ 1, (y_size-1)/OPS_block_size_y + 1, z_size);
dim3 tblock(OPS_block_size_x,OPS_block_size_y,1);
int consts_bytes = 0;
consts_bytes += ROUND_UP(NUM_FIELDS*sizeof(int));
reallocConstArrays(consts_bytes);
consts_bytes = 0;
arg2.data = OPS_consts_h + consts_bytes;
arg2.data_d = OPS_consts_d + consts_bytes;
for (int d=0; d<NUM_FIELDS; d++) ((int *)arg2.data)[d] = arg2h[d];
consts_bytes += ROUND_UP(NUM_FIELDS*sizeof(int));
mvConstArraysToDevice(consts_bytes);
int dat0 = args[0].dat->elem_size;
int dat1 = args[1].dat->elem_size;
char *p_a[3];
//set up initial pointers
int d_m[OPS_MAX_DIM];
#ifdef OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[0].dat->d_m[d] + OPS_sub_dat_list[args[0].dat->index]->d_im[d];
#else //OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[0].dat->d_m[d];
#endif //OPS_MPI
int base0 = dat0 * 1 *
(start[0] * args[0].stencil->stride[0] - args[0].dat->base[0] - d_m[0]);
base0 = base0+ dat0 *
args[0].dat->size[0] *
(start[1] * args[0].stencil->stride[1] - args[0].dat->base[1] - d_m[1]);
base0 = base0+ dat0 *
args[0].dat->size[0] *
args[0].dat->size[1] *
(start[2] * args[0].stencil->stride[2] - args[0].dat->base[2] - d_m[2]);
p_a[0] = (char *)args[0].data_d + base0;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[1].dat->d_m[d] + OPS_sub_dat_list[args[1].dat->index]->d_im[d];
#else //OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[1].dat->d_m[d];
#endif //OPS_MPI
int base1 = dat1 * 1 *
(start[0] * args[1].stencil->stride[0] - args[1].dat->base[0] - d_m[0]);
base1 = base1+ dat1 *
args[1].dat->size[0] *
(start[1] * args[1].stencil->stride[1] - args[1].dat->base[1] - d_m[1]);
base1 = base1+ dat1 *
args[1].dat->size[0] *
args[1].dat->size[1] *
(start[2] * args[1].stencil->stride[2] - args[1].dat->base[2] - d_m[2]);
p_a[1] = (char *)args[1].data_d + base1;
ops_H_D_exchanges_device(args, 3);
ops_halo_exchanges(args,3,range);
ops_timers_core(&c1,&t1);
OPS_kernels[64].mpi_time += t1-t2;
//call kernel wrapper function, passing in pointers to data
hipLaunchKernelGGL(( ops_update_halo_kernel2_xvel_plus_2_front), dim3(grid), dim3(tblock) , 0, 0, (double *)p_a[0], (double *)p_a[1],
(int *)arg2.data_d,x_size, y_size, z_size);
if (OPS_diags>1) {
cutilSafeCall(hipDeviceSynchronize());
}
ops_timers_core(&c2,&t2);
OPS_kernels[64].time += t2-t1;
ops_set_dirtybit_device(args, 3);
ops_set_halo_dirtybit3(&args[0],range);
ops_set_halo_dirtybit3(&args[1],range);
//Update kernel record
OPS_kernels[64].transfer += ops_compute_transfer(dim, range, &arg0);
OPS_kernels[64].transfer += ops_compute_transfer(dim, range, &arg1);
}
| 41dbf2578364922e77173ada14fd163fe4c5adb1.cu | //
// auto-generated by ops.py
//
__constant__ int xdim0_update_halo_kernel2_xvel_plus_2_front;
int xdim0_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int ydim0_update_halo_kernel2_xvel_plus_2_front;
int ydim0_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int xdim1_update_halo_kernel2_xvel_plus_2_front;
int xdim1_update_halo_kernel2_xvel_plus_2_front_h = -1;
__constant__ int ydim1_update_halo_kernel2_xvel_plus_2_front;
int ydim1_update_halo_kernel2_xvel_plus_2_front_h = -1;
#define OPS_ACC0(x,y,z) (x+xdim0_update_halo_kernel2_xvel_plus_2_front*(y)+xdim0_update_halo_kernel2_xvel_plus_2_front*ydim0_update_halo_kernel2_xvel_plus_2_front*(z))
#define OPS_ACC1(x,y,z) (x+xdim1_update_halo_kernel2_xvel_plus_2_front*(y)+xdim1_update_halo_kernel2_xvel_plus_2_front*ydim1_update_halo_kernel2_xvel_plus_2_front*(z))
//user function
__device__
inline void update_halo_kernel2_xvel_plus_2_front(double *xvel0, double *xvel1, const int* fields)
{
if(fields[FIELD_XVEL0] == 1) xvel0[OPS_ACC0(0,0,0)] = xvel0[OPS_ACC0(0,0,-2)];
if(fields[FIELD_XVEL1] == 1) xvel1[OPS_ACC1(0,0,0)] = xvel1[OPS_ACC1(0,0,-2)];
}
#undef OPS_ACC0
#undef OPS_ACC1
__global__ void ops_update_halo_kernel2_xvel_plus_2_front(
double* __restrict arg0,
double* __restrict arg1,
const int* __restrict arg2,
int size0,
int size1,
int size2 ){
int idx_z = blockDim.z * blockIdx.z + threadIdx.z;
int idx_y = blockDim.y * blockIdx.y + threadIdx.y;
int idx_x = blockDim.x * blockIdx.x + threadIdx.x;
arg0 += idx_x * 1 + idx_y * 1 * xdim0_update_halo_kernel2_xvel_plus_2_front + idx_z * 1 * xdim0_update_halo_kernel2_xvel_plus_2_front * ydim0_update_halo_kernel2_xvel_plus_2_front;
arg1 += idx_x * 1 + idx_y * 1 * xdim1_update_halo_kernel2_xvel_plus_2_front + idx_z * 1 * xdim1_update_halo_kernel2_xvel_plus_2_front * ydim1_update_halo_kernel2_xvel_plus_2_front;
if (idx_x < size0 && idx_y < size1 && idx_z < size2) {
update_halo_kernel2_xvel_plus_2_front(arg0, arg1, arg2);
}
}
// host stub function
void ops_par_loop_update_halo_kernel2_xvel_plus_2_front(char const *name, ops_block block, int dim, int* range,
ops_arg arg0, ops_arg arg1, ops_arg arg2) {
ops_arg args[3] = { arg0, arg1, arg2};
ops_timing_realloc(64,"update_halo_kernel2_xvel_plus_2_front");
OPS_kernels[64].count++;
//compute locally allocated range for the sub-block
int start[3];
int end[3];
#ifdef OPS_MPI
sub_block_list sb = OPS_sub_block_list[block->index];
if (!sb->owned) return;
for ( int n=0; n<3; n++ ){
start[n] = sb->decomp_disp[n];end[n] = sb->decomp_disp[n]+sb->decomp_size[n];
if (start[n] >= range[2*n]) {
start[n] = 0;
}
else {
start[n] = range[2*n] - start[n];
}
if (sb->id_m[n]==MPI_PROC_NULL && range[2*n] < 0) start[n] = range[2*n];
if (end[n] >= range[2*n+1]) {
end[n] = range[2*n+1] - sb->decomp_disp[n];
}
else {
end[n] = sb->decomp_size[n];
}
if (sb->id_p[n]==MPI_PROC_NULL && (range[2*n+1] > sb->decomp_disp[n]+sb->decomp_size[n]))
end[n] += (range[2*n+1]-sb->decomp_disp[n]-sb->decomp_size[n]);
}
#else //OPS_MPI
for ( int n=0; n<3; n++ ){
start[n] = range[2*n];end[n] = range[2*n+1];
}
#endif //OPS_MPI
int x_size = MAX(0,end[0]-start[0]);
int y_size = MAX(0,end[1]-start[1]);
int z_size = MAX(0,end[2]-start[2]);
int xdim0 = args[0].dat->size[0]*args[0].dat->dim;
int ydim0 = args[0].dat->size[1];
int xdim1 = args[1].dat->size[0]*args[1].dat->dim;
int ydim1 = args[1].dat->size[1];
//Timing
double t1,t2,c1,c2;
ops_timers_core(&c2,&t2);
if (xdim0 != xdim0_update_halo_kernel2_xvel_plus_2_front_h || ydim0 != ydim0_update_halo_kernel2_xvel_plus_2_front_h || xdim1 != xdim1_update_halo_kernel2_xvel_plus_2_front_h || ydim1 != ydim1_update_halo_kernel2_xvel_plus_2_front_h) {
cudaMemcpyToSymbol( xdim0_update_halo_kernel2_xvel_plus_2_front, &xdim0, sizeof(int) );
xdim0_update_halo_kernel2_xvel_plus_2_front_h = xdim0;
cudaMemcpyToSymbol( ydim0_update_halo_kernel2_xvel_plus_2_front, &ydim0, sizeof(int) );
ydim0_update_halo_kernel2_xvel_plus_2_front_h = ydim0;
cudaMemcpyToSymbol( xdim1_update_halo_kernel2_xvel_plus_2_front, &xdim1, sizeof(int) );
xdim1_update_halo_kernel2_xvel_plus_2_front_h = xdim1;
cudaMemcpyToSymbol( ydim1_update_halo_kernel2_xvel_plus_2_front, &ydim1, sizeof(int) );
ydim1_update_halo_kernel2_xvel_plus_2_front_h = ydim1;
}
int *arg2h = (int *)arg2.data;
dim3 grid( (x_size-1)/OPS_block_size_x+ 1, (y_size-1)/OPS_block_size_y + 1, z_size);
dim3 tblock(OPS_block_size_x,OPS_block_size_y,1);
int consts_bytes = 0;
consts_bytes += ROUND_UP(NUM_FIELDS*sizeof(int));
reallocConstArrays(consts_bytes);
consts_bytes = 0;
arg2.data = OPS_consts_h + consts_bytes;
arg2.data_d = OPS_consts_d + consts_bytes;
for (int d=0; d<NUM_FIELDS; d++) ((int *)arg2.data)[d] = arg2h[d];
consts_bytes += ROUND_UP(NUM_FIELDS*sizeof(int));
mvConstArraysToDevice(consts_bytes);
int dat0 = args[0].dat->elem_size;
int dat1 = args[1].dat->elem_size;
char *p_a[3];
//set up initial pointers
int d_m[OPS_MAX_DIM];
#ifdef OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[0].dat->d_m[d] + OPS_sub_dat_list[args[0].dat->index]->d_im[d];
#else //OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[0].dat->d_m[d];
#endif //OPS_MPI
int base0 = dat0 * 1 *
(start[0] * args[0].stencil->stride[0] - args[0].dat->base[0] - d_m[0]);
base0 = base0+ dat0 *
args[0].dat->size[0] *
(start[1] * args[0].stencil->stride[1] - args[0].dat->base[1] - d_m[1]);
base0 = base0+ dat0 *
args[0].dat->size[0] *
args[0].dat->size[1] *
(start[2] * args[0].stencil->stride[2] - args[0].dat->base[2] - d_m[2]);
p_a[0] = (char *)args[0].data_d + base0;
#ifdef OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[1].dat->d_m[d] + OPS_sub_dat_list[args[1].dat->index]->d_im[d];
#else //OPS_MPI
for (int d = 0; d < dim; d++) d_m[d] = args[1].dat->d_m[d];
#endif //OPS_MPI
int base1 = dat1 * 1 *
(start[0] * args[1].stencil->stride[0] - args[1].dat->base[0] - d_m[0]);
base1 = base1+ dat1 *
args[1].dat->size[0] *
(start[1] * args[1].stencil->stride[1] - args[1].dat->base[1] - d_m[1]);
base1 = base1+ dat1 *
args[1].dat->size[0] *
args[1].dat->size[1] *
(start[2] * args[1].stencil->stride[2] - args[1].dat->base[2] - d_m[2]);
p_a[1] = (char *)args[1].data_d + base1;
ops_H_D_exchanges_device(args, 3);
ops_halo_exchanges(args,3,range);
ops_timers_core(&c1,&t1);
OPS_kernels[64].mpi_time += t1-t2;
//call kernel wrapper function, passing in pointers to data
ops_update_halo_kernel2_xvel_plus_2_front<<<grid, tblock >>> ( (double *)p_a[0], (double *)p_a[1],
(int *)arg2.data_d,x_size, y_size, z_size);
if (OPS_diags>1) {
cutilSafeCall(cudaDeviceSynchronize());
}
ops_timers_core(&c2,&t2);
OPS_kernels[64].time += t2-t1;
ops_set_dirtybit_device(args, 3);
ops_set_halo_dirtybit3(&args[0],range);
ops_set_halo_dirtybit3(&args[1],range);
//Update kernel record
OPS_kernels[64].transfer += ops_compute_transfer(dim, range, &arg0);
OPS_kernels[64].transfer += ops_compute_transfer(dim, range, &arg1);
}
|
b581b21001deca1b60a5f8c8c87cc3e679e67ecb.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
//The correct one
#include <stdio.h>
#include "cutil_math.h"
//TICK VERY IMPORTANT, used for swapping the ping pong
int tick=0;
//All cuda stuff goes here
//Global Data Pointers
//Water
__device__ float *dWaterRainRate0_ptr;
__device__ float *dWaterRainRate1_ptr;
__device__ float *dWaterH0_ptr;
__device__ float *dWaterH1_ptr;
__device__ float4 *dWaterFlux0_ptr;
__device__ float4 *dWaterFlux1_ptr;
__device__ float2 *dWaterVelocity_ptr;
//Sediment
__device__ float *dSedimentCapacity_ptr;
__device__ float *dSedimentAmount0_ptr;
__device__ float *dSedimentAmount1_ptr;
__device__ float *dSedimentAmntAdvect_ptr;
__device__ float *dSedimentAmntAdvectBack_ptr;
//Terrain
__device__ float *dTerrainH0_ptr;
__device__ float *dTerrainH1_ptr;
__device__ float *dHardness0_ptr;
__device__ float *dHardness1_ptr;
//Thermal Erosion
__device__ float *dThermalAmount2Move_ptr;
__device__ float4 *dThermalFlux_ptr;
__device__ float4 *dThermalFluxDiag_ptr;
//Pitches STORED ON HOST!!!!!!
size_t hWaterRainRate0_pitch=0;
size_t hWaterRainRate1_pitch=0;
size_t hWaterH0_pitch=0;
size_t hWaterH1_pitch=0;
size_t hWaterFlux_pitch=0;
size_t hWaterVelocity_pitch=0;
//Sediment
size_t hSedimentCapacity_pitch=0;
size_t hSedimentAmount_pitch=0;
//Terrain
size_t hTerrainH0_pitch=0;
size_t hTerrainH1_pitch=0;
size_t hHardness0_pitch=0;
size_t hHardness1_pitch=0;
//Thermal Erosion
size_t hThermalAmount2Move_pitch=0;
size_t hThermalFlux_pitch=0;
size_t hThermalFluxDiag_pitch=0;
//Textures
//For each: texture, pointer to it, offset
texture<float, hipTextureType2D, hipReadModeElementType> dTexWaterRainRate;
textureReference const * dTexWaterRainRatePtr;
size_t dTexWaterRainRateOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexWaterH;
textureReference const *dTexWaterHptr;
size_t dTexWaterHOffset;
texture<float4, hipTextureType2D, hipReadModeElementType> dTexWaterFlux;
textureReference const *dTexWaterFluxPtr;
size_t dTexWaterFluxOffset;
texture<float2, hipTextureType2D, hipReadModeElementType> dTexWaterVelocity;
textureReference const *dTexWaterVelocityPtr;
size_t dTexWaterVelocityOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexSedimentCapacity;
textureReference const *dTexSedimentCapacityPtr;
size_t dTexSedimentCapacityOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexSedimentAmount;
textureReference const *dTexSedimentAmountPtr;
size_t dTexSedimentAmountOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexSedimentAmntAdvect;
textureReference const *dTexSedimentAmntAdvectPtr;
size_t dTexSedimentAmntAdvectOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexSedimentAmntAdvectBack;
textureReference const *dTexSedimentAmntAdvectBackPtr;
size_t dTexSedimentAmntAdvectBackOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexTerrainH;
textureReference const *dTexTerrainHptr;
size_t dTexTerrainHOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexHardness;
textureReference const *dTexHardnessPtr;
size_t dTexHardnessOffset;
texture<float, hipTextureType2D, hipReadModeElementType> dTexThermalAmnt2Move;
textureReference const *dTexThermalAmnt2MovePtr;
size_t dTexThermalAmnt2MoveOffset;
texture<float4, hipTextureType2D, hipReadModeElementType> dTexThermalFlux;
textureReference const *dTexThermalFluxPtr;
size_t dTexThermalFluxOffset;
texture<float4, hipTextureType2D, hipReadModeElementType> dTexThermalFluxDiag;
textureReference const *dTexThermalFluxDiagPtr;
size_t dTexThermalFluxDiagOffset;
//Texture Descriptors
struct hipChannelFormatDesc texFloatChannelDesc;
struct hipChannelFormatDesc texFloat2ChannelDesc;
struct hipChannelFormatDesc texFloat4ChannelDesc;
//------------------------------------------------------------------------------------
//Constants. Defaults chosen from Balazs Jako's Paper
__constant__ float dCoef_Timestep = 0.02f;
__constant__ float dCoef_G = 9.81f;
__constant__ float dCoef_PipeCrossSection = 40.0f;
__constant__ float dCoef_PipeLength = 1.0f ;
__constant__ float dCoef_RainRate = 0.012f;
__constant__ float dCoef_talusRatio = 1.2f;
__constant__ float dCoef_talusCoef = 0.8f;
__constant__ float dCoef_talusBias = 0.1f;
__constant__ float dCoef_SedimentCapacityFactor = 1.0f;
__constant__ float dCoef_DepthMax = 10.0f;
__constant__ float dCoef_DissolveRate = 0.5f;
__constant__ float dCoef_SedimentDropRate = 1.0f;
__constant__ float dCoef_HardnessMin = 0.5f;
__constant__ float dCoef_SoftenRate = 5.0f;
__constant__ float dCoef_ThermalErosionRate = 0.15f;
__constant__ float dCoef_EvaporationRate = 0.015f;
extern "C" void cuda_Initialize(int width, int height)
{
printf("\n\n\n %i %i", width, height);
texFloatChannelDesc = hipCreateChannelDesc<float1>();
texFloat2ChannelDesc = hipCreateChannelDesc<float2>();
texFloat4ChannelDesc = hipCreateChannelDesc<float4>();
dTexWaterRainRate.normalized = true;
dTexWaterRainRate.filterMode = hipFilterModeLinear;
dTexWaterRainRate.addressMode[0] = hipAddressModeWrap;
dTexWaterRainRate.addressMode[1] = hipAddressModeWrap;
dTexWaterH.normalized = true;
dTexWaterH.filterMode = hipFilterModeLinear;
dTexWaterH.addressMode[0] = hipAddressModeWrap;
dTexWaterH.addressMode[1] = hipAddressModeWrap;
dTexWaterFlux.normalized = true;
dTexWaterFlux.filterMode = hipFilterModeLinear;
dTexWaterFlux.addressMode[0] = hipAddressModeWrap;
dTexWaterFlux.addressMode[1] = hipAddressModeWrap;
dTexWaterVelocity.normalized = true;
dTexWaterVelocity.filterMode = hipFilterModeLinear;
dTexWaterVelocity.addressMode[0] = hipAddressModeWrap;
dTexWaterVelocity.addressMode[1] = hipAddressModeWrap;
dTexSedimentCapacity.normalized = true;
dTexSedimentCapacity.filterMode = hipFilterModeLinear;
dTexSedimentCapacity.addressMode[0] = hipAddressModeWrap;
dTexSedimentCapacity.addressMode[1] = hipAddressModeWrap;
dTexSedimentAmount.normalized = true;
dTexSedimentAmount.filterMode = hipFilterModeLinear;
dTexSedimentAmount.addressMode[0] = hipAddressModeWrap;
dTexSedimentAmount.addressMode[1] = hipAddressModeWrap;
dTexSedimentAmntAdvect.normalized = true;
dTexSedimentAmntAdvect.filterMode = hipFilterModeLinear;
dTexSedimentAmntAdvect.addressMode[0] = hipAddressModeWrap;
dTexSedimentAmntAdvect.addressMode[1] = hipAddressModeWrap;
dTexSedimentAmntAdvectBack.normalized = true;
dTexSedimentAmntAdvectBack.filterMode = hipFilterModeLinear;
dTexSedimentAmntAdvectBack.addressMode[0] = hipAddressModeWrap;
dTexSedimentAmntAdvectBack.addressMode[1] = hipAddressModeWrap;
dTexTerrainH.normalized = true;
dTexTerrainH.filterMode = hipFilterModeLinear;
dTexTerrainH.addressMode[0] = hipAddressModeWrap;
dTexTerrainH.addressMode[1] = hipAddressModeWrap;
dTexHardness.normalized = true;
dTexHardness.filterMode = hipFilterModeLinear;
dTexHardness.addressMode[0] = hipAddressModeWrap;
dTexHardness.addressMode[1] = hipAddressModeWrap;
dTexThermalAmnt2Move.normalized = true;
dTexThermalAmnt2Move.filterMode = hipFilterModeLinear;
dTexThermalAmnt2Move.addressMode[0] = hipAddressModeWrap;
dTexThermalAmnt2Move.addressMode[1] = hipAddressModeWrap;
dTexThermalFlux.normalized = true;
dTexThermalFlux.filterMode = hipFilterModeLinear;
dTexThermalFlux.addressMode[0] = hipAddressModeWrap;
dTexThermalFlux.addressMode[1] = hipAddressModeWrap;
dTexThermalFluxDiag.normalized = true;
dTexThermalFluxDiag.filterMode = hipFilterModeLinear;
dTexThermalFluxDiag.addressMode[0] = hipAddressModeWrap;
dTexThermalFluxDiag.addressMode[1] = hipAddressModeWrap;
hipGetTextureReference(&dTexWaterRainRatePtr, "dTexWaterRainRate");
hipGetTextureReference(&dTexWaterHptr, "dTexWaterH");
hipGetTextureReference(&dTexWaterFluxPtr, "dTexWaterFlux");
hipGetTextureReference(&dTexWaterVelocityPtr, "dTexWaterVelocity");
hipGetTextureReference(&dTexSedimentCapacityPtr, "dTexSedimentCapacity");
hipGetTextureReference(&dTexSedimentAmountPtr, "dTexSedimentAmount");
hipGetTextureReference(&dTexSedimentAmntAdvectPtr, "dTexSedimentAmntAdvect");
hipGetTextureReference(&dTexSedimentAmntAdvectBackPtr, "dTexSedimentAmntAdvectBack");
hipGetTextureReference(&dTexTerrainHptr, "dTexTerrainH");
hipGetTextureReference(&dTexHardnessPtr, "dTexHardness");
hipGetTextureReference(&dTexThermalAmnt2MovePtr, "dTexThermalAmnt2Move");
hipGetTextureReference(&dTexThermalFluxPtr, "dTexThermalFlux");
hipGetTextureReference(&dTexThermalFluxDiagPtr, "dTexThermalFluxDiag");
//Allocate Device Memory
hipMallocPitch((void**)&dWaterRainRate0_ptr,&hWaterRainRate0_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dWaterRainRate1_ptr,&hWaterRainRate1_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dWaterH0_ptr,&hWaterH0_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dWaterH1_ptr,&hWaterH1_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dWaterFlux0_ptr,&hWaterFlux_pitch,(width*sizeof(float4)),height);
hipMallocPitch((void**)&dWaterFlux1_ptr,&hWaterFlux_pitch,(width*sizeof(float4)),height);
hipMallocPitch((void**)&dWaterVelocity_ptr,&hWaterVelocity_pitch,(width*sizeof(float2)),height);
printf("\n\n\n %i %i", width, height);
hipMallocPitch((void**)&dSedimentCapacity_ptr,&hSedimentCapacity_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dSedimentAmount0_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dSedimentAmount1_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dSedimentAmntAdvect_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dSedimentAmntAdvectBack_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dTerrainH0_ptr,&hTerrainH0_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dTerrainH1_ptr,&hTerrainH1_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dHardness0_ptr,&hHardness0_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dHardness1_ptr,&hHardness1_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dThermalAmount2Move_ptr,&hThermalAmount2Move_pitch,(width*sizeof(float)),height);
hipMallocPitch((void**)&dThermalFlux_ptr,&hThermalFlux_pitch,(width*sizeof(float4)),height);
hipMallocPitch((void**)&dThermalFluxDiag_ptr,&hThermalFluxDiag_pitch,(width*sizeof(float4)),height);
//Memset Device Memory
hipMemset2D(dWaterRainRate0_ptr,hWaterRainRate0_pitch,0,width*sizeof(float),height);
hipMemset2D(dWaterRainRate1_ptr,hWaterRainRate1_pitch,0,width*sizeof(float),height);
hipMemset2D(dWaterH0_ptr,hWaterH0_pitch,0,width*sizeof(float),height);
hipMemset2D(dWaterH1_ptr,hWaterH1_pitch,0,width*sizeof(float),height);
hipMemset2D(dWaterFlux0_ptr,hWaterFlux_pitch,0,width*sizeof(float4),height);
hipMemset2D(dWaterFlux1_ptr,hWaterFlux_pitch,0,width*sizeof(float4),height);
hipMemset2D(dWaterVelocity_ptr,hWaterVelocity_pitch,0,width*sizeof(float2),height);
hipMemset2D(dSedimentCapacity_ptr,hSedimentCapacity_pitch,0,width*sizeof(float),height);
hipMemset2D(dSedimentAmount0_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
hipMemset2D(dSedimentAmount1_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
hipMemset2D(dSedimentAmntAdvect_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
hipMemset2D(dSedimentAmntAdvectBack_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
hipMemset2D(dTerrainH0_ptr,hTerrainH0_pitch,0,width*sizeof(float),height);
hipMemset2D(dTerrainH1_ptr,hTerrainH1_pitch,0,width*sizeof(float),height);
hipMemset2D(dHardness0_ptr,hHardness0_pitch,0,width*sizeof(float),height);
hipMemset2D(dHardness1_ptr,hHardness1_pitch,0,width*sizeof(float),height);
hipMemset2D(dThermalAmount2Move_ptr,hThermalAmount2Move_pitch,0,width*sizeof(float),height);
hipMemset2D(dThermalFlux_ptr,hThermalFlux_pitch,0,width*sizeof(float4),height);
hipMemset2D(dThermalFluxDiag_ptr,hThermalFluxDiag_pitch,0,width*sizeof(float4),height);
}
//USELESS
extern "C" int cuda_SetTerrainHeight(void * src, size_t srcPitch,size_t width, size_t height)
{
return hipMemcpy2D(dTerrainH0_ptr, hTerrainH0_pitch, src, srcPitch, width, height, hipMemcpyHostToDevice);
}
//USELESS
extern "C" int cuda_SetHardness(void * src, size_t srcPitch,size_t width, size_t height)
{
return hipMemcpy2D(dHardness0_ptr, hHardness0_pitch, src, srcPitch, width, height, hipMemcpyHostToDevice);
}
//USELESS
extern "C" int cuda_SetRainRate(void * src, size_t srcPitch,size_t width, size_t height)
{
return hipMemcpy2D(dWaterRainRate0_ptr, hWaterRainRate0_pitch, src, srcPitch, width, height, hipMemcpyHostToDevice);
}
extern "C" float* cuda_fetchTerrainHptr()
{
return dTerrainH0_ptr;
}
extern "C" float* cuda_fetchWaterHptr()
{
return dWaterH0_ptr;
}
extern "C" float* cuda_fetchRainRateptr()
{
return dWaterRainRate0_ptr;
}
extern "C" float* cuda_fetchHardnessptr()
{
return dHardness0_ptr;
}
extern "C" float* cuda_fetchWaterVelocityPtr()
{
return (float*)dWaterVelocity_ptr;
}
extern "C" float* cuda_fetchSedimentAmountPtr()
{
return (float*)dSedimentAmount0_ptr;
}
//Testing Kernel
__global__ void kernelTest(float4 *heights, size_t width, unsigned int height, float dt, float*in, float*in2)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float a = tex2D(dTexTerrainH,(x+0.5f)/height,(y+.5f)/height);
float b = tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
heights[y*width+x].x=sinf(x*0.1f+dt)*cosf(y*0.1f+dt);
heights[y*width+x].y = b;
heights[y*width+x].z = a;
heights[y*width+x].w=cosf(y*0.1f+dt)*10.0;
in[y*width+x] = a;
in2[y*width+x] = b;
}
__global__ void kernelEditBuffer(float* in, float * out, float editX, float editY, float pointDistance, float editValue, size_t width, unsigned int
height ,float maxDist, float dt )
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 pos;
pos.x = ((float)x-0.5f*height+0.5f)*pointDistance;
pos.y = ((float)y-0.5f*height+0.5f)*pointDistance;
float d = sqrtf((pos.x-editX)*(pos.x-editX) + (pos.y-editY)*(pos.y-editY));
float amount=0.0f;
if(d<maxDist)
{
amount = editValue*(1.0-smoothstep(0.0f,maxDist,d)*1.0f)*dt;
}
out[y*width+x] = max(in[y*width+x]+ amount,0.0f);
}
//Requires dTexWaterH dTexWaterRainRate
__global__ void kernelIncWater (float* out, float4* outBuffer, size_t width, unsigned int height,int rain)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*max(0.0f,dCoef_RainRate*min(tex2D(dTexWaterRainRate,(x+0.5f+sinf(mobileOffset*100.0f)*40.f)/height,(y+0.5f)/height)+mobileOffset*3.2f,1.0f));
//out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*max(0.0f,dCoef_RainRate*tex2D(dTexWaterRainRate, (x+0.5f+50.f*__sinf(0.001f*(float)clock()))/height, (y+0.5f)/height));
if(rain)
{
out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) +
dCoef_Timestep*max(0.0f,dCoef_RainRate*tex2D(dTexWaterRainRate, (x+0.5f)/height, (y+0.5f)/height));
}
else out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].x=tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].y=tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].z=tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*dCoef_RainRate*tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].w=0.0f;
}
//Requires dTexWaterH dTexTerrainH dTexWaterFlux
__global__ void kernelCalculateFlux(float4* out, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//x - left, y -right, z - top, w - bottom
float coefficients=dCoef_Timestep*dCoef_PipeCrossSection*dCoef_G/dCoef_PipeLength;
float localWaterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
float totalLocalH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height)+localWaterH;
float hDifferenceL = totalLocalH -tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height)-tex2D(dTexWaterH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
float hDifferenceR = totalLocalH -tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height)-tex2D(dTexWaterH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
float hDifferenceT = totalLocalH -tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height)-tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
float hDifferenceB = totalLocalH -tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height)-tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
//if (abs(hDifferenceL)<0.15f) hDifferenceL =0.0f;
//if (abs(hDifferenceR)<0.15f) hDifferenceR =0.0f;
//if (abs(hDifferenceT)<0.15f) hDifferenceT =0.0f;
//if (abs(hDifferenceB)<0.15f) hDifferenceB =0.0f;
//My Modification.
//Conserves the previous flux, if negative flux for a direction is necessary, subtracts no more than 0.01 THIS COULD BE A PROBLEM!
//Maybe I should subtract 0.01 only if the result would be negative?
//Has problems with paper formula for the factor. Doesnt allow timestep of 0.2. Which is possible with my formula for the factor
/*float fluxL = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + max(-0.01, coefficients*hDifferenceL));
float fluxR = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + max(-0.01, coefficients*hDifferenceR));
float fluxT = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + max(-0.01, coefficients*hDifferenceT));
float fluxB = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + max(-0.01, coefficients*hDifferenceB));*/
//My Modification. Version with ifs.
//Improved to allow diminishing flux, while preventing negative values and too quick decrementation
/*float fluxL = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + coefficients*hDifferenceL;
if (fluxL<0.0) fluxL = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x-0.01);
float fluxR = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + coefficients*hDifferenceR;
if (fluxR<0.0) fluxR = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y-0.01);
float fluxT = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + coefficients*hDifferenceT;
if (fluxT<0.0) fluxT = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z-0.01);
float fluxB = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + coefficients*hDifferenceB;
if (fluxB<0.0) fluxB = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w-0.01);*/
//My solution. Doesn't work with the new factor equation. Also results in wrong results!!!
/*float fluxL = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + coefficients*hDifferenceL));
float fluxR = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + coefficients*hDifferenceR));
float fluxT = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + coefficients*hDifferenceT));
float fluxB = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + coefficients*hDifferenceB));*/
//Original version. Best. Caution with the timestep. 0.05 works well
float fluxL = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).x + coefficients*hDifferenceL);
float fluxR = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).y + coefficients*hDifferenceR);
float fluxT = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).z + coefficients*hDifferenceT);
float fluxB = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).w + coefficients*hDifferenceB);
float totalFlux=(fluxL+fluxR+fluxT+fluxB)*dCoef_Timestep;
float localWaterVolume = localWaterH*dCoef_PipeLength*dCoef_PipeLength;
float factor=.999f;
if(totalFlux>localWaterVolume)
{
//Mei's formula for the factor
factor = min(1.0f, localWaterVolume/(totalFlux));
//factor = (localWaterH*dCoef_Timestep/totalFlux);
}
out[y*width+x].x = fluxL*factor;
out[y*width+x].y = fluxR*factor;
out[y*width+x].z = fluxT*factor;
out[y*width+x].w = fluxB*factor;
//outBuffer[y*width+x].x = totalLocalH;
//outBuffer[y*width+x].y = fluxL*factor;
//outBuffer[y*width+x].z = fluxL*factor+fluxR*factor+fluxT*factor+fluxB*factor;
//outBuffer[y*width+x].w = localWaterH;
}
//Requires dTexTerrainH dTexHardness dTexThermalAmnt2Move
__global__ void kernelThermalErosionFlux (float4* out, float4* outDiag, size_t width, unsigned int height, float4* outDebugBuffer)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float4 talus;
float4 talusD;
float4 hDiff;
float4 hDiffD;
float4 outTemp;
float4 outDiagTemp;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float total=0.0f;
hDiff.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
total+=max(hDiff.x,0.0f);
hDiff.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
total+=max(hDiff.y,0.0f);
hDiff.z = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiff.z,0.0f);
hDiff.w = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiff.w,0.0f);
hDiffD.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiffD.x,0.0f);
hDiffD.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiffD.y,0.0f);
hDiffD.z = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiffD.z,0.0f);
hDiffD.w = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiffD.w,0.0f);
//Take hardness into account
total = total/(1.0f-max(dCoef_HardnessMin,tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height)));
talus=hDiff/dCoef_PipeLength;
float diagDistance = sqrtf(dCoef_PipeLength*dCoef_PipeLength+dCoef_PipeLength*dCoef_PipeLength);
talusD=hDiffD/diagDistance;
float coef = dCoef_talusRatio+dCoef_talusBias;
//Cheap chemical erosion
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
float waterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
if(waterH>dCoef_DepthMax)
coef = max(0.1f,coef-max(0.5f*dCoef_talusRatio,length(velocity)/3.5f));
float amount = tex2D(dTexThermalAmnt2Move,(x+0.5f)/height,(y+0.5f)/height);
if(hDiff.x>0.0f && talus.x>coef)
outTemp.x=amount*(hDiff.x)/total;
else
outTemp.x=0.0f;
if(hDiff.y>0.0f && talus.y>coef)
outTemp.y=amount*(hDiff.y)/total;
else
outTemp.y=0.0f;
if(hDiff.z>0.0f && talus.z>coef)
outTemp.z=amount*(hDiff.z)/total;
else
outTemp.z=0.0f;
if(hDiff.w>0.0f && talus.w>coef)
outTemp.w=amount*(hDiff.w)/total;
else
outTemp.w=0.0f;
if(hDiff.x>0.0f && talusD.x>coef)
outDiagTemp.x=amount*(hDiffD.x)/total;
else
outDiagTemp.x=0.0f;
if(hDiff.y>0.0f && talusD.y>coef)
outDiagTemp.y=amount*(hDiffD.y)/total;
else
outDiagTemp.y=0.0f;
if(hDiff.z>0.0f && talusD.z>coef)
outDiagTemp.z=amount*(hDiffD.z)/total;
else
outDiagTemp.z=0.0f;
if(hDiff.w>0.0f && talusD.w>coef)
outDiagTemp.w=amount*(hDiffD.w)/total;
else
outDiagTemp.w=0.0f;
out[y*width+x] = outTemp;
outDiag[y*width+x] = outDiagTemp;
//DEBUG
//outDebugBuffer[y*width+x].z=max(max(out[y*width+x].x,out[y*width+x].y),max(out[y*width+x].z,out[y*width+x].w));
//outDebugBuffer[y*width+x].w=max(max(outDiag[y*width+x].x,outDiag[y*width+x].y),max(outDiag[y*width+x].z,outDiag[y*width+x].w));
//debugBuffer[y*width+x].z = amount;
}
//Requires dTexTerrainH dTexThermalFlux dTexThermalFluxDiag
__global__ void kernelThermalDrop (float* out, size_t width, unsigned int height,float4* outDebugBuffer)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float sum=tex2D(dTexThermalFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y;
sum+=tex2D(dTexThermalFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x;
sum+=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f-1.0f)/height).w;
sum+=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f+1.0f)/height).z;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).x;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).y;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).z;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).w;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height).y;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height).x;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height).z;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height).w;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).x;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).y;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).z;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).w;
float result = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) + sum;
out[y*width+x] = result; //result;
//outDebugBuffer[y*width+x].z = (tex2D(dTexThermalFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y+tex2D(dTexThermalFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x-tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).x-tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).y)*0.5f;
//outDebugBuffer[y*width+x].w = 1.0f;
//outBuffer[y*width+x].z = result;
//outBuffer[y*width+x].z = tex2D(dTexTerrainH,x+0.5f,y+0.5f);
}
//Requires dTexWaterFlux dTexWaterH dTexTerrainH
__global__ void kernelFlow (float2* outVelocity,float* outSedCap,float* outWH, float4* outFlux,float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//Velocity Field
float2 velocity;
float4 localFlux = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height);
float4 neighbourFlux;
neighbourFlux.x = tex2D(dTexWaterFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y;
neighbourFlux.y = tex2D(dTexWaterFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x;
neighbourFlux.z = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f-1.0f)/height).w;
neighbourFlux.w = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f+1.0f)/height).z;
float waterDelta = neighbourFlux.x + neighbourFlux.y + neighbourFlux.z + neighbourFlux.w -
localFlux.x - localFlux.y - localFlux.z - localFlux.w;
//Velocity calculations
velocity.x=(neighbourFlux.x-neighbourFlux.y-localFlux.x+localFlux.y)*0.5f;
velocity.y=(neighbourFlux.z-neighbourFlux.w-localFlux.z+localFlux.w)*0.5f;
outVelocity[y*width+x] = velocity;
float velocityLength=length(velocity);
//Calculate flow (final water height)
float waterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
waterH = waterH + (waterDelta*dCoef_Timestep)/(dCoef_PipeLength*dCoef_PipeLength);
outWH[y*width+x] = waterH;
//Sedimemt Capacity
//Limiter, actually increases erosion as depth decreases
float limit = max(0.0f,1.0f-waterH/dCoef_DepthMax);
//Solution A
//outSedCap[y*width+x] = max(0.3f,sinf(atanf( max(0.2f,0.5f+tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - tex2D(dTexTerrainH,(x+0.5f+normalize(velocity).x)/height,(y+0.5f+normalize(velocity).y/height)))/(1.5f*dCoef_PipeLength))))*min(3.f0,velocityLength*(limit+1.0f))*0.2f;
//Solution B
//Restrict velocity to normalized or 0.0
velocity = normalize(velocity);
if (velocityLength<0.2f) velocity.x = velocity.y = 0.0f;
int count=0;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float deltaH = localH - tex2D(dTexTerrainH,(x+0.5f+velocity.x)/height,(y+0.5f+velocity.y)/height);
if (deltaH>-.5f && deltaH < 0.3f)deltaH+=min(velocityLength*0.5f,0.5f*dCoef_PipeLength);
/*if(localH<tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height))count++;
float factor = 1.0f;
if (count>2) factor = .1f;*/
/*outSedCap[y*width+x] = min(waterH,
(0.1 + __sinf(atanf(deltaH/dCoef_PipeLength))) *
min(3.0f,velocityLength*(limit+1.0f))*0.5f);*/
outSedCap[y*width+x] = max(0.0f,0.1f+__sinf(atanf(deltaH/dCoef_PipeLength))*min(3.0f,velocityLength*(limit+1.0f))*0.5f);
//outSedCap[y*width+x] = sedimentCapacity;
//outBuffer[y*width+x].z = min(3.0f,velocityLength*(limit+1.0f));
outBuffer[y*width+x].w = velocityLength;
}
//Requires dTexSedimentAmount dTexSedimentCapacity dTexHardness dTexTerrainH dTexWaterH
__global__ void kernelErodeDepose(float* outHardness, float* outWH, float* outSedAmount, float* outTH, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float sedAmnt = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height);
float sedCap = tex2D(dTexSedimentCapacity,(x+0.5f)/height,(y+0.5f)/height);
float hardness =tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height);
float factor = dCoef_Timestep*(sedCap-sedAmnt);
if(sedCap>sedAmnt)
{
//factor = min(0.1*dCoef_Timestep,factor);
factor *= dCoef_DissolveRate*(1.0f-max(dCoef_HardnessMin,hardness));
//factor = (sedCap-sedAmnt)*0.1f;
outHardness[y*width+x] = max(dCoef_HardnessMin, hardness - dCoef_Timestep*dCoef_SoftenRate);
}
else
{
//factor = max(-0.1f*dCoef_Timestep,factor);
factor *= dCoef_SedimentDropRate;
//factor = -0.2f*sedAmnt;
//factor=1.0f;
//outHardness[y*width+x] = min(0.8f, hardness + dCoef_Timestep*dCoef_SoftenRate*0.2f);
}
if (hardness<-2.f)
{
outHardness[y*width+x] = .80f;
}
//Mass preservation!
if (factor>0)factor = min(factor,tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height));
outTH[y*width+x] = max(0.0f,tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor);
outSedAmount[y*width+x] = max(0.0f, tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) + factor);
outWH[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outWH[y*width+x] = max(0.0f, tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + factor);
/*if(sedCap>sedAmnt)
{
if (hardness==dCoef_HardnessMin)
{
outHardness[y*width+x] = .80f;
}
else
{
outHardness[y*width+x] = max(dCoef_HardnessMin, hardness - dCoef_Timestep*dCoef_SoftenRate);
}
}*/
//outHardness[y*width+x] = 0.0f;
outBuffer[y*width+x].x = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor;
outBuffer[y*width+x].y = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor + max(0.0f, tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + factor);
//outBuffer[y*width+x].z = hardness;
outBuffer[y*width+x].z = sedAmnt;
//outBuffer[y*width+x].z = sedCap;
//outBuffer[y*width+x].w = dCoef_Timestep*(sedCap-sedAmnt);
}
//__global__ void kernelFixAdvectSediment(float* out, float amountToAdd, size_t width, unsigned int height)
//{
// unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
// unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//
// out[y*width+x] = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) + amountToAdd*0.0f;
//}
//Requires dTexWaterVelocity dTexSedimentAmount
__global__ void kernelAdvectSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
out[y*width+x] = tex2D(dTexSedimentAmount,(x+0.5f-velocity.x*dCoef_Timestep/dCoef_PipeLength)/height,(y+0.5f-velocity.y*dCoef_Timestep/dCoef_PipeLength)/height);
}
//Requires dTexWaterVelocity dTexSedimentAmntAdvect
__global__ void kernelAdvectBackSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
float error = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) -
tex2D(dTexSedimentAmntAdvect,(x+0.5f+velocity.x*dCoef_Timestep/dCoef_PipeLength)/height,(y+0.5f+velocity.y*dCoef_Timestep/dCoef_PipeLength)/height);
float4 clamps;
clamps.x = tex2D(dTexSedimentAmount,(x+0.5f-ceilf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-ceilf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.y = tex2D(dTexSedimentAmount,(x+0.5f-floorf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-floorf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.z = tex2D(dTexSedimentAmount,(x+0.5f-floorf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-ceilf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.w = tex2D(dTexSedimentAmount,(x+0.5f-ceilf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-floorf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
float lowClamp = min( min(clamps.x,clamps.y),min(clamps.w,clamps.z));
float hiClamp = max( max(clamps.x,clamps.y),max(clamps.w,clamps.z));
out[y*width+x] = max(lowClamp,min(hiClamp,tex2D(dTexSedimentAmntAdvect, (x+0.5f)/height,(y+0.5f)/height) + error*0.5f));
}
// DO NOT USE DEPRECATED!!!!!
//Might be useful for the BFECC model if i decide to try it
//Requires dTexWaterVelocity dTexSedimentAmount dTexSedimentAmntAdvect dTexSedimentAmntAdvectBack
__global__ void kernelMoveSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
out[y*width+x] = (tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height)-tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height));
}
//Requires dTexTerrainH dTexHardness
__global__ void kernelThermalErosionAmnt (float* out,float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float amountToMove = dCoef_PipeLength*dCoef_PipeLength*dCoef_Timestep*dCoef_ThermalErosionRate*(1.0f-tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height))/2.0f;
//float4 hDiff;
//float4 hDiffD;
float hDiff=0.0f;
/*hDiff.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
hDiff.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
hDiff.z = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
hDiff.w = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
hDiffD.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height);
hDiffD.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height);
hDiffD.z = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height);
hDiffD.w = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height); */
hDiff=max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height));
/*float maxHeightDifference = max(
max(max(hDiff.x,hDiff.y),max(hDiff.w,hDiff.w)),
max(max(hDiffD.x,hDiffD.y),max(hDiffD.z,hDiffD.w))); */
/*out[y*width+x]=amountToMove*abs(maxHeightDifference);*/
out[y*width+x]=amountToMove*hDiff;
//outBuffer[y*width+x].x = localH;
//outBuffer[y*width+x].y = 1.0f-tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].z = abs(maxHeightDifference);
//outBuffer[y*width+x].w = hDiffD.w;
}
//Requires dTexWaterH
__global__ void kernelEvaporate(float* outWater, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float result = max(tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height)*(1.0-dCoef_EvaporationRate*dCoef_Timestep),0.0f);
outBuffer[y*width+x].x = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
outBuffer[y*width+x].y = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) + result;
outWater[y*width+x] = result;
}
//Swap pointers
inline void cuda_exchPtrs(void ** ptrA, void ** ptrB)
{
void * ptrTmp = *ptrA;
*ptrA = *ptrB;
*ptrB = ptrTmp;
}
extern "C" void cuda_EditTerrain(float editX, float editY, float pointDistance, float editValue, unsigned int width, unsigned int height ,float maxDist, float dt, int mode )
{
dim3 block(8, 8, 1);
dim3 grid(width / block.x, height / block.y, 1);
if (mode)
{
printf("edit A");
hipLaunchKernelGGL(( kernelEditBuffer), dim3(grid), dim3(block), 0, 0, dWaterH0_ptr, dWaterH1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
}
else
{
printf("edit B");
hipLaunchKernelGGL(( kernelEditBuffer), dim3(grid), dim3(block), 0, 0, dTerrainH0_ptr, dTerrainH1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dTerrainH1_ptr,(void **)&dTerrainH0_ptr);
/*kernelEditBuffer<<< grid, block>>>(dSedimentAmount0_ptr, dSedimentAmount1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);*/
}
}
int rainCount=0;
int rain=0;
float sedimentArray[512*512];
extern "C" void cuda_Simulate(float4* heights, unsigned int width, unsigned int height, float dt, float *in,float*in2)
{
rainCount++;
if(rainCount<330)
rain=1;
else rain=0;
if(rainCount>380)
{rain=1;rainCount=0;}
dim3 block(32, 2, 1);
dim3 grid(width / block.x, height / block.y, 1);
hipError_t ERR;
//Inc Water
hipBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
hipBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);
hipLaunchKernelGGL(( kernelIncWater), dim3(grid), dim3(block), 0, 0, dWaterH1_ptr,heights,width,height,rain);
hipDeviceSynchronize();
hipUnbindTexture(dTexWaterHptr);
hipUnbindTexture(dTexWaterRainRatePtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
ERR = hipGetLastError();
//Calc Flux
hipBindTexture2D((size_t *)&dTexTerrainHOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
hipBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
hipBindTexture2D((size_t *)&dTexWaterFluxOffset,dTexWaterFluxPtr,(void *)dWaterFlux0_ptr,&texFloat4ChannelDesc,512,512,hWaterFlux_pitch);
hipLaunchKernelGGL(( kernelCalculateFlux), dim3(grid),dim3(block), 0, 0, dWaterFlux1_ptr,
heights,
width,
height);
hipDeviceSynchronize();
hipUnbindTexture(dTexWaterFluxPtr);
cuda_exchPtrs((void **)&dWaterFlux0_ptr,(void **)&dWaterFlux1_ptr);
ERR = hipGetLastError();
//hipUnbindTexture(dTexTerrainHptr);//barrier
//hipUnbindTexture(dTexWaterHptr);//barrier
//Calc Errode Amnt
hipBindTexture2D( (size_t *)&dTexHardnessOffset,
dTexHardnessPtr,
(void *)dHardness0_ptr,
&texFloatChannelDesc,
512,512,
hHardness0_pitch);
hipLaunchKernelGGL(( kernelThermalErosionAmnt), dim3(grid),dim3(block), 0, 0, dThermalAmount2Move_ptr,
heights,
width,
height);
hipDeviceSynchronize();
ERR = hipGetLastError();
//hipUnbindTexture(dTexHardnessPtr);//barrier
//Calculate Flow
hipBindTexture2D( (size_t *)&dTexWaterFluxOffset,
dTexWaterFluxPtr,
(void *)dWaterFlux0_ptr,
&texFloat4ChannelDesc,
512,512,
hWaterFlux_pitch);
hipLaunchKernelGGL(( kernelFlow), dim3(grid),dim3(block), 0, 0, dWaterVelocity_ptr,
dSedimentCapacity_ptr,
dWaterH1_ptr,
dWaterFlux1_ptr,
heights,
width,
height);
hipDeviceSynchronize();
hipUnbindTexture(dTexWaterHptr);
hipUnbindTexture(dTexWaterFluxPtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
//cuda_exchPtrs((void **)&dWaterFlux0_ptr,(void **)&dWaterFlux1_ptr);
ERR = hipGetLastError();
//hipUnbindTexture(dTexTerrainHptr);//barrier
//hipUnbindTexture(dTexHardnessPtr);//barrier
//Calculate ErodeDepose
hipBindTexture2D(
(size_t *)&dTexWaterHOffset,
dTexWaterHptr,
(void *)dWaterH0_ptr,
&texFloatChannelDesc,
512, 512,
hWaterH0_pitch);
hipBindTexture2D(
(size_t *)&dTexSedimentAmountOffset,
dTexSedimentAmountPtr,
(void *)dSedimentAmount0_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
hipBindTexture2D(
(size_t *)&dTexSedimentCapacityOffset,
dTexSedimentCapacityPtr,
(void *)dSedimentCapacity_ptr,
&texFloatChannelDesc,
512,512,
hSedimentCapacity_pitch);
hipLaunchKernelGGL(( kernelErodeDepose), dim3(grid),dim3(block), 0, 0, dHardness1_ptr,dWaterH1_ptr,dSedimentAmount1_ptr,dTerrainH1_ptr,heights,width, height);
hipDeviceSynchronize();
hipUnbindTexture(dTexWaterHptr);
hipUnbindTexture(dTexHardnessPtr);
hipUnbindTexture(dTexTerrainHptr);
hipUnbindTexture(dTexSedimentAmountPtr);
hipUnbindTexture(dTexSedimentCapacityPtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
cuda_exchPtrs((void **)&dHardness0_ptr,(void **)&dHardness1_ptr);
cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);
// ERR = hipGetLastError();
//Move Sedmient
hipBindTexture2D(
(size_t *)&dTexWaterHOffset,
dTexWaterHptr,
(void *)dWaterH0_ptr,
&texFloatChannelDesc,
512, 512,
hWaterH0_pitch);
hipBindTexture2D(
(size_t *)&dTexSedimentAmountOffset,
dTexSedimentAmountPtr,
(void *)dSedimentAmount0_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
hipBindTexture2D(
(size_t *)&dTexWaterVelocityOffset,
dTexWaterVelocityPtr,
(void *)dWaterVelocity_ptr,
&texFloat2ChannelDesc,
512, 512,
hWaterVelocity_pitch);
hipLaunchKernelGGL(( kernelAdvectSediment), dim3(grid),dim3(block), 0, 0, dSedimentAmntAdvect_ptr, heights, width, height);
hipDeviceSynchronize();
hipBindTexture2D(
(size_t *)&dTexSedimentAmntAdvectOffset,
dTexSedimentAmntAdvectPtr,
(void *)dSedimentAmntAdvect_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
hipLaunchKernelGGL(( kernelAdvectBackSediment), dim3(grid),dim3(block), 0, 0, dSedimentAmount1_ptr, heights, width, height);
hipDeviceSynchronize();
hipUnbindTexture(dTexSedimentAmountPtr);
hipUnbindTexture(dTexSedimentAmntAdvectPtr);
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);
ERR = hipGetLastError();
//Errode thermally
hipBindTexture2D(
(size_t *)&dTexHardnessOffset,
dTexHardnessPtr,
(void *)dHardness0_ptr,
&texFloatChannelDesc,
512, 512,
hHardness0_pitch);
hipBindTexture2D(
(size_t *)&dTexThermalAmnt2MoveOffset,
dTexThermalAmnt2MovePtr,
(void *)dThermalAmount2Move_ptr,
&texFloatChannelDesc,
512, 512,
hThermalAmount2Move_pitch);
hipBindTexture2D(
(size_t *)&dTexTerrainHOffset,
dTexTerrainHptr,
(void *)dTerrainH0_ptr,
&texFloatChannelDesc,
512, 512,
hTerrainH0_pitch);
hipLaunchKernelGGL(( kernelThermalErosionFlux), dim3(grid),dim3(block), 0, 0, dThermalFlux_ptr,dThermalFluxDiag_ptr,width,height, heights);
hipDeviceSynchronize();
hipUnbindTexture(dTexThermalAmnt2MovePtr);
hipUnbindTexture(dTexHardnessPtr);
hipUnbindTexture(dTexWaterVelocityPtr);
ERR = hipGetLastError();
// //Drop Thermal Eroded
hipBindTexture2D(
(size_t *)&dTexThermalFluxOffset,
dTexThermalFluxPtr,
(void *)dThermalFlux_ptr,
&texFloat4ChannelDesc,
512,512,
hThermalFlux_pitch);
hipBindTexture2D(
(size_t *)&dTexThermalFluxDiagOffset,
dTexThermalFluxDiagPtr,
(void *)dThermalFluxDiag_ptr,
&texFloat4ChannelDesc,
512,
512,
hThermalFluxDiag_pitch);
hipLaunchKernelGGL(( kernelThermalDrop), dim3(grid),dim3(block), 0, 0, dTerrainH1_ptr,width,height,heights);
hipDeviceSynchronize();
hipUnbindTexture(dTexTerrainHptr);
hipUnbindTexture(dTexThermalFluxPtr);
hipUnbindTexture(dTexThermalFluxDiagPtr);
cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
//ERR = hipGetLastError();
//Evaporate
hipBindTexture2D(
(size_t *)&dTexTerrainHOffset,
dTexTerrainHptr,
(void *)dTerrainH0_ptr,
&texFloatChannelDesc,
512, 512,
hTerrainH0_pitch);
//hipBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
hipLaunchKernelGGL(( kernelEvaporate), dim3(grid),dim3(block), 0, 0, dWaterH1_ptr, heights,width, height);
hipDeviceSynchronize();
hipUnbindTexture(dTexWaterHptr);
hipUnbindTexture(dTexTerrainHptr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
ERR = hipGetLastError();
//hipBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
// hipBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);//bound=1;
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH1_ptr, dWaterRainRate1_ptr);
// hipUnbindTexture(dTexTerrainHptr);
// hipUnbindTexture(dTexWaterRainRatePtr);
// cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
// cuda_exchPtrs((void **)&dWaterRainRate0_ptr,(void **)&dWaterRainRate1_ptr);
//tick^=1;
}
//extern "C" void cuda_Simulate(float4* heights, unsigned int width, unsigned int height, float dt, float *in,float*in2)
//{
//
// dim3 block(8, 8, 1);
// dim3 grid(width / block.x, height / block.y, 1);
// if(!tick)
// {
// hipBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
// hipBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);//bound=1;
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH1_ptr, dWaterRainRate1_ptr);
// hipUnbindTexture(dTexTerrainHptr);
// hipUnbindTexture(dTexWaterRainRatePtr);
// }
// else
// {
// hipBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH1_ptr,&texFloatChannelDesc,512,512,hTerrainH1_pitch);
// hipBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate1_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH0_ptr,dWaterRainRate0_ptr);
// hipUnbindTexture(dTexTerrainHptr);
// hipUnbindTexture(dTexWaterRainRatePtr);
// }
// tick^=1;
//}
//extern "C" void cuda_Simulate(int* heights, unsigned int width, unsigned int height, float dt)
//{
// //printf("\nCuda Start");
//
// // execute the kernel
// dim3 block(8, 8, 1);
// dim3 grid(width / block.x, height / block.y, 1);
// kernelTest<<< grid, block>>>(heights, width, height, dt);
//
//
// //printf("\nCuda End");
//}
| b581b21001deca1b60a5f8c8c87cc3e679e67ecb.cu |
//The correct one
#include <stdio.h>
#include "cutil_math.h"
//TICK VERY IMPORTANT, used for swapping the ping pong
int tick=0;
//All cuda stuff goes here
//Global Data Pointers
//Water
__device__ float *dWaterRainRate0_ptr;
__device__ float *dWaterRainRate1_ptr;
__device__ float *dWaterH0_ptr;
__device__ float *dWaterH1_ptr;
__device__ float4 *dWaterFlux0_ptr;
__device__ float4 *dWaterFlux1_ptr;
__device__ float2 *dWaterVelocity_ptr;
//Sediment
__device__ float *dSedimentCapacity_ptr;
__device__ float *dSedimentAmount0_ptr;
__device__ float *dSedimentAmount1_ptr;
__device__ float *dSedimentAmntAdvect_ptr;
__device__ float *dSedimentAmntAdvectBack_ptr;
//Terrain
__device__ float *dTerrainH0_ptr;
__device__ float *dTerrainH1_ptr;
__device__ float *dHardness0_ptr;
__device__ float *dHardness1_ptr;
//Thermal Erosion
__device__ float *dThermalAmount2Move_ptr;
__device__ float4 *dThermalFlux_ptr;
__device__ float4 *dThermalFluxDiag_ptr;
//Pitches STORED ON HOST!!!!!!
size_t hWaterRainRate0_pitch=0;
size_t hWaterRainRate1_pitch=0;
size_t hWaterH0_pitch=0;
size_t hWaterH1_pitch=0;
size_t hWaterFlux_pitch=0;
size_t hWaterVelocity_pitch=0;
//Sediment
size_t hSedimentCapacity_pitch=0;
size_t hSedimentAmount_pitch=0;
//Terrain
size_t hTerrainH0_pitch=0;
size_t hTerrainH1_pitch=0;
size_t hHardness0_pitch=0;
size_t hHardness1_pitch=0;
//Thermal Erosion
size_t hThermalAmount2Move_pitch=0;
size_t hThermalFlux_pitch=0;
size_t hThermalFluxDiag_pitch=0;
//Textures
//For each: texture, pointer to it, offset
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexWaterRainRate;
textureReference const * dTexWaterRainRatePtr;
size_t dTexWaterRainRateOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexWaterH;
textureReference const *dTexWaterHptr;
size_t dTexWaterHOffset;
texture<float4, cudaTextureType2D, cudaReadModeElementType> dTexWaterFlux;
textureReference const *dTexWaterFluxPtr;
size_t dTexWaterFluxOffset;
texture<float2, cudaTextureType2D, cudaReadModeElementType> dTexWaterVelocity;
textureReference const *dTexWaterVelocityPtr;
size_t dTexWaterVelocityOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexSedimentCapacity;
textureReference const *dTexSedimentCapacityPtr;
size_t dTexSedimentCapacityOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexSedimentAmount;
textureReference const *dTexSedimentAmountPtr;
size_t dTexSedimentAmountOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexSedimentAmntAdvect;
textureReference const *dTexSedimentAmntAdvectPtr;
size_t dTexSedimentAmntAdvectOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexSedimentAmntAdvectBack;
textureReference const *dTexSedimentAmntAdvectBackPtr;
size_t dTexSedimentAmntAdvectBackOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexTerrainH;
textureReference const *dTexTerrainHptr;
size_t dTexTerrainHOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexHardness;
textureReference const *dTexHardnessPtr;
size_t dTexHardnessOffset;
texture<float, cudaTextureType2D, cudaReadModeElementType> dTexThermalAmnt2Move;
textureReference const *dTexThermalAmnt2MovePtr;
size_t dTexThermalAmnt2MoveOffset;
texture<float4, cudaTextureType2D, cudaReadModeElementType> dTexThermalFlux;
textureReference const *dTexThermalFluxPtr;
size_t dTexThermalFluxOffset;
texture<float4, cudaTextureType2D, cudaReadModeElementType> dTexThermalFluxDiag;
textureReference const *dTexThermalFluxDiagPtr;
size_t dTexThermalFluxDiagOffset;
//Texture Descriptors
struct cudaChannelFormatDesc texFloatChannelDesc;
struct cudaChannelFormatDesc texFloat2ChannelDesc;
struct cudaChannelFormatDesc texFloat4ChannelDesc;
//------------------------------------------------------------------------------------
//Constants. Defaults chosen from Balazs Jako's Paper
__constant__ float dCoef_Timestep = 0.02f;
__constant__ float dCoef_G = 9.81f;
__constant__ float dCoef_PipeCrossSection = 40.0f;
__constant__ float dCoef_PipeLength = 1.0f ;
__constant__ float dCoef_RainRate = 0.012f;
__constant__ float dCoef_talusRatio = 1.2f;
__constant__ float dCoef_talusCoef = 0.8f;
__constant__ float dCoef_talusBias = 0.1f;
__constant__ float dCoef_SedimentCapacityFactor = 1.0f;
__constant__ float dCoef_DepthMax = 10.0f;
__constant__ float dCoef_DissolveRate = 0.5f;
__constant__ float dCoef_SedimentDropRate = 1.0f;
__constant__ float dCoef_HardnessMin = 0.5f;
__constant__ float dCoef_SoftenRate = 5.0f;
__constant__ float dCoef_ThermalErosionRate = 0.15f;
__constant__ float dCoef_EvaporationRate = 0.015f;
extern "C" void cuda_Initialize(int width, int height)
{
printf("\n\n\n %i %i", width, height);
texFloatChannelDesc = cudaCreateChannelDesc<float1>();
texFloat2ChannelDesc = cudaCreateChannelDesc<float2>();
texFloat4ChannelDesc = cudaCreateChannelDesc<float4>();
dTexWaterRainRate.normalized = true;
dTexWaterRainRate.filterMode = cudaFilterModeLinear;
dTexWaterRainRate.addressMode[0] = cudaAddressModeWrap;
dTexWaterRainRate.addressMode[1] = cudaAddressModeWrap;
dTexWaterH.normalized = true;
dTexWaterH.filterMode = cudaFilterModeLinear;
dTexWaterH.addressMode[0] = cudaAddressModeWrap;
dTexWaterH.addressMode[1] = cudaAddressModeWrap;
dTexWaterFlux.normalized = true;
dTexWaterFlux.filterMode = cudaFilterModeLinear;
dTexWaterFlux.addressMode[0] = cudaAddressModeWrap;
dTexWaterFlux.addressMode[1] = cudaAddressModeWrap;
dTexWaterVelocity.normalized = true;
dTexWaterVelocity.filterMode = cudaFilterModeLinear;
dTexWaterVelocity.addressMode[0] = cudaAddressModeWrap;
dTexWaterVelocity.addressMode[1] = cudaAddressModeWrap;
dTexSedimentCapacity.normalized = true;
dTexSedimentCapacity.filterMode = cudaFilterModeLinear;
dTexSedimentCapacity.addressMode[0] = cudaAddressModeWrap;
dTexSedimentCapacity.addressMode[1] = cudaAddressModeWrap;
dTexSedimentAmount.normalized = true;
dTexSedimentAmount.filterMode = cudaFilterModeLinear;
dTexSedimentAmount.addressMode[0] = cudaAddressModeWrap;
dTexSedimentAmount.addressMode[1] = cudaAddressModeWrap;
dTexSedimentAmntAdvect.normalized = true;
dTexSedimentAmntAdvect.filterMode = cudaFilterModeLinear;
dTexSedimentAmntAdvect.addressMode[0] = cudaAddressModeWrap;
dTexSedimentAmntAdvect.addressMode[1] = cudaAddressModeWrap;
dTexSedimentAmntAdvectBack.normalized = true;
dTexSedimentAmntAdvectBack.filterMode = cudaFilterModeLinear;
dTexSedimentAmntAdvectBack.addressMode[0] = cudaAddressModeWrap;
dTexSedimentAmntAdvectBack.addressMode[1] = cudaAddressModeWrap;
dTexTerrainH.normalized = true;
dTexTerrainH.filterMode = cudaFilterModeLinear;
dTexTerrainH.addressMode[0] = cudaAddressModeWrap;
dTexTerrainH.addressMode[1] = cudaAddressModeWrap;
dTexHardness.normalized = true;
dTexHardness.filterMode = cudaFilterModeLinear;
dTexHardness.addressMode[0] = cudaAddressModeWrap;
dTexHardness.addressMode[1] = cudaAddressModeWrap;
dTexThermalAmnt2Move.normalized = true;
dTexThermalAmnt2Move.filterMode = cudaFilterModeLinear;
dTexThermalAmnt2Move.addressMode[0] = cudaAddressModeWrap;
dTexThermalAmnt2Move.addressMode[1] = cudaAddressModeWrap;
dTexThermalFlux.normalized = true;
dTexThermalFlux.filterMode = cudaFilterModeLinear;
dTexThermalFlux.addressMode[0] = cudaAddressModeWrap;
dTexThermalFlux.addressMode[1] = cudaAddressModeWrap;
dTexThermalFluxDiag.normalized = true;
dTexThermalFluxDiag.filterMode = cudaFilterModeLinear;
dTexThermalFluxDiag.addressMode[0] = cudaAddressModeWrap;
dTexThermalFluxDiag.addressMode[1] = cudaAddressModeWrap;
cudaGetTextureReference(&dTexWaterRainRatePtr, "dTexWaterRainRate");
cudaGetTextureReference(&dTexWaterHptr, "dTexWaterH");
cudaGetTextureReference(&dTexWaterFluxPtr, "dTexWaterFlux");
cudaGetTextureReference(&dTexWaterVelocityPtr, "dTexWaterVelocity");
cudaGetTextureReference(&dTexSedimentCapacityPtr, "dTexSedimentCapacity");
cudaGetTextureReference(&dTexSedimentAmountPtr, "dTexSedimentAmount");
cudaGetTextureReference(&dTexSedimentAmntAdvectPtr, "dTexSedimentAmntAdvect");
cudaGetTextureReference(&dTexSedimentAmntAdvectBackPtr, "dTexSedimentAmntAdvectBack");
cudaGetTextureReference(&dTexTerrainHptr, "dTexTerrainH");
cudaGetTextureReference(&dTexHardnessPtr, "dTexHardness");
cudaGetTextureReference(&dTexThermalAmnt2MovePtr, "dTexThermalAmnt2Move");
cudaGetTextureReference(&dTexThermalFluxPtr, "dTexThermalFlux");
cudaGetTextureReference(&dTexThermalFluxDiagPtr, "dTexThermalFluxDiag");
//Allocate Device Memory
cudaMallocPitch((void**)&dWaterRainRate0_ptr,&hWaterRainRate0_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dWaterRainRate1_ptr,&hWaterRainRate1_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dWaterH0_ptr,&hWaterH0_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dWaterH1_ptr,&hWaterH1_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dWaterFlux0_ptr,&hWaterFlux_pitch,(width*sizeof(float4)),height);
cudaMallocPitch((void**)&dWaterFlux1_ptr,&hWaterFlux_pitch,(width*sizeof(float4)),height);
cudaMallocPitch((void**)&dWaterVelocity_ptr,&hWaterVelocity_pitch,(width*sizeof(float2)),height);
printf("\n\n\n %i %i", width, height);
cudaMallocPitch((void**)&dSedimentCapacity_ptr,&hSedimentCapacity_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dSedimentAmount0_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dSedimentAmount1_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dSedimentAmntAdvect_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dSedimentAmntAdvectBack_ptr,&hSedimentAmount_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dTerrainH0_ptr,&hTerrainH0_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dTerrainH1_ptr,&hTerrainH1_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dHardness0_ptr,&hHardness0_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dHardness1_ptr,&hHardness1_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dThermalAmount2Move_ptr,&hThermalAmount2Move_pitch,(width*sizeof(float)),height);
cudaMallocPitch((void**)&dThermalFlux_ptr,&hThermalFlux_pitch,(width*sizeof(float4)),height);
cudaMallocPitch((void**)&dThermalFluxDiag_ptr,&hThermalFluxDiag_pitch,(width*sizeof(float4)),height);
//Memset Device Memory
cudaMemset2D(dWaterRainRate0_ptr,hWaterRainRate0_pitch,0,width*sizeof(float),height);
cudaMemset2D(dWaterRainRate1_ptr,hWaterRainRate1_pitch,0,width*sizeof(float),height);
cudaMemset2D(dWaterH0_ptr,hWaterH0_pitch,0,width*sizeof(float),height);
cudaMemset2D(dWaterH1_ptr,hWaterH1_pitch,0,width*sizeof(float),height);
cudaMemset2D(dWaterFlux0_ptr,hWaterFlux_pitch,0,width*sizeof(float4),height);
cudaMemset2D(dWaterFlux1_ptr,hWaterFlux_pitch,0,width*sizeof(float4),height);
cudaMemset2D(dWaterVelocity_ptr,hWaterVelocity_pitch,0,width*sizeof(float2),height);
cudaMemset2D(dSedimentCapacity_ptr,hSedimentCapacity_pitch,0,width*sizeof(float),height);
cudaMemset2D(dSedimentAmount0_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
cudaMemset2D(dSedimentAmount1_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
cudaMemset2D(dSedimentAmntAdvect_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
cudaMemset2D(dSedimentAmntAdvectBack_ptr,hSedimentAmount_pitch,0,width*sizeof(float),height);
cudaMemset2D(dTerrainH0_ptr,hTerrainH0_pitch,0,width*sizeof(float),height);
cudaMemset2D(dTerrainH1_ptr,hTerrainH1_pitch,0,width*sizeof(float),height);
cudaMemset2D(dHardness0_ptr,hHardness0_pitch,0,width*sizeof(float),height);
cudaMemset2D(dHardness1_ptr,hHardness1_pitch,0,width*sizeof(float),height);
cudaMemset2D(dThermalAmount2Move_ptr,hThermalAmount2Move_pitch,0,width*sizeof(float),height);
cudaMemset2D(dThermalFlux_ptr,hThermalFlux_pitch,0,width*sizeof(float4),height);
cudaMemset2D(dThermalFluxDiag_ptr,hThermalFluxDiag_pitch,0,width*sizeof(float4),height);
}
//USELESS
extern "C" int cuda_SetTerrainHeight(void * src, size_t srcPitch,size_t width, size_t height)
{
return cudaMemcpy2D(dTerrainH0_ptr, hTerrainH0_pitch, src, srcPitch, width, height, cudaMemcpyHostToDevice);
}
//USELESS
extern "C" int cuda_SetHardness(void * src, size_t srcPitch,size_t width, size_t height)
{
return cudaMemcpy2D(dHardness0_ptr, hHardness0_pitch, src, srcPitch, width, height, cudaMemcpyHostToDevice);
}
//USELESS
extern "C" int cuda_SetRainRate(void * src, size_t srcPitch,size_t width, size_t height)
{
return cudaMemcpy2D(dWaterRainRate0_ptr, hWaterRainRate0_pitch, src, srcPitch, width, height, cudaMemcpyHostToDevice);
}
extern "C" float* cuda_fetchTerrainHptr()
{
return dTerrainH0_ptr;
}
extern "C" float* cuda_fetchWaterHptr()
{
return dWaterH0_ptr;
}
extern "C" float* cuda_fetchRainRateptr()
{
return dWaterRainRate0_ptr;
}
extern "C" float* cuda_fetchHardnessptr()
{
return dHardness0_ptr;
}
extern "C" float* cuda_fetchWaterVelocityPtr()
{
return (float*)dWaterVelocity_ptr;
}
extern "C" float* cuda_fetchSedimentAmountPtr()
{
return (float*)dSedimentAmount0_ptr;
}
//Testing Kernel
__global__ void kernelTest(float4 *heights, size_t width, unsigned int height, float dt, float*in, float*in2)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float a = tex2D(dTexTerrainH,(x+0.5f)/height,(y+.5f)/height);
float b = tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
heights[y*width+x].x=sinf(x*0.1f+dt)*cosf(y*0.1f+dt);
heights[y*width+x].y = b;
heights[y*width+x].z = a;
heights[y*width+x].w=cosf(y*0.1f+dt)*10.0;
in[y*width+x] = a;
in2[y*width+x] = b;
}
__global__ void kernelEditBuffer(float* in, float * out, float editX, float editY, float pointDistance, float editValue, size_t width, unsigned int
height ,float maxDist, float dt )
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 pos;
pos.x = ((float)x-0.5f*height+0.5f)*pointDistance;
pos.y = ((float)y-0.5f*height+0.5f)*pointDistance;
float d = sqrtf((pos.x-editX)*(pos.x-editX) + (pos.y-editY)*(pos.y-editY));
float amount=0.0f;
if(d<maxDist)
{
amount = editValue*(1.0-smoothstep(0.0f,maxDist,d)*1.0f)*dt;
}
out[y*width+x] = max(in[y*width+x]+ amount,0.0f);
}
//Requires dTexWaterH dTexWaterRainRate
__global__ void kernelIncWater (float* out, float4* outBuffer, size_t width, unsigned int height,int rain)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*max(0.0f,dCoef_RainRate*min(tex2D(dTexWaterRainRate,(x+0.5f+sinf(mobileOffset*100.0f)*40.f)/height,(y+0.5f)/height)+mobileOffset*3.2f,1.0f));
//out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*max(0.0f,dCoef_RainRate*tex2D(dTexWaterRainRate, (x+0.5f+50.f*__sinf(0.001f*(float)clock()))/height, (y+0.5f)/height));
if(rain)
{
out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) +
dCoef_Timestep*max(0.0f,dCoef_RainRate*tex2D(dTexWaterRainRate, (x+0.5f)/height, (y+0.5f)/height));
}
else out[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].x=tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].y=tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].z=tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + dCoef_Timestep*dCoef_RainRate*tex2D(dTexWaterRainRate,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].w=0.0f;
}
//Requires dTexWaterH dTexTerrainH dTexWaterFlux
__global__ void kernelCalculateFlux(float4* out, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//x - left, y -right, z - top, w - bottom
float coefficients=dCoef_Timestep*dCoef_PipeCrossSection*dCoef_G/dCoef_PipeLength;
float localWaterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
float totalLocalH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height)+localWaterH;
float hDifferenceL = totalLocalH -tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height)-tex2D(dTexWaterH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
float hDifferenceR = totalLocalH -tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height)-tex2D(dTexWaterH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
float hDifferenceT = totalLocalH -tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height)-tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
float hDifferenceB = totalLocalH -tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height)-tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
//if (abs(hDifferenceL)<0.15f) hDifferenceL =0.0f;
//if (abs(hDifferenceR)<0.15f) hDifferenceR =0.0f;
//if (abs(hDifferenceT)<0.15f) hDifferenceT =0.0f;
//if (abs(hDifferenceB)<0.15f) hDifferenceB =0.0f;
//My Modification.
//Conserves the previous flux, if negative flux for a direction is necessary, subtracts no more than 0.01 THIS COULD BE A PROBLEM!
//Maybe I should subtract 0.01 only if the result would be negative?
//Has problems with paper formula for the factor. Doesnt allow timestep of 0.2. Which is possible with my formula for the factor
/*float fluxL = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + max(-0.01, coefficients*hDifferenceL));
float fluxR = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + max(-0.01, coefficients*hDifferenceR));
float fluxT = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + max(-0.01, coefficients*hDifferenceT));
float fluxB = max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + max(-0.01, coefficients*hDifferenceB));*/
//My Modification. Version with ifs.
//Improved to allow diminishing flux, while preventing negative values and too quick decrementation
/*float fluxL = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + coefficients*hDifferenceL;
if (fluxL<0.0) fluxL = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x-0.01);
float fluxR = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + coefficients*hDifferenceR;
if (fluxR<0.0) fluxR = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y-0.01);
float fluxT = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + coefficients*hDifferenceT;
if (fluxT<0.0) fluxT = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z-0.01);
float fluxB = tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + coefficients*hDifferenceB;
if (fluxB<0.0) fluxB = max(0.0,tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w-0.01);*/
//My solution. Doesn't work with the new factor equation. Also results in wrong results!!!
/*float fluxL = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).x + coefficients*hDifferenceL));
float fluxR = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).y + coefficients*hDifferenceR));
float fluxT = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).z + coefficients*hDifferenceT));
float fluxB = max(tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w,max(0.0, tex2D(dTexWaterFlux,(x+0.5)/height,(y+0.5)/height).w + coefficients*hDifferenceB));*/
//Original version. Best. Caution with the timestep. 0.05 works well
float fluxL = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).x + coefficients*hDifferenceL);
float fluxR = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).y + coefficients*hDifferenceR);
float fluxT = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).z + coefficients*hDifferenceT);
float fluxB = max(0.0f, tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height).w + coefficients*hDifferenceB);
float totalFlux=(fluxL+fluxR+fluxT+fluxB)*dCoef_Timestep;
float localWaterVolume = localWaterH*dCoef_PipeLength*dCoef_PipeLength;
float factor=.999f;
if(totalFlux>localWaterVolume)
{
//Mei's formula for the factor
factor = min(1.0f, localWaterVolume/(totalFlux));
//factor = (localWaterH*dCoef_Timestep/totalFlux);
}
out[y*width+x].x = fluxL*factor;
out[y*width+x].y = fluxR*factor;
out[y*width+x].z = fluxT*factor;
out[y*width+x].w = fluxB*factor;
//outBuffer[y*width+x].x = totalLocalH;
//outBuffer[y*width+x].y = fluxL*factor;
//outBuffer[y*width+x].z = fluxL*factor+fluxR*factor+fluxT*factor+fluxB*factor;
//outBuffer[y*width+x].w = localWaterH;
}
//Requires dTexTerrainH dTexHardness dTexThermalAmnt2Move
__global__ void kernelThermalErosionFlux (float4* out, float4* outDiag, size_t width, unsigned int height, float4* outDebugBuffer)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float4 talus;
float4 talusD;
float4 hDiff;
float4 hDiffD;
float4 outTemp;
float4 outDiagTemp;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float total=0.0f;
hDiff.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
total+=max(hDiff.x,0.0f);
hDiff.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
total+=max(hDiff.y,0.0f);
hDiff.z = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiff.z,0.0f);
hDiff.w = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiff.w,0.0f);
hDiffD.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiffD.x,0.0f);
hDiffD.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiffD.y,0.0f);
hDiffD.z = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height);
total+=max(hDiffD.z,0.0f);
hDiffD.w = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height);
total+=max(hDiffD.w,0.0f);
//Take hardness into account
total = total/(1.0f-max(dCoef_HardnessMin,tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height)));
talus=hDiff/dCoef_PipeLength;
float diagDistance = sqrtf(dCoef_PipeLength*dCoef_PipeLength+dCoef_PipeLength*dCoef_PipeLength);
talusD=hDiffD/diagDistance;
float coef = dCoef_talusRatio+dCoef_talusBias;
//Cheap chemical erosion
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
float waterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
if(waterH>dCoef_DepthMax)
coef = max(0.1f,coef-max(0.5f*dCoef_talusRatio,length(velocity)/3.5f));
float amount = tex2D(dTexThermalAmnt2Move,(x+0.5f)/height,(y+0.5f)/height);
if(hDiff.x>0.0f && talus.x>coef)
outTemp.x=amount*(hDiff.x)/total;
else
outTemp.x=0.0f;
if(hDiff.y>0.0f && talus.y>coef)
outTemp.y=amount*(hDiff.y)/total;
else
outTemp.y=0.0f;
if(hDiff.z>0.0f && talus.z>coef)
outTemp.z=amount*(hDiff.z)/total;
else
outTemp.z=0.0f;
if(hDiff.w>0.0f && talus.w>coef)
outTemp.w=amount*(hDiff.w)/total;
else
outTemp.w=0.0f;
if(hDiff.x>0.0f && talusD.x>coef)
outDiagTemp.x=amount*(hDiffD.x)/total;
else
outDiagTemp.x=0.0f;
if(hDiff.y>0.0f && talusD.y>coef)
outDiagTemp.y=amount*(hDiffD.y)/total;
else
outDiagTemp.y=0.0f;
if(hDiff.z>0.0f && talusD.z>coef)
outDiagTemp.z=amount*(hDiffD.z)/total;
else
outDiagTemp.z=0.0f;
if(hDiff.w>0.0f && talusD.w>coef)
outDiagTemp.w=amount*(hDiffD.w)/total;
else
outDiagTemp.w=0.0f;
out[y*width+x] = outTemp;
outDiag[y*width+x] = outDiagTemp;
//DEBUG
//outDebugBuffer[y*width+x].z=max(max(out[y*width+x].x,out[y*width+x].y),max(out[y*width+x].z,out[y*width+x].w));
//outDebugBuffer[y*width+x].w=max(max(outDiag[y*width+x].x,outDiag[y*width+x].y),max(outDiag[y*width+x].z,outDiag[y*width+x].w));
//debugBuffer[y*width+x].z = amount;
}
//Requires dTexTerrainH dTexThermalFlux dTexThermalFluxDiag
__global__ void kernelThermalDrop (float* out, size_t width, unsigned int height,float4* outDebugBuffer)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float sum=tex2D(dTexThermalFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y;
sum+=tex2D(dTexThermalFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x;
sum+=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f-1.0f)/height).w;
sum+=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f+1.0f)/height).z;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).x;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).y;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).z;
sum-=tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).w;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height).y;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height).x;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height).z;
sum+=tex2D(dTexThermalFluxDiag,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height).w;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).x;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).y;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).z;
sum-=tex2D(dTexThermalFluxDiag,(x+0.5f)/height,(y+0.5f)/height).w;
float result = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) + sum;
out[y*width+x] = result; //result;
//outDebugBuffer[y*width+x].z = (tex2D(dTexThermalFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y+tex2D(dTexThermalFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x-tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).x-tex2D(dTexThermalFlux,(x+0.5f)/height,(y+0.5f)/height).y)*0.5f;
//outDebugBuffer[y*width+x].w = 1.0f;
//outBuffer[y*width+x].z = result;
//outBuffer[y*width+x].z = tex2D(dTexTerrainH,x+0.5f,y+0.5f);
}
//Requires dTexWaterFlux dTexWaterH dTexTerrainH
__global__ void kernelFlow (float2* outVelocity,float* outSedCap,float* outWH, float4* outFlux,float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//Velocity Field
float2 velocity;
float4 localFlux = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f)/height);
float4 neighbourFlux;
neighbourFlux.x = tex2D(dTexWaterFlux,(x+0.5f-1.0f)/height,(y+0.5f)/height).y;
neighbourFlux.y = tex2D(dTexWaterFlux,(x+0.5f+1.0f)/height,(y+0.5f)/height).x;
neighbourFlux.z = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f-1.0f)/height).w;
neighbourFlux.w = tex2D(dTexWaterFlux,(x+0.5f)/height,(y+0.5f+1.0f)/height).z;
float waterDelta = neighbourFlux.x + neighbourFlux.y + neighbourFlux.z + neighbourFlux.w -
localFlux.x - localFlux.y - localFlux.z - localFlux.w;
//Velocity calculations
velocity.x=(neighbourFlux.x-neighbourFlux.y-localFlux.x+localFlux.y)*0.5f;
velocity.y=(neighbourFlux.z-neighbourFlux.w-localFlux.z+localFlux.w)*0.5f;
outVelocity[y*width+x] = velocity;
float velocityLength=length(velocity);
//Calculate flow (final water height)
float waterH = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
waterH = waterH + (waterDelta*dCoef_Timestep)/(dCoef_PipeLength*dCoef_PipeLength);
outWH[y*width+x] = waterH;
//Sedimemt Capacity
//Limiter, actually increases erosion as depth decreases
float limit = max(0.0f,1.0f-waterH/dCoef_DepthMax);
//Solution A
//outSedCap[y*width+x] = max(0.3f,sinf(atanf( max(0.2f,0.5f+tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - tex2D(dTexTerrainH,(x+0.5f+normalize(velocity).x)/height,(y+0.5f+normalize(velocity).y/height)))/(1.5f*dCoef_PipeLength))))*min(3.f0,velocityLength*(limit+1.0f))*0.2f;
//Solution B
//Restrict velocity to normalized or 0.0
velocity = normalize(velocity);
if (velocityLength<0.2f) velocity.x = velocity.y = 0.0f;
int count=0;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float deltaH = localH - tex2D(dTexTerrainH,(x+0.5f+velocity.x)/height,(y+0.5f+velocity.y)/height);
if (deltaH>-.5f && deltaH < 0.3f)deltaH+=min(velocityLength*0.5f,0.5f*dCoef_PipeLength);
/*if(localH<tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height))count++;
if(localH<tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height))count++;
float factor = 1.0f;
if (count>2) factor = .1f;*/
/*outSedCap[y*width+x] = min(waterH,
(0.1 + __sinf(atanf(deltaH/dCoef_PipeLength))) *
min(3.0f,velocityLength*(limit+1.0f))*0.5f);*/
outSedCap[y*width+x] = max(0.0f,0.1f+__sinf(atanf(deltaH/dCoef_PipeLength))*min(3.0f,velocityLength*(limit+1.0f))*0.5f);
//outSedCap[y*width+x] = sedimentCapacity;
//outBuffer[y*width+x].z = min(3.0f,velocityLength*(limit+1.0f));
outBuffer[y*width+x].w = velocityLength;
}
//Requires dTexSedimentAmount dTexSedimentCapacity dTexHardness dTexTerrainH dTexWaterH
__global__ void kernelErodeDepose(float* outHardness, float* outWH, float* outSedAmount, float* outTH, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float sedAmnt = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height);
float sedCap = tex2D(dTexSedimentCapacity,(x+0.5f)/height,(y+0.5f)/height);
float hardness =tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height);
float factor = dCoef_Timestep*(sedCap-sedAmnt);
if(sedCap>sedAmnt)
{
//factor = min(0.1*dCoef_Timestep,factor);
factor *= dCoef_DissolveRate*(1.0f-max(dCoef_HardnessMin,hardness));
//factor = (sedCap-sedAmnt)*0.1f;
outHardness[y*width+x] = max(dCoef_HardnessMin, hardness - dCoef_Timestep*dCoef_SoftenRate);
}
else
{
//factor = max(-0.1f*dCoef_Timestep,factor);
factor *= dCoef_SedimentDropRate;
//factor = -0.2f*sedAmnt;
//factor=1.0f;
//outHardness[y*width+x] = min(0.8f, hardness + dCoef_Timestep*dCoef_SoftenRate*0.2f);
}
if (hardness<-2.f)
{
outHardness[y*width+x] = .80f;
}
//Mass preservation!
if (factor>0)factor = min(factor,tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height));
outTH[y*width+x] = max(0.0f,tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor);
outSedAmount[y*width+x] = max(0.0f, tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) + factor);
outWH[y*width+x] = tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height);
//outWH[y*width+x] = max(0.0f, tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + factor);
/*if(sedCap>sedAmnt)
{
if (hardness==dCoef_HardnessMin)
{
outHardness[y*width+x] = .80f;
}
else
{
outHardness[y*width+x] = max(dCoef_HardnessMin, hardness - dCoef_Timestep*dCoef_SoftenRate);
}
}*/
//outHardness[y*width+x] = 0.0f;
outBuffer[y*width+x].x = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor;
outBuffer[y*width+x].y = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) - factor + max(0.0f, tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height) + factor);
//outBuffer[y*width+x].z = hardness;
outBuffer[y*width+x].z = sedAmnt;
//outBuffer[y*width+x].z = sedCap;
//outBuffer[y*width+x].w = dCoef_Timestep*(sedCap-sedAmnt);
}
//__global__ void kernelFixAdvectSediment(float* out, float amountToAdd, size_t width, unsigned int height)
//{
// unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
// unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
//
// out[y*width+x] = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) + amountToAdd*0.0f;
//}
//Requires dTexWaterVelocity dTexSedimentAmount
__global__ void kernelAdvectSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
out[y*width+x] = tex2D(dTexSedimentAmount,(x+0.5f-velocity.x*dCoef_Timestep/dCoef_PipeLength)/height,(y+0.5f-velocity.y*dCoef_Timestep/dCoef_PipeLength)/height);
}
//Requires dTexWaterVelocity dTexSedimentAmntAdvect
__global__ void kernelAdvectBackSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
float error = tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height) -
tex2D(dTexSedimentAmntAdvect,(x+0.5f+velocity.x*dCoef_Timestep/dCoef_PipeLength)/height,(y+0.5f+velocity.y*dCoef_Timestep/dCoef_PipeLength)/height);
float4 clamps;
clamps.x = tex2D(dTexSedimentAmount,(x+0.5f-ceilf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-ceilf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.y = tex2D(dTexSedimentAmount,(x+0.5f-floorf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-floorf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.z = tex2D(dTexSedimentAmount,(x+0.5f-floorf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-ceilf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
clamps.w = tex2D(dTexSedimentAmount,(x+0.5f-ceilf(velocity.x*dCoef_Timestep/dCoef_PipeLength))/height,(y+0.5f-floorf(velocity.y*dCoef_Timestep/dCoef_PipeLength))/height);
float lowClamp = min( min(clamps.x,clamps.y),min(clamps.w,clamps.z));
float hiClamp = max( max(clamps.x,clamps.y),max(clamps.w,clamps.z));
out[y*width+x] = max(lowClamp,min(hiClamp,tex2D(dTexSedimentAmntAdvect, (x+0.5f)/height,(y+0.5f)/height) + error*0.5f));
}
// DO NOT USE DEPRECATED!!!!!
//Might be useful for the BFECC model if i decide to try it
//Requires dTexWaterVelocity dTexSedimentAmount dTexSedimentAmntAdvect dTexSedimentAmntAdvectBack
__global__ void kernelMoveSediment(float* out, float4* outDebug, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float2 velocity = tex2D(dTexWaterVelocity,(x+0.5f)/height,(y+0.5f)/height);
out[y*width+x] = (tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height)-tex2D(dTexSedimentAmount,(x+0.5f)/height,(y+0.5f)/height));
}
//Requires dTexTerrainH dTexHardness
__global__ void kernelThermalErosionAmnt (float* out,float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float localH = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
float amountToMove = dCoef_PipeLength*dCoef_PipeLength*dCoef_Timestep*dCoef_ThermalErosionRate*(1.0f-tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height))/2.0f;
//float4 hDiff;
//float4 hDiffD;
float hDiff=0.0f;
/*hDiff.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height);
hDiff.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height);
hDiff.z = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height);
hDiff.w = localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height);
hDiffD.x = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height);
hDiffD.y = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height);
hDiffD.z = localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height);
hDiffD.w = localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height); */
hDiff=max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f+1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f+1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f+1.0f)/height,(y+0.5f-1.0f)/height)) +
max(0.0f,localH-tex2D(dTexTerrainH,(x+0.5f-1.0f)/height,(y+0.5f+1.0f)/height));
/*float maxHeightDifference = max(
max(max(hDiff.x,hDiff.y),max(hDiff.w,hDiff.w)),
max(max(hDiffD.x,hDiffD.y),max(hDiffD.z,hDiffD.w))); */
/*out[y*width+x]=amountToMove*abs(maxHeightDifference);*/
out[y*width+x]=amountToMove*hDiff;
//outBuffer[y*width+x].x = localH;
//outBuffer[y*width+x].y = 1.0f-tex2D(dTexHardness,(x+0.5f)/height,(y+0.5f)/height);
//outBuffer[y*width+x].z = abs(maxHeightDifference);
//outBuffer[y*width+x].w = hDiffD.w;
}
//Requires dTexWaterH
__global__ void kernelEvaporate(float* outWater, float4* outBuffer, size_t width, unsigned int height)
{
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
float result = max(tex2D(dTexWaterH,(x+0.5f)/height,(y+0.5f)/height)*(1.0-dCoef_EvaporationRate*dCoef_Timestep),0.0f);
outBuffer[y*width+x].x = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height);
outBuffer[y*width+x].y = tex2D(dTexTerrainH,(x+0.5f)/height,(y+0.5f)/height) + result;
outWater[y*width+x] = result;
}
//Swap pointers
inline void cuda_exchPtrs(void ** ptrA, void ** ptrB)
{
void * ptrTmp = *ptrA;
*ptrA = *ptrB;
*ptrB = ptrTmp;
}
extern "C" void cuda_EditTerrain(float editX, float editY, float pointDistance, float editValue, unsigned int width, unsigned int height ,float maxDist, float dt, int mode )
{
dim3 block(8, 8, 1);
dim3 grid(width / block.x, height / block.y, 1);
if (mode)
{
printf("edit A");
kernelEditBuffer<<< grid, block>>>(dWaterH0_ptr, dWaterH1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
}
else
{
printf("edit B");
kernelEditBuffer<<< grid, block>>>(dTerrainH0_ptr, dTerrainH1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dTerrainH1_ptr,(void **)&dTerrainH0_ptr);
/*kernelEditBuffer<<< grid, block>>>(dSedimentAmount0_ptr, dSedimentAmount1_ptr, editX, editY, pointDistance, editValue, width, height , maxDist, dt );
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);*/
}
}
int rainCount=0;
int rain=0;
float sedimentArray[512*512];
extern "C" void cuda_Simulate(float4* heights, unsigned int width, unsigned int height, float dt, float *in,float*in2)
{
rainCount++;
if(rainCount<330)
rain=1;
else rain=0;
if(rainCount>380)
{rain=1;rainCount=0;}
dim3 block(32, 2, 1);
dim3 grid(width / block.x, height / block.y, 1);
cudaError ERR;
//Inc Water
cudaBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
cudaBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);
kernelIncWater<<<grid, block>>>(dWaterH1_ptr,heights,width,height,rain);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexWaterHptr);
cudaUnbindTexture(dTexWaterRainRatePtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
ERR = cudaGetLastError();
//Calc Flux
cudaBindTexture2D((size_t *)&dTexTerrainHOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
cudaBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
cudaBindTexture2D((size_t *)&dTexWaterFluxOffset,dTexWaterFluxPtr,(void *)dWaterFlux0_ptr,&texFloat4ChannelDesc,512,512,hWaterFlux_pitch);
kernelCalculateFlux<<<grid,block>>>(dWaterFlux1_ptr,
heights,
width,
height);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexWaterFluxPtr);
cuda_exchPtrs((void **)&dWaterFlux0_ptr,(void **)&dWaterFlux1_ptr);
ERR = cudaGetLastError();
//cudaUnbindTexture(dTexTerrainHptr);//barrier
//cudaUnbindTexture(dTexWaterHptr);//barrier
//Calc Errode Amnt
cudaBindTexture2D( (size_t *)&dTexHardnessOffset,
dTexHardnessPtr,
(void *)dHardness0_ptr,
&texFloatChannelDesc,
512,512,
hHardness0_pitch);
kernelThermalErosionAmnt<<<grid,block>>>( dThermalAmount2Move_ptr,
heights,
width,
height);
cudaDeviceSynchronize();
ERR = cudaGetLastError();
//cudaUnbindTexture(dTexHardnessPtr);//barrier
//Calculate Flow
cudaBindTexture2D( (size_t *)&dTexWaterFluxOffset,
dTexWaterFluxPtr,
(void *)dWaterFlux0_ptr,
&texFloat4ChannelDesc,
512,512,
hWaterFlux_pitch);
kernelFlow<<<grid,block>>>( dWaterVelocity_ptr,
dSedimentCapacity_ptr,
dWaterH1_ptr,
dWaterFlux1_ptr,
heights,
width,
height);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexWaterHptr);
cudaUnbindTexture(dTexWaterFluxPtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
//cuda_exchPtrs((void **)&dWaterFlux0_ptr,(void **)&dWaterFlux1_ptr);
ERR = cudaGetLastError();
//cudaUnbindTexture(dTexTerrainHptr);//barrier
//cudaUnbindTexture(dTexHardnessPtr);//barrier
//Calculate ErodeDepose
cudaBindTexture2D(
(size_t *)&dTexWaterHOffset,
dTexWaterHptr,
(void *)dWaterH0_ptr,
&texFloatChannelDesc,
512, 512,
hWaterH0_pitch);
cudaBindTexture2D(
(size_t *)&dTexSedimentAmountOffset,
dTexSedimentAmountPtr,
(void *)dSedimentAmount0_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
cudaBindTexture2D(
(size_t *)&dTexSedimentCapacityOffset,
dTexSedimentCapacityPtr,
(void *)dSedimentCapacity_ptr,
&texFloatChannelDesc,
512,512,
hSedimentCapacity_pitch);
kernelErodeDepose<<<grid,block>>>(dHardness1_ptr,dWaterH1_ptr,dSedimentAmount1_ptr,dTerrainH1_ptr,heights,width, height);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexWaterHptr);
cudaUnbindTexture(dTexHardnessPtr);
cudaUnbindTexture(dTexTerrainHptr);
cudaUnbindTexture(dTexSedimentAmountPtr);
cudaUnbindTexture(dTexSedimentCapacityPtr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
cuda_exchPtrs((void **)&dHardness0_ptr,(void **)&dHardness1_ptr);
cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);
// ERR = cudaGetLastError();
//Move Sedmient
cudaBindTexture2D(
(size_t *)&dTexWaterHOffset,
dTexWaterHptr,
(void *)dWaterH0_ptr,
&texFloatChannelDesc,
512, 512,
hWaterH0_pitch);
cudaBindTexture2D(
(size_t *)&dTexSedimentAmountOffset,
dTexSedimentAmountPtr,
(void *)dSedimentAmount0_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
cudaBindTexture2D(
(size_t *)&dTexWaterVelocityOffset,
dTexWaterVelocityPtr,
(void *)dWaterVelocity_ptr,
&texFloat2ChannelDesc,
512, 512,
hWaterVelocity_pitch);
kernelAdvectSediment<<<grid,block>>>(dSedimentAmntAdvect_ptr, heights, width, height);
cudaDeviceSynchronize();
cudaBindTexture2D(
(size_t *)&dTexSedimentAmntAdvectOffset,
dTexSedimentAmntAdvectPtr,
(void *)dSedimentAmntAdvect_ptr,
&texFloatChannelDesc,
512, 512,
hSedimentAmount_pitch);
kernelAdvectBackSediment<<<grid,block>>>(dSedimentAmount1_ptr, heights, width, height);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexSedimentAmountPtr);
cudaUnbindTexture(dTexSedimentAmntAdvectPtr);
cuda_exchPtrs((void **)&dSedimentAmount0_ptr,(void **)&dSedimentAmount1_ptr);
ERR = cudaGetLastError();
//Errode thermally
cudaBindTexture2D(
(size_t *)&dTexHardnessOffset,
dTexHardnessPtr,
(void *)dHardness0_ptr,
&texFloatChannelDesc,
512, 512,
hHardness0_pitch);
cudaBindTexture2D(
(size_t *)&dTexThermalAmnt2MoveOffset,
dTexThermalAmnt2MovePtr,
(void *)dThermalAmount2Move_ptr,
&texFloatChannelDesc,
512, 512,
hThermalAmount2Move_pitch);
cudaBindTexture2D(
(size_t *)&dTexTerrainHOffset,
dTexTerrainHptr,
(void *)dTerrainH0_ptr,
&texFloatChannelDesc,
512, 512,
hTerrainH0_pitch);
kernelThermalErosionFlux<<<grid,block>>>(dThermalFlux_ptr,dThermalFluxDiag_ptr,width,height, heights);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexThermalAmnt2MovePtr);
cudaUnbindTexture(dTexHardnessPtr);
cudaUnbindTexture(dTexWaterVelocityPtr);
ERR = cudaGetLastError();
// //Drop Thermal Eroded
cudaBindTexture2D(
(size_t *)&dTexThermalFluxOffset,
dTexThermalFluxPtr,
(void *)dThermalFlux_ptr,
&texFloat4ChannelDesc,
512,512,
hThermalFlux_pitch);
cudaBindTexture2D(
(size_t *)&dTexThermalFluxDiagOffset,
dTexThermalFluxDiagPtr,
(void *)dThermalFluxDiag_ptr,
&texFloat4ChannelDesc,
512,
512,
hThermalFluxDiag_pitch);
kernelThermalDrop<<<grid,block>>>(dTerrainH1_ptr,width,height,heights);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexTerrainHptr);
cudaUnbindTexture(dTexThermalFluxPtr);
cudaUnbindTexture(dTexThermalFluxDiagPtr);
cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
//ERR = cudaGetLastError();
//Evaporate
cudaBindTexture2D(
(size_t *)&dTexTerrainHOffset,
dTexTerrainHptr,
(void *)dTerrainH0_ptr,
&texFloatChannelDesc,
512, 512,
hTerrainH0_pitch);
//cudaBindTexture2D((size_t *)&dTexWaterHOffset,dTexWaterHptr,(void *)dWaterH0_ptr,&texFloatChannelDesc,512,512,hWaterH0_pitch);
kernelEvaporate<<<grid,block>>>(dWaterH1_ptr, heights,width, height);
cudaDeviceSynchronize();
cudaUnbindTexture(dTexWaterHptr);
cudaUnbindTexture(dTexTerrainHptr);
cuda_exchPtrs((void **)&dWaterH0_ptr,(void **)&dWaterH1_ptr);
ERR = cudaGetLastError();
//cudaBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
// cudaBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);//bound=1;
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH1_ptr, dWaterRainRate1_ptr);
// cudaUnbindTexture(dTexTerrainHptr);
// cudaUnbindTexture(dTexWaterRainRatePtr);
// cuda_exchPtrs((void **)&dTerrainH0_ptr,(void **)&dTerrainH1_ptr);
// cuda_exchPtrs((void **)&dWaterRainRate0_ptr,(void **)&dWaterRainRate1_ptr);
//tick^=1;
}
//extern "C" void cuda_Simulate(float4* heights, unsigned int width, unsigned int height, float dt, float *in,float*in2)
//{
//
// dim3 block(8, 8, 1);
// dim3 grid(width / block.x, height / block.y, 1);
// if(!tick)
// {
// cudaBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH0_ptr,&texFloatChannelDesc,512,512,hTerrainH0_pitch);
// cudaBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate0_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);//bound=1;
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH1_ptr, dWaterRainRate1_ptr);
// cudaUnbindTexture(dTexTerrainHptr);
// cudaUnbindTexture(dTexWaterRainRatePtr);
// }
// else
// {
// cudaBindTexture2D((size_t *)&dTexHardnessOffset,dTexTerrainHptr,(void *)dTerrainH1_ptr,&texFloatChannelDesc,512,512,hTerrainH1_pitch);
// cudaBindTexture2D((size_t *)&dTexWaterRainRateOffset, dTexWaterRainRatePtr,(void *)dWaterRainRate1_ptr,&texFloatChannelDesc,512,512,hWaterRainRate0_pitch);
// kernelTest<<< grid, block>>>(heights, width, height, dt, dTerrainH0_ptr,dWaterRainRate0_ptr);
// cudaUnbindTexture(dTexTerrainHptr);
// cudaUnbindTexture(dTexWaterRainRatePtr);
// }
// tick^=1;
//}
//extern "C" void cuda_Simulate(int* heights, unsigned int width, unsigned int height, float dt)
//{
// //printf("\nCuda Start");
//
// // execute the kernel
// dim3 block(8, 8, 1);
// dim3 grid(width / block.x, height / block.y, 1);
// kernelTest<<< grid, block>>>(heights, width, height, dt);
//
//
// //printf("\nCuda End");
//}
|
TensorFactories.hip | // !!! This is a file automatically generated by hipify!!!
#include "ATen/ATen.h"
#include "ATen/NativeFunctions.h"
#include "ATen/hip/HIPTypeConversion.cuh"
#include <THH/THHGeneral.h>
#include <THH/THHThrustAllocator.cuh>
#include <thrust/device_ptr.h>
#include <thrust/sort.h>
#include <thrust/execution_policy.h>
#include <thrust/sequence.h>
#include <algorithm>
#include <sstream>
namespace at {
namespace native {
Tensor& eye_out_cuda(Tensor& result, int64_t n, int64_t m) {
if (n <= 0) {
std::ostringstream oss;
oss << "n must be greater than 0, got: " << n;
std::runtime_error(oss.str());
}
if(m <= 0) {
m = n;
}
result.resize_({n, m});
result.zero_();
int64_t sz = std::min<int64_t>(n, m);
int64_t stride = result.stride(0) + result.stride(1);
Tensor diag = result.as_strided({sz}, {stride});
diag.fill_(1);
return result;
}
Tensor& randperm_out_cuda(Tensor& result, int64_t n, Generator* generator) {
if (n < 0) {
std::ostringstream oss;
oss << "n must be non-negative, got " << n;
throw std::runtime_error(oss.str());
}
if (n > 0) {
AT_DISPATCH_ALL_TYPES_AND_HALF(
result.type(), "randperm_out_cuda", [&] {
AT_CHECK(Scalar(n).to<scalar_t>(),
"n is too large for result tensor type: '", result.type().toString(), "'");
}
);
}
result.resize_({n});
if (result.type().scalarType() == at::ScalarType::Half) {
auto result_float = CUDA(kFloat).tensor({n});
result.copy_(randperm_out_cuda(result_float, n, generator));
} else {
if (n < 30000) { // For small inputs, we offload it to CPU instead.
auto result_cpu = result.type().toBackend(kCPU).tensor({n});
randperm_out(result_cpu, n, generator);
result.copy_(result_cpu);
} else {
// Generate random values for the keys array
AT_DISPATCH_ALL_TYPES(
result.type(), "randperm_out_cuda", [&] {
using cuda_scalar_t = cuda::into_type<scalar_t>;
auto keys = result.type().tensor(result.sizes()).random_(generator);
auto result_data = thrust::device_ptr<cuda_scalar_t>(result.data<cuda_scalar_t>());
auto keys_data = thrust::device_ptr<cuda_scalar_t>(keys.data<cuda_scalar_t>());
auto state = globalContext().getTHCState();
THCThrustAllocator thrustAlloc(state);
auto policy = thrust::hip::par(thrustAlloc).on(THCState_getCurrentStream(state));
thrust::sequence(policy, result_data, result_data + n);
// Use the sorted order of keys to rearrange the result array
thrust::sort_by_key(policy, keys_data, keys_data + n, result_data);
}
);
}
}
return result;
}
}} // namespace at::native
| TensorFactories.cu | #include "ATen/ATen.h"
#include "ATen/NativeFunctions.h"
#include "ATen/cuda/CUDATypeConversion.cuh"
#include <THC/THCGeneral.h>
#include <THC/THCThrustAllocator.cuh>
#include <thrust/device_ptr.h>
#include <thrust/sort.h>
#include <thrust/execution_policy.h>
#include <thrust/sequence.h>
#include <algorithm>
#include <sstream>
namespace at {
namespace native {
Tensor& eye_out_cuda(Tensor& result, int64_t n, int64_t m) {
if (n <= 0) {
std::ostringstream oss;
oss << "n must be greater than 0, got: " << n;
std::runtime_error(oss.str());
}
if(m <= 0) {
m = n;
}
result.resize_({n, m});
result.zero_();
int64_t sz = std::min<int64_t>(n, m);
int64_t stride = result.stride(0) + result.stride(1);
Tensor diag = result.as_strided({sz}, {stride});
diag.fill_(1);
return result;
}
Tensor& randperm_out_cuda(Tensor& result, int64_t n, Generator* generator) {
if (n < 0) {
std::ostringstream oss;
oss << "n must be non-negative, got " << n;
throw std::runtime_error(oss.str());
}
if (n > 0) {
AT_DISPATCH_ALL_TYPES_AND_HALF(
result.type(), "randperm_out_cuda", [&] {
AT_CHECK(Scalar(n).to<scalar_t>(),
"n is too large for result tensor type: '", result.type().toString(), "'");
}
);
}
result.resize_({n});
if (result.type().scalarType() == at::ScalarType::Half) {
auto result_float = CUDA(kFloat).tensor({n});
result.copy_(randperm_out_cuda(result_float, n, generator));
} else {
if (n < 30000) { // For small inputs, we offload it to CPU instead.
auto result_cpu = result.type().toBackend(kCPU).tensor({n});
randperm_out(result_cpu, n, generator);
result.copy_(result_cpu);
} else {
// Generate random values for the keys array
AT_DISPATCH_ALL_TYPES(
result.type(), "randperm_out_cuda", [&] {
using cuda_scalar_t = cuda::into_type<scalar_t>;
auto keys = result.type().tensor(result.sizes()).random_(generator);
auto result_data = thrust::device_ptr<cuda_scalar_t>(result.data<cuda_scalar_t>());
auto keys_data = thrust::device_ptr<cuda_scalar_t>(keys.data<cuda_scalar_t>());
auto state = globalContext().getTHCState();
THCThrustAllocator thrustAlloc(state);
auto policy = thrust::cuda::par(thrustAlloc).on(THCState_getCurrentStream(state));
thrust::sequence(policy, result_data, result_data + n);
// Use the sorted order of keys to rearrange the result array
thrust::sort_by_key(policy, keys_data, keys_data + n, result_data);
}
);
}
}
return result;
}
}} // namespace at::native
|
bf41de4d80cdda0b214ce49948c2b8240203dcb1.hip | // !!! This is a file automatically generated by hipify!!!
#include "Particles.h"
#include "ParticlesBatching.h"
#include "ParticlesStreaming.h"
#include <hip/hip_runtime.h>
#include <hip/hip_runtime.h>
#define NUMBER_OF_PARTICLES_PER_BATCH 1024000
#define MAX_NUMBER_OF_STREAMS 5
#define NUMBER_OF_STREAMS_PER_BATCH 4
/** particle mover for GPU with batching */
int mover_GPU_stream(struct particles* part, struct EMfield* field, struct grid* grd, struct parameters* param)
{
// print species and subcycling
std::cout << "***GPU MOVER with SUBCYCLYING "<< param->n_sub_cycles << " - species " << part->species_ID << " ***" << std::endl;
// auxiliary variables
FPpart dt_sub_cycling = (FPpart) param->dt/((double) part->n_sub_cycles);
FPpart dto2 = .5*dt_sub_cycling, qomdt2 = part->qom*dto2/param->c;
// allocate memory for variables on device
FPpart *x_dev = NULL, *y_dev = NULL, *z_dev = NULL, *u_dev = NULL, *v_dev = NULL, *w_dev = NULL;
FPinterp *q_dev = NULL;
FPfield *XN_flat_dev = NULL, *YN_flat_dev = NULL, *ZN_flat_dev = NULL, *Ex_flat_dev = NULL, *Ey_flat_dev = NULL, *Ez_flat_dev = NULL, *Bxn_flat_dev = NULL, *Byn_flat_dev, *Bzn_flat_dev = NULL;
size_t free_bytes = 0;
int i, total_size_particles, start_index_batch, end_index_batch, number_of_batches;
// Calculation done later to compute free space after allocating space on the GPU fo
// other variables below, the assumption is that these variables fit in the GPU memory
// and mini batching is implemented only taking into account particles
hipMalloc(&XN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&YN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&ZN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Ex_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Ey_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Ez_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Bxn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Byn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&Bzn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMemcpy(XN_flat_dev, grd->XN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(YN_flat_dev, grd->YN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(ZN_flat_dev, grd->ZN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Ex_flat_dev, field->Ex_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Ey_flat_dev, field->Ey_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Ez_flat_dev, field->Ez_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Bxn_flat_dev, field->Bxn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Byn_flat_dev, field->Byn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Bzn_flat_dev, field->Bzn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
free_bytes = queryFreeMemoryOnGPU();
total_size_particles = sizeof(FPpart) * part->npmax * 6 + sizeof(FPinterp) * part->npmax; // for x,y,z,u,v,w and q
start_index_batch = 0, end_index_batch = 0;
// implement mini-batching only in the case where the free space on the GPU isn't enough
if(free_bytes > total_size_particles)
{
start_index_batch = 0;
end_index_batch = part->npmax - 1; // set end_index to the last particle as we are processing in in one batch
number_of_batches = 1;
}
else
{
start_index_batch = 0;
end_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH - 1; // NUM_PARTICLES_PER_BATCH is a hyperparameter set by tuning
if(part->npmax % NUMBER_OF_PARTICLES_PER_BATCH != 0)
{
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH + 1; // works because of integer division
}
else
{
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH;
}
}
hipStream_t cudaStreams[MAX_NUMBER_OF_STREAMS];
for(i = 0; i < number_of_batches; i++)
{
long int number_of_particles_batch = end_index_batch - start_index_batch + 1; // number of particles in a batch
size_t batch_size_per_attribute = number_of_particles_batch * sizeof(FPpart); // size of the attribute per batch in bytes x,z,y,u,v,w
long int number_of_particles_stream = 0, stream_size_per_attribute = 0, number_of_streams = 0, stream_offset = 0, offset = 0, start_index_stream = 0, end_index_stream = 0, max_num_particles_per_stream = 0;
hipMalloc(&x_dev, batch_size_per_attribute);
hipMalloc(&y_dev, batch_size_per_attribute);
hipMalloc(&z_dev, batch_size_per_attribute);
hipMalloc(&u_dev, batch_size_per_attribute);
hipMalloc(&v_dev, batch_size_per_attribute);
hipMalloc(&w_dev, batch_size_per_attribute);
hipMalloc(&q_dev, number_of_particles_batch * sizeof(FPinterp));
start_index_stream = 0;
end_index_stream = start_index_stream + (number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH) - 1;
max_num_particles_per_stream = number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH;
if(number_of_particles_batch % NUMBER_OF_STREAMS_PER_BATCH != 0) // We have some leftover bytes
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH + 1;
}
else
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH;
}
for (int j = 0; j < number_of_streams; j++)
{
hipStreamCreate(&cudaStreams[j]);
}
for (int stream_idx = 0; stream_idx < number_of_streams; stream_idx++)
{
number_of_particles_stream = end_index_stream - start_index_stream + 1;
stream_size_per_attribute = number_of_particles_stream * sizeof(FPpart); // for x,y,z,u,v,w
stream_offset = start_index_stream;
offset = stream_offset + start_index_batch; // batch offset + stream_offset
hipMemcpyAsync(&x_dev[stream_offset], &part->x[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&y_dev[stream_offset], &part->y[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&z_dev[stream_offset], &part->z[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&u_dev[stream_offset], &part->u[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&v_dev[stream_offset], &part->v[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&w_dev[stream_offset], &part->w[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&q_dev[stream_offset], &part->q[offset], number_of_particles_stream * sizeof(FPinterp), hipMemcpyHostToDevice, cudaStreams[stream_idx]);
// start subcycling
for (int i_sub=0; i_sub < part->n_sub_cycles; i_sub++){
// Call GPU kernel
hipLaunchKernelGGL(( single_particle_kernel), dim3((number_of_particles_stream + TPB - 1)/TPB), dim3(TPB), 0, cudaStreams[stream_idx],
x_dev, y_dev, z_dev, u_dev, v_dev, w_dev, q_dev,
XN_flat_dev, YN_flat_dev, ZN_flat_dev,
grd->nxn, grd->nyn, grd->nzn,
grd->xStart, grd->yStart, grd->zStart,
grd->invdx, grd->invdy, grd->invdz,
grd->Lx, grd->Ly, grd->Lz, grd->invVOL,
Ex_flat_dev, Ey_flat_dev, Ez_flat_dev,
Bxn_flat_dev, Byn_flat_dev, Bzn_flat_dev,
param->PERIODICX, param->PERIODICY, param->PERIODICZ,
dt_sub_cycling, dto2, qomdt2,
part->NiterMover, number_of_particles_stream, stream_offset
);
} // end of one particle
hipMemcpyAsync(&part->x[offset], &x_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->y[offset], &y_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->z[offset], &z_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->u[offset], &u_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->v[offset], &v_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->w[offset], &w_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->q[offset], &q_dev[stream_offset], number_of_particles_stream * sizeof(FPinterp), hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipStreamSynchronize(cudaStreams[stream_idx]);
start_index_stream = start_index_stream + max_num_particles_per_stream;
if( (start_index_stream + max_num_particles_per_stream) > number_of_particles_batch)
{
end_index_stream = number_of_particles_batch - 1;
}
else
{
end_index_stream += max_num_particles_per_stream;
}
}
for(int j = 0; j < number_of_streams; j++)
{
hipStreamDestroy(cudaStreams[j]);
}
hipFree(x_dev);
hipFree(y_dev);
hipFree(z_dev);
hipFree(u_dev);
hipFree(v_dev);
hipFree(w_dev);
hipFree(q_dev);
// Update indices for next batch
start_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH;
if( (start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH) > part->npmax)
{
end_index_batch = part->npmax - 1;
}
else
{
end_index_batch += NUMBER_OF_PARTICLES_PER_BATCH;
}
}
hipMemcpy(field->Ex_flat, Ex_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
hipMemcpy(field->Ey_flat, Ey_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
hipMemcpy(field->Ez_flat, Ez_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
hipMemcpy(field->Bxn_flat, Bxn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
hipMemcpy(field->Byn_flat, Byn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
hipMemcpy(field->Bzn_flat, Bzn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyDeviceToHost);
// Clean up
hipFree(XN_flat_dev);
hipFree(YN_flat_dev);
hipFree(ZN_flat_dev);
hipFree(Ex_flat_dev);
hipFree(Ey_flat_dev);
hipFree(Ez_flat_dev);
hipFree(Bxn_flat_dev);
hipFree(Byn_flat_dev);
hipFree(Bzn_flat_dev);
return(0);
}
/** Interpolation with batching */
void interpP2G_GPU_stream(struct particles* part, struct interpDensSpecies* ids, struct grid* grd)
{
FPpart *x_dev = NULL, *y_dev = NULL, *z_dev = NULL, *u_dev = NULL, *v_dev = NULL, *w_dev = NULL;
FPinterp * q_dev = NULL, *Jx_flat_dev = NULL, *Jy_flat_dev = NULL, *Jz_flat_dev = NULL, *rhon_flat_dev = NULL, *pxx_flat_dev = NULL, *pxy_flat_dev = NULL, *pxz_flat_dev = NULL, *pyy_flat_dev = NULL, *pyz_flat_dev = NULL, *pzz_flat_dev = NULL;
FPfield *XN_flat_dev = NULL, *YN_flat_dev = NULL, *ZN_flat_dev = NULL;
size_t free_bytes = 0;
int i, total_size_particles, start_index_batch, end_index_batch, number_of_batches;
// Calculation done later to compute free space after allocating space on the GPU for
// other variables below, the assumption is that these variables fit in the GPU memory
// and mini batching is implemented only taking into account particles
hipMalloc(&Jx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&Jy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&Jz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&rhon_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pxx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pxy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pxz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pyy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pyz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&pzz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
hipMalloc(&XN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&YN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMalloc(&ZN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
hipMemcpy(Jx_flat_dev, ids->Jx_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Jy_flat_dev, ids->Jy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(Jz_flat_dev, ids->Jz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(rhon_flat_dev, ids->rhon_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pxx_flat_dev, ids->pxx_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pxy_flat_dev, ids->pxy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pxz_flat_dev, ids->pxz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pyy_flat_dev, ids->pyy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pyz_flat_dev, ids->pyz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(pzz_flat_dev, ids->pzz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(XN_flat_dev, grd->XN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(YN_flat_dev, grd->YN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
hipMemcpy(ZN_flat_dev, grd->ZN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), hipMemcpyHostToDevice);
free_bytes = queryFreeMemoryOnGPU();
total_size_particles = sizeof(FPpart) * part->npmax * 6 + sizeof(FPinterp) * part->npmax; // for x,y,z,u,v,w and q
start_index_batch = 0, end_index_batch = 0;
// implement mini-batching only in the case where the free space on the GPU isn't enough
if(free_bytes > total_size_particles)
{
start_index_batch = 0;
end_index_batch = part->npmax - 1 ; // set end_index to the last particle as we are processing in in one batch
number_of_batches = 1;
}
else
{
start_index_batch = 0;
end_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH - 1; // NUM_PARTICLES_PER_BATCH is a hyperparameter set by tuning
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH + 1; // works because of integer division
}
hipStream_t cudaStreams[MAX_NUMBER_OF_STREAMS];
for(i = 0; i < number_of_batches; i++)
{
long int number_of_particles_batch = end_index_batch - start_index_batch + 1; // number of particles in a batch
size_t batch_size = number_of_particles_batch * sizeof(FPpart); // size of the batch in bytes
long int number_of_particles_stream = 0, stream_size_per_attribute = 0, number_of_streams = 0, stream_offset = 0, offset = 0, start_index_stream = 0, end_index_stream = 0, max_num_particles_per_stream = 0;
hipMalloc(&x_dev, batch_size);
hipMalloc(&y_dev, batch_size);
hipMalloc(&z_dev, batch_size);
hipMalloc(&u_dev, batch_size);
hipMalloc(&v_dev, batch_size);
hipMalloc(&w_dev, batch_size);
hipMalloc(&q_dev, number_of_particles_batch * sizeof(FPinterp));
start_index_stream = 0;
end_index_stream = start_index_stream + (number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH) - 1;
max_num_particles_per_stream = number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH;
if(number_of_particles_batch % NUMBER_OF_STREAMS_PER_BATCH != 0) // We have some leftover bytes
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH + 1;
}
else
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH;
}
for (int j = 0; j < number_of_streams; j++)
{
hipStreamCreate(&cudaStreams[j]);
}
for (int stream_idx = 0; stream_idx < number_of_streams; stream_idx++)
{
number_of_particles_stream = end_index_stream - start_index_stream + 1;
stream_size_per_attribute = number_of_particles_stream * sizeof(FPpart); // for x,y,z,u,v,w
stream_offset = start_index_stream;
offset = stream_offset + start_index_batch; // batch offset + stream_offset
hipMemcpyAsync(&x_dev[stream_offset], &part->x[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&y_dev[stream_offset], &part->y[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&z_dev[stream_offset], &part->z[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&u_dev[stream_offset], &part->u[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&v_dev[stream_offset], &part->v[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&w_dev[stream_offset], &part->w[offset], stream_size_per_attribute, hipMemcpyHostToDevice, cudaStreams[stream_idx]);
hipMemcpyAsync(&q_dev[stream_offset], &part->q[offset], number_of_particles_stream * sizeof(FPinterp), hipMemcpyHostToDevice, cudaStreams[stream_idx]);
// Call GPU kernel
hipLaunchKernelGGL(( interP2G_kernel), dim3((number_of_particles_stream + TPB - 1)/TPB), dim3(TPB), 0, cudaStreams[stream_idx],
x_dev, y_dev, z_dev, u_dev, v_dev, w_dev, q_dev,
XN_flat_dev, YN_flat_dev, ZN_flat_dev,
grd->nxn, grd->nyn, grd->nzn,
grd->xStart, grd->yStart, grd->zStart,
grd->invdx, grd->invdy, grd->invdz, grd->invVOL,
Jx_flat_dev, Jy_flat_dev, Jz_flat_dev, rhon_flat_dev,
pxx_flat_dev , pxy_flat_dev, pxz_flat_dev, pyy_flat_dev, pyz_flat_dev, pzz_flat_dev,
number_of_particles_stream, stream_offset
);
hipMemcpyAsync(&part->x[offset], &x_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->y[offset], &y_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->z[offset], &z_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->u[offset], &u_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->v[offset], &v_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipMemcpyAsync(&part->w[offset], &w_dev[stream_offset], stream_size_per_attribute, hipMemcpyDeviceToHost, cudaStreams[stream_idx]);
hipStreamSynchronize(cudaStreams[stream_idx]);
start_index_stream = start_index_stream + max_num_particles_per_stream;
if( (start_index_stream + max_num_particles_per_stream) > number_of_particles_batch)
{
end_index_stream = number_of_particles_batch - 1;
}
else
{
end_index_stream += max_num_particles_per_stream;
}
}
for(int j = 0; j < number_of_streams; j++)
{
hipStreamDestroy(cudaStreams[j]);
}
hipFree(x_dev);
hipFree(y_dev);
hipFree(z_dev);
hipFree(u_dev);
hipFree(v_dev);
hipFree(w_dev);
hipFree(q_dev);
// Update indices for next batch
start_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH;
if ((start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH) > part->npmax)
{
end_index_batch = part->npmax - 1;
}
else
{
end_index_batch += NUMBER_OF_PARTICLES_PER_BATCH;
}
}
// Copy memory back to CPU (only the parts that have been modified inside the kernel)
hipMemcpy(ids->Jx_flat, Jx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->Jy_flat, Jy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->Jz_flat, Jz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->rhon_flat, rhon_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pxx_flat, pxx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pxy_flat, pxy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pxz_flat, pxz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pyy_flat, pyy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pyz_flat, pyz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
hipMemcpy(ids->pzz_flat, pzz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), hipMemcpyDeviceToHost);
// Clean up
hipFree(Jx_flat_dev);
hipFree(Jy_flat_dev);
hipFree(Jz_flat_dev);
hipFree(XN_flat_dev);
hipFree(YN_flat_dev);
hipFree(ZN_flat_dev);
hipFree(rhon_flat_dev);
hipFree(pxx_flat_dev);
hipFree(pxy_flat_dev);
hipFree(pxz_flat_dev);
hipFree(pyy_flat_dev);
hipFree(pyz_flat_dev);
hipFree(pzz_flat_dev);
} | bf41de4d80cdda0b214ce49948c2b8240203dcb1.cu | #include "Particles.h"
#include "ParticlesBatching.h"
#include "ParticlesStreaming.h"
#include <cuda.h>
#include <cuda_runtime.h>
#define NUMBER_OF_PARTICLES_PER_BATCH 1024000
#define MAX_NUMBER_OF_STREAMS 5
#define NUMBER_OF_STREAMS_PER_BATCH 4
/** particle mover for GPU with batching */
int mover_GPU_stream(struct particles* part, struct EMfield* field, struct grid* grd, struct parameters* param)
{
// print species and subcycling
std::cout << "***GPU MOVER with SUBCYCLYING "<< param->n_sub_cycles << " - species " << part->species_ID << " ***" << std::endl;
// auxiliary variables
FPpart dt_sub_cycling = (FPpart) param->dt/((double) part->n_sub_cycles);
FPpart dto2 = .5*dt_sub_cycling, qomdt2 = part->qom*dto2/param->c;
// allocate memory for variables on device
FPpart *x_dev = NULL, *y_dev = NULL, *z_dev = NULL, *u_dev = NULL, *v_dev = NULL, *w_dev = NULL;
FPinterp *q_dev = NULL;
FPfield *XN_flat_dev = NULL, *YN_flat_dev = NULL, *ZN_flat_dev = NULL, *Ex_flat_dev = NULL, *Ey_flat_dev = NULL, *Ez_flat_dev = NULL, *Bxn_flat_dev = NULL, *Byn_flat_dev, *Bzn_flat_dev = NULL;
size_t free_bytes = 0;
int i, total_size_particles, start_index_batch, end_index_batch, number_of_batches;
// Calculation done later to compute free space after allocating space on the GPU fo
// other variables below, the assumption is that these variables fit in the GPU memory
// and mini batching is implemented only taking into account particles
cudaMalloc(&XN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&YN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&ZN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Ex_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Ey_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Ez_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Bxn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Byn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&Bzn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMemcpy(XN_flat_dev, grd->XN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(YN_flat_dev, grd->YN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(ZN_flat_dev, grd->ZN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Ex_flat_dev, field->Ex_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Ey_flat_dev, field->Ey_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Ez_flat_dev, field->Ez_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Bxn_flat_dev, field->Bxn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Byn_flat_dev, field->Byn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Bzn_flat_dev, field->Bzn_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
free_bytes = queryFreeMemoryOnGPU();
total_size_particles = sizeof(FPpart) * part->npmax * 6 + sizeof(FPinterp) * part->npmax; // for x,y,z,u,v,w and q
start_index_batch = 0, end_index_batch = 0;
// implement mini-batching only in the case where the free space on the GPU isn't enough
if(free_bytes > total_size_particles)
{
start_index_batch = 0;
end_index_batch = part->npmax - 1; // set end_index to the last particle as we are processing in in one batch
number_of_batches = 1;
}
else
{
start_index_batch = 0;
end_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH - 1; // NUM_PARTICLES_PER_BATCH is a hyperparameter set by tuning
if(part->npmax % NUMBER_OF_PARTICLES_PER_BATCH != 0)
{
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH + 1; // works because of integer division
}
else
{
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH;
}
}
cudaStream_t cudaStreams[MAX_NUMBER_OF_STREAMS];
for(i = 0; i < number_of_batches; i++)
{
long int number_of_particles_batch = end_index_batch - start_index_batch + 1; // number of particles in a batch
size_t batch_size_per_attribute = number_of_particles_batch * sizeof(FPpart); // size of the attribute per batch in bytes x,z,y,u,v,w
long int number_of_particles_stream = 0, stream_size_per_attribute = 0, number_of_streams = 0, stream_offset = 0, offset = 0, start_index_stream = 0, end_index_stream = 0, max_num_particles_per_stream = 0;
cudaMalloc(&x_dev, batch_size_per_attribute);
cudaMalloc(&y_dev, batch_size_per_attribute);
cudaMalloc(&z_dev, batch_size_per_attribute);
cudaMalloc(&u_dev, batch_size_per_attribute);
cudaMalloc(&v_dev, batch_size_per_attribute);
cudaMalloc(&w_dev, batch_size_per_attribute);
cudaMalloc(&q_dev, number_of_particles_batch * sizeof(FPinterp));
start_index_stream = 0;
end_index_stream = start_index_stream + (number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH) - 1;
max_num_particles_per_stream = number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH;
if(number_of_particles_batch % NUMBER_OF_STREAMS_PER_BATCH != 0) // We have some leftover bytes
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH + 1;
}
else
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH;
}
for (int j = 0; j < number_of_streams; j++)
{
cudaStreamCreate(&cudaStreams[j]);
}
for (int stream_idx = 0; stream_idx < number_of_streams; stream_idx++)
{
number_of_particles_stream = end_index_stream - start_index_stream + 1;
stream_size_per_attribute = number_of_particles_stream * sizeof(FPpart); // for x,y,z,u,v,w
stream_offset = start_index_stream;
offset = stream_offset + start_index_batch; // batch offset + stream_offset
cudaMemcpyAsync(&x_dev[stream_offset], &part->x[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&y_dev[stream_offset], &part->y[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&z_dev[stream_offset], &part->z[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&u_dev[stream_offset], &part->u[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&v_dev[stream_offset], &part->v[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&w_dev[stream_offset], &part->w[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&q_dev[stream_offset], &part->q[offset], number_of_particles_stream * sizeof(FPinterp), cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
// start subcycling
for (int i_sub=0; i_sub < part->n_sub_cycles; i_sub++){
// Call GPU kernel
single_particle_kernel<<<(number_of_particles_stream + TPB - 1)/TPB, TPB, 0, cudaStreams[stream_idx]>>>(
x_dev, y_dev, z_dev, u_dev, v_dev, w_dev, q_dev,
XN_flat_dev, YN_flat_dev, ZN_flat_dev,
grd->nxn, grd->nyn, grd->nzn,
grd->xStart, grd->yStart, grd->zStart,
grd->invdx, grd->invdy, grd->invdz,
grd->Lx, grd->Ly, grd->Lz, grd->invVOL,
Ex_flat_dev, Ey_flat_dev, Ez_flat_dev,
Bxn_flat_dev, Byn_flat_dev, Bzn_flat_dev,
param->PERIODICX, param->PERIODICY, param->PERIODICZ,
dt_sub_cycling, dto2, qomdt2,
part->NiterMover, number_of_particles_stream, stream_offset
);
} // end of one particle
cudaMemcpyAsync(&part->x[offset], &x_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->y[offset], &y_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->z[offset], &z_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->u[offset], &u_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->v[offset], &v_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->w[offset], &w_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->q[offset], &q_dev[stream_offset], number_of_particles_stream * sizeof(FPinterp), cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaStreamSynchronize(cudaStreams[stream_idx]);
start_index_stream = start_index_stream + max_num_particles_per_stream;
if( (start_index_stream + max_num_particles_per_stream) > number_of_particles_batch)
{
end_index_stream = number_of_particles_batch - 1;
}
else
{
end_index_stream += max_num_particles_per_stream;
}
}
for(int j = 0; j < number_of_streams; j++)
{
cudaStreamDestroy(cudaStreams[j]);
}
cudaFree(x_dev);
cudaFree(y_dev);
cudaFree(z_dev);
cudaFree(u_dev);
cudaFree(v_dev);
cudaFree(w_dev);
cudaFree(q_dev);
// Update indices for next batch
start_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH;
if( (start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH) > part->npmax)
{
end_index_batch = part->npmax - 1;
}
else
{
end_index_batch += NUMBER_OF_PARTICLES_PER_BATCH;
}
}
cudaMemcpy(field->Ex_flat, Ex_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
cudaMemcpy(field->Ey_flat, Ey_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
cudaMemcpy(field->Ez_flat, Ez_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
cudaMemcpy(field->Bxn_flat, Bxn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
cudaMemcpy(field->Byn_flat, Byn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
cudaMemcpy(field->Bzn_flat, Bzn_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyDeviceToHost);
// Clean up
cudaFree(XN_flat_dev);
cudaFree(YN_flat_dev);
cudaFree(ZN_flat_dev);
cudaFree(Ex_flat_dev);
cudaFree(Ey_flat_dev);
cudaFree(Ez_flat_dev);
cudaFree(Bxn_flat_dev);
cudaFree(Byn_flat_dev);
cudaFree(Bzn_flat_dev);
return(0);
}
/** Interpolation with batching */
void interpP2G_GPU_stream(struct particles* part, struct interpDensSpecies* ids, struct grid* grd)
{
FPpart *x_dev = NULL, *y_dev = NULL, *z_dev = NULL, *u_dev = NULL, *v_dev = NULL, *w_dev = NULL;
FPinterp * q_dev = NULL, *Jx_flat_dev = NULL, *Jy_flat_dev = NULL, *Jz_flat_dev = NULL, *rhon_flat_dev = NULL, *pxx_flat_dev = NULL, *pxy_flat_dev = NULL, *pxz_flat_dev = NULL, *pyy_flat_dev = NULL, *pyz_flat_dev = NULL, *pzz_flat_dev = NULL;
FPfield *XN_flat_dev = NULL, *YN_flat_dev = NULL, *ZN_flat_dev = NULL;
size_t free_bytes = 0;
int i, total_size_particles, start_index_batch, end_index_batch, number_of_batches;
// Calculation done later to compute free space after allocating space on the GPU for
// other variables below, the assumption is that these variables fit in the GPU memory
// and mini batching is implemented only taking into account particles
cudaMalloc(&Jx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&Jy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&Jz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&rhon_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pxx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pxy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pxz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pyy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pyz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&pzz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp));
cudaMalloc(&XN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&YN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMalloc(&ZN_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield));
cudaMemcpy(Jx_flat_dev, ids->Jx_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Jy_flat_dev, ids->Jy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(Jz_flat_dev, ids->Jz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(rhon_flat_dev, ids->rhon_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pxx_flat_dev, ids->pxx_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pxy_flat_dev, ids->pxy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pxz_flat_dev, ids->pxz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pyy_flat_dev, ids->pyy_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pyz_flat_dev, ids->pyz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(pzz_flat_dev, ids->pzz_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(XN_flat_dev, grd->XN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(YN_flat_dev, grd->YN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
cudaMemcpy(ZN_flat_dev, grd->ZN_flat, grd->nxn * grd->nyn * grd->nzn * sizeof(FPfield), cudaMemcpyHostToDevice);
free_bytes = queryFreeMemoryOnGPU();
total_size_particles = sizeof(FPpart) * part->npmax * 6 + sizeof(FPinterp) * part->npmax; // for x,y,z,u,v,w and q
start_index_batch = 0, end_index_batch = 0;
// implement mini-batching only in the case where the free space on the GPU isn't enough
if(free_bytes > total_size_particles)
{
start_index_batch = 0;
end_index_batch = part->npmax - 1 ; // set end_index to the last particle as we are processing in in one batch
number_of_batches = 1;
}
else
{
start_index_batch = 0;
end_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH - 1; // NUM_PARTICLES_PER_BATCH is a hyperparameter set by tuning
number_of_batches = part->npmax / NUMBER_OF_PARTICLES_PER_BATCH + 1; // works because of integer division
}
cudaStream_t cudaStreams[MAX_NUMBER_OF_STREAMS];
for(i = 0; i < number_of_batches; i++)
{
long int number_of_particles_batch = end_index_batch - start_index_batch + 1; // number of particles in a batch
size_t batch_size = number_of_particles_batch * sizeof(FPpart); // size of the batch in bytes
long int number_of_particles_stream = 0, stream_size_per_attribute = 0, number_of_streams = 0, stream_offset = 0, offset = 0, start_index_stream = 0, end_index_stream = 0, max_num_particles_per_stream = 0;
cudaMalloc(&x_dev, batch_size);
cudaMalloc(&y_dev, batch_size);
cudaMalloc(&z_dev, batch_size);
cudaMalloc(&u_dev, batch_size);
cudaMalloc(&v_dev, batch_size);
cudaMalloc(&w_dev, batch_size);
cudaMalloc(&q_dev, number_of_particles_batch * sizeof(FPinterp));
start_index_stream = 0;
end_index_stream = start_index_stream + (number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH) - 1;
max_num_particles_per_stream = number_of_particles_batch / NUMBER_OF_STREAMS_PER_BATCH;
if(number_of_particles_batch % NUMBER_OF_STREAMS_PER_BATCH != 0) // We have some leftover bytes
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH + 1;
}
else
{
number_of_streams = NUMBER_OF_STREAMS_PER_BATCH;
}
for (int j = 0; j < number_of_streams; j++)
{
cudaStreamCreate(&cudaStreams[j]);
}
for (int stream_idx = 0; stream_idx < number_of_streams; stream_idx++)
{
number_of_particles_stream = end_index_stream - start_index_stream + 1;
stream_size_per_attribute = number_of_particles_stream * sizeof(FPpart); // for x,y,z,u,v,w
stream_offset = start_index_stream;
offset = stream_offset + start_index_batch; // batch offset + stream_offset
cudaMemcpyAsync(&x_dev[stream_offset], &part->x[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&y_dev[stream_offset], &part->y[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&z_dev[stream_offset], &part->z[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&u_dev[stream_offset], &part->u[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&v_dev[stream_offset], &part->v[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&w_dev[stream_offset], &part->w[offset], stream_size_per_attribute, cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
cudaMemcpyAsync(&q_dev[stream_offset], &part->q[offset], number_of_particles_stream * sizeof(FPinterp), cudaMemcpyHostToDevice, cudaStreams[stream_idx]);
// Call GPU kernel
interP2G_kernel<<<(number_of_particles_stream + TPB - 1)/TPB, TPB, 0, cudaStreams[stream_idx]>>>(
x_dev, y_dev, z_dev, u_dev, v_dev, w_dev, q_dev,
XN_flat_dev, YN_flat_dev, ZN_flat_dev,
grd->nxn, grd->nyn, grd->nzn,
grd->xStart, grd->yStart, grd->zStart,
grd->invdx, grd->invdy, grd->invdz, grd->invVOL,
Jx_flat_dev, Jy_flat_dev, Jz_flat_dev, rhon_flat_dev,
pxx_flat_dev , pxy_flat_dev, pxz_flat_dev, pyy_flat_dev, pyz_flat_dev, pzz_flat_dev,
number_of_particles_stream, stream_offset
);
cudaMemcpyAsync(&part->x[offset], &x_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->y[offset], &y_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->z[offset], &z_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->u[offset], &u_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->v[offset], &v_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaMemcpyAsync(&part->w[offset], &w_dev[stream_offset], stream_size_per_attribute, cudaMemcpyDeviceToHost, cudaStreams[stream_idx]);
cudaStreamSynchronize(cudaStreams[stream_idx]);
start_index_stream = start_index_stream + max_num_particles_per_stream;
if( (start_index_stream + max_num_particles_per_stream) > number_of_particles_batch)
{
end_index_stream = number_of_particles_batch - 1;
}
else
{
end_index_stream += max_num_particles_per_stream;
}
}
for(int j = 0; j < number_of_streams; j++)
{
cudaStreamDestroy(cudaStreams[j]);
}
cudaFree(x_dev);
cudaFree(y_dev);
cudaFree(z_dev);
cudaFree(u_dev);
cudaFree(v_dev);
cudaFree(w_dev);
cudaFree(q_dev);
// Update indices for next batch
start_index_batch = start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH;
if ((start_index_batch + NUMBER_OF_PARTICLES_PER_BATCH) > part->npmax)
{
end_index_batch = part->npmax - 1;
}
else
{
end_index_batch += NUMBER_OF_PARTICLES_PER_BATCH;
}
}
// Copy memory back to CPU (only the parts that have been modified inside the kernel)
cudaMemcpy(ids->Jx_flat, Jx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->Jy_flat, Jy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->Jz_flat, Jz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->rhon_flat, rhon_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pxx_flat, pxx_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pxy_flat, pxy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pxz_flat, pxz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pyy_flat, pyy_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pyz_flat, pyz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
cudaMemcpy(ids->pzz_flat, pzz_flat_dev, grd->nxn * grd->nyn * grd->nzn * sizeof(FPinterp), cudaMemcpyDeviceToHost);
// Clean up
cudaFree(Jx_flat_dev);
cudaFree(Jy_flat_dev);
cudaFree(Jz_flat_dev);
cudaFree(XN_flat_dev);
cudaFree(YN_flat_dev);
cudaFree(ZN_flat_dev);
cudaFree(rhon_flat_dev);
cudaFree(pxx_flat_dev);
cudaFree(pxy_flat_dev);
cudaFree(pxz_flat_dev);
cudaFree(pyy_flat_dev);
cudaFree(pyz_flat_dev);
cudaFree(pzz_flat_dev);
} |
65cb82f8932af0763ebd3aca956ffa0f206b1779.hip | // !!! This is a file automatically generated by hipify!!!
//
// Created by songzeceng on 2020/11/26.
//
#include "hip/hip_runtime.h"
#include "kernel.h"
#include <stdio.h>
#define TPB 128
#define RAD 1
__global__ void ddKernel(float *d_out, float *d_in, int size, float h) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= size) {
return;
}
int s_idx = threadIdx.x;
extern __shared__ float s_in[];
s_in[s_idx] = d_in[i];
__syncthreads();
if (threadIdx.x > 0) {
float value = (s_in[s_idx - 1] - 2.f * s_in[s_idx] + s_in[s_idx + 1]) / (h * h);
d_out[i] = value;
}
}
void ddParallel(float *out, float *in, int n, float h) {
float *d_in, *d_out;
int nBytes = n * sizeof(float );
hipMalloc(&d_in, nBytes);
hipMalloc(&d_out, nBytes);
hipMemcpy(d_in, in, nBytes, hipMemcpyHostToDevice);
hipLaunchKernelGGL(( ddKernel), dim3((n + TPB - 1) / TPB), dim3(TPB), (TPB + RAD) * sizeof(float ), 0, d_out, d_in, n, h);
hipMemcpy(out, d_out, nBytes, hipMemcpyDeviceToHost);
hipFree(d_in);
hipFree(d_out);
} | 65cb82f8932af0763ebd3aca956ffa0f206b1779.cu | //
// Created by songzeceng on 2020/11/26.
//
#include "cuda_runtime.h"
#include "kernel.h"
#include <stdio.h>
#define TPB 128
#define RAD 1
__global__ void ddKernel(float *d_out, float *d_in, int size, float h) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= size) {
return;
}
int s_idx = threadIdx.x;
extern __shared__ float s_in[];
s_in[s_idx] = d_in[i];
__syncthreads();
if (threadIdx.x > 0) {
float value = (s_in[s_idx - 1] - 2.f * s_in[s_idx] + s_in[s_idx + 1]) / (h * h);
d_out[i] = value;
}
}
void ddParallel(float *out, float *in, int n, float h) {
float *d_in, *d_out;
int nBytes = n * sizeof(float );
cudaMalloc(&d_in, nBytes);
cudaMalloc(&d_out, nBytes);
cudaMemcpy(d_in, in, nBytes, cudaMemcpyHostToDevice);
ddKernel<<<(n + TPB - 1) / TPB, TPB, (TPB + RAD) * sizeof(float )>>>(d_out, d_in, n, h);
cudaMemcpy(out, d_out, nBytes, cudaMemcpyDeviceToHost);
cudaFree(d_in);
cudaFree(d_out);
} |
833ac4ce67d6c70a19a68ec0b7429b10bf332e7a.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/***************************************************************************//**
* \file bodies.cu
* \author Anush Krishnan (anush@bu.edu)
* \author Christopher Minar (minarc@oregonstate.edu)
* \brief Implementation of the methods of the class \c bodies.
*/
#include "bodies.h"
#include <cusp/blas/blas.h>
#include <iomanip>
#include <fstream>
/**
* \brief Sets initial position and velocity of each body.
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
*/
void bodies::initialise(parameterDB &db, domain &D)
{
std::vector<body> *B = db["flow"]["bodies"].get<std::vector<body> *>();
// number of bodies in the flow
numBodies = B->size();
//oscylinder
xCoeff = (*B)[0].xCoefficient;
yCoeff = (*B)[0].yCoefficient;
uCoeff = (*B)[0].uCoefficient;
vCoeff = (*B)[0].vCoefficient;
xfrequency = (*B)[0].xfrequency;
yfrequency = (*B)[0].yfrequency;
xPhase = (*B)[0].xPhase;
yPhase = (*B)[0].yPhase;
uPhase = (*B)[0].uPhase;
vPhase = (*B)[0].vPhase;
// set the sizes of all the arrays
numPoints.resize(numBodies);
offsets.resize(numBodies);
startI.resize(numBodies);
startJ.resize(numBodies);
numCellsX.resize(numBodies);
numCellsY.resize(numBodies);
startI0.resize(numBodies);
startJ0.resize(numBodies);
numCellsX0.resize(numBodies);
numCellsY0.resize(numBodies);
xmin.resize(numBodies);
xmax.resize(numBodies);
ymin.resize(numBodies);
ymax.resize(numBodies);
xmin0.resize(numBodies);
xmax0.resize(numBodies);
ymin0.resize(numBodies);
ymax0.resize(numBodies);
xleft.resize(numBodies);
xright.resize(numBodies);
ytop.resize(numBodies);
ybot.resize(numBodies);
// calculate offsets, number of points in each body and the total number of points
totalPoints = 0;
for(int k=0; k<numBodies; k++)
{
offsets[k] = totalPoints;
numPoints[k] = (*B)[k].numPoints;
totalPoints += numPoints[k];
}
// fill up coordinates of body points
X.resize(totalPoints);
Y.resize(totalPoints);
ds.resize(totalPoints);
ones.resize(totalPoints);
cusp::blas::fill(ones, 1.0);
for(int k=0; k<numBodies; k++)
{
for(int i=0; i<numPoints[k]; i++)
{
X[i+offsets[k]] = (*B)[k].X[i];
Y[i+offsets[k]] = (*B)[k].Y[i];
}
}
x.resize(totalPoints);
y.resize(totalPoints);
dx.resize(totalPoints);
dy.resize(totalPoints);
xk.resize(totalPoints);
yk.resize(totalPoints);
uB.resize(totalPoints);
vB.resize(totalPoints);
uBk.resize(totalPoints);
vBk.resize(totalPoints);
uB0.resize(totalPoints);
vB0.resize(totalPoints);
force_dudn.resize(totalPoints);
force_dvdn.resize(totalPoints);
force_pressure.resize(totalPoints);
force_x.resize(totalPoints);
force_y.resize(totalPoints);
x1.resize(totalPoints);
x2.resize(totalPoints);
x3.resize(totalPoints);
x4.resize(totalPoints);
y1.resize(totalPoints);
y2.resize(totalPoints);
y3.resize(totalPoints);
y4.resize(totalPoints);
q1.resize(totalPoints);
q2.resize(totalPoints);
q3.resize(totalPoints);
q4.resize(totalPoints);
point_y.resize(totalPoints);
point_x.resize(totalPoints);
point2_y.resize(totalPoints);
point2_x.resize(totalPoints);
point3_y.resize(totalPoints);
point3_x.resize(totalPoints);
centerVelocityU = 0;
centerVelocityV = 0;
cusp::blas::fill(vB, 0);
cusp::blas::fill(uB, 0);
cusp::blas::fill(vBk, 0);
cusp::blas::fill(uBk, 0);
cast();
bodiesMove = false;
for(int k=0; k<numBodies; k++)
{
// assume a closed body (closed loop)
for(int i=offsets[k], j = offsets[k]+numPoints[k]-1; i<offsets[k]+numPoints[k];)
{
// calculate the lengths of the boundary segments
ds[i] = sqrt( (X[i]-X[j])*(X[i]-X[j]) + (Y[i]-Y[j])*(Y[i]-Y[j]) );
// j takes the value of i, then i is incremented
j = i++;
}
// if the body is moving, set bodiesMove to true
bodiesMove = bodiesMove || (*B)[k].moving[0] || (*B)[k].moving[1];
}
// set initial position of the body
update(db, D, 0.0);
if(numBodies)
{
calculateTightBoundingBoxes(db, D);
calculateBoundingBoxes(db, D);
numCellsXHost = numCellsX[0];
numCellsYHost = numCellsY[0];
}
midX=0;
midY=0;
midX0=0;
midY0=0;
midXk=0;
midYk=0;
for (int i=0;i<totalPoints;i++)
{
midX += x[i];
midY += y[i];
}
midX /= totalPoints;
midY /= totalPoints;
for (int i = 0; i<totalPoints; i++)
{
dx[i] = x[i] - midX;
dy[i] = y[i] - midY;
}
midX0=midX;
midY0=midY;
centerVelocityV = 0;
centerVelocityU = 0;
centerVelocityVk = 0;
centerVelocityUk = 0;
centerVelocityU0= 0;
centerVelocityV0= 0;
}
void bodies::cast()
{
numPoints_r = thrust::raw_pointer_cast ( &(numPoints[0]));
offsets_r = thrust::raw_pointer_cast ( &(offsets[0]));
startI_r = thrust::raw_pointer_cast ( &(startI[0]));
startJ_r = thrust::raw_pointer_cast ( &(startJ[0]));
numCellsX_r = thrust::raw_pointer_cast ( &(numCellsX[0]));
numCellsY_r = thrust::raw_pointer_cast ( &(numCellsY[0]));
startI0_r = thrust::raw_pointer_cast ( &(startI0[0]));
startJ0_r = thrust::raw_pointer_cast ( &(startJ0[0]));
numCellsX0_r = thrust::raw_pointer_cast ( &(numCellsX0[0]));
numCellsY0_r = thrust::raw_pointer_cast ( &(numCellsY0[0]));
xmin_r = thrust::raw_pointer_cast ( &(xmin[0]));
xmax_r = thrust::raw_pointer_cast ( &(xmax[0]));
ymin_r = thrust::raw_pointer_cast ( &(ymin[0]));
ymax_r = thrust::raw_pointer_cast ( &(ymax[0]));
xmin0_r = thrust::raw_pointer_cast ( &(xmin0[0]));
xmax0_r = thrust::raw_pointer_cast ( &(xmax0[0]));
ymin0_r = thrust::raw_pointer_cast ( &(ymin0[0]));
ymax0_r = thrust::raw_pointer_cast ( &(xmax0[0]));
X_r = thrust::raw_pointer_cast ( &(X[0]));
Y_r = thrust::raw_pointer_cast ( &(Y[0]));
ds_r = thrust::raw_pointer_cast ( &(ds[0]));
ones_r = thrust::raw_pointer_cast ( &(ones[0]));
x_r = thrust::raw_pointer_cast ( &(x[0]));
y_r = thrust::raw_pointer_cast ( &(y[0]));
dx_r = thrust::raw_pointer_cast ( &(dx[0]));
dy_r = thrust::raw_pointer_cast ( &(dy[0]));
//xk_r = thrust::raw_pointer_cast ( &(xk[0]));
//yk_r = thrust::raw_pointer_cast ( &(yk[0]));
uB_r = thrust::raw_pointer_cast ( &(uB[0]));
vB_r = thrust::raw_pointer_cast ( &(vB[0]));
uBk_r = thrust::raw_pointer_cast ( &(uBk[0]));
vBk_r = thrust::raw_pointer_cast ( &(vBk[0]));
xleft_r = thrust::raw_pointer_cast ( &(xleft[0]));
xright_r = thrust::raw_pointer_cast ( &(xright[0]));
ybot_r = thrust::raw_pointer_cast ( &(ybot[0]));
ytop_r = thrust::raw_pointer_cast ( &(ytop[0]));
test_r = thrust::raw_pointer_cast ( &(test[0]));
x1_r = thrust::raw_pointer_cast ( &(x1[0]));
x2_r = thrust::raw_pointer_cast ( &(x2[0]));
x3_r = thrust::raw_pointer_cast ( &(x3[0]));
x4_r = thrust::raw_pointer_cast ( &(x4[0]));
y1_r = thrust::raw_pointer_cast ( &(y1[0]));
y2_r = thrust::raw_pointer_cast ( &(y2[0]));
y3_r = thrust::raw_pointer_cast ( &(y3[0]));
y4_r = thrust::raw_pointer_cast ( &(y4[0]));
q1_r = thrust::raw_pointer_cast ( &(q1[0]));
q2_r = thrust::raw_pointer_cast ( &(q2[0]));
q3_r = thrust::raw_pointer_cast ( &(q3[0]));
q4_r = thrust::raw_pointer_cast ( &(q4[0]));
point_x_r = thrust::raw_pointer_cast ( &(point_x[0]));
point_y_r = thrust::raw_pointer_cast ( &(point_y[0]));
point2_x_r = thrust::raw_pointer_cast ( &(point2_x[0]));
point2_y_r = thrust::raw_pointer_cast ( &(point2_y[0]));
point3_x_r = thrust::raw_pointer_cast ( &(point3_x[0]));
point3_y_r = thrust::raw_pointer_cast ( &(point3_y[0]));
force_pressure_r= thrust::raw_pointer_cast ( &(force_pressure[0]));
force_dudn_r = thrust::raw_pointer_cast ( &(force_dudn[0]));
force_dvdn_r = thrust::raw_pointer_cast ( &(force_dvdn[0]));
force_x_r = thrust::raw_pointer_cast ( &(force_x[0]));
force_y_r = thrust::raw_pointer_cast ( &(force_y[0]));
}
//flag this isn't setup to work with multiple bodies
//flag this kernel is setup to be called recursivly to handle body sizes larger than than
//flag this kernel isn't working for smaller body node spacing (0.005 works, 0.004 does not), disabling for now
//the maximum number of points allowable in a block
__global__
void boundingBox(double *x, double *y,
thrust::device_vector<double>::iterator xmax_in, thrust::device_vector<double>::iterator xmin_in, thrust::device_vector<double>::iterator ymax_in, thrust::device_vector<double>::iterator ymin_in,
int *startI, int *startJ, int *numCellsX, int *numCellsY,
double *xmax, double *xmin, double *ymax, double *ymin, double scale)
{
if (threadIdx.x > 0)
return;
xmax[0] = *xmax_in;
xmin[0] = *xmin_in;
ymax[0] = *ymax_in;
ymin[0] = *ymin_in;
double dx = xmax[0]-xmin[0],
dy = ymax[0]-ymin[0];
xmax[0] += 0.5*dx*(scale-1.0);
xmin[0] -= 0.5*dx*(scale-1.0);
ymax[0] += 0.5*dy*(scale-1.0);
ymin[0] -= 0.5*dy*(scale-1.0);
int i=0,
j=0;
while(x[i+1] < xmin[0])
i++;
while(y[j+1] < ymin[0])
j++;
startI[0] = i;
startJ[0] = j;
while(x[i] < xmax[0])
i++;
while(y[j] < ymax[0])
j++;
numCellsX[0] = i - startI[0];
numCellsY[0] = j - startJ[0];
}
/**
* \brief Calculates indices of the bounding box of each body in the flow.
*
* First the bounding box is scaled by a coefficient stored in the database.
* Then, indices of the x-coordinate and y-coordinate of the bottom left cell
* of the bounding box are stored. Finally, the number of cells in the x- and y-
* directions are calculated.
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
*/
void bodies::calculateBoundingBoxes(parameterDB &db, domain &D)
{
double scale = db["simulation"]["scaleCV"].get<double>();
double *x_r = thrust::raw_pointer_cast( &(D.x[0]) ),
*y_r = thrust::raw_pointer_cast( &(D.y[0]) );
thrust::device_vector<double>::iterator iter_xmax,
iter_xmin,
iter_ymax,
iter_ymin;
iter_xmax = thrust::max_element(x.begin(),x.end());
iter_xmin = thrust::min_element(x.begin(),x.end());
iter_ymax = thrust::max_element(y.begin(),y.end());
iter_ymin = thrust::min_element(y.begin(),y.end());
const int blocksize = 1;
dim3 grid(1, 1);
dim3 block(blocksize, 1);
hipLaunchKernelGGL(( boundingBox), dim3(grid),dim3(block), 0, 0, x_r,y_r,
iter_xmax, iter_xmin, iter_ymax, iter_ymin,
startI_r, startJ_r, numCellsX_r, numCellsY_r,
xmax_r,xmin_r,ymax_r,ymin_r, scale);
}
void bodies::calculateTightBoundingBoxes(parameterDB &db, domain &D) //flag this should be merged into the normal calculate bounding box function
{
double scale = db["simulation"]["scaleCV"].get<double>();
int i, j;
for(int k=0; k<numBodies; k++)
{
xmin0[k] = x[offsets[k]];
xmax0[k] = xmin[k];
ymin0[k] = y[offsets[k]];
ymax0[k] = ymin[k];
for(int l=offsets[k]+1; l<offsets[k]+numPoints[k]; l++)
{
if(x[l] < xmin0[k]) xmin0[k] = x[l];
if(x[l] > xmax0[k]) xmax0[k] = x[l];
if(y[l] < ymin0[k]) ymin0[k] = y[l];
if(y[l] > ymax0[k]) ymax0[k] = y[l];
}
i=0; j=0;
while(D.x[i+1] < xmin0[k])
i++;
while(D.y[j+1] < ymin0[k])
j++;
startI0[k] = i;
startJ0[k] = j;
while(D.x[i] < xmax[k])
i++;
while(D.y[j] < ymax[k])
j++;
numCellsX0[k] = i - startI0[k];
numCellsY0[k] = j - startJ0[k];
}
}
/**
* \brief Updates position, velocity and neighbors of each body.
*
* This is done using the formulae:
*
* \f$ x_{i,m} = X^c_m + (X_{i,m} - X^0_m) \cos\theta - (Y_{i,m} - Y^0_m) \sin\theta \f$
*
* and
*
* \f$ y_{i,m} = Y^c_m + (X_{i,m} - X^0_m) \sin\theta + (Y_{i,m} - Y^0_m) \cos\theta \f$
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
* \param Time the time
*/
void bodies::update(parameterDB &db, domain &D, double Time)
{
typedef typename cusp::array1d<double, cusp::device_memory> Array;
typedef typename Array::iterator Iterator;
typedef cusp::array1d_view<Iterator> View;
// views of the vectors that store the coordinates and velocities of all the body points
View XView, YView, xView, yView, onesView, uBView, vBView;
// body data
std::vector<body> *B = db["flow"]["bodies"].get<std::vector<body> *>();
for(int l=0; l<numBodies; l++)
{
// update the location and velocity of the body
(*B)[l].update(Time);
// create the views for the current body
if(l < numBodies-1)
{
XView = View(X.begin()+offsets[l], X.begin()+offsets[l+1]);
YView = View(Y.begin()+offsets[l], Y.begin()+offsets[l+1]);
onesView = View(ones.begin()+offsets[l], ones.begin()+offsets[l+1]);
uBView = View(uB.begin()+offsets[l], uB.begin()+offsets[l+1]);
vBView = View(vB.begin()+offsets[l], vB.begin()+offsets[l+1]);
xView = View(x.begin()+offsets[l], x.begin()+offsets[l+1]);
yView = View(y.begin()+offsets[l], y.begin()+offsets[l+1]);
}
else
{
XView = View(X.begin()+offsets[l], X.end());
YView = View(Y.begin()+offsets[l], Y.end());
onesView = View(ones.begin()+offsets[l], ones.end());
xView = View(x.begin()+offsets[l], x.end());
yView = View(y.begin()+offsets[l], y.end());
uBView = View(uB.begin()+offsets[l], uB.end());
vBView = View(vB.begin()+offsets[l], vB.end());
}
// update postitions
// x-coordinates
cusp::blas::axpbypcz( onesView, XView, onesView, xView, (*B)[l].Xc[0], cos((*B)[l].Theta), -(*B)[l].X0[0]*cos((*B)[l].Theta) );
cusp::blas::axpbypcz( xView, YView, onesView, xView, 1.0, -sin((*B)[l].Theta), (*B)[l].X0[1]*sin((*B)[l].Theta) );
// y-coordinates
cusp::blas::axpbypcz( onesView, XView, onesView, yView, (*B)[l].Xc[1], sin((*B)[l].Theta), -(*B)[l].X0[0]*sin((*B)[l].Theta) );
cusp::blas::axpbypcz( yView, YView, onesView, yView, 1.0, cos((*B)[l].Theta), -(*B)[l].X0[1]*cos((*B)[l].Theta) );
// update velocities
// x-velocities
cusp::blas::axpbypcz(onesView, yView, onesView, uBView, (*B)[l].vel[0], -(*B)[l].angVel, (*B)[l].angVel*(*B)[l].Xc[1]);
// y-velocities
cusp::blas::axpbypcz(onesView, xView, onesView, vBView, (*B)[l].vel[1], (*B)[l].angVel, -(*B)[l].angVel*(*B)[l].Xc[0]);
}
}
/**
* \brief Writes body coordinates into a file (using data from the device).
*
* \param caseFolder directory of the simulation
* \param timeStep time-step of the simulation
*/
void bodies::writeToFile(std::string &caseFolder, int timeStep)
{
cusp::array1d<double, cusp::host_memory>
xHost = x,
yHost = y;
double *bx = thrust::raw_pointer_cast(&(xHost[0])),
*by = thrust::raw_pointer_cast(&(yHost[0]));
writeToFile(bx, by, caseFolder, timeStep);
}
/**
* \brief Writes body coordinates into a file called \a bodies.
*
* \param bx x-coordinate of all points of all bodies
* \param by y-coordinate of all points of all bodies
* \param caseFolder directory of the simulation
* \param timeStep time-step of the simulation
*/
void bodies::writeToFile(double *bx, double *by, std::string &caseFolder, int timeStep)
{
std::string path;
std::stringstream out;
out << caseFolder << '/' << std::setfill('0') << std::setw(7) << timeStep << "/bodies";
std::ofstream file(out.str().c_str());;
file << '#' << std::setw(19) << "x-coordinate" << std::setw(20) << "y-coordinate" << std::endl;
for (int l=0; l < totalPoints; l++)
{
file << bx[l] << '\t' << by[l] << '\n';
}
file.close();
}
| 833ac4ce67d6c70a19a68ec0b7429b10bf332e7a.cu | /***************************************************************************//**
* \file bodies.cu
* \author Anush Krishnan (anush@bu.edu)
* \author Christopher Minar (minarc@oregonstate.edu)
* \brief Implementation of the methods of the class \c bodies.
*/
#include "bodies.h"
#include <cusp/blas/blas.h>
#include <iomanip>
#include <fstream>
/**
* \brief Sets initial position and velocity of each body.
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
*/
void bodies::initialise(parameterDB &db, domain &D)
{
std::vector<body> *B = db["flow"]["bodies"].get<std::vector<body> *>();
// number of bodies in the flow
numBodies = B->size();
//oscylinder
xCoeff = (*B)[0].xCoefficient;
yCoeff = (*B)[0].yCoefficient;
uCoeff = (*B)[0].uCoefficient;
vCoeff = (*B)[0].vCoefficient;
xfrequency = (*B)[0].xfrequency;
yfrequency = (*B)[0].yfrequency;
xPhase = (*B)[0].xPhase;
yPhase = (*B)[0].yPhase;
uPhase = (*B)[0].uPhase;
vPhase = (*B)[0].vPhase;
// set the sizes of all the arrays
numPoints.resize(numBodies);
offsets.resize(numBodies);
startI.resize(numBodies);
startJ.resize(numBodies);
numCellsX.resize(numBodies);
numCellsY.resize(numBodies);
startI0.resize(numBodies);
startJ0.resize(numBodies);
numCellsX0.resize(numBodies);
numCellsY0.resize(numBodies);
xmin.resize(numBodies);
xmax.resize(numBodies);
ymin.resize(numBodies);
ymax.resize(numBodies);
xmin0.resize(numBodies);
xmax0.resize(numBodies);
ymin0.resize(numBodies);
ymax0.resize(numBodies);
xleft.resize(numBodies);
xright.resize(numBodies);
ytop.resize(numBodies);
ybot.resize(numBodies);
// calculate offsets, number of points in each body and the total number of points
totalPoints = 0;
for(int k=0; k<numBodies; k++)
{
offsets[k] = totalPoints;
numPoints[k] = (*B)[k].numPoints;
totalPoints += numPoints[k];
}
// fill up coordinates of body points
X.resize(totalPoints);
Y.resize(totalPoints);
ds.resize(totalPoints);
ones.resize(totalPoints);
cusp::blas::fill(ones, 1.0);
for(int k=0; k<numBodies; k++)
{
for(int i=0; i<numPoints[k]; i++)
{
X[i+offsets[k]] = (*B)[k].X[i];
Y[i+offsets[k]] = (*B)[k].Y[i];
}
}
x.resize(totalPoints);
y.resize(totalPoints);
dx.resize(totalPoints);
dy.resize(totalPoints);
xk.resize(totalPoints);
yk.resize(totalPoints);
uB.resize(totalPoints);
vB.resize(totalPoints);
uBk.resize(totalPoints);
vBk.resize(totalPoints);
uB0.resize(totalPoints);
vB0.resize(totalPoints);
force_dudn.resize(totalPoints);
force_dvdn.resize(totalPoints);
force_pressure.resize(totalPoints);
force_x.resize(totalPoints);
force_y.resize(totalPoints);
x1.resize(totalPoints);
x2.resize(totalPoints);
x3.resize(totalPoints);
x4.resize(totalPoints);
y1.resize(totalPoints);
y2.resize(totalPoints);
y3.resize(totalPoints);
y4.resize(totalPoints);
q1.resize(totalPoints);
q2.resize(totalPoints);
q3.resize(totalPoints);
q4.resize(totalPoints);
point_y.resize(totalPoints);
point_x.resize(totalPoints);
point2_y.resize(totalPoints);
point2_x.resize(totalPoints);
point3_y.resize(totalPoints);
point3_x.resize(totalPoints);
centerVelocityU = 0;
centerVelocityV = 0;
cusp::blas::fill(vB, 0);
cusp::blas::fill(uB, 0);
cusp::blas::fill(vBk, 0);
cusp::blas::fill(uBk, 0);
cast();
bodiesMove = false;
for(int k=0; k<numBodies; k++)
{
// assume a closed body (closed loop)
for(int i=offsets[k], j = offsets[k]+numPoints[k]-1; i<offsets[k]+numPoints[k];)
{
// calculate the lengths of the boundary segments
ds[i] = sqrt( (X[i]-X[j])*(X[i]-X[j]) + (Y[i]-Y[j])*(Y[i]-Y[j]) );
// j takes the value of i, then i is incremented
j = i++;
}
// if the body is moving, set bodiesMove to true
bodiesMove = bodiesMove || (*B)[k].moving[0] || (*B)[k].moving[1];
}
// set initial position of the body
update(db, D, 0.0);
if(numBodies)
{
calculateTightBoundingBoxes(db, D);
calculateBoundingBoxes(db, D);
numCellsXHost = numCellsX[0];
numCellsYHost = numCellsY[0];
}
midX=0;
midY=0;
midX0=0;
midY0=0;
midXk=0;
midYk=0;
for (int i=0;i<totalPoints;i++)
{
midX += x[i];
midY += y[i];
}
midX /= totalPoints;
midY /= totalPoints;
for (int i = 0; i<totalPoints; i++)
{
dx[i] = x[i] - midX;
dy[i] = y[i] - midY;
}
midX0=midX;
midY0=midY;
centerVelocityV = 0;
centerVelocityU = 0;
centerVelocityVk = 0;
centerVelocityUk = 0;
centerVelocityU0= 0;
centerVelocityV0= 0;
}
void bodies::cast()
{
numPoints_r = thrust::raw_pointer_cast ( &(numPoints[0]));
offsets_r = thrust::raw_pointer_cast ( &(offsets[0]));
startI_r = thrust::raw_pointer_cast ( &(startI[0]));
startJ_r = thrust::raw_pointer_cast ( &(startJ[0]));
numCellsX_r = thrust::raw_pointer_cast ( &(numCellsX[0]));
numCellsY_r = thrust::raw_pointer_cast ( &(numCellsY[0]));
startI0_r = thrust::raw_pointer_cast ( &(startI0[0]));
startJ0_r = thrust::raw_pointer_cast ( &(startJ0[0]));
numCellsX0_r = thrust::raw_pointer_cast ( &(numCellsX0[0]));
numCellsY0_r = thrust::raw_pointer_cast ( &(numCellsY0[0]));
xmin_r = thrust::raw_pointer_cast ( &(xmin[0]));
xmax_r = thrust::raw_pointer_cast ( &(xmax[0]));
ymin_r = thrust::raw_pointer_cast ( &(ymin[0]));
ymax_r = thrust::raw_pointer_cast ( &(ymax[0]));
xmin0_r = thrust::raw_pointer_cast ( &(xmin0[0]));
xmax0_r = thrust::raw_pointer_cast ( &(xmax0[0]));
ymin0_r = thrust::raw_pointer_cast ( &(ymin0[0]));
ymax0_r = thrust::raw_pointer_cast ( &(xmax0[0]));
X_r = thrust::raw_pointer_cast ( &(X[0]));
Y_r = thrust::raw_pointer_cast ( &(Y[0]));
ds_r = thrust::raw_pointer_cast ( &(ds[0]));
ones_r = thrust::raw_pointer_cast ( &(ones[0]));
x_r = thrust::raw_pointer_cast ( &(x[0]));
y_r = thrust::raw_pointer_cast ( &(y[0]));
dx_r = thrust::raw_pointer_cast ( &(dx[0]));
dy_r = thrust::raw_pointer_cast ( &(dy[0]));
//xk_r = thrust::raw_pointer_cast ( &(xk[0]));
//yk_r = thrust::raw_pointer_cast ( &(yk[0]));
uB_r = thrust::raw_pointer_cast ( &(uB[0]));
vB_r = thrust::raw_pointer_cast ( &(vB[0]));
uBk_r = thrust::raw_pointer_cast ( &(uBk[0]));
vBk_r = thrust::raw_pointer_cast ( &(vBk[0]));
xleft_r = thrust::raw_pointer_cast ( &(xleft[0]));
xright_r = thrust::raw_pointer_cast ( &(xright[0]));
ybot_r = thrust::raw_pointer_cast ( &(ybot[0]));
ytop_r = thrust::raw_pointer_cast ( &(ytop[0]));
test_r = thrust::raw_pointer_cast ( &(test[0]));
x1_r = thrust::raw_pointer_cast ( &(x1[0]));
x2_r = thrust::raw_pointer_cast ( &(x2[0]));
x3_r = thrust::raw_pointer_cast ( &(x3[0]));
x4_r = thrust::raw_pointer_cast ( &(x4[0]));
y1_r = thrust::raw_pointer_cast ( &(y1[0]));
y2_r = thrust::raw_pointer_cast ( &(y2[0]));
y3_r = thrust::raw_pointer_cast ( &(y3[0]));
y4_r = thrust::raw_pointer_cast ( &(y4[0]));
q1_r = thrust::raw_pointer_cast ( &(q1[0]));
q2_r = thrust::raw_pointer_cast ( &(q2[0]));
q3_r = thrust::raw_pointer_cast ( &(q3[0]));
q4_r = thrust::raw_pointer_cast ( &(q4[0]));
point_x_r = thrust::raw_pointer_cast ( &(point_x[0]));
point_y_r = thrust::raw_pointer_cast ( &(point_y[0]));
point2_x_r = thrust::raw_pointer_cast ( &(point2_x[0]));
point2_y_r = thrust::raw_pointer_cast ( &(point2_y[0]));
point3_x_r = thrust::raw_pointer_cast ( &(point3_x[0]));
point3_y_r = thrust::raw_pointer_cast ( &(point3_y[0]));
force_pressure_r= thrust::raw_pointer_cast ( &(force_pressure[0]));
force_dudn_r = thrust::raw_pointer_cast ( &(force_dudn[0]));
force_dvdn_r = thrust::raw_pointer_cast ( &(force_dvdn[0]));
force_x_r = thrust::raw_pointer_cast ( &(force_x[0]));
force_y_r = thrust::raw_pointer_cast ( &(force_y[0]));
}
//flag this isn't setup to work with multiple bodies
//flag this kernel is setup to be called recursivly to handle body sizes larger than than
//flag this kernel isn't working for smaller body node spacing (0.005 works, 0.004 does not), disabling for now
//the maximum number of points allowable in a block
__global__
void boundingBox(double *x, double *y,
thrust::device_vector<double>::iterator xmax_in, thrust::device_vector<double>::iterator xmin_in, thrust::device_vector<double>::iterator ymax_in, thrust::device_vector<double>::iterator ymin_in,
int *startI, int *startJ, int *numCellsX, int *numCellsY,
double *xmax, double *xmin, double *ymax, double *ymin, double scale)
{
if (threadIdx.x > 0)
return;
xmax[0] = *xmax_in;
xmin[0] = *xmin_in;
ymax[0] = *ymax_in;
ymin[0] = *ymin_in;
double dx = xmax[0]-xmin[0],
dy = ymax[0]-ymin[0];
xmax[0] += 0.5*dx*(scale-1.0);
xmin[0] -= 0.5*dx*(scale-1.0);
ymax[0] += 0.5*dy*(scale-1.0);
ymin[0] -= 0.5*dy*(scale-1.0);
int i=0,
j=0;
while(x[i+1] < xmin[0])
i++;
while(y[j+1] < ymin[0])
j++;
startI[0] = i;
startJ[0] = j;
while(x[i] < xmax[0])
i++;
while(y[j] < ymax[0])
j++;
numCellsX[0] = i - startI[0];
numCellsY[0] = j - startJ[0];
}
/**
* \brief Calculates indices of the bounding box of each body in the flow.
*
* First the bounding box is scaled by a coefficient stored in the database.
* Then, indices of the x-coordinate and y-coordinate of the bottom left cell
* of the bounding box are stored. Finally, the number of cells in the x- and y-
* directions are calculated.
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
*/
void bodies::calculateBoundingBoxes(parameterDB &db, domain &D)
{
double scale = db["simulation"]["scaleCV"].get<double>();
double *x_r = thrust::raw_pointer_cast( &(D.x[0]) ),
*y_r = thrust::raw_pointer_cast( &(D.y[0]) );
thrust::device_vector<double>::iterator iter_xmax,
iter_xmin,
iter_ymax,
iter_ymin;
iter_xmax = thrust::max_element(x.begin(),x.end());
iter_xmin = thrust::min_element(x.begin(),x.end());
iter_ymax = thrust::max_element(y.begin(),y.end());
iter_ymin = thrust::min_element(y.begin(),y.end());
const int blocksize = 1;
dim3 grid(1, 1);
dim3 block(blocksize, 1);
boundingBox<<<grid,block>>>(x_r,y_r,
iter_xmax, iter_xmin, iter_ymax, iter_ymin,
startI_r, startJ_r, numCellsX_r, numCellsY_r,
xmax_r,xmin_r,ymax_r,ymin_r, scale);
}
void bodies::calculateTightBoundingBoxes(parameterDB &db, domain &D) //flag this should be merged into the normal calculate bounding box function
{
double scale = db["simulation"]["scaleCV"].get<double>();
int i, j;
for(int k=0; k<numBodies; k++)
{
xmin0[k] = x[offsets[k]];
xmax0[k] = xmin[k];
ymin0[k] = y[offsets[k]];
ymax0[k] = ymin[k];
for(int l=offsets[k]+1; l<offsets[k]+numPoints[k]; l++)
{
if(x[l] < xmin0[k]) xmin0[k] = x[l];
if(x[l] > xmax0[k]) xmax0[k] = x[l];
if(y[l] < ymin0[k]) ymin0[k] = y[l];
if(y[l] > ymax0[k]) ymax0[k] = y[l];
}
i=0; j=0;
while(D.x[i+1] < xmin0[k])
i++;
while(D.y[j+1] < ymin0[k])
j++;
startI0[k] = i;
startJ0[k] = j;
while(D.x[i] < xmax[k])
i++;
while(D.y[j] < ymax[k])
j++;
numCellsX0[k] = i - startI0[k];
numCellsY0[k] = j - startJ0[k];
}
}
/**
* \brief Updates position, velocity and neighbors of each body.
*
* This is done using the formulae:
*
* \f$ x_{i,m} = X^c_m + (X_{i,m} - X^0_m) \cos\theta - (Y_{i,m} - Y^0_m) \sin\theta \f$
*
* and
*
* \f$ y_{i,m} = Y^c_m + (X_{i,m} - X^0_m) \sin\theta + (Y_{i,m} - Y^0_m) \cos\theta \f$
*
* \param db database that contains all the simulation parameters
* \param D information about the computational grid
* \param Time the time
*/
void bodies::update(parameterDB &db, domain &D, double Time)
{
typedef typename cusp::array1d<double, cusp::device_memory> Array;
typedef typename Array::iterator Iterator;
typedef cusp::array1d_view<Iterator> View;
// views of the vectors that store the coordinates and velocities of all the body points
View XView, YView, xView, yView, onesView, uBView, vBView;
// body data
std::vector<body> *B = db["flow"]["bodies"].get<std::vector<body> *>();
for(int l=0; l<numBodies; l++)
{
// update the location and velocity of the body
(*B)[l].update(Time);
// create the views for the current body
if(l < numBodies-1)
{
XView = View(X.begin()+offsets[l], X.begin()+offsets[l+1]);
YView = View(Y.begin()+offsets[l], Y.begin()+offsets[l+1]);
onesView = View(ones.begin()+offsets[l], ones.begin()+offsets[l+1]);
uBView = View(uB.begin()+offsets[l], uB.begin()+offsets[l+1]);
vBView = View(vB.begin()+offsets[l], vB.begin()+offsets[l+1]);
xView = View(x.begin()+offsets[l], x.begin()+offsets[l+1]);
yView = View(y.begin()+offsets[l], y.begin()+offsets[l+1]);
}
else
{
XView = View(X.begin()+offsets[l], X.end());
YView = View(Y.begin()+offsets[l], Y.end());
onesView = View(ones.begin()+offsets[l], ones.end());
xView = View(x.begin()+offsets[l], x.end());
yView = View(y.begin()+offsets[l], y.end());
uBView = View(uB.begin()+offsets[l], uB.end());
vBView = View(vB.begin()+offsets[l], vB.end());
}
// update postitions
// x-coordinates
cusp::blas::axpbypcz( onesView, XView, onesView, xView, (*B)[l].Xc[0], cos((*B)[l].Theta), -(*B)[l].X0[0]*cos((*B)[l].Theta) );
cusp::blas::axpbypcz( xView, YView, onesView, xView, 1.0, -sin((*B)[l].Theta), (*B)[l].X0[1]*sin((*B)[l].Theta) );
// y-coordinates
cusp::blas::axpbypcz( onesView, XView, onesView, yView, (*B)[l].Xc[1], sin((*B)[l].Theta), -(*B)[l].X0[0]*sin((*B)[l].Theta) );
cusp::blas::axpbypcz( yView, YView, onesView, yView, 1.0, cos((*B)[l].Theta), -(*B)[l].X0[1]*cos((*B)[l].Theta) );
// update velocities
// x-velocities
cusp::blas::axpbypcz(onesView, yView, onesView, uBView, (*B)[l].vel[0], -(*B)[l].angVel, (*B)[l].angVel*(*B)[l].Xc[1]);
// y-velocities
cusp::blas::axpbypcz(onesView, xView, onesView, vBView, (*B)[l].vel[1], (*B)[l].angVel, -(*B)[l].angVel*(*B)[l].Xc[0]);
}
}
/**
* \brief Writes body coordinates into a file (using data from the device).
*
* \param caseFolder directory of the simulation
* \param timeStep time-step of the simulation
*/
void bodies::writeToFile(std::string &caseFolder, int timeStep)
{
cusp::array1d<double, cusp::host_memory>
xHost = x,
yHost = y;
double *bx = thrust::raw_pointer_cast(&(xHost[0])),
*by = thrust::raw_pointer_cast(&(yHost[0]));
writeToFile(bx, by, caseFolder, timeStep);
}
/**
* \brief Writes body coordinates into a file called \a bodies.
*
* \param bx x-coordinate of all points of all bodies
* \param by y-coordinate of all points of all bodies
* \param caseFolder directory of the simulation
* \param timeStep time-step of the simulation
*/
void bodies::writeToFile(double *bx, double *by, std::string &caseFolder, int timeStep)
{
std::string path;
std::stringstream out;
out << caseFolder << '/' << std::setfill('0') << std::setw(7) << timeStep << "/bodies";
std::ofstream file(out.str().c_str());;
file << '#' << std::setw(19) << "x-coordinate" << std::setw(20) << "y-coordinate" << std::endl;
for (int l=0; l < totalPoints; l++)
{
file << bx[l] << '\t' << by[l] << '\n';
}
file.close();
}
|
7648cb6aa77dfd4b398bc346636ec8375eaca145.hip | // !!! This is a file automatically generated by hipify!!!
#include "../common/common.h"
#include <hip/hip_runtime.h>
#include <stdio.h>
#define LEN 1<<22
struct innerStruct{
float x;
float y;
};
void initialInnerStruct(innerStruct *ip, int size)
{
for (int i = 0; i < size; i++)
{
ip[i].x = (float)(rand() & 0xFF) / 100.0f;
ip[i].y = (float)(rand() & 0xFF) / 100.0f;
}
return;
}
void testInnerStructHost(innerStruct *A, innerStruct *C, const int n)
{
for (int idx = 0; idx < n; idx++)
{
C[idx].x = A[idx].x + 10.f;
C[idx].y = A[idx].y + 20.f;
}
return;
}
void checkInnerStruct(innerStruct *hostRef, innerStruct *gpuRef, const int N)
{
double epsilon = 1.0E-8;
bool match = 1;
for (int i = 0; i < N; i++)
{
if (abs(hostRef[i].x - gpuRef[i].x) > epsilon)
{
match = 0;
printf("different on %dth element: host %f gpu %f\n", i,
hostRef[i].x, gpuRef[i].x);
break;
}
if (abs(hostRef[i].y - gpuRef[i].y) > epsilon)
{
match = 0;
printf("different on %dth element: host %f gpu %f\n", i,
hostRef[i].y, gpuRef[i].y);
break;
}
}
if (!match) printf("Arrays do not match.\n\n");
}
__global__ void testInnerStruct(innerStruct *data, innerStruct * result,
const int n)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i < n)
{
innerStruct tmp = data[i];
tmp.x += 10.f;
tmp.y += 20.f;
result[i] = tmp;
}
}
__global__ void warmup(innerStruct *data, innerStruct * result, const int n)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i < n)
{
innerStruct tmp = data[i];
tmp.x += 10.f;
tmp.y += 20.f;
result[i] = tmp;
}
}
int main(int argc, char **argv)
{
// set up device
int dev = 0;
hipDeviceProp_t deviceProp;
CHECK(hipGetDeviceProperties(&deviceProp, dev));
printf("%s test struct of array at ", argv[0]);
printf("device %d: %s \n", dev, deviceProp.name);
CHECK(hipSetDevice(dev));
// allocate host memory
int nElem = LEN;
size_t nBytes = nElem * sizeof(innerStruct);
innerStruct *h_A = (innerStruct *)malloc(nBytes);
innerStruct *hostRef = (innerStruct *)malloc(nBytes);
innerStruct *gpuRef = (innerStruct *)malloc(nBytes);
// initialize host array
initialInnerStruct(h_A, nElem);
testInnerStructHost(h_A, hostRef, nElem);
// allocate device memory
innerStruct *d_A, *d_C;
CHECK(hipMalloc((innerStruct**)&d_A, nBytes));
CHECK(hipMalloc((innerStruct**)&d_C, nBytes));
// copy data from host to device
CHECK(hipMemcpy(d_A, h_A, nBytes, hipMemcpyHostToDevice));
// set up offset for summaryAU: It is blocksize not offset. Thanks.CZ
int blocksize = 128;
if (argc > 1) blocksize = atoi(argv[1]);
// execution configuration
dim3 block (blocksize, 1);
dim3 grid ((nElem + block.x - 1) / block.x, 1);
// kernel 1: warmup
double iStart = cpuSecond();
hipLaunchKernelGGL(( warmup), dim3(grid), dim3(block), 0, 0, d_A, d_C, nElem);
CHECK(hipDeviceSynchronize());
double iElaps = cpuSecond() - iStart;
printf("warmup <<< %3d, %3d >>> elapsed %f sec\n", grid.x, block.x,
iElaps);
CHECK(hipMemcpy(gpuRef, d_C, nBytes, hipMemcpyDeviceToHost));
checkInnerStruct(hostRef, gpuRef, nElem);
CHECK(hipGetLastError());
// kernel 2: testInnerStruct
iStart = cpuSecond();
hipLaunchKernelGGL(( testInnerStruct), dim3(grid), dim3(block), 0, 0, d_A, d_C, nElem);
CHECK(hipDeviceSynchronize());
iElaps = cpuSecond() - iStart;
printf("innerstruct <<< %3d, %3d >>> elapsed %f sec\n", grid.x, block.x,
iElaps);
CHECK(hipMemcpy(gpuRef, d_C, nBytes, hipMemcpyDeviceToHost));
checkInnerStruct(hostRef, gpuRef, nElem);
CHECK(hipGetLastError());
// free memories both host and device
CHECK(hipFree(d_A));
CHECK(hipFree(d_C));
free(h_A);
free(hostRef);
free(gpuRef);
// reset device
CHECK(hipDeviceReset());
return EXIT_SUCCESS;
} | 7648cb6aa77dfd4b398bc346636ec8375eaca145.cu | #include "../common/common.h"
#include <cuda_runtime.h>
#include <stdio.h>
#define LEN 1<<22
struct innerStruct{
float x;
float y;
};
void initialInnerStruct(innerStruct *ip, int size)
{
for (int i = 0; i < size; i++)
{
ip[i].x = (float)(rand() & 0xFF) / 100.0f;
ip[i].y = (float)(rand() & 0xFF) / 100.0f;
}
return;
}
void testInnerStructHost(innerStruct *A, innerStruct *C, const int n)
{
for (int idx = 0; idx < n; idx++)
{
C[idx].x = A[idx].x + 10.f;
C[idx].y = A[idx].y + 20.f;
}
return;
}
void checkInnerStruct(innerStruct *hostRef, innerStruct *gpuRef, const int N)
{
double epsilon = 1.0E-8;
bool match = 1;
for (int i = 0; i < N; i++)
{
if (abs(hostRef[i].x - gpuRef[i].x) > epsilon)
{
match = 0;
printf("different on %dth element: host %f gpu %f\n", i,
hostRef[i].x, gpuRef[i].x);
break;
}
if (abs(hostRef[i].y - gpuRef[i].y) > epsilon)
{
match = 0;
printf("different on %dth element: host %f gpu %f\n", i,
hostRef[i].y, gpuRef[i].y);
break;
}
}
if (!match) printf("Arrays do not match.\n\n");
}
__global__ void testInnerStruct(innerStruct *data, innerStruct * result,
const int n)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i < n)
{
innerStruct tmp = data[i];
tmp.x += 10.f;
tmp.y += 20.f;
result[i] = tmp;
}
}
__global__ void warmup(innerStruct *data, innerStruct * result, const int n)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i < n)
{
innerStruct tmp = data[i];
tmp.x += 10.f;
tmp.y += 20.f;
result[i] = tmp;
}
}
int main(int argc, char **argv)
{
// set up device
int dev = 0;
cudaDeviceProp deviceProp;
CHECK(cudaGetDeviceProperties(&deviceProp, dev));
printf("%s test struct of array at ", argv[0]);
printf("device %d: %s \n", dev, deviceProp.name);
CHECK(cudaSetDevice(dev));
// allocate host memory
int nElem = LEN;
size_t nBytes = nElem * sizeof(innerStruct);
innerStruct *h_A = (innerStruct *)malloc(nBytes);
innerStruct *hostRef = (innerStruct *)malloc(nBytes);
innerStruct *gpuRef = (innerStruct *)malloc(nBytes);
// initialize host array
initialInnerStruct(h_A, nElem);
testInnerStructHost(h_A, hostRef, nElem);
// allocate device memory
innerStruct *d_A, *d_C;
CHECK(cudaMalloc((innerStruct**)&d_A, nBytes));
CHECK(cudaMalloc((innerStruct**)&d_C, nBytes));
// copy data from host to device
CHECK(cudaMemcpy(d_A, h_A, nBytes, cudaMemcpyHostToDevice));
// set up offset for summaryAU: It is blocksize not offset. Thanks.CZ
int blocksize = 128;
if (argc > 1) blocksize = atoi(argv[1]);
// execution configuration
dim3 block (blocksize, 1);
dim3 grid ((nElem + block.x - 1) / block.x, 1);
// kernel 1: warmup
double iStart = cpuSecond();
warmup<<<grid, block>>>(d_A, d_C, nElem);
CHECK(cudaDeviceSynchronize());
double iElaps = cpuSecond() - iStart;
printf("warmup <<< %3d, %3d >>> elapsed %f sec\n", grid.x, block.x,
iElaps);
CHECK(cudaMemcpy(gpuRef, d_C, nBytes, cudaMemcpyDeviceToHost));
checkInnerStruct(hostRef, gpuRef, nElem);
CHECK(cudaGetLastError());
// kernel 2: testInnerStruct
iStart = cpuSecond();
testInnerStruct<<<grid, block>>>(d_A, d_C, nElem);
CHECK(cudaDeviceSynchronize());
iElaps = cpuSecond() - iStart;
printf("innerstruct <<< %3d, %3d >>> elapsed %f sec\n", grid.x, block.x,
iElaps);
CHECK(cudaMemcpy(gpuRef, d_C, nBytes, cudaMemcpyDeviceToHost));
checkInnerStruct(hostRef, gpuRef, nElem);
CHECK(cudaGetLastError());
// free memories both host and device
CHECK(cudaFree(d_A));
CHECK(cudaFree(d_C));
free(h_A);
free(hostRef);
free(gpuRef);
// reset device
CHECK(cudaDeviceReset());
return EXIT_SUCCESS;
} |
2c6cf303c25b9af3c6ece9c9c569016a410b1ce7.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
// ***************************************************************
// cuDPP -- CUDA Data Parallel Primitives library
// -------------------------------------------------------------
// $Revision: 3505 $
// $Date: 2007-07-06 09:26:06 -0700 (Fri, 06 Jul 2007) $
// -------------------------------------------------------------
// This source code is distributed under the terms of license.txt in
// the root directory of this source distribution.
// -------------------------------------------------------------
/**
* @file
* segmented_scan_app.cu
*
* @brief CUDPP application-level scan routines
*/
/** \addtogroup cudpp_app
*
*/
/** @name Segmented Scan Functions
* @{
*/
#include "cudpp.h"
#include "cudpp_util.h"
#include "cudpp_plan.h"
#include "kernel/segmented_scan_kernel.cu"
#include "kernel/vector_kernel.cu"
#include <cutil.h>
#include <cstdlib>
#include <cstdio>
#include <assert.h>
/** @brief Perform recursive scan on arbitrary size arrays
*
* This is the CPU-side workhorse function of the segmented scan
* engine. This function invokes the CUDA kernels which perform the
* segmented scan on individual blocks.
*
* Scans of large arrays must be split (possibly recursively) into a
* hierarchy of block scans, where each block is scanned by a single
* CUDA thread block. At each recursive level of the
* segmentedScanArrayRecursive first invokes a kernel to scan all blocks of
* that level, and if the level has more than one block, it calls
* itself recursively. On returning from each recursive level, the
* total sum of each block from the level below is added to all
* elements of the first segment of the corresponding block in this
* level.
*
* Template parameter T is the data type of the input data.
* Template parameter op is the binary operator of the segmented scan.
* Template parameter isBackward specifies whether the direction is backward
* (not implemented). It is forward if it is false.
* Template parameter isExclusive specifies whether the segmented scan
* is exclusive (true) or inclusive (false).
*
* @param[out] d_out The output array for the segmented scan results
* @param[in] d_idata The input array to be scanned
* @param[in] d_iflags The input flags vector which specifies the
* segments. The first element of a segment is marked by a 1 in the
* corresponding position in d_iflags vector. All other elements of
* d_iflags is 0.
* @param[out] d_blockSums Array of arrays of per-block sums (one
* array per recursive level, allocated
* by allocScanStorage())
* @param[out] d_blockFlags Array of arrays of per-block OR-reductions
* of flags (one array per recursive level, allocated by
* allocScanStorage())
* @param[out] d_blockIndices Array of arrays of per-block
* min-reductions of indices (one array per recursive level, allocated
* by allocSegmentedScanStorage()). An index for a particular position \c i in
* a block is calculated as - if \c d_iflags[i] is set then it is the
* 1-based index of that position (i.e if \c d_iflags[10] is set then
* index is \c 11) otherwise the index is \c INT_MAX (the identity
* element of a min operator)
* @param[in] numElements The number of elements in the array to scan
* @param[in] level The current recursive level of the scan
*/
template <typename T, class Op, bool isBackward, bool isExclusive, bool doShiftFlagsLeft>
void segmentedScanArrayRecursive(T *d_out,
const T *d_idata,
const unsigned int *d_iflags,
T **d_blockSums,
unsigned int **d_blockFlags,
unsigned int **d_blockIndices,
int numElements,
int level)
{
unsigned int numBlocks =
max(1, (int)ceil((double)numElements /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
// This is the number of elements per block that the
// CTA level API is aware of
unsigned int numEltsPerBlock = SCAN_CTA_SIZE * 2;
// Space to store flags - we need two sets. One gets modified and the
// other doesn't
unsigned int flagSpace = numEltsPerBlock * sizeof(unsigned int);
// Space to store indices
unsigned int idxSpace = numEltsPerBlock * sizeof(unsigned int);
// Total shared memory space
unsigned int sharedMemSize =
sizeof(T) * (numEltsPerBlock) + idxSpace + flagSpace;
// setup execution parameters
dim3 grid(max(1, numBlocks), 1, 1);
dim3 threads(SCAN_CTA_SIZE, 1, 1);
// make sure there are no CUDA errors before we start
CUT_CHECK_ERROR("segmentedScanArrayRecursive before kernels");
bool fullBlock = (numElements ==
(numBlocks * SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE));
bool sm12OrBetterHw;
hipDeviceProp_t deviceProp;
int dev;
CUDA_SAFE_CALL(hipGetDevice(&dev));
CUDA_SAFE_CALL(hipGetDeviceProperties(&deviceProp, dev));
if (deviceProp.minor >= 2)
sm12OrBetterHw = true;
else
sm12OrBetterHw = false;
unsigned int traitsCode = 0;
if (numBlocks > 1) traitsCode |= 1;
if (fullBlock) traitsCode |= 2;
if (sm12OrBetterHw) traitsCode |= 4;
switch(traitsCode)
{
case 0: // single block, single row, non-full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, false,
false> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 1: // multi block, single row, non-full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, true,
false> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 2: // single block, single row, full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, false,
false> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 3: // multi block, single row, full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, true,
false> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 4: // single block, single row, non-full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, false,
true> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 5: // multi block, single row, non-full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, true,
true> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 6: // single block, single row, full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, false,
true> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 7: // multi block, single row, full last block
hipLaunchKernelGGL(( segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, true,
true> >)
, dim3(grid), dim3(threads), sharedMemSize , 0,
d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
}
CUT_CHECK_ERROR("segmentedScanArrayRecursive after block level scans");
if (numBlocks > 1)
{
// After scanning all the sub-blocks, we are mostly done. But
// now we need to take all of the last values of the
// sub-blocks and segment scan those. This will give us a new value
// that must be sdded to the first segment of each block to get
// the final results.
segmentedScanArrayRecursive<T, Op, isBackward, false, false>
((T*)d_blockSums[level], (const T*)d_blockSums[level],
d_blockFlags[level], (T **)d_blockSums,
d_blockFlags, d_blockIndices,
numBlocks, level + 1);
if (isBackward)
{
if (fullBlock)
hipLaunchKernelGGL(( vectorSegmentedAddUniformToRight4<T, Op, true>), dim3(grid), dim3(threads), 0, 0,
d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
else
hipLaunchKernelGGL(( vectorSegmentedAddUniformToRight4<T, Op, false>), dim3(grid), dim3(threads), 0, 0,
d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
}
else
{
if (fullBlock)
hipLaunchKernelGGL(( vectorSegmentedAddUniform4<T, Op, true>), dim3(grid), dim3(threads), 0, 0,
d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
else
hipLaunchKernelGGL(( vectorSegmentedAddUniform4<T, Op, false>), dim3(grid), dim3(threads), 0, 0,
d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
}
CUT_CHECK_ERROR("vectorSegmentedAddUniform4");
}
}
#ifdef __cplusplus
extern "C"
{
#endif
// file scope
/** @brief Allocate intermediate block sums, block flags and block
* indices arrays in a CUDPPSegmentedScanPlan class.
*
* Segmented scans of large arrays must be split (possibly
* recursively) into a hierarchy of block segmented scans, where each
* block is scanned by a single CUDA thread block. At each recursive
* level of the scan, we need an array in which to store the total
* sums of all blocks in that level. Also at this level we have two
* more arrays - one which contains the OR-reductions of flags of all
* blocks at that level and the second which contains the
* min-reductions of indices of all blocks at that levels This
* function computes the amount of storage needed and allocates it.
*
* @param[in] plan Pointer to CUDPPSegmentedScanPlan object containing segmented scan
* options and number of elements, which is used to compute storage
* requirements.
*/
void allocSegmentedScanStorage(CUDPPSegmentedScanPlan *plan)
{
plan->m_numEltsAllocated = plan->m_numElements;
size_t numElts = plan->m_numElements;
size_t level = 0;
do
{
size_t numBlocks =
max(1, (unsigned int)ceil
((double)numElts /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
if (numBlocks > 1)
{
level++;
}
numElts = numBlocks;
} while (numElts > 1);
size_t elementSize = 0;
switch(plan->m_config.datatype)
{
case CUDPP_INT:
plan->m_blockSums = (void**) malloc(level * sizeof(int*));
elementSize = sizeof(int);
break;
case CUDPP_UINT:
plan->m_blockSums = (void**) malloc(level * sizeof(unsigned int*));
elementSize = sizeof(unsigned int);
break;
case CUDPP_FLOAT:
plan->m_blockSums = (void**) malloc(level * sizeof(float*));
elementSize = sizeof(float);
break;
default:
break;
}
plan->m_blockFlags =
(unsigned int**) malloc(level * sizeof(unsigned int*));
plan->m_blockIndices =
(unsigned int**) malloc(level * sizeof(unsigned int*));
plan->m_numLevelsAllocated = level;
numElts = plan->m_numElements;
level = 0;
do
{
size_t numBlocks =
max(1,
(unsigned int)ceil((double)numElts /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
if (numBlocks > 1)
{
CUDA_SAFE_CALL(hipMalloc((void**) &(plan->m_blockSums[level]),
numBlocks * elementSize));
CUDA_SAFE_CALL(hipMalloc((void**) &(plan->m_blockFlags[level]),
numBlocks * sizeof(unsigned int)));
CUDA_SAFE_CALL(hipMalloc((void**) &(plan->m_blockIndices[level]),
numBlocks * sizeof(unsigned int)));
level++;
}
numElts = numBlocks;
} while (numElts > 1);
CUT_CHECK_ERROR("allocSegmentedScanStorage");
}
/** @brief Deallocate intermediate block sums, block flags and block
* indices arrays in a CUDPPSegmentedScanPlan class.
*
* These arrays must have been allocated by allocSegmentedScanStorage(),
* which is called by the constructor of CUDPPSegmentedScanPlan.
*
* @param[in] plan CUDPPSegmentedScanPlan class initialized by its constructor.
*/
void freeSegmentedScanStorage(CUDPPSegmentedScanPlan *plan)
{
for (unsigned int i = 0; i < plan->m_numLevelsAllocated; i++)
{
hipFree(plan->m_blockSums[i]);
hipFree(plan->m_blockFlags[i]);
hipFree(plan->m_blockIndices[i]);
}
CUT_CHECK_ERROR("freeSegmentedScanStorage");
free((void**)plan->m_blockSums);
free((void**)plan->m_blockFlags);
free((void**)plan->m_blockIndices);
plan->m_blockSums = 0;
plan->m_blockFlags = 0;
plan->m_blockIndices = 0;
plan->m_numEltsAllocated = 0;
plan->m_numLevelsAllocated = 0;
}
#ifdef __cplusplus
}
#endif
template <typename T, bool isBackward, bool isExclusive>
void cudppSegmentedScanDispatchOperator(void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
switch(plan->m_config.op)
{
case CUDPP_MAX:
segmentedScanArrayRecursive<T, OperatorMax<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_ADD:
segmentedScanArrayRecursive<T, OperatorAdd<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_MULTIPLY:
segmentedScanArrayRecursive<T, OperatorMultiply<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_MIN:
segmentedScanArrayRecursive<T, OperatorMin<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
default:
break;
}
}
template <bool isBackward, bool isExclusive>
void cudppSegmentedScanDispatchType(void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
switch(plan->m_config.datatype)
{
case CUDPP_INT:
cudppSegmentedScanDispatchOperator<int, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
case CUDPP_UINT:
cudppSegmentedScanDispatchOperator<unsigned int, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
case CUDPP_FLOAT:
cudppSegmentedScanDispatchOperator<float, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
default:
break;
}
}
#ifdef __cplusplus
extern "C"
{
#endif
/** @brief Dispatch function to perform a scan (prefix sum) on an
* array with the specified configuration.
*
* This is the dispatch routine which calls segmentedScanArrayRecursive() with
* appropriate template parameters and arguments to achieve the scan as
* specified in \a plan.
*
* @param[in] numElements The number of elements to scan
* @param[in] plan Segmented Scan configuration (plan), initialized
* by CUDPPSegmentedScanPlan constructor
* @param[in] d_in The input array
* @param[in] d_iflags The input flags array
* @param[out] d_out The output array of segmented scan results
*/
void cudppSegmentedScanDispatch (void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
if (CUDPP_OPTION_EXCLUSIVE & plan->m_config.options)
{
if (CUDPP_OPTION_BACKWARD & plan->m_config.options)
{
cudppSegmentedScanDispatchType<true, true>(d_out, d_in, d_iflags, numElements, plan);
}
else
{
cudppSegmentedScanDispatchType<false, true>(d_out, d_in, d_iflags, numElements, plan);
}
}
else
{
if (CUDPP_OPTION_BACKWARD & plan->m_config.options)
{
cudppSegmentedScanDispatchType<true, false>(d_out, d_in, d_iflags, numElements, plan);
}
else
{
cudppSegmentedScanDispatchType<false, false>(d_out, d_in, d_iflags, numElements, plan);
}
}
}
#ifdef __cplusplus
}
#endif
/** @} */ // end segmented scan functions
/** @} */ // end cudpp_app
| 2c6cf303c25b9af3c6ece9c9c569016a410b1ce7.cu | // ***************************************************************
// cuDPP -- CUDA Data Parallel Primitives library
// -------------------------------------------------------------
// $Revision: 3505 $
// $Date: 2007-07-06 09:26:06 -0700 (Fri, 06 Jul 2007) $
// -------------------------------------------------------------
// This source code is distributed under the terms of license.txt in
// the root directory of this source distribution.
// -------------------------------------------------------------
/**
* @file
* segmented_scan_app.cu
*
* @brief CUDPP application-level scan routines
*/
/** \addtogroup cudpp_app
*
*/
/** @name Segmented Scan Functions
* @{
*/
#include "cudpp.h"
#include "cudpp_util.h"
#include "cudpp_plan.h"
#include "kernel/segmented_scan_kernel.cu"
#include "kernel/vector_kernel.cu"
#include <cutil.h>
#include <cstdlib>
#include <cstdio>
#include <assert.h>
/** @brief Perform recursive scan on arbitrary size arrays
*
* This is the CPU-side workhorse function of the segmented scan
* engine. This function invokes the CUDA kernels which perform the
* segmented scan on individual blocks.
*
* Scans of large arrays must be split (possibly recursively) into a
* hierarchy of block scans, where each block is scanned by a single
* CUDA thread block. At each recursive level of the
* segmentedScanArrayRecursive first invokes a kernel to scan all blocks of
* that level, and if the level has more than one block, it calls
* itself recursively. On returning from each recursive level, the
* total sum of each block from the level below is added to all
* elements of the first segment of the corresponding block in this
* level.
*
* Template parameter T is the data type of the input data.
* Template parameter op is the binary operator of the segmented scan.
* Template parameter isBackward specifies whether the direction is backward
* (not implemented). It is forward if it is false.
* Template parameter isExclusive specifies whether the segmented scan
* is exclusive (true) or inclusive (false).
*
* @param[out] d_out The output array for the segmented scan results
* @param[in] d_idata The input array to be scanned
* @param[in] d_iflags The input flags vector which specifies the
* segments. The first element of a segment is marked by a 1 in the
* corresponding position in d_iflags vector. All other elements of
* d_iflags is 0.
* @param[out] d_blockSums Array of arrays of per-block sums (one
* array per recursive level, allocated
* by allocScanStorage())
* @param[out] d_blockFlags Array of arrays of per-block OR-reductions
* of flags (one array per recursive level, allocated by
* allocScanStorage())
* @param[out] d_blockIndices Array of arrays of per-block
* min-reductions of indices (one array per recursive level, allocated
* by allocSegmentedScanStorage()). An index for a particular position \c i in
* a block is calculated as - if \c d_iflags[i] is set then it is the
* 1-based index of that position (i.e if \c d_iflags[10] is set then
* index is \c 11) otherwise the index is \c INT_MAX (the identity
* element of a min operator)
* @param[in] numElements The number of elements in the array to scan
* @param[in] level The current recursive level of the scan
*/
template <typename T, class Op, bool isBackward, bool isExclusive, bool doShiftFlagsLeft>
void segmentedScanArrayRecursive(T *d_out,
const T *d_idata,
const unsigned int *d_iflags,
T **d_blockSums,
unsigned int **d_blockFlags,
unsigned int **d_blockIndices,
int numElements,
int level)
{
unsigned int numBlocks =
max(1, (int)ceil((double)numElements /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
// This is the number of elements per block that the
// CTA level API is aware of
unsigned int numEltsPerBlock = SCAN_CTA_SIZE * 2;
// Space to store flags - we need two sets. One gets modified and the
// other doesn't
unsigned int flagSpace = numEltsPerBlock * sizeof(unsigned int);
// Space to store indices
unsigned int idxSpace = numEltsPerBlock * sizeof(unsigned int);
// Total shared memory space
unsigned int sharedMemSize =
sizeof(T) * (numEltsPerBlock) + idxSpace + flagSpace;
// setup execution parameters
dim3 grid(max(1, numBlocks), 1, 1);
dim3 threads(SCAN_CTA_SIZE, 1, 1);
// make sure there are no CUDA errors before we start
CUT_CHECK_ERROR("segmentedScanArrayRecursive before kernels");
bool fullBlock = (numElements ==
(numBlocks * SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE));
bool sm12OrBetterHw;
cudaDeviceProp deviceProp;
int dev;
CUDA_SAFE_CALL(cudaGetDevice(&dev));
CUDA_SAFE_CALL(cudaGetDeviceProperties(&deviceProp, dev));
if (deviceProp.minor >= 2)
sm12OrBetterHw = true;
else
sm12OrBetterHw = false;
unsigned int traitsCode = 0;
if (numBlocks > 1) traitsCode |= 1;
if (fullBlock) traitsCode |= 2;
if (sm12OrBetterHw) traitsCode |= 4;
switch(traitsCode)
{
case 0: // single block, single row, non-full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, false,
false> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 1: // multi block, single row, non-full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, true,
false> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 2: // single block, single row, full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, false,
false> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 3: // multi block, single row, full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, true,
false> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 4: // single block, single row, non-full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, false,
true> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 5: // multi block, single row, non-full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, false, true,
true> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
case 6: // single block, single row, full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, false,
true> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements, 0, 0, 0);
break;
case 7: // multi block, single row, full last block
segmentedScan4<T, SegmentedScanTraits<T, Op, isBackward, isExclusive, doShiftFlagsLeft, true, true,
true> >
<<< grid, threads, sharedMemSize >>>
(d_out, d_idata, d_iflags, numElements,
d_blockSums[level], d_blockFlags[level],
d_blockIndices[level]);
break;
}
CUT_CHECK_ERROR("segmentedScanArrayRecursive after block level scans");
if (numBlocks > 1)
{
// After scanning all the sub-blocks, we are mostly done. But
// now we need to take all of the last values of the
// sub-blocks and segment scan those. This will give us a new value
// that must be sdded to the first segment of each block to get
// the final results.
segmentedScanArrayRecursive<T, Op, isBackward, false, false>
((T*)d_blockSums[level], (const T*)d_blockSums[level],
d_blockFlags[level], (T **)d_blockSums,
d_blockFlags, d_blockIndices,
numBlocks, level + 1);
if (isBackward)
{
if (fullBlock)
vectorSegmentedAddUniformToRight4<T, Op, true><<<grid, threads>>>
(d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
else
vectorSegmentedAddUniformToRight4<T, Op, false><<<grid, threads>>>
(d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
}
else
{
if (fullBlock)
vectorSegmentedAddUniform4<T, Op, true><<<grid, threads>>>
(d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
else
vectorSegmentedAddUniform4<T, Op, false><<<grid, threads>>>
(d_out, d_blockSums[level], d_blockIndices[level],
numElements, 0, 0);
}
CUT_CHECK_ERROR("vectorSegmentedAddUniform4");
}
}
#ifdef __cplusplus
extern "C"
{
#endif
// file scope
/** @brief Allocate intermediate block sums, block flags and block
* indices arrays in a CUDPPSegmentedScanPlan class.
*
* Segmented scans of large arrays must be split (possibly
* recursively) into a hierarchy of block segmented scans, where each
* block is scanned by a single CUDA thread block. At each recursive
* level of the scan, we need an array in which to store the total
* sums of all blocks in that level. Also at this level we have two
* more arrays - one which contains the OR-reductions of flags of all
* blocks at that level and the second which contains the
* min-reductions of indices of all blocks at that levels This
* function computes the amount of storage needed and allocates it.
*
* @param[in] plan Pointer to CUDPPSegmentedScanPlan object containing segmented scan
* options and number of elements, which is used to compute storage
* requirements.
*/
void allocSegmentedScanStorage(CUDPPSegmentedScanPlan *plan)
{
plan->m_numEltsAllocated = plan->m_numElements;
size_t numElts = plan->m_numElements;
size_t level = 0;
do
{
size_t numBlocks =
max(1, (unsigned int)ceil
((double)numElts /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
if (numBlocks > 1)
{
level++;
}
numElts = numBlocks;
} while (numElts > 1);
size_t elementSize = 0;
switch(plan->m_config.datatype)
{
case CUDPP_INT:
plan->m_blockSums = (void**) malloc(level * sizeof(int*));
elementSize = sizeof(int);
break;
case CUDPP_UINT:
plan->m_blockSums = (void**) malloc(level * sizeof(unsigned int*));
elementSize = sizeof(unsigned int);
break;
case CUDPP_FLOAT:
plan->m_blockSums = (void**) malloc(level * sizeof(float*));
elementSize = sizeof(float);
break;
default:
break;
}
plan->m_blockFlags =
(unsigned int**) malloc(level * sizeof(unsigned int*));
plan->m_blockIndices =
(unsigned int**) malloc(level * sizeof(unsigned int*));
plan->m_numLevelsAllocated = level;
numElts = plan->m_numElements;
level = 0;
do
{
size_t numBlocks =
max(1,
(unsigned int)ceil((double)numElts /
((double)SEGSCAN_ELTS_PER_THREAD * SCAN_CTA_SIZE)));
if (numBlocks > 1)
{
CUDA_SAFE_CALL(cudaMalloc((void**) &(plan->m_blockSums[level]),
numBlocks * elementSize));
CUDA_SAFE_CALL(cudaMalloc((void**) &(plan->m_blockFlags[level]),
numBlocks * sizeof(unsigned int)));
CUDA_SAFE_CALL(cudaMalloc((void**) &(plan->m_blockIndices[level]),
numBlocks * sizeof(unsigned int)));
level++;
}
numElts = numBlocks;
} while (numElts > 1);
CUT_CHECK_ERROR("allocSegmentedScanStorage");
}
/** @brief Deallocate intermediate block sums, block flags and block
* indices arrays in a CUDPPSegmentedScanPlan class.
*
* These arrays must have been allocated by allocSegmentedScanStorage(),
* which is called by the constructor of CUDPPSegmentedScanPlan.
*
* @param[in] plan CUDPPSegmentedScanPlan class initialized by its constructor.
*/
void freeSegmentedScanStorage(CUDPPSegmentedScanPlan *plan)
{
for (unsigned int i = 0; i < plan->m_numLevelsAllocated; i++)
{
cudaFree(plan->m_blockSums[i]);
cudaFree(plan->m_blockFlags[i]);
cudaFree(plan->m_blockIndices[i]);
}
CUT_CHECK_ERROR("freeSegmentedScanStorage");
free((void**)plan->m_blockSums);
free((void**)plan->m_blockFlags);
free((void**)plan->m_blockIndices);
plan->m_blockSums = 0;
plan->m_blockFlags = 0;
plan->m_blockIndices = 0;
plan->m_numEltsAllocated = 0;
plan->m_numLevelsAllocated = 0;
}
#ifdef __cplusplus
}
#endif
template <typename T, bool isBackward, bool isExclusive>
void cudppSegmentedScanDispatchOperator(void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
switch(plan->m_config.op)
{
case CUDPP_MAX:
segmentedScanArrayRecursive<T, OperatorMax<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_ADD:
segmentedScanArrayRecursive<T, OperatorAdd<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_MULTIPLY:
segmentedScanArrayRecursive<T, OperatorMultiply<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
case CUDPP_MIN:
segmentedScanArrayRecursive<T, OperatorMin<T>, isBackward, isExclusive, isBackward>
((T *)d_out, (const T *)d_in, d_iflags, (T **)plan->m_blockSums, plan->m_blockFlags,
plan->m_blockIndices, numElements, 0);
break;
default:
break;
}
}
template <bool isBackward, bool isExclusive>
void cudppSegmentedScanDispatchType(void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
switch(plan->m_config.datatype)
{
case CUDPP_INT:
cudppSegmentedScanDispatchOperator<int, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
case CUDPP_UINT:
cudppSegmentedScanDispatchOperator<unsigned int, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
case CUDPP_FLOAT:
cudppSegmentedScanDispatchOperator<float, isBackward, isExclusive>
(d_out, d_in, d_iflags, numElements, plan);
break;
default:
break;
}
}
#ifdef __cplusplus
extern "C"
{
#endif
/** @brief Dispatch function to perform a scan (prefix sum) on an
* array with the specified configuration.
*
* This is the dispatch routine which calls segmentedScanArrayRecursive() with
* appropriate template parameters and arguments to achieve the scan as
* specified in \a plan.
*
* @param[in] numElements The number of elements to scan
* @param[in] plan Segmented Scan configuration (plan), initialized
* by CUDPPSegmentedScanPlan constructor
* @param[in] d_in The input array
* @param[in] d_iflags The input flags array
* @param[out] d_out The output array of segmented scan results
*/
void cudppSegmentedScanDispatch (void *d_out,
const void *d_in,
const unsigned int *d_iflags,
int numElements,
const CUDPPSegmentedScanPlan *plan
)
{
if (CUDPP_OPTION_EXCLUSIVE & plan->m_config.options)
{
if (CUDPP_OPTION_BACKWARD & plan->m_config.options)
{
cudppSegmentedScanDispatchType<true, true>(d_out, d_in, d_iflags, numElements, plan);
}
else
{
cudppSegmentedScanDispatchType<false, true>(d_out, d_in, d_iflags, numElements, plan);
}
}
else
{
if (CUDPP_OPTION_BACKWARD & plan->m_config.options)
{
cudppSegmentedScanDispatchType<true, false>(d_out, d_in, d_iflags, numElements, plan);
}
else
{
cudppSegmentedScanDispatchType<false, false>(d_out, d_in, d_iflags, numElements, plan);
}
}
}
#ifdef __cplusplus
}
#endif
/** @} */ // end segmented scan functions
/** @} */ // end cudpp_app
|
17e8db38ebd85eaa87adb2307692f6093ca04a38.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <wb.h>
#define wbCheck(stmt) \
do { \
hipError_t err = stmt; \
if (err != hipSuccess) { \
wbLog(ERROR, "Failed to run stmt ", #stmt); \
wbLog(ERROR, "Got CUDA error ... ", hipGetErrorString(err)); \
return -1; \
} \
} while (0)
#define TILE_WIDTH 16
// Compute C = A * B
__global__ void matrixMultiplyShared(float *A, float *B, float *C,
int numARows, int numAColumns,
int numBRows, int numBColumns,
int numCRows, int numCColumns) {
//@@ Insert code to implement matrix multiplication here
//@@ You have to use tiling with shared memory for arbitrary size
float CValue = 0;
int row = blockIdx.y * TILE_WIDTH + threadIdx.y;
int col = blockIdx.x * TILE_WIDTH + threadIdx.x;
__shared__ float As[TILE_WIDTH][TILE_WIDTH];
__shared__ float Bs[TILE_WIDTH][TILE_WIDTH];
for (int i = 0; i < (TILE_WIDTH + numAColumns - 1)/TILE_WIDTH; i++) {
if (i * TILE_WIDTH + threadIdx.x < numAColumns && row < numARows)
As[threadIdx.y][threadIdx.x] = A[row * numAColumns + i*TILE_WIDTH + threadIdx.x];
else
As[threadIdx.y][threadIdx.x] = 0.0;
if (i * TILE_WIDTH + threadIdx.y < numBRows && col < numBColumns)
Bs[threadIdx.y][threadIdx.x] = B[(i * TILE_WIDTH + threadIdx.y) * numBColumns + col];
else
Bs[threadIdx.y][threadIdx.x] = 0.0;
__syncthreads();
for (int j = 0; j < TILE_WIDTH; ++j)
CValue += As[threadIdx.y][j] * Bs[j][threadIdx.x];
__syncthreads();
}
if (row < numCRows && col < numCColumns)
C[((blockIdx.y * blockDim.y + threadIdx.y) * numCColumns) + (blockIdx.x * blockDim.x) + threadIdx.x] = CValue;
}
int main(int argc, char **argv) {
wbArg_t args;
float *hostA; // The A matrix
float *hostB; // The B matrix
float *hostC; // The output C matrix
float *deviceA; // A matrix on device
float *deviceB; // B matrix on device
float *deviceC; // C matrix on device
int numARows; // number of rows in the matrix A
int numAColumns; // number of columns in the matrix A
int numBRows; // number of rows in the matrix B
int numBColumns; // number of columns in the matrix B
int numCRows; // number of rows in the matrix C(you have to set this)
int numCColumns; // number of columns in the matrix C (you have to set
// this)
args = wbArg_read(argc, argv);
wbTime_start(Generic, "Importing data and creating memory on host");
hostA = (float *)wbImport(wbArg_getInputFile(args, 0), &numARows,
&numAColumns);
hostB = (float *)wbImport(wbArg_getInputFile(args, 1), &numBRows,
&numBColumns);
//@@ Set numCRows and numCColumns
numCRows = numARows; // set to correct value
numCColumns = numBColumns; // set to correct value
//@@ Allocate the hostC matrix
hostC = (float *) malloc(sizeof(float) * numCRows * numCColumns);
wbTime_stop(Generic, "Importing data and creating memory on host");
wbLog(TRACE, "The dimensions of A are ", numARows, " x ", numAColumns);
wbLog(TRACE, "The dimensions of B are ", numBRows, " x ", numBColumns);
wbLog(TRACE, "The dimensions of C are ", numCRows, " x ", numCColumns);
wbTime_start(GPU, "Allocating GPU memory.");
//@@ Allocate GPU memory here
hipMalloc((void **) &deviceA, sizeof(float)*numARows*numAColumns);
hipMalloc((void **) &deviceB, sizeof(float)*numBRows*numBColumns);
hipMalloc((void **) &deviceC, sizeof(float)*numCRows*numCColumns);
wbTime_stop(GPU, "Allocating GPU memory.");
wbTime_start(GPU, "Copying input memory to the GPU.");
//@@ Copy memory to the GPU here
hipMemcpy(deviceA, hostA, sizeof(float)*numARows*numAColumns, hipMemcpyHostToDevice);
hipMemcpy(deviceB, hostB, sizeof(float)*numBRows*numBColumns, hipMemcpyHostToDevice);
wbTime_stop(GPU, "Copying input memory to the GPU.");
//@@ Initialize the grid and block dimensions here
// note that TILE_WIDTH is set to 16 on line number 13.
dim3 dimBlock(TILE_WIDTH, TILE_WIDTH, 1);
dim3 dimGrid((numCColumns/TILE_WIDTH) + 1, (numCRows/TILE_WIDTH) + 1, 1);
wbTime_start(Compute, "Performing CUDA computation");
//@@ Launch the GPU Kernel here
hipLaunchKernelGGL(( matrixMultiplyShared), dim3(dimGrid), dim3(dimBlock), 0, 0, deviceA, deviceB, deviceC, numARows, numAColumns, numBRows, numBColumns, numCRows, numCColumns);
hipDeviceSynchronize();
wbTime_stop(Compute, "Performing CUDA computation");
wbTime_start(Copy, "Copying output memory to the CPU");
//@@ Copy the GPU memory back to the CPU here
hipMemcpy(hostC, deviceC, sizeof(float)*numCRows*numCColumns, hipMemcpyDeviceToHost);
wbTime_stop(Copy, "Copying output memory to the CPU");
wbTime_start(GPU, "Freeing GPU Memory");
//@@ Free the GPU memory here
hipFree(deviceA);
hipFree(deviceB);
hipFree(deviceC);
wbTime_stop(GPU, "Freeing GPU Memory");
wbSolution(args, hostC, numCRows, numCColumns);
free(hostA);
free(hostB);
free(hostC);
return 0;
}
| 17e8db38ebd85eaa87adb2307692f6093ca04a38.cu | #include <wb.h>
#define wbCheck(stmt) \
do { \
cudaError_t err = stmt; \
if (err != cudaSuccess) { \
wbLog(ERROR, "Failed to run stmt ", #stmt); \
wbLog(ERROR, "Got CUDA error ... ", cudaGetErrorString(err)); \
return -1; \
} \
} while (0)
#define TILE_WIDTH 16
// Compute C = A * B
__global__ void matrixMultiplyShared(float *A, float *B, float *C,
int numARows, int numAColumns,
int numBRows, int numBColumns,
int numCRows, int numCColumns) {
//@@ Insert code to implement matrix multiplication here
//@@ You have to use tiling with shared memory for arbitrary size
float CValue = 0;
int row = blockIdx.y * TILE_WIDTH + threadIdx.y;
int col = blockIdx.x * TILE_WIDTH + threadIdx.x;
__shared__ float As[TILE_WIDTH][TILE_WIDTH];
__shared__ float Bs[TILE_WIDTH][TILE_WIDTH];
for (int i = 0; i < (TILE_WIDTH + numAColumns - 1)/TILE_WIDTH; i++) {
if (i * TILE_WIDTH + threadIdx.x < numAColumns && row < numARows)
As[threadIdx.y][threadIdx.x] = A[row * numAColumns + i*TILE_WIDTH + threadIdx.x];
else
As[threadIdx.y][threadIdx.x] = 0.0;
if (i * TILE_WIDTH + threadIdx.y < numBRows && col < numBColumns)
Bs[threadIdx.y][threadIdx.x] = B[(i * TILE_WIDTH + threadIdx.y) * numBColumns + col];
else
Bs[threadIdx.y][threadIdx.x] = 0.0;
__syncthreads();
for (int j = 0; j < TILE_WIDTH; ++j)
CValue += As[threadIdx.y][j] * Bs[j][threadIdx.x];
__syncthreads();
}
if (row < numCRows && col < numCColumns)
C[((blockIdx.y * blockDim.y + threadIdx.y) * numCColumns) + (blockIdx.x * blockDim.x) + threadIdx.x] = CValue;
}
int main(int argc, char **argv) {
wbArg_t args;
float *hostA; // The A matrix
float *hostB; // The B matrix
float *hostC; // The output C matrix
float *deviceA; // A matrix on device
float *deviceB; // B matrix on device
float *deviceC; // C matrix on device
int numARows; // number of rows in the matrix A
int numAColumns; // number of columns in the matrix A
int numBRows; // number of rows in the matrix B
int numBColumns; // number of columns in the matrix B
int numCRows; // number of rows in the matrix C(you have to set this)
int numCColumns; // number of columns in the matrix C (you have to set
// this)
args = wbArg_read(argc, argv);
wbTime_start(Generic, "Importing data and creating memory on host");
hostA = (float *)wbImport(wbArg_getInputFile(args, 0), &numARows,
&numAColumns);
hostB = (float *)wbImport(wbArg_getInputFile(args, 1), &numBRows,
&numBColumns);
//@@ Set numCRows and numCColumns
numCRows = numARows; // set to correct value
numCColumns = numBColumns; // set to correct value
//@@ Allocate the hostC matrix
hostC = (float *) malloc(sizeof(float) * numCRows * numCColumns);
wbTime_stop(Generic, "Importing data and creating memory on host");
wbLog(TRACE, "The dimensions of A are ", numARows, " x ", numAColumns);
wbLog(TRACE, "The dimensions of B are ", numBRows, " x ", numBColumns);
wbLog(TRACE, "The dimensions of C are ", numCRows, " x ", numCColumns);
wbTime_start(GPU, "Allocating GPU memory.");
//@@ Allocate GPU memory here
cudaMalloc((void **) &deviceA, sizeof(float)*numARows*numAColumns);
cudaMalloc((void **) &deviceB, sizeof(float)*numBRows*numBColumns);
cudaMalloc((void **) &deviceC, sizeof(float)*numCRows*numCColumns);
wbTime_stop(GPU, "Allocating GPU memory.");
wbTime_start(GPU, "Copying input memory to the GPU.");
//@@ Copy memory to the GPU here
cudaMemcpy(deviceA, hostA, sizeof(float)*numARows*numAColumns, cudaMemcpyHostToDevice);
cudaMemcpy(deviceB, hostB, sizeof(float)*numBRows*numBColumns, cudaMemcpyHostToDevice);
wbTime_stop(GPU, "Copying input memory to the GPU.");
//@@ Initialize the grid and block dimensions here
// note that TILE_WIDTH is set to 16 on line number 13.
dim3 dimBlock(TILE_WIDTH, TILE_WIDTH, 1);
dim3 dimGrid((numCColumns/TILE_WIDTH) + 1, (numCRows/TILE_WIDTH) + 1, 1);
wbTime_start(Compute, "Performing CUDA computation");
//@@ Launch the GPU Kernel here
matrixMultiplyShared<<<dimGrid, dimBlock>>>(deviceA, deviceB, deviceC, numARows, numAColumns, numBRows, numBColumns, numCRows, numCColumns);
cudaDeviceSynchronize();
wbTime_stop(Compute, "Performing CUDA computation");
wbTime_start(Copy, "Copying output memory to the CPU");
//@@ Copy the GPU memory back to the CPU here
cudaMemcpy(hostC, deviceC, sizeof(float)*numCRows*numCColumns, cudaMemcpyDeviceToHost);
wbTime_stop(Copy, "Copying output memory to the CPU");
wbTime_start(GPU, "Freeing GPU Memory");
//@@ Free the GPU memory here
cudaFree(deviceA);
cudaFree(deviceB);
cudaFree(deviceC);
wbTime_stop(GPU, "Freeing GPU Memory");
wbSolution(args, hostC, numCRows, numCColumns);
free(hostA);
free(hostB);
free(hostC);
return 0;
}
|
4d5647c27cb991f463140a02f5181e15a2ead5d6.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#ifndef THC_GENERIC_FILE
#define THC_GENERIC_FILE "generic/VolumetricMaxUnpooling.cu"
#else
static inline void THNN_(VolumetricMaxUnpooling_shapeCheck)(
THCState *state,
THCTensor *input,
THCTensor *gradOutput,
THCIndexTensor *indices,
int oT,
int oW,
int oH,
int dT,
int dW,
int dH,
int pT,
int pW,
int pH) {
int inputSlices = 0;
THCUNN_check_shape_indices(state, indices, input);
THArgCheck(dT > 0 && dW > 0 && dH > 0, 10,
"stride should be greater than zero, but got dT: %d dH: %d dW: %d",
dT, dH, dW);
if (THCTensor_(nDimension)(state, input) == 4)
{
inputSlices = THCTensor_(size)(state, input, 0);
}
else if (THCTensor_(nDimension)(state, input) == 5)
{
inputSlices = THCTensor_(size)(state, input, 1);
}
else
{
AT_ERROR("non-empty 4D or 5D tensor expected, got size: ",
input->sizes());
}
int dimw = 3;
int dimh = 2;
int dimt = 1;
int dimn = 0;
if (input->dim() == 5)
{
dimt++;
dimw++;
dimh++;
dimn++;
}
if (gradOutput != NULL) {
if (oT != gradOutput->size[dimt] || oW != gradOutput->size[dimw] || oH != gradOutput->size[dimh])
{
THError(
"Inconsistent gradOutput size. oT= %d, oH= %d, oW= %d, gradOutput: %dx%dx%d",
oT, oH, oW, gradOutput->size[dimt], gradOutput->size[dimh], gradOutput->size[dimw]);
}
THCUNN_check_dim_size(state, gradOutput, input->dim(), dimn, inputSlices);
}
}
void THNN_(VolumetricMaxUnpooling_updateOutput)(
THCState *state,
THCTensor *input,
THCTensor *output,
THCIndexTensor *indices,
int outputTime, int outputWidth, int outputHeight,
int dT, int dW, int dH,
int padT, int padW, int padH)
{
int batchSize = 0;
int inputSlices = 0;
int inputTime = 0;
int inputHeight = 0;
int inputWidth = 0;
THNN_(VolumetricMaxUnpooling_shapeCheck)(
state, input, NULL, indices,
outputTime, outputWidth, outputHeight,
dT, dW, dH, padT, padW, padH);
THCUNN_assertSameGPU(state, 3, input, indices, output);
int fiveDimensionalInput = THCTensor_(nDimension)(state, input) == 5;
if (THCTensor_(nDimension)(state, input) == 4)
{
/* sizes */
batchSize = 1;
inputSlices = THCTensor_(size)(state, input, 0);
inputTime = THCTensor_(size)(state, input, 1);
inputHeight = THCTensor_(size)(state, input, 2);
inputWidth = THCTensor_(size)(state, input, 3);
}
else if (fiveDimensionalInput)
{
/* sizes */
batchSize = THCTensor_(size)(state, input, 0);
inputSlices = THCTensor_(size)(state, input, 1);
inputTime = THCTensor_(size)(state, input, 2);
inputHeight = THCTensor_(size)(state, input, 3);
inputWidth = THCTensor_(size)(state, input, 4);
}
if (!fiveDimensionalInput) /* 4D */
{
/* resize output */
THCTensor_(resize4d)(state, output, inputSlices,
outputTime, outputHeight, outputWidth);
}
else
{ /* 5D */
THCTensor_(resize5d)(state, output, batchSize, inputSlices,
outputTime, outputHeight, outputWidth);
}
input = THCTensor_(newContiguous)(state, input);
indices = THCIndexTensor_(newContiguous)(state, indices);
output = THCTensor_(newContiguous)(state, output);
THCTensor_(zero)(state, output);
if (fiveDimensionalInput) {
// Collapse batch and feature dimensions
// newFoldBatchDim assumes contiguity so the newContiguous calls must
// preceed this
THCTensor *old_output = output;
output = THCTensor_(newFoldBatchDim)(state, output);
THCTensor_(free)(state, old_output);
THCTensor *old_input = input;
input = THCTensor_(newFoldBatchDim)(state, input);
THCTensor_(free)(state, old_input);
THCIndexTensor *old_indices = indices;
indices = THCIndexTensor_(newFoldBatchDim)(state, indices);
THCIndexTensor_(free)(state, old_indices);
}
real* outputData = THCTensor_(data)(state, output);
THCDeviceTensor<real, 4> cudaInput;
THCDeviceTensor<THCIndex_t, 4> cudaIndices;
cudaInput = toDeviceTensor<real, 4>(state, input);
cudaIndices = toDeviceTensor<THCIndex_t, 4>(state, indices);
int totalZ = inputTime * inputSlices * batchSize;
int offsetZ = 0;
dim3 block(32, 8);
while (totalZ > 0) {
dim3 grid(THCCeilDiv(inputWidth, static_cast<int>(block.x)),
THCCeilDiv(inputHeight, static_cast<int>(block.y)),
totalZ > 65535 ? 65535 : totalZ);
hipLaunchKernelGGL(( cuda_VolumetricMaxUnpooling_updateOutput), dim3(grid), dim3(block),
0, THCState_getCurrentStream(state),
cudaInput, cudaIndices, outputData,
outputTime, outputHeight, outputWidth,
dT, dH, dW,
padT, padH, padW, offsetZ);
THCudaCheck(hipGetLastError());
totalZ -= 65535;
offsetZ += 65535;
}
THCTensor_(free)(state, input);
THCTensor_(free)(state, output);
THCIndexTensor_(free)(state, indices);
}
void THNN_(VolumetricMaxUnpooling_updateGradInput)(
THCState *state,
THCTensor *input,
THCTensor *gradOutput,
THCTensor *gradInput,
THCIndexTensor *indices,
int outputTime, int outputWidth, int outputHeight,
int dT, int dW, int dH,
int padT, int padW, int padH)
{
int batchSize = 0;
int inputSlices = 0;
int inputTime = 0;
int inputHeight = 0;
int inputWidth = 0;
THNN_(VolumetricMaxUnpooling_shapeCheck)(
state, input, gradOutput, indices,
outputTime, outputWidth, outputHeight,
dT, dW, dH, padT, padW, padH);
THCUNN_assertSameGPU(state, 4, input, indices, gradOutput, gradInput);
int fiveDimensionalInput = THCTensor_(nDimension)(state, input) == 5;
if (!fiveDimensionalInput) /* 4D */
{
batchSize = 1;
inputSlices = THCTensor_(size)(state, input, 0);
inputTime = THCTensor_(size)(state, input, 1);
inputHeight = THCTensor_(size)(state, input, 2);
inputWidth = THCTensor_(size)(state, input, 3);
}
else
{
batchSize = THCTensor_(size)(state, input, 0);
inputSlices = THCTensor_(size)(state, input, 1);
inputTime = THCTensor_(size)(state, input, 2);
inputHeight = THCTensor_(size)(state, input, 3);
inputWidth = THCTensor_(size)(state, input, 4);
}
input = THCTensor_(newContiguous)(state, input);
THCTensor_(resizeAs)(state, gradInput, input);
THCTensor_(zero)(state, gradInput);
indices = THCIndexTensor_(newContiguous)(state, indices);
gradOutput = THCTensor_(newContiguous)(state, gradOutput);
// Collapse batch and feature dimensions
if (fiveDimensionalInput) {
gradInput = THCTensor_(newFoldBatchDim)(state, gradInput);
THCIndexTensor *old_indices = indices;
indices = THCIndexTensor_(newFoldBatchDim)(state, indices);
THCIndexTensor_(free)(state, old_indices);
THCTensor *old_gradOutput = gradOutput;
gradOutput = THCTensor_(newFoldBatchDim)(state, gradOutput);
THCTensor_(free)(state, old_gradOutput);
} else {
THCTensor_(retain)(state, gradInput);
}
real* gradOutputData = THCTensor_(data)(state, gradOutput);
THCDeviceTensor<real, 4> cudaGradInput;
THCDeviceTensor<THCIndex_t, 4> cudaIndices;
cudaGradInput = toDeviceTensor<real, 4>(state, gradInput);
cudaIndices = toDeviceTensor<THCIndex_t, 4>(state, indices);
int totalZ = inputTime * inputSlices * batchSize;
int offsetZ = 0;
dim3 block(32, 8);
while (totalZ > 0) {
dim3 grid(THCCeilDiv(inputWidth, static_cast<int>(block.x)),
THCCeilDiv(inputHeight, static_cast<int>(block.y)),
totalZ > 65535 ? 65535 : totalZ);
hipLaunchKernelGGL(( cuda_VolumetricMaxUnpooling_updateGradInput), dim3(grid), dim3(block),
0, THCState_getCurrentStream(state),
gradOutputData,
outputTime, outputHeight, outputWidth,
cudaIndices,
cudaGradInput,
dT, dH, dW,
padT, padH, padW, offsetZ);
THCudaCheck(hipGetLastError());
totalZ -= 65535;
offsetZ += 65535;
}
// cleanup
THCTensor_(free)(state, gradOutput);
THCTensor_(free)(state, gradInput);
THCIndexTensor_(free)(state, indices);
THCTensor_(free)(state, input);
}
#endif
| 4d5647c27cb991f463140a02f5181e15a2ead5d6.cu | #ifndef THC_GENERIC_FILE
#define THC_GENERIC_FILE "generic/VolumetricMaxUnpooling.cu"
#else
static inline void THNN_(VolumetricMaxUnpooling_shapeCheck)(
THCState *state,
THCTensor *input,
THCTensor *gradOutput,
THCIndexTensor *indices,
int oT,
int oW,
int oH,
int dT,
int dW,
int dH,
int pT,
int pW,
int pH) {
int inputSlices = 0;
THCUNN_check_shape_indices(state, indices, input);
THArgCheck(dT > 0 && dW > 0 && dH > 0, 10,
"stride should be greater than zero, but got dT: %d dH: %d dW: %d",
dT, dH, dW);
if (THCTensor_(nDimension)(state, input) == 4)
{
inputSlices = THCTensor_(size)(state, input, 0);
}
else if (THCTensor_(nDimension)(state, input) == 5)
{
inputSlices = THCTensor_(size)(state, input, 1);
}
else
{
AT_ERROR("non-empty 4D or 5D tensor expected, got size: ",
input->sizes());
}
int dimw = 3;
int dimh = 2;
int dimt = 1;
int dimn = 0;
if (input->dim() == 5)
{
dimt++;
dimw++;
dimh++;
dimn++;
}
if (gradOutput != NULL) {
if (oT != gradOutput->size[dimt] || oW != gradOutput->size[dimw] || oH != gradOutput->size[dimh])
{
THError(
"Inconsistent gradOutput size. oT= %d, oH= %d, oW= %d, gradOutput: %dx%dx%d",
oT, oH, oW, gradOutput->size[dimt], gradOutput->size[dimh], gradOutput->size[dimw]);
}
THCUNN_check_dim_size(state, gradOutput, input->dim(), dimn, inputSlices);
}
}
void THNN_(VolumetricMaxUnpooling_updateOutput)(
THCState *state,
THCTensor *input,
THCTensor *output,
THCIndexTensor *indices,
int outputTime, int outputWidth, int outputHeight,
int dT, int dW, int dH,
int padT, int padW, int padH)
{
int batchSize = 0;
int inputSlices = 0;
int inputTime = 0;
int inputHeight = 0;
int inputWidth = 0;
THNN_(VolumetricMaxUnpooling_shapeCheck)(
state, input, NULL, indices,
outputTime, outputWidth, outputHeight,
dT, dW, dH, padT, padW, padH);
THCUNN_assertSameGPU(state, 3, input, indices, output);
int fiveDimensionalInput = THCTensor_(nDimension)(state, input) == 5;
if (THCTensor_(nDimension)(state, input) == 4)
{
/* sizes */
batchSize = 1;
inputSlices = THCTensor_(size)(state, input, 0);
inputTime = THCTensor_(size)(state, input, 1);
inputHeight = THCTensor_(size)(state, input, 2);
inputWidth = THCTensor_(size)(state, input, 3);
}
else if (fiveDimensionalInput)
{
/* sizes */
batchSize = THCTensor_(size)(state, input, 0);
inputSlices = THCTensor_(size)(state, input, 1);
inputTime = THCTensor_(size)(state, input, 2);
inputHeight = THCTensor_(size)(state, input, 3);
inputWidth = THCTensor_(size)(state, input, 4);
}
if (!fiveDimensionalInput) /* 4D */
{
/* resize output */
THCTensor_(resize4d)(state, output, inputSlices,
outputTime, outputHeight, outputWidth);
}
else
{ /* 5D */
THCTensor_(resize5d)(state, output, batchSize, inputSlices,
outputTime, outputHeight, outputWidth);
}
input = THCTensor_(newContiguous)(state, input);
indices = THCIndexTensor_(newContiguous)(state, indices);
output = THCTensor_(newContiguous)(state, output);
THCTensor_(zero)(state, output);
if (fiveDimensionalInput) {
// Collapse batch and feature dimensions
// newFoldBatchDim assumes contiguity so the newContiguous calls must
// preceed this
THCTensor *old_output = output;
output = THCTensor_(newFoldBatchDim)(state, output);
THCTensor_(free)(state, old_output);
THCTensor *old_input = input;
input = THCTensor_(newFoldBatchDim)(state, input);
THCTensor_(free)(state, old_input);
THCIndexTensor *old_indices = indices;
indices = THCIndexTensor_(newFoldBatchDim)(state, indices);
THCIndexTensor_(free)(state, old_indices);
}
real* outputData = THCTensor_(data)(state, output);
THCDeviceTensor<real, 4> cudaInput;
THCDeviceTensor<THCIndex_t, 4> cudaIndices;
cudaInput = toDeviceTensor<real, 4>(state, input);
cudaIndices = toDeviceTensor<THCIndex_t, 4>(state, indices);
int totalZ = inputTime * inputSlices * batchSize;
int offsetZ = 0;
dim3 block(32, 8);
while (totalZ > 0) {
dim3 grid(THCCeilDiv(inputWidth, static_cast<int>(block.x)),
THCCeilDiv(inputHeight, static_cast<int>(block.y)),
totalZ > 65535 ? 65535 : totalZ);
cuda_VolumetricMaxUnpooling_updateOutput<<<grid, block,
0, THCState_getCurrentStream(state)>>>(
cudaInput, cudaIndices, outputData,
outputTime, outputHeight, outputWidth,
dT, dH, dW,
padT, padH, padW, offsetZ);
THCudaCheck(cudaGetLastError());
totalZ -= 65535;
offsetZ += 65535;
}
THCTensor_(free)(state, input);
THCTensor_(free)(state, output);
THCIndexTensor_(free)(state, indices);
}
void THNN_(VolumetricMaxUnpooling_updateGradInput)(
THCState *state,
THCTensor *input,
THCTensor *gradOutput,
THCTensor *gradInput,
THCIndexTensor *indices,
int outputTime, int outputWidth, int outputHeight,
int dT, int dW, int dH,
int padT, int padW, int padH)
{
int batchSize = 0;
int inputSlices = 0;
int inputTime = 0;
int inputHeight = 0;
int inputWidth = 0;
THNN_(VolumetricMaxUnpooling_shapeCheck)(
state, input, gradOutput, indices,
outputTime, outputWidth, outputHeight,
dT, dW, dH, padT, padW, padH);
THCUNN_assertSameGPU(state, 4, input, indices, gradOutput, gradInput);
int fiveDimensionalInput = THCTensor_(nDimension)(state, input) == 5;
if (!fiveDimensionalInput) /* 4D */
{
batchSize = 1;
inputSlices = THCTensor_(size)(state, input, 0);
inputTime = THCTensor_(size)(state, input, 1);
inputHeight = THCTensor_(size)(state, input, 2);
inputWidth = THCTensor_(size)(state, input, 3);
}
else
{
batchSize = THCTensor_(size)(state, input, 0);
inputSlices = THCTensor_(size)(state, input, 1);
inputTime = THCTensor_(size)(state, input, 2);
inputHeight = THCTensor_(size)(state, input, 3);
inputWidth = THCTensor_(size)(state, input, 4);
}
input = THCTensor_(newContiguous)(state, input);
THCTensor_(resizeAs)(state, gradInput, input);
THCTensor_(zero)(state, gradInput);
indices = THCIndexTensor_(newContiguous)(state, indices);
gradOutput = THCTensor_(newContiguous)(state, gradOutput);
// Collapse batch and feature dimensions
if (fiveDimensionalInput) {
gradInput = THCTensor_(newFoldBatchDim)(state, gradInput);
THCIndexTensor *old_indices = indices;
indices = THCIndexTensor_(newFoldBatchDim)(state, indices);
THCIndexTensor_(free)(state, old_indices);
THCTensor *old_gradOutput = gradOutput;
gradOutput = THCTensor_(newFoldBatchDim)(state, gradOutput);
THCTensor_(free)(state, old_gradOutput);
} else {
THCTensor_(retain)(state, gradInput);
}
real* gradOutputData = THCTensor_(data)(state, gradOutput);
THCDeviceTensor<real, 4> cudaGradInput;
THCDeviceTensor<THCIndex_t, 4> cudaIndices;
cudaGradInput = toDeviceTensor<real, 4>(state, gradInput);
cudaIndices = toDeviceTensor<THCIndex_t, 4>(state, indices);
int totalZ = inputTime * inputSlices * batchSize;
int offsetZ = 0;
dim3 block(32, 8);
while (totalZ > 0) {
dim3 grid(THCCeilDiv(inputWidth, static_cast<int>(block.x)),
THCCeilDiv(inputHeight, static_cast<int>(block.y)),
totalZ > 65535 ? 65535 : totalZ);
cuda_VolumetricMaxUnpooling_updateGradInput<<<grid, block,
0, THCState_getCurrentStream(state)>>>(
gradOutputData,
outputTime, outputHeight, outputWidth,
cudaIndices,
cudaGradInput,
dT, dH, dW,
padT, padH, padW, offsetZ);
THCudaCheck(cudaGetLastError());
totalZ -= 65535;
offsetZ += 65535;
}
// cleanup
THCTensor_(free)(state, gradOutput);
THCTensor_(free)(state, gradInput);
THCIndexTensor_(free)(state, indices);
THCTensor_(free)(state, input);
}
#endif
|
17e36b7055974a0345f28b6dd959f9b8dd13cf6b.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#if GOOGLE_CUDA
#include "ew_op_gpu.h"
#include <stdio.h>
#include <type_traits>
template <typename TG, typename TR>
__global__ void apply_lazy_adam(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint K, uint zero_nans)
{
uint tid = threadIdx.x;
uint c = blockIdx.x;
uint k = blockIdx.y*blockDim.x + tid;
uint offset = c*K + k;
float g = load(add_ptr_u(Grad, offset), 0, k < K);
// Nans => zero
if (zero_nans)
asm("{ \n\t"
".reg .pred is_number; \n\t"
"testp.number.f32 is_number, %0; \n\t"
"selp.f32 %0, %0, 0.0, is_number;\n\t"
"}" : "+f"(g) :);
// Saturate fp16 infinity values
if (std::is_same<TG, ehalf>::value)
g = fmaxf(fminf(g, 65504.0f), -65504.0f);
// max reduce gradient within this block.
// If the whole block is zero that means that this embedding vector was not selected.
// If the emb vector is bigger than the block then at least the probability is high of non-selection.
// Make Adam a no-op in this case.
float gmax = fabsf(g);
for (int i = 16; i > 0; i >>= 1)
gmax = fmaxf(gmax, shfl_xor(gmax, i));
if (blockDim.x > 32)
{
__shared__ float Share[32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid/32] = gmax;
__syncthreads();
if (tid < 32)
{
// first warp loads all prior reductions
gmax = Share[tid];
// reduce within this last warp
#pragma unroll 1
for (int i = blockDim.x/64; i > 0; i >>= 1)
gmax = fmaxf(gmax, shfl_xor(gmax, i));
// final reduction to shared
Share[tid] = gmax;
}
__syncthreads();
gmax = Share[0];
}
if (k < K && gmax > 0.0f)
{
float v = load(add_ptr_u((const TR*)Var, offset));
float m = load(add_ptr_u((const TR*)Mean, offset));
float p = load(add_ptr_u((const float*)Param, offset));
g *= grad_scale;
v = decay_var * v + (1.0f - decay_var) * g*g;
float sigma = sqrtf(v);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sigma;
g = fmaxf(g, -clip);
g = fminf(g, clip);
}
m = decay_mean * m + (1.0f - decay_mean) * g;
p -= lr * m / (sigma + epsilon);
store(add_ptr_u(Mean, offset), m);
store(add_ptr_u(Var, offset), v);
store(add_ptr_u(Param, offset), p);
}
}
template <typename TG, typename TR>
__global__ void apply_adam(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint zero_nans)
{
uint tid = threadIdx.x;
uint bid = blockIdx.x;
for (uint offset = bid*blockDim.x + tid; offset < size; offset += gridDim.x*blockDim.x)
{
float g = load(add_ptr_u( Grad, offset));
float v = load(add_ptr_u((const TR*)Var, offset));
float m = load(add_ptr_u((const TR*)Mean, offset));
float p = load(add_ptr_u((const float*)Param, offset));
// Nans => zero
if (zero_nans)
asm("{ \n\t"
".reg .pred is_number; \n\t"
"testp.number.f32 is_number, %0; \n\t"
"selp.f32 %0, %0, 0.0, is_number;\n\t"
"}" : "+f"(g) :);
// Saturate fp16 infinity values
if (std::is_same<TG, ehalf>::value)
g = fmaxf(fminf(g, 65504.0f), -65504.0f);
g *= grad_scale;
v = decay_var * v + (1.0f - decay_var) * g*g;
float sigma = sqrtf(v);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sigma;
g = fmaxf(g, -clip);
g = fminf(g, clip);
}
m = decay_mean * m + (1.0f - decay_mean) * g;
p -= lr * m / (sigma + epsilon);
store(add_ptr_u(Mean, offset), m);
store(add_ptr_u(Var, offset), v);
store(add_ptr_u(Param, offset), p);
}
}
template <typename TG, typename TR>
bool ApplyAdam(
hipStream_t stream, uint SMs,
const TG* grad,
float* param,
TR* mean,
TR* var,
float lr,
float decay_mean,
float decay_var,
float epsilon,
float grad_scale,
float clip_sigma,
uint size,
uint lazy_update,
bool zero_nans)
{
if (lazy_update)
{
uint K = lazy_update;
uint C = size;
uint threads, gridK;
if (K <= 1024) {
threads = THREAD_POW2(K);
gridK = 1;
}
else {
threads = 256;
gridK = CEIL_DIV(K, 256);
}
hipLaunchKernelGGL(( apply_lazy_adam<TG,TR>), dim3(dim3(C,gridK,1)),dim3(threads),0,stream, param, mean, var, grad, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma, K, zero_nans);
}
else
{
uint grid = SMs, threads = 64;
if (size > SMs*1024) { threads = 1024; grid *= 2; }
else if (size > SMs* 512) { threads = 1024; }
else if (size > SMs* 256) { threads = 512; }
else if (size > SMs* 128) { threads = 256; }
else if (size > SMs* 64) { threads = 128; }
hipLaunchKernelGGL(( apply_adam<TG,TR>), dim3(grid),dim3(threads),0,stream, param, mean, var, grad, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma, size, zero_nans);
}
return true;
}
template bool ApplyAdam<float,float>(hipStream_t stream, uint SMs, const float* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template bool ApplyAdam<ehalf,float>(hipStream_t stream, uint SMs, const ehalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template bool ApplyAdam<bhalf,float>(hipStream_t stream, uint SMs, const bhalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template <typename TG, typename TR, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) apply_adam_gated(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
const float* __restrict__ Gate,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Grad += offset;
Mean += offset;
Var += offset;
Param += offset;
float g[U], m[U], v[U], p[U];
for (uint j = 0; j < U; j++) g[j] = load((const TG*)Grad, j*THREADS);
for (uint j = 0; j < U; j++) m[j] = load((const TR*)Mean, j*THREADS);
for (uint j = 0; j < U; j++) v[j] = load((const TR*)Var, j*THREADS);
for (uint j = 0; j < U; j++) p[j] = load((const float*)Param, j*THREADS);
for (uint j = 0; j < U; j++)
{
g[j] *= grad_scale;
v[j] = decay_var * v[j] + (1.0f - decay_var ) * g[j] * g[j];
float sig = sqrtf(v[j]);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sig;
g[j] = fmaxf(g[j], -clip);
g[j] = fminf(g[j], clip);
}
m[j] = decay_mean * m[j] + (1.0f - decay_mean) * g[j];
p[j] -= lr * m[j] / (sqrtf(v[j]) + epsilon);
}
for (uint j = 0; j < U; j++) store(Mean, m[j], j*THREADS);
for (uint j = 0; j < U; j++) store(Var, v[j], j*THREADS);
for (uint j = 0; j < U; j++) store(Param, p[j], j*THREADS);
}
}
template <typename TG, typename TR>
bool ApplyAdamGated(
hipStream_t stream,
const float* gate,
const TG* grad,
float* param,
TR* mean,
TR* var,
float lr,
float decay_mean,
float decay_var,
float epsilon,
float grad_scale,
float clip_sigma,
uint blocks,
uint bsize)
{
if (bsize == 8)
hipLaunchKernelGGL(( apply_adam_gated<TG,TR, 8, 32>), dim3(blocks), dim3(32),0,stream, param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
else if (bsize == 16)
hipLaunchKernelGGL(( apply_adam_gated<TG,TR,16, 64>), dim3(blocks), dim3(64),0,stream, param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
else
hipLaunchKernelGGL(( apply_adam_gated<TG,TR,32,256>), dim3(blocks),dim3(256),0,stream, param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
return true;
}
template bool ApplyAdamGated<float,float>(hipStream_t stream, const float* gate, const float* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template bool ApplyAdamGated<ehalf,float>(hipStream_t stream, const float* gate, const ehalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template bool ApplyAdamGated<bhalf,float>(hipStream_t stream, const float* gate, const bhalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template <typename T, uint U>
__global__ void __launch_bounds__(32) apply_ema(
T* Ema,
const T* __restrict__ Param,
float decay, uint size)
{
uint tid = threadIdx.x;
uint bid = blockIdx.x;
uint offset = bid * U*32 + tid;
bool pred[U];
for (uint j = 0; j < U; j++)
pred[j] = offset + j*32 < size;
Ema += offset;
Param += offset;
float e[U], p[U];
for (uint j = 0; j < U; j++) e[j] = load((const T*)Ema, j*32, pred[j]);
for (uint j = 0; j < U; j++) p[j] = load( Param, j*32, pred[j]);
for (uint j = 0; j < U; j++) e[j] -= (1.0f - decay) * (e[j] - p[j]);
for (uint j = 0; j < U; j++) store(Ema, e[j], j*32, pred[j]);
}
template <typename T>
bool ApplyEma(hipStream_t stream, T* ema, const T* param, float decay, uint size)
{
uint grid = (size >> 7) + ((size & 127) != 0); // 1 warp with 4 unrolls
if (grid > 200)
{
hipLaunchKernelGGL(( apply_ema<T,4>), dim3(grid),dim3(32),0,stream, ema, param, decay, size);
}
else
{
grid = (size >> 5) + ((size & 31) != 0); // 1 warp with 1 unroll
hipLaunchKernelGGL(( apply_ema<T,1>), dim3(grid),dim3(32),0,stream, ema, param, decay, size);
}
return true;
}
template bool ApplyEma<float>(hipStream_t stream, float* ema, const float* param, float decay, uint size);
template <typename T, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) apply_ema_gated(
T* Ema,
const T* __restrict__ Param,
const float* __restrict__ Gate,
float decay)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Ema += offset;
Param += offset;
float e[U], p[U];
for (uint j = 0; j < U; j++) e[j] = load((const T*)Ema, j*THREADS);
for (uint j = 0; j < U; j++) p[j] = load( Param, j*THREADS);
for (uint j = 0; j < U; j++) e[j] -= (1.0f - decay) * (e[j] - p[j]);
for (uint j = 0; j < U; j++) store(Ema, e[j], j*THREADS);
}
}
template <typename T>
bool ApplyEmaGated(hipStream_t stream, T* ema, const T* param, const float* gate, float decay, uint blocks, uint bsize)
{
if (bsize == 8)
hipLaunchKernelGGL(( apply_ema_gated<T, 8, 32>), dim3(blocks), dim3(32),0,stream, ema, param, gate, decay);
else if (bsize == 16)
hipLaunchKernelGGL(( apply_ema_gated<T,16, 64>), dim3(blocks), dim3(64),0,stream, ema, param, gate, decay);
else
hipLaunchKernelGGL(( apply_ema_gated<T,32,256>), dim3(blocks),dim3(256),0,stream, ema, param, gate, decay);
return true;
}
template bool ApplyEmaGated<float>(hipStream_t stream, float* ema, const float* param, const float* gate, float decay, uint blocks, uint bsize);
template <typename T, uint BSIZE, uint THREADS, uint GATED>
__global__ void __launch_bounds__(THREADS) blocksparse_l2_decay(T* Param, const float* __restrict__ Gate, float rate, float epsilon)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (GATED == 0 || Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Param += offset;
float p[U];
for (uint j = 0; j < U; j++)
p[j] = load((const T*)Param, j*THREADS);
// Reduce sum squared within this thread
float sum_sqared = 0.0f;
for (uint j = 0; j < U; j++)
sum_sqared += p[j] * p[j];
// reduce within warp
for (int i = 16; i > 0; i >>= 1)
sum_sqared += shfl_xor(sum_sqared, i);
// if using more than 1 warp, further reduced with shared memory
if (THREADS > 32)
{
__shared__ float Share[THREADS/32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid >> 5] = sum_sqared;
__syncthreads();
if (tid < THREADS/32)
{
// first warp loads all prior reductions
sum_sqared = Share[tid];
// reduce within this first warp
for (int i = THREADS/64; i > 0; i >>= 1)
sum_sqared += shfl_xor(sum_sqared, i);
// outputs final reduction to shared
Share[tid] = sum_sqared;
}
__syncthreads();
// broadcast result to all threads
sum_sqared = Share[0];
}
// apply weight decay and store updated paramm
float decay = fminf(rsqrtf(sum_sqared + epsilon) * rate, 1.0f);
for (uint j = 0; j < U; j++)
store(Param, p[j] - p[j] * decay, j*THREADS);
}
}
template <typename T>
bool BlocksparseL2Decay(hipStream_t stream, T* param, const float* gate, float rate, float epsilon, uint blocks, uint bsize)
{
if (gate != NULL)
{
if (bsize == 8)
hipLaunchKernelGGL(( blocksparse_l2_decay<T, 8, 32,1>), dim3(blocks), dim3(32),0,stream, param, gate, rate, epsilon);
else if (bsize == 16)
hipLaunchKernelGGL(( blocksparse_l2_decay<T,16, 64,1>), dim3(blocks), dim3(64),0,stream, param, gate, rate, epsilon);
else
hipLaunchKernelGGL(( blocksparse_l2_decay<T,32,256,1>), dim3(blocks),dim3(256),0,stream, param, gate, rate, epsilon);
}
else
{
if (bsize == 8)
hipLaunchKernelGGL(( blocksparse_l2_decay<T, 8, 32,0>), dim3(blocks), dim3(32),0,stream, param, gate, rate, epsilon);
else if (bsize == 16)
hipLaunchKernelGGL(( blocksparse_l2_decay<T,16, 64,0>), dim3(blocks), dim3(64),0,stream, param, gate, rate, epsilon);
else
hipLaunchKernelGGL(( blocksparse_l2_decay<T,32,256,0>), dim3(blocks),dim3(256),0,stream, param, gate, rate, epsilon);
}
return true;
}
template bool BlocksparseL2Decay<float>(hipStream_t stream, float* param, const float* gate, float rate, float epsilon, uint blocks, uint bsize);
template <typename T, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) blocksparse_maxnorm_prune(const T* __restrict__ Param, float* Gate, float threshold)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
uint offset = bid*BSIZE*BSIZE + tid;
Param += offset;
float p[U];
for (uint j = 0; j < U; j++)
p[j] = load(Param, j*THREADS);
// Reduce max within this thread
float max_abs = 0.0f;
for (uint j = 0; j < U; j++)
max_abs = fmaxf(fabsf(p[j]), max_abs);
// reduce within warp
for (int i = 16; i > 0; i >>= 1)
max_abs = fmaxf(max_abs, shfl_xor(max_abs, i));
// if using more than 1 warp, further reduced with shared memory
if (THREADS > 32)
{
__shared__ float Share[THREADS/32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid >> 5] = max_abs;
__syncthreads();
if (tid < THREADS/32)
{
// first warp loads all prior reductions
max_abs = Share[tid];
// reduce within this first warp
for (int i = THREADS/64; i > 0; i >>= 1)
max_abs = fmaxf(max_abs, shfl_xor(max_abs, i));
}
}
// first thread has the final reduced max_abs
// compare against threshhold and update gate if needed.
// if (bid < 2 && tid == 0)
// printf("%d %d %.5f %.5f\n", bid, gridDim.x, max_abs, threshold);
if (tid == 0)
Gate[bid] = max_abs < threshold ? 0.0f : 1.0f;
}
template <typename T>
bool BlocksparseMaxnormPrune(hipStream_t stream, const T* param, float* gate, float threshold, uint blocks, uint bsize)
{
if (bsize == 8)
hipLaunchKernelGGL(( blocksparse_maxnorm_prune<T, 8, 32>), dim3(blocks), dim3(32),0,stream, param, gate, threshold);
else if (bsize == 16)
hipLaunchKernelGGL(( blocksparse_maxnorm_prune<T,16, 64>), dim3(blocks), dim3(64),0,stream, param, gate, threshold);
else
hipLaunchKernelGGL(( blocksparse_maxnorm_prune<T,32,256>), dim3(blocks),dim3(256),0,stream, param, gate, threshold);
return true;
}
template bool BlocksparseMaxnormPrune<float>(hipStream_t stream, const float* param, float* gate, float threshold, uint blocks, uint bsize);
#endif
| 17e36b7055974a0345f28b6dd959f9b8dd13cf6b.cu |
#if GOOGLE_CUDA
#include "ew_op_gpu.h"
#include <stdio.h>
#include <type_traits>
template <typename TG, typename TR>
__global__ void apply_lazy_adam(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint K, uint zero_nans)
{
uint tid = threadIdx.x;
uint c = blockIdx.x;
uint k = blockIdx.y*blockDim.x + tid;
uint offset = c*K + k;
float g = load(add_ptr_u(Grad, offset), 0, k < K);
// Nans => zero
if (zero_nans)
asm("{ \n\t"
".reg .pred is_number; \n\t"
"testp.number.f32 is_number, %0; \n\t"
"selp.f32 %0, %0, 0.0, is_number;\n\t"
"}" : "+f"(g) :);
// Saturate fp16 infinity values
if (std::is_same<TG, ehalf>::value)
g = fmaxf(fminf(g, 65504.0f), -65504.0f);
// max reduce gradient within this block.
// If the whole block is zero that means that this embedding vector was not selected.
// If the emb vector is bigger than the block then at least the probability is high of non-selection.
// Make Adam a no-op in this case.
float gmax = fabsf(g);
for (int i = 16; i > 0; i >>= 1)
gmax = fmaxf(gmax, shfl_xor(gmax, i));
if (blockDim.x > 32)
{
__shared__ float Share[32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid/32] = gmax;
__syncthreads();
if (tid < 32)
{
// first warp loads all prior reductions
gmax = Share[tid];
// reduce within this last warp
#pragma unroll 1
for (int i = blockDim.x/64; i > 0; i >>= 1)
gmax = fmaxf(gmax, shfl_xor(gmax, i));
// final reduction to shared
Share[tid] = gmax;
}
__syncthreads();
gmax = Share[0];
}
if (k < K && gmax > 0.0f)
{
float v = load(add_ptr_u((const TR*)Var, offset));
float m = load(add_ptr_u((const TR*)Mean, offset));
float p = load(add_ptr_u((const float*)Param, offset));
g *= grad_scale;
v = decay_var * v + (1.0f - decay_var) * g*g;
float sigma = sqrtf(v);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sigma;
g = fmaxf(g, -clip);
g = fminf(g, clip);
}
m = decay_mean * m + (1.0f - decay_mean) * g;
p -= lr * m / (sigma + epsilon);
store(add_ptr_u(Mean, offset), m);
store(add_ptr_u(Var, offset), v);
store(add_ptr_u(Param, offset), p);
}
}
template <typename TG, typename TR>
__global__ void apply_adam(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint zero_nans)
{
uint tid = threadIdx.x;
uint bid = blockIdx.x;
for (uint offset = bid*blockDim.x + tid; offset < size; offset += gridDim.x*blockDim.x)
{
float g = load(add_ptr_u( Grad, offset));
float v = load(add_ptr_u((const TR*)Var, offset));
float m = load(add_ptr_u((const TR*)Mean, offset));
float p = load(add_ptr_u((const float*)Param, offset));
// Nans => zero
if (zero_nans)
asm("{ \n\t"
".reg .pred is_number; \n\t"
"testp.number.f32 is_number, %0; \n\t"
"selp.f32 %0, %0, 0.0, is_number;\n\t"
"}" : "+f"(g) :);
// Saturate fp16 infinity values
if (std::is_same<TG, ehalf>::value)
g = fmaxf(fminf(g, 65504.0f), -65504.0f);
g *= grad_scale;
v = decay_var * v + (1.0f - decay_var) * g*g;
float sigma = sqrtf(v);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sigma;
g = fmaxf(g, -clip);
g = fminf(g, clip);
}
m = decay_mean * m + (1.0f - decay_mean) * g;
p -= lr * m / (sigma + epsilon);
store(add_ptr_u(Mean, offset), m);
store(add_ptr_u(Var, offset), v);
store(add_ptr_u(Param, offset), p);
}
}
template <typename TG, typename TR>
bool ApplyAdam(
CUstream stream, uint SMs,
const TG* grad,
float* param,
TR* mean,
TR* var,
float lr,
float decay_mean,
float decay_var,
float epsilon,
float grad_scale,
float clip_sigma,
uint size,
uint lazy_update,
bool zero_nans)
{
if (lazy_update)
{
uint K = lazy_update;
uint C = size;
uint threads, gridK;
if (K <= 1024) {
threads = THREAD_POW2(K);
gridK = 1;
}
else {
threads = 256;
gridK = CEIL_DIV(K, 256);
}
apply_lazy_adam<TG,TR><<<dim3(C,gridK,1),threads,0,stream>>>(param, mean, var, grad, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma, K, zero_nans);
}
else
{
uint grid = SMs, threads = 64;
if (size > SMs*1024) { threads = 1024; grid *= 2; }
else if (size > SMs* 512) { threads = 1024; }
else if (size > SMs* 256) { threads = 512; }
else if (size > SMs* 128) { threads = 256; }
else if (size > SMs* 64) { threads = 128; }
apply_adam<TG,TR><<<grid,threads,0,stream>>>(param, mean, var, grad, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma, size, zero_nans);
}
return true;
}
template bool ApplyAdam<float,float>(CUstream stream, uint SMs, const float* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template bool ApplyAdam<ehalf,float>(CUstream stream, uint SMs, const ehalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template bool ApplyAdam<bhalf,float>(CUstream stream, uint SMs, const bhalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint size, uint lazy_update, bool zero_nans);
template <typename TG, typename TR, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) apply_adam_gated(
float* Param,
TR* Mean,
TR* Var,
const TG* __restrict__ Grad,
const float* __restrict__ Gate,
float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Grad += offset;
Mean += offset;
Var += offset;
Param += offset;
float g[U], m[U], v[U], p[U];
for (uint j = 0; j < U; j++) g[j] = load((const TG*)Grad, j*THREADS);
for (uint j = 0; j < U; j++) m[j] = load((const TR*)Mean, j*THREADS);
for (uint j = 0; j < U; j++) v[j] = load((const TR*)Var, j*THREADS);
for (uint j = 0; j < U; j++) p[j] = load((const float*)Param, j*THREADS);
for (uint j = 0; j < U; j++)
{
g[j] *= grad_scale;
v[j] = decay_var * v[j] + (1.0f - decay_var ) * g[j] * g[j];
float sig = sqrtf(v[j]);
if (clip_sigma != 0.0f)
{
float clip = clip_sigma * sig;
g[j] = fmaxf(g[j], -clip);
g[j] = fminf(g[j], clip);
}
m[j] = decay_mean * m[j] + (1.0f - decay_mean) * g[j];
p[j] -= lr * m[j] / (sqrtf(v[j]) + epsilon);
}
for (uint j = 0; j < U; j++) store(Mean, m[j], j*THREADS);
for (uint j = 0; j < U; j++) store(Var, v[j], j*THREADS);
for (uint j = 0; j < U; j++) store(Param, p[j], j*THREADS);
}
}
template <typename TG, typename TR>
bool ApplyAdamGated(
CUstream stream,
const float* gate,
const TG* grad,
float* param,
TR* mean,
TR* var,
float lr,
float decay_mean,
float decay_var,
float epsilon,
float grad_scale,
float clip_sigma,
uint blocks,
uint bsize)
{
if (bsize == 8)
apply_adam_gated<TG,TR, 8, 32><<<blocks, 32,0,stream>>>(param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
else if (bsize == 16)
apply_adam_gated<TG,TR,16, 64><<<blocks, 64,0,stream>>>(param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
else
apply_adam_gated<TG,TR,32,256><<<blocks,256,0,stream>>>(param, mean, var, grad, gate, lr, decay_mean, decay_var, epsilon, grad_scale, clip_sigma);
return true;
}
template bool ApplyAdamGated<float,float>(CUstream stream, const float* gate, const float* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template bool ApplyAdamGated<ehalf,float>(CUstream stream, const float* gate, const ehalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template bool ApplyAdamGated<bhalf,float>(CUstream stream, const float* gate, const bhalf* grad, float* param, float* mean, float* var, float lr, float decay_mean, float decay_var, float epsilon, float grad_scale, float clip_sigma, uint blocks, uint bsize);
template <typename T, uint U>
__global__ void __launch_bounds__(32) apply_ema(
T* Ema,
const T* __restrict__ Param,
float decay, uint size)
{
uint tid = threadIdx.x;
uint bid = blockIdx.x;
uint offset = bid * U*32 + tid;
bool pred[U];
for (uint j = 0; j < U; j++)
pred[j] = offset + j*32 < size;
Ema += offset;
Param += offset;
float e[U], p[U];
for (uint j = 0; j < U; j++) e[j] = load((const T*)Ema, j*32, pred[j]);
for (uint j = 0; j < U; j++) p[j] = load( Param, j*32, pred[j]);
for (uint j = 0; j < U; j++) e[j] -= (1.0f - decay) * (e[j] - p[j]);
for (uint j = 0; j < U; j++) store(Ema, e[j], j*32, pred[j]);
}
template <typename T>
bool ApplyEma(CUstream stream, T* ema, const T* param, float decay, uint size)
{
uint grid = (size >> 7) + ((size & 127) != 0); // 1 warp with 4 unrolls
if (grid > 200)
{
apply_ema<T,4><<<grid,32,0,stream>>>(ema, param, decay, size);
}
else
{
grid = (size >> 5) + ((size & 31) != 0); // 1 warp with 1 unroll
apply_ema<T,1><<<grid,32,0,stream>>>(ema, param, decay, size);
}
return true;
}
template bool ApplyEma<float>(CUstream stream, float* ema, const float* param, float decay, uint size);
template <typename T, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) apply_ema_gated(
T* Ema,
const T* __restrict__ Param,
const float* __restrict__ Gate,
float decay)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Ema += offset;
Param += offset;
float e[U], p[U];
for (uint j = 0; j < U; j++) e[j] = load((const T*)Ema, j*THREADS);
for (uint j = 0; j < U; j++) p[j] = load( Param, j*THREADS);
for (uint j = 0; j < U; j++) e[j] -= (1.0f - decay) * (e[j] - p[j]);
for (uint j = 0; j < U; j++) store(Ema, e[j], j*THREADS);
}
}
template <typename T>
bool ApplyEmaGated(CUstream stream, T* ema, const T* param, const float* gate, float decay, uint blocks, uint bsize)
{
if (bsize == 8)
apply_ema_gated<T, 8, 32><<<blocks, 32,0,stream>>>(ema, param, gate, decay);
else if (bsize == 16)
apply_ema_gated<T,16, 64><<<blocks, 64,0,stream>>>(ema, param, gate, decay);
else
apply_ema_gated<T,32,256><<<blocks,256,0,stream>>>(ema, param, gate, decay);
return true;
}
template bool ApplyEmaGated<float>(CUstream stream, float* ema, const float* param, const float* gate, float decay, uint blocks, uint bsize);
template <typename T, uint BSIZE, uint THREADS, uint GATED>
__global__ void __launch_bounds__(THREADS) blocksparse_l2_decay(T* Param, const float* __restrict__ Gate, float rate, float epsilon)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
if (GATED == 0 || Gate[bid] != 0.0f)
{
uint offset = bid*BSIZE*BSIZE + tid;
Param += offset;
float p[U];
for (uint j = 0; j < U; j++)
p[j] = load((const T*)Param, j*THREADS);
// Reduce sum squared within this thread
float sum_sqared = 0.0f;
for (uint j = 0; j < U; j++)
sum_sqared += p[j] * p[j];
// reduce within warp
for (int i = 16; i > 0; i >>= 1)
sum_sqared += shfl_xor(sum_sqared, i);
// if using more than 1 warp, further reduced with shared memory
if (THREADS > 32)
{
__shared__ float Share[THREADS/32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid >> 5] = sum_sqared;
__syncthreads();
if (tid < THREADS/32)
{
// first warp loads all prior reductions
sum_sqared = Share[tid];
// reduce within this first warp
for (int i = THREADS/64; i > 0; i >>= 1)
sum_sqared += shfl_xor(sum_sqared, i);
// outputs final reduction to shared
Share[tid] = sum_sqared;
}
__syncthreads();
// broadcast result to all threads
sum_sqared = Share[0];
}
// apply weight decay and store updated paramm
float decay = fminf(rsqrtf(sum_sqared + epsilon) * rate, 1.0f);
for (uint j = 0; j < U; j++)
store(Param, p[j] - p[j] * decay, j*THREADS);
}
}
template <typename T>
bool BlocksparseL2Decay(CUstream stream, T* param, const float* gate, float rate, float epsilon, uint blocks, uint bsize)
{
if (gate != NULL)
{
if (bsize == 8)
blocksparse_l2_decay<T, 8, 32,1><<<blocks, 32,0,stream>>>(param, gate, rate, epsilon);
else if (bsize == 16)
blocksparse_l2_decay<T,16, 64,1><<<blocks, 64,0,stream>>>(param, gate, rate, epsilon);
else
blocksparse_l2_decay<T,32,256,1><<<blocks,256,0,stream>>>(param, gate, rate, epsilon);
}
else
{
if (bsize == 8)
blocksparse_l2_decay<T, 8, 32,0><<<blocks, 32,0,stream>>>(param, gate, rate, epsilon);
else if (bsize == 16)
blocksparse_l2_decay<T,16, 64,0><<<blocks, 64,0,stream>>>(param, gate, rate, epsilon);
else
blocksparse_l2_decay<T,32,256,0><<<blocks,256,0,stream>>>(param, gate, rate, epsilon);
}
return true;
}
template bool BlocksparseL2Decay<float>(CUstream stream, float* param, const float* gate, float rate, float epsilon, uint blocks, uint bsize);
template <typename T, uint BSIZE, uint THREADS>
__global__ void __launch_bounds__(THREADS) blocksparse_maxnorm_prune(const T* __restrict__ Param, float* Gate, float threshold)
{
const uint U = BSIZE*BSIZE/THREADS;
uint bid = blockIdx.x;
uint tid = threadIdx.x;
uint offset = bid*BSIZE*BSIZE + tid;
Param += offset;
float p[U];
for (uint j = 0; j < U; j++)
p[j] = load(Param, j*THREADS);
// Reduce max within this thread
float max_abs = 0.0f;
for (uint j = 0; j < U; j++)
max_abs = fmaxf(fabsf(p[j]), max_abs);
// reduce within warp
for (int i = 16; i > 0; i >>= 1)
max_abs = fmaxf(max_abs, shfl_xor(max_abs, i));
// if using more than 1 warp, further reduced with shared memory
if (THREADS > 32)
{
__shared__ float Share[THREADS/32];
// first thread of each warp store to shared
if ((tid & 31) == 0)
Share[tid >> 5] = max_abs;
__syncthreads();
if (tid < THREADS/32)
{
// first warp loads all prior reductions
max_abs = Share[tid];
// reduce within this first warp
for (int i = THREADS/64; i > 0; i >>= 1)
max_abs = fmaxf(max_abs, shfl_xor(max_abs, i));
}
}
// first thread has the final reduced max_abs
// compare against threshhold and update gate if needed.
// if (bid < 2 && tid == 0)
// printf("%d %d %.5f %.5f\n", bid, gridDim.x, max_abs, threshold);
if (tid == 0)
Gate[bid] = max_abs < threshold ? 0.0f : 1.0f;
}
template <typename T>
bool BlocksparseMaxnormPrune(CUstream stream, const T* param, float* gate, float threshold, uint blocks, uint bsize)
{
if (bsize == 8)
blocksparse_maxnorm_prune<T, 8, 32><<<blocks, 32,0,stream>>>(param, gate, threshold);
else if (bsize == 16)
blocksparse_maxnorm_prune<T,16, 64><<<blocks, 64,0,stream>>>(param, gate, threshold);
else
blocksparse_maxnorm_prune<T,32,256><<<blocks,256,0,stream>>>(param, gate, threshold);
return true;
}
template bool BlocksparseMaxnormPrune<float>(CUstream stream, const float* param, float* gate, float threshold, uint blocks, uint bsize);
#endif
|
625994a92fb886557d77283bed8fbfeefaeb6369.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "caffe2/core/context_gpu.h"
#include "caffe2/image/transform_gpu.h"
/**
*
* Copyright (c) 2016, NVIDIA CORPORATION, All rights reserved
* Distributed under 2-clause BSD license; see accompanying LICENSE file
*
**/
namespace caffe2 {
namespace {
// input in (int8, NHWC), output in (fp32, NCHW)
template <typename In, typename Out>
__global__
void transform_kernel(const int N, const int C, const int H, const int W,
const float mean, const float std, const In* in, Out* out) {
const int n = blockIdx.x;
const int nStride = C*H*W;
// pointers to data for this image
const In* input_ptr = &in[n*nStride];
Out* output_ptr = &out[n*nStride];
// either read or write uncoalesced - try reading
for (int c=0; c < C; ++c) {
for (int h=threadIdx.y; h < H; h += blockDim.y) {
for (int w=threadIdx.x; w < W; w += blockDim.x) {
int in_idx = c + C*w + C*W*h; // HWC
int out_idx = c*H*W + h*W + w; // CHW
//out[out_idx] = static_cast<Out>(
// static_cast<In>(in[in_idx]-mean)/std);
output_ptr[out_idx] = (static_cast<Out>(input_ptr[in_idx])-mean) / std;
}
}
}
}
}
template <typename T_IN, typename T_OUT, class Context>
bool TransformOnGPU(Tensor<Context>& X, Tensor<Context> *Y, T_OUT mean, T_OUT std, Context *context) {
return true;
};
template <>
bool TransformOnGPU<uint8_t, float, CUDAContext>(Tensor<CUDAContext>& X, Tensor<CUDAContext> *Y, float std, float mean, CUDAContext *context)
{
// data comes in as NHWC
const int N = X.dim32(0), C = X.dim32(3), H = X.dim32(1), W = X.dim32(2);
// data goes out as NCHW
Y->Resize(std::vector<int>{N,C,H,W});
auto* input_data = X.data<uint8_t>();
auto* output_data = Y->mutable_data<float>();
hipLaunchKernelGGL(( transform_kernel<uint8_t,float>), dim3(N), dim3(dim3(16,16)), 0, context->cuda_stream(), N,C,H,W, mean, std, input_data, output_data);
return true;
}
} // namespace caffe2
| 625994a92fb886557d77283bed8fbfeefaeb6369.cu | #include "caffe2/core/context_gpu.h"
#include "caffe2/image/transform_gpu.h"
/**
*
* Copyright (c) 2016, NVIDIA CORPORATION, All rights reserved
* Distributed under 2-clause BSD license; see accompanying LICENSE file
*
**/
namespace caffe2 {
namespace {
// input in (int8, NHWC), output in (fp32, NCHW)
template <typename In, typename Out>
__global__
void transform_kernel(const int N, const int C, const int H, const int W,
const float mean, const float std, const In* in, Out* out) {
const int n = blockIdx.x;
const int nStride = C*H*W;
// pointers to data for this image
const In* input_ptr = &in[n*nStride];
Out* output_ptr = &out[n*nStride];
// either read or write uncoalesced - try reading
for (int c=0; c < C; ++c) {
for (int h=threadIdx.y; h < H; h += blockDim.y) {
for (int w=threadIdx.x; w < W; w += blockDim.x) {
int in_idx = c + C*w + C*W*h; // HWC
int out_idx = c*H*W + h*W + w; // CHW
//out[out_idx] = static_cast<Out>(
// static_cast<In>(in[in_idx]-mean)/std);
output_ptr[out_idx] = (static_cast<Out>(input_ptr[in_idx])-mean) / std;
}
}
}
}
}
template <typename T_IN, typename T_OUT, class Context>
bool TransformOnGPU(Tensor<Context>& X, Tensor<Context> *Y, T_OUT mean, T_OUT std, Context *context) {
return true;
};
template <>
bool TransformOnGPU<uint8_t, float, CUDAContext>(Tensor<CUDAContext>& X, Tensor<CUDAContext> *Y, float std, float mean, CUDAContext *context)
{
// data comes in as NHWC
const int N = X.dim32(0), C = X.dim32(3), H = X.dim32(1), W = X.dim32(2);
// data goes out as NCHW
Y->Resize(std::vector<int>{N,C,H,W});
auto* input_data = X.data<uint8_t>();
auto* output_data = Y->mutable_data<float>();
transform_kernel<uint8_t,float><<<N, dim3(16,16), 0, context->cuda_stream()>>>(N,C,H,W, mean, std, input_data, output_data);
return true;
}
} // namespace caffe2
|
69e9860775e581e9e6913605329cfdaa4fae8358.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <sys/time.h>
#include <sys/types.h>
#define DEBUG_ENABLE 0
#define ERROR_TRACING 0
#define V 7000
#define INF 1000000
///const int INF = 1000000;
///const int V = 7000;
void input(char *inFileName);
void output(char *outFileName);
void block_APSP(int B);
int iceil(int a, int b);
void cal(int B, int Round, int block_start_x, int block_start_y, int block_width, int block_height);
int n, m; // Number of vertices, edges
int Dist[V][V];
int* devDist;
int gpuID=0;
///int Dist[7000][7000];
FILE *logFp;
char *in1;
char *out1;
static int totalCUDADevice = 0;
static int totalNode = 1;
static int MPIID=0;
static int currentDev=0;
typedef struct Timer {
char name[256];
struct timeval begin;
struct timeval end;
} Timer;
Timer *timer_memcpy,*timer_commu,*timer_compute;
Timer *timer_phase3;
Timer* timer_init(Timer* t,const char* name);
Timer* timer_new(const char* name);
void timer_start(Timer* t);
void timer_end(Timer* t);
void timer_add(Timer* t1, const Timer* t2);
double timer_seconds(const Timer* t);
void timer_print(const Timer* t,FILE* stream);
void timer_delete(Timer* t);
Timer* timer_init(Timer* t,const char* name ) {
/// memset
/// http://baike.baidu.com/view/982208.htm
/// struct
/// t. name [0]={'\0'};
/// t. begin=0;
/// t. end=0;
/// memset
/// memset (t,0,sizeof(Timer));
if(t) {
memset (t,0,sizeof(Timer));
strncpy(t->name,name,256);
}
return t;
}
Timer* timer_new(const char* name) {
Timer* t;
t = (Timer*)malloc(sizeof(Timer));
return timer_init(t,name);
}
void timer_start(Timer* t) {
if(!t) return;
gettimeofday(&t->begin,0);
}
void timer_end(Timer* t) {
if(!t) return;
gettimeofday(&t->end,0);
}
void timer_add(Timer* t1, const Timer* t2) {
if(!t1 || !t2) return;
t1->end.tv_sec+=(t2->end.tv_sec-t2->begin.tv_sec); /// seconds
t1->end.tv_usec+=(t2->end.tv_usec-t2->begin.tv_usec); /// microseconds
}
double timer_seconds(const Timer* t) {
if(!t) return 0;
return (double)(t->end.tv_sec-t->begin.tv_sec)+(1e-6*(t->end.tv_usec-t->begin.tv_usec));
}
void timer_delete(Timer* t) {
if(!t) return;
free(t);
}
void timer_print(const Timer* t, FILE* stream) {
if(!t) return;
fprintf(stream,"%s : %f(sec)\n",t->name,timer_seconds(t));
}
static void __debugCUDACall(hipError_t err, const char* expr, const char* file, int line) {
if(err != hipSuccess) {
fprintf(stderr,"in File %s Line %d:%s\n",file,line,expr);
fprintf(stderr,"%s \n",hipGetErrorString(err));
}
}
#define debugCUDACall(X) __debugCUDACall((X),#X,__FILE__,__LINE__)
char *stringConcat(char *str1, char *str2) {
int length=strlen(str1)+strlen(str2)+1;
char *result = (char*)malloc(sizeof(char) * length);
//
strcpy(result, str1);
//
strcat(result, str2);
return result;
}
void initCUDADevice(int gpuID)
{
// Task 1: Device Initialization
hipGetDeviceCount(&totalCUDADevice);
printf("totalCUDADevice=%d, \n",totalCUDADevice);
if (totalCUDADevice == 0) {
printf("No CUDA device found.\n\n");
} else if (gpuID < totalCUDADevice) {
printf("set CUDA device=%d, \n",gpuID );
hipSetDevice(gpuID);
} else {
gpuID =0;
printf("set CUDA device=%d, \n",gpuID );
hipSetDevice(gpuID);
}
}
int mainRun(int argc, char* argv[])
{
if(argc > 4) {
sscanf(argv[4],"%d",&gpuID);
}
initCUDADevice(gpuID);
if(argc > 5) {
sscanf(argv[5],"%d",&totalCUDADevice);
printf("### set totalCUDADevice=%d by argv[5] \n",totalCUDADevice);
if (totalCUDADevice ==0){
printf("### Disable cuda device for running in cpu. \n");
}
}
timer_memcpy = timer_new("Memcpy");
timer_commu = timer_new("Communication");
timer_compute = timer_new("Compute");
timer_phase3 = timer_new("Phase3");
logFp = freopen("./log.txt","wr", stderr);
// in1= "D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\tiny_test_case";
// out1="D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\output\\tiny_test_case_out";
//in1= "./Testcase/in2";
//out1="./output/tiny_test_case_out";
///char *in1= "D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\in3";
///char *out1="D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\output\\out1";
///input( in1 );
input( argv[1] );
if(totalCUDADevice > 0) {
hipMalloc((void**)&devDist,sizeof(int)*V*n);
}
/*
fprintf(logFp, "\n");
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(logFp, "INF ");
else fprintf(logFp, "%d ", Dist[i][j]);
}
fprintf(logFp, "\n");
}
*/
if(totalCUDADevice > 0) {
timer_start(timer_memcpy);
debugCUDACall(hipMemcpy(devDist,&Dist[0][0],sizeof(int)*V*n,hipMemcpyHostToDevice));
timer_end(timer_memcpy);
}
int B = 128;
if(argc > 3) {
sscanf(argv[3],"%d",&B);
}
printf("*** B=%d, source=%s, output=%s,\n",B,argv[1],argv[2]);
timer_start(timer_phase3);
timer_end(timer_phase3);
timer_start(timer_compute);
timer_end(timer_compute);
block_APSP(B);
if(totalCUDADevice >0) {
Timer tempMemcpy;
timer_init(&tempMemcpy,"");
timer_start(&tempMemcpy);
debugCUDACall(hipMemcpy(&Dist[0][0],devDist,sizeof(int)*n*V,hipMemcpyDeviceToHost));
timer_end(&tempMemcpy);
timer_add(timer_memcpy,&tempMemcpy);
debugCUDACall(hipFree(devDist));
}
output(argv[2]);
///output( out1 );
timer_print(timer_memcpy,stdout);
timer_print(timer_commu,stdout);
timer_print(timer_compute,stdout);
timer_print(timer_phase3,stdout);
fclose(logFp);
timer_delete(timer_memcpy);
timer_delete(timer_commu);
timer_delete(timer_compute);
timer_delete(timer_phase3);
return 0;
}
void input(char *inFileName)
{ FILE *infile = fopen(inFileName, "r");
fscanf(infile, "%d %d", &n, &m);
printf("n=%d, m=%d \n",n,m);
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (i == j) Dist[i][j] = 0;
else Dist[i][j] = INF;
}
}
while (--m >= 0) {
int a, b, v;
fscanf(infile, "%d %d %d", &a, &b, &v);
if (m== 49) printf("m=%d, a=%d, b=%d, v=%d \n",m,a,b,v);
--a, --b;
Dist[a][b] = v;
}
}
void output(char *outFileName)
{ FILE *outfile = fopen(outFileName, "w");
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(outfile, "INF ");
else fprintf(outfile, "%d ", Dist[i][j]);
}
fprintf(outfile, "\n");
}
}
int iceil(int a, int b) {
return (a + b -1)/b;
}
/// (y,x) ==> (column,row)
/// | |
/// (0,0) | (0,1) | (0,2)
/// ______|_______|______
/// (1,0) | (1,1) | (1,2)
/// ______|_______|______
/// (2,0) | (2,1) | (2,2)
/// | |
void block_APSP(int B) {
int round = iceil(n, B);
#if DEBUG_ENABLE
fprintf(logFp,"round=%d ====================================\n",round);
#endif
int r=0;
for ( r = 0; r < round; ++r) {
///* Phase 1*/
#if DEBUG_ENABLE
fprintf(logFp,"[Phase1] r=%d ====================================\n",r);
#endif
/// B, Round, block_start_x, block_start_y, block_width, block_height
cal(B, r, r, r, 1, 1);
#if DEBUG_ENABLE
fprintf(logFp,"[Phase2] r=%d \n",r);
fprintf(logFp," r=%d 1. \n",r);
#endif
///* Phase 2*/ /// (y,x) ==> (column,row) ==> (r,0)
cal(B, r, r, 0, r, 1); ///front row
#if DEBUG_ENABLE
fprintf(logFp," r=%d 2. \n",r);
#endif
cal(B, r, r, r +1, round - r -1, 1); /// back row
#if DEBUG_ENABLE
fprintf(logFp," r=%d 3. \n",r);
#endif
cal(B, r, 0, r, 1, r); /// up column
#if DEBUG_ENABLE
fprintf(logFp," r=%d 4. \n",r);
#endif
cal(B, r, r +1, r, 1, round - r -1); /// down column
#if DEBUG_ENABLE
fprintf(logFp,"[Phase3] r=%d \n",r);
#endif
Timer tempTime;
timer_init(&tempTime,"");
timer_start(&tempTime);
#if DEBUG_ENABLE
///* Phase 3*/
fprintf(logFp," r=%d 1. \n",r);
#endif
cal(B, r, 0, 0, r, r); ///2 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 2. \n",r);
#endif
cal(B, r, 0, r +1, round -r -1, r); /// 1 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 3. \n",r);
#endif
cal(B, r, r +1, 0, r, round - r -1); /// 3 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 4. \n",r);
#endif
cal(B, r, r +1, r +1, round -r -1, round - r -1); /// 4 quadrant
timer_end(&tempTime);
timer_add(timer_phase3,&tempTime);
/*
char *devOut0 ="in3_dev_0.txt";
char *devOut1 ="in3_dev_1.txt";
char *devOut2 ="in3_dev_0.txt";
char *devOut3 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
*/
#if ERROR_TRACING
if (totalCUDADevice ==0){
char *fileName = "in3_dev_";
char *extFile =".txt";
char roundString[10];
sprintf(roundString,"%d",r);
fileName = stringConcat(fileName, roundString);
fileName = stringConcat(fileName, extFile);
FILE *outfile = fopen(fileName, "w");
fprintf(outfile, "round=%d, \n",r);
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
fprintf(outfile, "[%d][%d]=%d, ",i,j,Dist[i][j]);
}
fprintf(outfile, "\n");
}
fclose(outfile);
}
#endif
}
}
static __global__ void column_CalKernelGPU(int B,int Round,int x,int y,int n,int* dDist,int k) {
///column14sec
//int Bpow2=B*B;
int b_i = blockIdx.x+x;
int b_j = blockIdx.y+y;
int valIK,valKJ,valIJ;
for(int bid=0; bid<B; bid+=1) {
int threadIdx_x=bid;
int threadIdx_y=threadIdx.x;
int i=b_i*B+threadIdx_x;
int j=b_j*B+threadIdx_y;
if (i > n) continue;
if (j > n) continue;
valIK=dDist[i*V+k];
valKJ=dDist[k*V+j];
valIJ=dDist[i*V+j];
if (valIK + valKJ < valIJ) {
valIJ = valIK + valKJ;
dDist[i*V+j]=valIJ;
}
//__threadfence();
}
}
static __global__ void rwo_CalKernelGPU(int B,int Round,int x,int y,int n,int* dDist,int k) {
///row 294sec
int b_i = blockIdx.x+x;
int b_j = blockIdx.y+y;
int valIK,valKJ,valIJ;
for(int bid=0; bid<B; bid+=1) {
int i=b_i*B+threadIdx.x;
int j=b_j*B+bid;
if (i > n) continue;
if (j > n) continue;
valIK=dDist[i*V+k];
valKJ=dDist[k*V+j];
valIJ=dDist[i*V+j];
if (valIK + valKJ < valIJ) {
valIJ = valIK + valKJ;
dDist[i*V+j]=valIJ;
}
//__threadfence();
}
}
static void calKernelCPU(int B,int Round,int b_i,int b_j) {
//////////////////////
int k=0;
/// To calculate B*B elements in the block (b_i, b_j)
/// For each block, it need to compute B times
int block_internal_start_x = b_i * B;
int block_internal_end_x = (b_i +1) * B;
int block_internal_start_y = b_j * B;
int block_internal_end_y = (b_j +1) * B;
if (block_internal_end_x > n) block_internal_end_x = n;
if (block_internal_end_y > n) block_internal_end_y = n;
for ( k = Round * B; k < (Round +1) * B && k < n; ++k) { ///
int i,j;
/// To calculate original index of elements in the block (b_i, b_j)
/// For instance, original index of (0,0) in block (1,2) is (2,5) for V=6,B=2
for ( i = block_internal_start_x; i < block_internal_end_x; ++i) {
for ( j = block_internal_start_y; j < block_internal_end_y; ++j) {
if (Dist[i][k] + Dist[k][j] < Dist[i][j])
Dist[i][j] = Dist[i][k] + Dist[k][j];
}
}
}
}
static void calLauncherCPU(int B,int Round,int x,int y,int w,int h) {
int b_i,b_j;
for ( b_i = 0; b_i < h; ++b_i) {
for ( b_j = 0; b_j < w; ++b_j) {
calKernelCPU(B,Round,b_i+x,b_j+y);
}
}
}
static struct hipDeviceProp_t prop;
static int devicePropGot=0;
static void getProp() {
if(!devicePropGot) {
devicePropGot=1;
hipGetDeviceProperties(&prop,currentDev);
}
}
int isFirst = 1;
void calLauncher(int B,int Round,int x,int y,int w,int h) {
dim3 gdim(h,w,1);
dim3 bdim(B,1,1);
hipError_t err;
if(totalCUDADevice == 0) {
if (isFirst){
isFirst=0;
printf("run in cpu ,because totalCUDADevice=%d\n",totalCUDADevice );
}
calLauncherCPU(B,Round,x,y,w,h);
return;
}
int mink=Round*B;
int maxk=mink+B;
if(maxk>n) maxk=n;
getProp();
if(bdim.x > prop.maxThreadsPerBlock) {
bdim.x=prop.maxThreadsPerBlock;
}
for (int k = mink; k < maxk; ++k) { ///
Timer tempTime;
timer_init(&tempTime,"");
timer_start(&tempTime);
hipLaunchKernelGGL(( column_CalKernelGPU), dim3(gdim),dim3(bdim), 0, 0, B,Round,x,y,n,devDist,k);
err=hipDeviceSynchronize();
timer_end(&tempTime);
timer_add(timer_compute,&tempTime);
if(err != hipSuccess) {
fprintf(stderr,"%s(gdim=%d,%d,%d)(bid=%d,%d,%d)\n",
hipGetErrorString(err),
gdim.x,gdim.y,gdim.z,bdim.x,bdim.y,bdim.z);
}
}
}
void cal(int B, int Round, int x,int y,int w,int h)
{
#if DEBUG_ENABLE
int i,j=0;
int block_end_x = x + h ;
int block_end_y = y + w;
fprintf(logFp,"B=%d, Round=%d, block_start_x=%d, block_start_y=%d, block_width=%d, block_height=%d, \n",B,Round,x,y,w,h);
fprintf(logFp,"block_end_x=%d, block_end_y=%d,\n",block_end_x,block_end_y);
#endif
calLauncher(B,Round,x,y,w,h);
#if DEBUG_ENABLE
fprintf(logFp, "\n");
i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(logFp, "INF ");
else fprintf(logFp, "%d ", Dist[i][j]);
}
fprintf(logFp, "\n");
}
fprintf(logFp, "------------------------------------------------\n");
#endif
}
int main(int argc, char* argv[]) {
struct timeval tv, tv2;
clock_t endTime;
unsigned long long start_utime, end_utime;
endTime =clock();
gettimeofday(&tv, NULL);
mainRun( argc, argv);
gettimeofday(&tv2, NULL);
endTime =clock() - endTime ;
start_utime = tv.tv_sec * 1000000 + tv.tv_usec;
end_utime = tv2.tv_sec * 1000000 + tv2.tv_usec;
printf("Clock=%f sec. , Gettimeofday time = %llu.%03llu milisecond; %llu.%03llu sec \n",((float)endTime) /CLOCKS_PER_SEC, (end_utime - start_utime)/1000, (end_utime - start_utime)%1000, (end_utime - start_utime)/1000000, (end_utime - start_utime)%1000000 );
return 0;
}
| 69e9860775e581e9e6913605329cfdaa4fae8358.cu | #include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <sys/time.h>
#include <sys/types.h>
#define DEBUG_ENABLE 0
#define ERROR_TRACING 0
#define V 7000
#define INF 1000000
///const int INF = 1000000;
///const int V = 7000;
void input(char *inFileName);
void output(char *outFileName);
void block_APSP(int B);
int iceil(int a, int b);
void cal(int B, int Round, int block_start_x, int block_start_y, int block_width, int block_height);
int n, m; // Number of vertices, edges
int Dist[V][V];
int* devDist;
int gpuID=0;
///int Dist[7000][7000];
FILE *logFp;
char *in1;
char *out1;
static int totalCUDADevice = 0;
static int totalNode = 1;
static int MPIID=0;
static int currentDev=0;
typedef struct Timer {
char name[256];
struct timeval begin;
struct timeval end;
} Timer;
Timer *timer_memcpy,*timer_commu,*timer_compute;
Timer *timer_phase3;
Timer* timer_init(Timer* t,const char* name);
Timer* timer_new(const char* name);
void timer_start(Timer* t);
void timer_end(Timer* t);
void timer_add(Timer* t1, const Timer* t2);
double timer_seconds(const Timer* t);
void timer_print(const Timer* t,FILE* stream);
void timer_delete(Timer* t);
Timer* timer_init(Timer* t,const char* name ) {
/// memset 介紹 及 常見問題
/// http://baike.baidu.com/view/982208.htm
/// 一般情況下,清空 struct的方法︰
/// t. name [0]={'\0'};
/// t. begin=0;
/// t. end=0;
/// 可用 memset 直接清除,方便。
/// memset (t,0,sizeof(Timer));
if(t) {
memset (t,0,sizeof(Timer));
strncpy(t->name,name,256);
}
return t;
}
Timer* timer_new(const char* name) {
Timer* t;
t = (Timer*)malloc(sizeof(Timer));
return timer_init(t,name);
}
void timer_start(Timer* t) {
if(!t) return;
gettimeofday(&t->begin,0);
}
void timer_end(Timer* t) {
if(!t) return;
gettimeofday(&t->end,0);
}
void timer_add(Timer* t1, const Timer* t2) {
if(!t1 || !t2) return;
t1->end.tv_sec+=(t2->end.tv_sec-t2->begin.tv_sec); /// seconds
t1->end.tv_usec+=(t2->end.tv_usec-t2->begin.tv_usec); /// microseconds
}
double timer_seconds(const Timer* t) {
if(!t) return 0;
return (double)(t->end.tv_sec-t->begin.tv_sec)+(1e-6*(t->end.tv_usec-t->begin.tv_usec));
}
void timer_delete(Timer* t) {
if(!t) return;
free(t);
}
void timer_print(const Timer* t, FILE* stream) {
if(!t) return;
fprintf(stream,"%s : %f(sec)\n",t->name,timer_seconds(t));
}
static void __debugCUDACall(cudaError_t err, const char* expr, const char* file, int line) {
if(err != cudaSuccess) {
fprintf(stderr,"in File %s Line %d:%s\n",file,line,expr);
fprintf(stderr,"%s \n",cudaGetErrorString(err));
}
}
#define debugCUDACall(X) __debugCUDACall((X),#X,__FILE__,__LINE__)
char *stringConcat(char *str1, char *str2) {
int length=strlen(str1)+strlen(str2)+1;
char *result = (char*)malloc(sizeof(char) * length);
// 複製第一個字串至新的陣列空間
strcpy(result, str1);
// 串接第二個字串至新的陣列空間
strcat(result, str2);
return result;
}
void initCUDADevice(int gpuID)
{
// Task 1: Device Initialization
cudaGetDeviceCount(&totalCUDADevice);
printf("totalCUDADevice=%d, \n",totalCUDADevice);
if (totalCUDADevice == 0) {
printf("No CUDA device found.\n\n");
} else if (gpuID < totalCUDADevice) {
printf("set CUDA device=%d, \n",gpuID );
cudaSetDevice(gpuID);
} else {
gpuID =0;
printf("set CUDA device=%d, \n",gpuID );
cudaSetDevice(gpuID);
}
}
int mainRun(int argc, char* argv[])
{
if(argc > 4) {
sscanf(argv[4],"%d",&gpuID);
}
initCUDADevice(gpuID);
if(argc > 5) {
sscanf(argv[5],"%d",&totalCUDADevice);
printf("### set totalCUDADevice=%d by argv[5] \n",totalCUDADevice);
if (totalCUDADevice ==0){
printf("### Disable cuda device for running in cpu. \n");
}
}
timer_memcpy = timer_new("Memcpy");
timer_commu = timer_new("Communication");
timer_compute = timer_new("Compute");
timer_phase3 = timer_new("Phase3");
logFp = freopen("./log.txt","wr", stderr);
// in1= "D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\tiny_test_case";
// out1="D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\output\\tiny_test_case_out";
//in1= "./Testcase/in2";
//out1="./output/tiny_test_case_out";
///char *in1= "D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\in3";
///char *out1="D:\\c\\codeblock\\c\\parallel_programming\\hw4\\hw4\\Testcase\\output\\out1";
///input( in1 );
input( argv[1] );
if(totalCUDADevice > 0) {
cudaMalloc((void**)&devDist,sizeof(int)*V*n);
}
/*
fprintf(logFp, "\n");
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(logFp, "INF ");
else fprintf(logFp, "%d ", Dist[i][j]);
}
fprintf(logFp, "\n");
}
*/
if(totalCUDADevice > 0) {
timer_start(timer_memcpy);
debugCUDACall(cudaMemcpy(devDist,&Dist[0][0],sizeof(int)*V*n,cudaMemcpyHostToDevice));
timer_end(timer_memcpy);
}
int B = 128;
if(argc > 3) {
sscanf(argv[3],"%d",&B);
}
printf("*** B=%d, source=%s, output=%s,\n",B,argv[1],argv[2]);
timer_start(timer_phase3);
timer_end(timer_phase3);
timer_start(timer_compute);
timer_end(timer_compute);
block_APSP(B);
if(totalCUDADevice >0) {
Timer tempMemcpy;
timer_init(&tempMemcpy,"");
timer_start(&tempMemcpy);
debugCUDACall(cudaMemcpy(&Dist[0][0],devDist,sizeof(int)*n*V,cudaMemcpyDeviceToHost));
timer_end(&tempMemcpy);
timer_add(timer_memcpy,&tempMemcpy);
debugCUDACall(cudaFree(devDist));
}
output(argv[2]);
///output( out1 );
timer_print(timer_memcpy,stdout);
timer_print(timer_commu,stdout);
timer_print(timer_compute,stdout);
timer_print(timer_phase3,stdout);
fclose(logFp);
timer_delete(timer_memcpy);
timer_delete(timer_commu);
timer_delete(timer_compute);
timer_delete(timer_phase3);
return 0;
}
void input(char *inFileName)
{ FILE *infile = fopen(inFileName, "r");
fscanf(infile, "%d %d", &n, &m);
printf("n=%d, m=%d \n",n,m);
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (i == j) Dist[i][j] = 0;
else Dist[i][j] = INF;
}
}
while (--m >= 0) {
int a, b, v;
fscanf(infile, "%d %d %d", &a, &b, &v);
if (m== 49) printf("m=%d, a=%d, b=%d, v=%d \n",m,a,b,v);
--a, --b;
Dist[a][b] = v;
}
}
void output(char *outFileName)
{ FILE *outfile = fopen(outFileName, "w");
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(outfile, "INF ");
else fprintf(outfile, "%d ", Dist[i][j]);
}
fprintf(outfile, "\n");
}
}
int iceil(int a, int b) {
return (a + b -1)/b;
}
/// (y,x) ==> (column,row)
/// | |
/// (0,0) | (0,1) | (0,2)
/// ______|_______|______
/// (1,0) | (1,1) | (1,2)
/// ______|_______|______
/// (2,0) | (2,1) | (2,2)
/// | |
void block_APSP(int B) {
int round = iceil(n, B);
#if DEBUG_ENABLE
fprintf(logFp,"round=%d ====================================\n",round);
#endif
int r=0;
for ( r = 0; r < round; ++r) {
///* Phase 1*/
#if DEBUG_ENABLE
fprintf(logFp,"[Phase1] r=%d ====================================\n",r);
#endif
/// B, Round, block_start_x, block_start_y, block_width, block_height
cal(B, r, r, r, 1, 1);
#if DEBUG_ENABLE
fprintf(logFp,"[Phase2] r=%d \n",r);
fprintf(logFp," r=%d 1. \n",r);
#endif
///* Phase 2*/ /// (y,x) ==> (column,row) ==> (r,0)
cal(B, r, r, 0, r, 1); ///front row
#if DEBUG_ENABLE
fprintf(logFp," r=%d 2. \n",r);
#endif
cal(B, r, r, r +1, round - r -1, 1); /// back row
#if DEBUG_ENABLE
fprintf(logFp," r=%d 3. \n",r);
#endif
cal(B, r, 0, r, 1, r); /// up column
#if DEBUG_ENABLE
fprintf(logFp," r=%d 4. \n",r);
#endif
cal(B, r, r +1, r, 1, round - r -1); /// down column
#if DEBUG_ENABLE
fprintf(logFp,"[Phase3] r=%d \n",r);
#endif
Timer tempTime;
timer_init(&tempTime,"");
timer_start(&tempTime);
#if DEBUG_ENABLE
///* Phase 3*/
fprintf(logFp," r=%d 1. \n",r);
#endif
cal(B, r, 0, 0, r, r); ///2 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 2. \n",r);
#endif
cal(B, r, 0, r +1, round -r -1, r); /// 1 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 3. \n",r);
#endif
cal(B, r, r +1, 0, r, round - r -1); /// 3 quadrant
#if DEBUG_ENABLE
fprintf(logFp," r=%d 4. \n",r);
#endif
cal(B, r, r +1, r +1, round -r -1, round - r -1); /// 4 quadrant
timer_end(&tempTime);
timer_add(timer_phase3,&tempTime);
/*
char *devOut0 ="in3_dev_0.txt";
char *devOut1 ="in3_dev_1.txt";
char *devOut2 ="in3_dev_0.txt";
char *devOut3 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
char *devOut0 ="in3_dev_0.txt";
*/
#if ERROR_TRACING
if (totalCUDADevice ==0){
char *fileName = "in3_dev_";
char *extFile =".txt";
char roundString[10];
sprintf(roundString,"%d",r);
fileName = stringConcat(fileName, roundString);
fileName = stringConcat(fileName, extFile);
FILE *outfile = fopen(fileName, "w");
fprintf(outfile, "round=%d, \n",r);
int i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
fprintf(outfile, "[%d][%d]=%d, ",i,j,Dist[i][j]);
}
fprintf(outfile, "\n");
}
fclose(outfile);
}
#endif
}
}
static __global__ void column_CalKernelGPU(int B,int Round,int x,int y,int n,int* dDist,int k) {
///column實際跑後,花了14sec,跑很快。
//int Bpow2=B*B;
int b_i = blockIdx.x+x;
int b_j = blockIdx.y+y;
int valIK,valKJ,valIJ;
for(int bid=0; bid<B; bid+=1) {
int threadIdx_x=bid;
int threadIdx_y=threadIdx.x;
int i=b_i*B+threadIdx_x;
int j=b_j*B+threadIdx_y;
if (i > n) continue;
if (j > n) continue;
valIK=dDist[i*V+k];
valKJ=dDist[k*V+j];
valIJ=dDist[i*V+j];
if (valIK + valKJ < valIJ) {
valIJ = valIK + valKJ;
dDist[i*V+j]=valIJ;
}
//__threadfence();
}
}
static __global__ void rwo_CalKernelGPU(int B,int Round,int x,int y,int n,int* dDist,int k) {
///row 實際跑後,花了294sec,跑很久。
int b_i = blockIdx.x+x;
int b_j = blockIdx.y+y;
int valIK,valKJ,valIJ;
for(int bid=0; bid<B; bid+=1) {
int i=b_i*B+threadIdx.x;
int j=b_j*B+bid;
if (i > n) continue;
if (j > n) continue;
valIK=dDist[i*V+k];
valKJ=dDist[k*V+j];
valIJ=dDist[i*V+j];
if (valIK + valKJ < valIJ) {
valIJ = valIK + valKJ;
dDist[i*V+j]=valIJ;
}
//__threadfence();
}
}
static void calKernelCPU(int B,int Round,int b_i,int b_j) {
//////////////////////
int k=0;
/// To calculate B*B elements in the block (b_i, b_j)
/// For each block, it need to compute B times
int block_internal_start_x = b_i * B;
int block_internal_end_x = (b_i +1) * B;
int block_internal_start_y = b_j * B;
int block_internal_end_y = (b_j +1) * B;
if (block_internal_end_x > n) block_internal_end_x = n;
if (block_internal_end_y > n) block_internal_end_y = n;
for ( k = Round * B; k < (Round +1) * B && k < n; ++k) { ///
int i,j;
/// To calculate original index of elements in the block (b_i, b_j)
/// For instance, original index of (0,0) in block (1,2) is (2,5) for V=6,B=2
for ( i = block_internal_start_x; i < block_internal_end_x; ++i) {
for ( j = block_internal_start_y; j < block_internal_end_y; ++j) {
if (Dist[i][k] + Dist[k][j] < Dist[i][j])
Dist[i][j] = Dist[i][k] + Dist[k][j];
}
}
}
}
static void calLauncherCPU(int B,int Round,int x,int y,int w,int h) {
int b_i,b_j;
for ( b_i = 0; b_i < h; ++b_i) {
for ( b_j = 0; b_j < w; ++b_j) {
calKernelCPU(B,Round,b_i+x,b_j+y);
}
}
}
static struct cudaDeviceProp prop;
static int devicePropGot=0;
static void getProp() {
if(!devicePropGot) {
devicePropGot=1;
cudaGetDeviceProperties(&prop,currentDev);
}
}
int isFirst = 1;
void calLauncher(int B,int Round,int x,int y,int w,int h) {
dim3 gdim(h,w,1);
dim3 bdim(B,1,1);
cudaError_t err;
if(totalCUDADevice == 0) {
if (isFirst){
isFirst=0;
printf("run in cpu ,because totalCUDADevice=%d\n",totalCUDADevice );
}
calLauncherCPU(B,Round,x,y,w,h);
return;
}
int mink=Round*B;
int maxk=mink+B;
if(maxk>n) maxk=n;
getProp();
if(bdim.x > prop.maxThreadsPerBlock) {
bdim.x=prop.maxThreadsPerBlock;
}
for (int k = mink; k < maxk; ++k) { ///
Timer tempTime;
timer_init(&tempTime,"");
timer_start(&tempTime);
column_CalKernelGPU<<<gdim,bdim>>>(B,Round,x,y,n,devDist,k);
err=cudaDeviceSynchronize();
timer_end(&tempTime);
timer_add(timer_compute,&tempTime);
if(err != cudaSuccess) {
fprintf(stderr,"%s(gdim=%d,%d,%d)(bid=%d,%d,%d)\n",
cudaGetErrorString(err),
gdim.x,gdim.y,gdim.z,bdim.x,bdim.y,bdim.z);
}
}
}
void cal(int B, int Round, int x,int y,int w,int h)
{
#if DEBUG_ENABLE
int i,j=0;
int block_end_x = x + h ;
int block_end_y = y + w;
fprintf(logFp,"B=%d, Round=%d, block_start_x=%d, block_start_y=%d, block_width=%d, block_height=%d, \n",B,Round,x,y,w,h);
fprintf(logFp,"block_end_x=%d, block_end_y=%d,\n",block_end_x,block_end_y);
#endif
calLauncher(B,Round,x,y,w,h);
#if DEBUG_ENABLE
fprintf(logFp, "\n");
i,j=0;
for ( i = 0; i < n; ++i) {
for ( j = 0; j < n; ++j) {
if (Dist[i][j] >= INF) fprintf(logFp, "INF ");
else fprintf(logFp, "%d ", Dist[i][j]);
}
fprintf(logFp, "\n");
}
fprintf(logFp, "------------------------------------------------\n");
#endif
}
int main(int argc, char* argv[]) {
struct timeval tv, tv2;
clock_t endTime;
unsigned long long start_utime, end_utime;
endTime =clock();
gettimeofday(&tv, NULL);
mainRun( argc, argv);
gettimeofday(&tv2, NULL);
endTime =clock() - endTime ;
start_utime = tv.tv_sec * 1000000 + tv.tv_usec;
end_utime = tv2.tv_sec * 1000000 + tv2.tv_usec;
printf("Clock=%f sec. , Gettimeofday time = %llu.%03llu milisecond; %llu.%03llu sec \n",((float)endTime) /CLOCKS_PER_SEC, (end_utime - start_utime)/1000, (end_utime - start_utime)%1000, (end_utime - start_utime)/1000000, (end_utime - start_utime)%1000000 );
return 0;
}
|
c6610f0813a6faeb1533b59964d8b612a0effcbe.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <iostream>
__global__ void add(int a, int b, int *c) {
*c = a+b;
}
int main (void) {
int c;
int *dev_c;
hipMalloc((void **) &dev_c, sizeof(int));
hipLaunchKernelGGL(( add), dim3(1), dim3(1), 0, 0, 2, 7, dev_c);
hipMemcpy(&c, dev_c, sizeof(int), hipMemcpyDeviceToHost);
printf("2+7 = %d\n", c);
hipFree(dev_c);
return 0;
} | c6610f0813a6faeb1533b59964d8b612a0effcbe.cu | #include <iostream>
__global__ void add(int a, int b, int *c) {
*c = a+b;
}
int main (void) {
int c;
int *dev_c;
cudaMalloc((void **) &dev_c, sizeof(int));
add<<<1, 1>>>(2, 7, dev_c);
cudaMemcpy(&c, dev_c, sizeof(int), cudaMemcpyDeviceToHost);
printf("2+7 = %d\n", c);
cudaFree(dev_c);
return 0;
} |
8fcd46de03011776ee21c6e4665d1d7d3c722aa7.hip | // !!! This is a file automatically generated by hipify!!!
#include "common.hu"
std::vector<float> benchmark(DATA_TYPE *output,
DATA_TYPE *data,
hipEvent_t start, hipEvent_t stop)
{
DATA_TYPE *dev_output, *dev_middle, *dev_data, *middle;
std::vector<float> time(2);
/*
Setup
*/
cudaCheckReturn(hipHostMalloc(&middle, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(hipMalloc(&dev_data, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(hipMalloc(&dev_middle, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(hipMalloc(&dev_output, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(hipMemcpy(dev_data, data, DATA_SIZE * sizeof(DATA_TYPE),
hipMemcpyHostToDevice));
hipfftHandle plan;
cufftCheckReturn(hipfftCreate(&plan));
long long len = DATA_SIZE;
size_t ws = 0;
cufftCheckReturn(
cufftXtMakePlanMany(
plan, 1, &len,
NULL, 1, 1, HIP_C_32F,
NULL, 1, 1, HIP_C_32F,
1, &ws, HIP_C_32F));
/*
FFT
*/
cudaCheckReturn(hipDeviceSynchronize());
cudaCheckReturn(hipEventRecord(start));
cufftCheckReturn(cufftXtExec(plan, dev_data, dev_middle, HIPFFT_FORWARD));
cudaCheckReturn(hipEventRecord(stop));
cudaCheckReturn(hipEventSynchronize(stop));
cudaCheckKernel();
cudaCheckReturn(hipEventElapsedTime(&time[0], start, stop));
/*
Scaling
*/
cudaCheckReturn(hipMemcpy(middle, dev_middle, DATA_SIZE * sizeof(DATA_TYPE),
hipMemcpyDeviceToHost));
for (size_t i = 0; i < DATA_SIZE; i++) {
float2 m = middle[i];
m.x /= DATA_SIZE;
m.y /= DATA_SIZE;
middle[i] = m;
}
cudaCheckReturn(hipMemcpy(dev_middle, middle, DATA_SIZE * sizeof(DATA_TYPE),
hipMemcpyHostToDevice));
/*
IFFT
*/
cudaCheckReturn(hipDeviceSynchronize());
cudaCheckReturn(hipEventRecord(start));
cufftCheckReturn(cufftXtExec(plan, dev_middle, dev_output, HIPFFT_BACKWARD));
cudaCheckReturn(hipEventRecord(stop));
cudaCheckReturn(hipEventSynchronize(stop));
cudaCheckKernel();
cudaCheckReturn(hipEventElapsedTime(&time[1], start, stop));
/*
Close
*/
cufftCheckReturn(hipfftDestroy(plan));
cudaCheckReturn(hipMemcpy(output, dev_output, DATA_SIZE * sizeof(DATA_TYPE),
hipMemcpyDeviceToHost));
cudaCheckReturn(hipHostFree(middle));
cudaCheckReturn(hipFree(dev_output));
cudaCheckReturn(hipFree(dev_middle));
cudaCheckReturn(hipFree(dev_data));
return time;
}
| 8fcd46de03011776ee21c6e4665d1d7d3c722aa7.cu | #include "common.hu"
std::vector<float> benchmark(DATA_TYPE *output,
DATA_TYPE *data,
cudaEvent_t start, cudaEvent_t stop)
{
DATA_TYPE *dev_output, *dev_middle, *dev_data, *middle;
std::vector<float> time(2);
/*
Setup
*/
cudaCheckReturn(cudaMallocHost(&middle, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(cudaMalloc(&dev_data, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(cudaMalloc(&dev_middle, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(cudaMalloc(&dev_output, DATA_SIZE * sizeof(DATA_TYPE)));
cudaCheckReturn(cudaMemcpy(dev_data, data, DATA_SIZE * sizeof(DATA_TYPE),
cudaMemcpyHostToDevice));
cufftHandle plan;
cufftCheckReturn(cufftCreate(&plan));
long long len = DATA_SIZE;
size_t ws = 0;
cufftCheckReturn(
cufftXtMakePlanMany(
plan, 1, &len,
NULL, 1, 1, CUDA_C_32F,
NULL, 1, 1, CUDA_C_32F,
1, &ws, CUDA_C_32F));
/*
FFT
*/
cudaCheckReturn(cudaDeviceSynchronize());
cudaCheckReturn(cudaEventRecord(start));
cufftCheckReturn(cufftXtExec(plan, dev_data, dev_middle, CUFFT_FORWARD));
cudaCheckReturn(cudaEventRecord(stop));
cudaCheckReturn(cudaEventSynchronize(stop));
cudaCheckKernel();
cudaCheckReturn(cudaEventElapsedTime(&time[0], start, stop));
/*
Scaling
*/
cudaCheckReturn(cudaMemcpy(middle, dev_middle, DATA_SIZE * sizeof(DATA_TYPE),
cudaMemcpyDeviceToHost));
for (size_t i = 0; i < DATA_SIZE; i++) {
float2 m = middle[i];
m.x /= DATA_SIZE;
m.y /= DATA_SIZE;
middle[i] = m;
}
cudaCheckReturn(cudaMemcpy(dev_middle, middle, DATA_SIZE * sizeof(DATA_TYPE),
cudaMemcpyHostToDevice));
/*
IFFT
*/
cudaCheckReturn(cudaDeviceSynchronize());
cudaCheckReturn(cudaEventRecord(start));
cufftCheckReturn(cufftXtExec(plan, dev_middle, dev_output, CUFFT_INVERSE));
cudaCheckReturn(cudaEventRecord(stop));
cudaCheckReturn(cudaEventSynchronize(stop));
cudaCheckKernel();
cudaCheckReturn(cudaEventElapsedTime(&time[1], start, stop));
/*
Close
*/
cufftCheckReturn(cufftDestroy(plan));
cudaCheckReturn(cudaMemcpy(output, dev_output, DATA_SIZE * sizeof(DATA_TYPE),
cudaMemcpyDeviceToHost));
cudaCheckReturn(cudaFreeHost(middle));
cudaCheckReturn(cudaFree(dev_output));
cudaCheckReturn(cudaFree(dev_middle));
cudaCheckReturn(cudaFree(dev_data));
return time;
}
|
e53a8de9621eabd33855a4bc9b8cde72f4fba811.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* Copyright 2017-2020 ABBYY Production LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------------------------------------------------*/
#include <NeoMathEngine/NeoMathEngineDefs.h>
#ifdef NEOML_USE_CUDA
#include <CudaMathEngine.h>
#include <CudaDevice.h>
#include <CudaCommon.h>
#include <CudaMathEngineDnnPoolings.h>
#include <MemoryHandleInternal.h>
#include <MathEngineCommon.h>
#include <Kernels/CudaDnnPoolingKernels.h>
#include <Kernels/CudaDnn3dPoolingKernels.h>
#include <Kernels/CudaDnnGlobalPoolingKernels.h>
#include <Kernels/CudaDnnTimePoolingKernels.h>
#include <Kernels/CudaDnnGlobalTimePoolingKernels.h>
namespace NeoML {
CMaxPoolingDesc* CCudaMathEngine::InitMaxPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int strideHeight, int strideWidth, const CBlobDesc& result )
{
CCudaMaxPoolingDesc* desc = new CCudaMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
return desc;
}
void CCudaMathEngine::BlobMaxPooling(const CMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData)
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxPoolingDescInternal& desc = static_cast<const CCudaMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
int* maxIndexPtr = 0;
if(maxIndicesData != 0) {
maxIndexPtr = GetRaw( *maxIndicesData );
}
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount,
result.ObjectCount(), result.Height() * result.Width(), result.Depth() * result.Channels());
hipLaunchKernelGGL(( BlobMaxPoolingKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(sourceData), maxIndexPtr, GetRaw(resultData));
}
void CCudaMathEngine::BlobMaxPoolingBackward( const CMaxPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxPoolingDescInternal& desc = static_cast<const CCudaMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
VectorFill(inputDiffData, 0, inputDiff.BlobSize());
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth;
int batchNorm = (outputDiff.ObjectCount() + BlobMaxPoolingBackwardCombine - 1) / BlobMaxPoolingBackwardCombine;
dim3 blockCount;
dim3 threadCount;
int totalChannels = outputDiff.Depth() * outputDiff.Channels();
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, batchNorm, outputDiff.Height() * outputDiff.Width(), totalChannels);
hipLaunchKernelGGL(( BlobMaxPoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, isAtomic,
GetRaw(outputDiffData), GetRaw(maxIndicesData), GetRaw(inputDiffData), batchNorm );
}
CMeanPoolingDesc* CCudaMathEngine::InitMeanPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int strideHeight, int strideWidth, const CBlobDesc& result )
{
CCudaMeanPoolingDesc* desc = new CCudaMeanPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
return desc;
}
void CCudaMathEngine::BlobMeanPooling( const CMeanPoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMeanPoolingDescInternal& desc = static_cast<const CCudaMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
int totalChannels = result.Depth() * result.Channels();
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount,
result.ObjectCount(), result.Height() * result.Width(), totalChannels);
hipLaunchKernelGGL(( BlobMeanPoolingKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(sourceData), GetRaw(resultData) );
}
void CCudaMathEngine::BlobMeanPoolingBackward( const CMeanPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMeanPoolingDescInternal& desc = static_cast<const CCudaMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& outputDiff = desc.Result;
const bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth;
dim3 blockCount;
dim3 threadCount;
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
getCudaTaskGrid3D( blockCount, threadCount, outputDiff.ObjectCount(), outputDiff.Height() * outputDiff.Width(),
outputDiff.Depth() * outputDiff.Channels() );
hipLaunchKernelGGL(( BlobMeanPoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(outputDiffData),
GetRaw(inputDiffData), isAtomic );
}
CGlobalMaxOverTimePoolingDesc* CCudaMathEngine::InitGlobalMaxOverTimePooling( const CBlobDesc& source, const CBlobDesc& result )
{
CCudaGlobalMaxOverTimePoolingDesc* desc = new CCudaGlobalMaxOverTimePoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
return desc;
}
void CCudaMathEngine::BlobGlobalMaxOverTimePooling( const CGlobalMaxOverTimePoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaGlobalMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
int objectCount = source.BatchLength();
int objectSize = source.BlobSize() / objectCount;
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, objectSize);
if( maxIndicesData == 0 ) {
hipLaunchKernelGGL(( BlobGlobalMaxOverTimePoolingKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(sourceData), GetRaw(resultData) );
} else {
hipLaunchKernelGGL(( BlobGlobalMaxOverTimePoolingWithIndexKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(sourceData), GetRaw(*maxIndicesData), GetRaw(resultData) );
}
}
void CCudaMathEngine::BlobGlobalMaxOverTimePoolingBackward( const CGlobalMaxOverTimePoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CIntHandle& maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaGlobalMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
const CCudaBlobDesc& result = desc.Result;
VectorFill(resultData, 0, result.BlobSize());
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, source.BlobSize());
hipLaunchKernelGGL(( BlobGlobalMaxOverTimePoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw(sourceData), GetRaw(maxIndicesData), GetRaw(resultData) );
}
CGlobalMaxPoolingDesc* CCudaMathEngine::InitGlobalMaxPooling( const CBlobDesc& source, const CBlobDesc& maxIndices, const CBlobDesc& result )
{
CCudaGlobalMaxPoolingDesc* desc = new CCudaGlobalMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.MaxIndices = maxIndices;
desc->Internal.Result = result;
return desc;
}
void CCudaMathEngine::BlobGlobalMaxPooling( const CGlobalMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle& maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxPoolingDescInternal& desc = static_cast<const CCudaGlobalMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
const CCudaBlobDesc& maxIndices = desc.MaxIndices;
const CCudaBlobDesc& result = desc.Result;
ASSERT_EXPR(source.ObjectCount() == result.ObjectCount() && maxIndices.ObjectCount() == result.ObjectCount());
ASSERT_EXPR(maxIndices.ObjectSize() == result.ObjectSize());
int poolSize = source.Depth() * source.Height() * source.Width();
int maxCount = result.Depth() * result.Height() * result.Width();
int poolSizeNorm = (poolSize + BlobGlobalMaxPoolingCombine - 1) / BlobGlobalMaxPoolingCombine;
// As the shared memory size depends on maxCount, we may need to limit the number of threads
int sharedMemoryPerThread = 4 * maxCount * sizeof(float);
int maxThreadCount = device->SharedMemoryLimit / sharedMemoryPerThread;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid2DMinYX(1, device->ThreadMaxCount, blockCount, threadCount,
source.ObjectCount() * source.Channels(), poolSizeNorm, maxThreadCount);
blockCount.x = 1;
int sharedSize = threadCount.y * threadCount.x * sharedMemoryPerThread;
hipLaunchKernelGGL(( BlobGlobalMaxPoolingKernel), dim3(blockCount), dim3(threadCount), sharedSize, 0, desc, GetRaw( sourceData ),
GetRaw( maxIndicesData ), GetRaw( resultData ), poolSize, maxCount, poolSizeNorm );
}
void CCudaMathEngine::BlobGlobalMaxPoolingBackward( const CGlobalMaxPoolingDesc& poolingDesc,
const CFloatHandle& outputDiffData, const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxPoolingDescInternal& desc = static_cast<const CCudaGlobalMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
VectorFill( inputDiffData, 0, inputDiff.BlobSize() );
int poolSize = inputDiff.Depth() * inputDiff.Height() * inputDiff.Width();
int maxCount = outputDiff.Depth() * outputDiff.Height() * outputDiff.Width();
int fullSize = outputDiff.ObjectCount() * maxCount * outputDiff.Channels();
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, fullSize, BlobGlobalMaxPoolingBackwardCombine);
hipLaunchKernelGGL(( BlobGlobalMaxPoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ), poolSize, maxCount, fullSize );
}
C3dMaxPoolingDesc* CCudaMathEngine::Init3dMaxPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int filterDepth,
int strideHeight, int strideWidth, int strideDepth,
const CBlobDesc& result )
{
CCuda3dMaxPoolingDesc* desc = new CCuda3dMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterDepth = filterDepth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
desc->Internal.StrideDepth = strideDepth;
return desc;
}
void CCudaMathEngine::Blob3dMaxPooling( const C3dMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->IsNull() || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMaxPoolingDescInternal& desc = static_cast<const CCuda3dMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, result.ObjectCount(),
result.Depth() * result.Height() * result.Width(), result.Channels());
hipLaunchKernelGGL(( Blob3dMaxPoolingKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw( sourceData ),
maxIndicesData == 0 ? 0 : GetRaw( *maxIndicesData ), GetRaw( resultData ) );
}
void CCudaMathEngine::Blob3dMaxPoolingBackward( const C3dMaxPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMaxPoolingDescInternal& desc = static_cast<const CCuda3dMaxPoolingDesc&>( poolingDesc ).Internal;
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth || desc.FilterDepth > desc.StrideDepth;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, desc.Result.ObjectCount(),
desc.Result.Depth() * desc.Result.Height() * desc.Result.Width(), desc.Result.Channels());
hipLaunchKernelGGL(( Blob3dMaxPoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ), isAtomic );
}
C3dMeanPoolingDesc* CCudaMathEngine::Init3dMeanPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int filterDepth,
int strideHeight, int strideWidth, int strideDepth,
const CBlobDesc& result )
{
CCuda3dMeanPoolingDesc* desc = new CCuda3dMeanPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterDepth = filterDepth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
desc->Internal.StrideDepth = strideDepth;
return desc;
}
void CCudaMathEngine::Blob3dMeanPooling( const C3dMeanPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMeanPoolingDescInternal& desc = static_cast<const CCuda3dMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, result.ObjectCount(),
result.Depth() * result.Height() * result.Width(), result.Channels());
hipLaunchKernelGGL(( Blob3dMeanPoolingKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw( sourceData ), GetRaw( resultData ) );
}
void CCudaMathEngine::Blob3dMeanPoolingBackward( const C3dMeanPoolingDesc& poolingDesc,
const CFloatHandle& outputDiffData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMeanPoolingDescInternal& desc = static_cast<const CCuda3dMeanPoolingDesc&>( poolingDesc ).Internal;
if( desc.FilterHeight != desc.StrideHeight || desc.FilterWidth != desc.StrideWidth || desc.FilterDepth != desc.StrideDepth ) {
// Either the cube blocks used for pooling have nonzero intersections, and we need to add up several diffs,
// or some of the data is skipped when pooling and we need to set diff = 0 for it
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
}
// Indicates that the cube blocks used for pooling have nonzero intersections, and the diffs should be added up (atomicAdd)
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth || desc.FilterDepth > desc.StrideDepth;
const CCudaBlobDesc& outputDiff = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, outputDiff.ObjectCount(),
outputDiff.Depth() * outputDiff.Height() * outputDiff.Width(), outputDiff.Channels());
hipLaunchKernelGGL(( Blob3dMeanPoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, desc, GetRaw( outputDiffData ),
GetRaw( inputDiffData ), isAtomic );
}
CMaxOverTimePoolingDesc* CCudaMathEngine::InitMaxOverTimePooling( const CBlobDesc& source,
int filterLen, int strideLen, const CBlobDesc& result )
{
CCudaMaxOverTimePoolingDesc* desc = new CCudaMaxOverTimePoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterLen = filterLen;
desc->Internal.StrideLen = strideLen;
return desc;
}
void CCudaMathEngine::BlobMaxOverTimePooling( const CMaxOverTimePoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
int xSize = (desc.FilterLen + BlobMaxOverTimePoolingCombine - 1) / BlobMaxOverTimePoolingCombine;
xSize = alignXSizeForWarp(xSize);
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid2DMinYX(1, device->ThreadMaxCount, blockCount, threadCount, result.BlobSize(), xSize);
blockCount.x = 1; // in any case there may only one block along the X coordinate so that we can calculate the maximum inside one block
int sharedSize = threadCount.x * threadCount.y * threadCount.z;
if( maxIndicesData != 0 ) {
hipLaunchKernelGGL(( BlobMaxOverTimePoolingKernel), dim3(blockCount), dim3(threadCount), sharedSize * sizeof(CValueWithIndex), 0, desc,
GetRaw( sourceData ), GetRaw( *maxIndicesData ), GetRaw( resultData ) );
} else {
hipLaunchKernelGGL(( BlobMaxOverTimePoolingKernel), dim3(blockCount), dim3(threadCount), sharedSize * sizeof(float), 0, desc,
GetRaw( sourceData ), GetRaw( resultData ) );
}
}
void CCudaMathEngine::BlobMaxOverTimePoolingBackward( const CMaxOverTimePoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
// Set diff to 0
CCudaMathEngine::VectorFill( inputDiffData, 0, inputDiff.BlobSize() );
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, outputDiff.BlobSize(), BlobMaxOverTimePoolingBackwardCombine);
if( desc.StrideLen >= desc.FilterLen ) {
// The pooling areas do not intersect, no need to add
CStoreSet store;
hipLaunchKernelGGL(( BlobMaxOverTimePoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, store, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ) );
} else {
CStoreAtomicAdd store;
hipLaunchKernelGGL(( BlobMaxOverTimePoolingBackwardKernel), dim3(blockCount), dim3(threadCount), 0, 0, store, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ));
}
}
} // namespace NeoML
#endif // NEOML_USE_CUDA
| e53a8de9621eabd33855a4bc9b8cde72f4fba811.cu | /* Copyright © 2017-2020 ABBYY Production LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------------------------------------------------*/
#include <NeoMathEngine/NeoMathEngineDefs.h>
#ifdef NEOML_USE_CUDA
#include <CudaMathEngine.h>
#include <CudaDevice.h>
#include <CudaCommon.h>
#include <CudaMathEngineDnnPoolings.h>
#include <MemoryHandleInternal.h>
#include <MathEngineCommon.h>
#include <Kernels/CudaDnnPoolingKernels.h>
#include <Kernels/CudaDnn3dPoolingKernels.h>
#include <Kernels/CudaDnnGlobalPoolingKernels.h>
#include <Kernels/CudaDnnTimePoolingKernels.h>
#include <Kernels/CudaDnnGlobalTimePoolingKernels.h>
namespace NeoML {
CMaxPoolingDesc* CCudaMathEngine::InitMaxPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int strideHeight, int strideWidth, const CBlobDesc& result )
{
CCudaMaxPoolingDesc* desc = new CCudaMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
return desc;
}
void CCudaMathEngine::BlobMaxPooling(const CMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData)
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxPoolingDescInternal& desc = static_cast<const CCudaMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
int* maxIndexPtr = 0;
if(maxIndicesData != 0) {
maxIndexPtr = GetRaw( *maxIndicesData );
}
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount,
result.ObjectCount(), result.Height() * result.Width(), result.Depth() * result.Channels());
BlobMaxPoolingKernel<<<blockCount, threadCount>>>( desc, GetRaw(sourceData), maxIndexPtr, GetRaw(resultData));
}
void CCudaMathEngine::BlobMaxPoolingBackward( const CMaxPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxPoolingDescInternal& desc = static_cast<const CCudaMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
VectorFill(inputDiffData, 0, inputDiff.BlobSize());
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth;
int batchNorm = (outputDiff.ObjectCount() + BlobMaxPoolingBackwardCombine - 1) / BlobMaxPoolingBackwardCombine;
dim3 blockCount;
dim3 threadCount;
int totalChannels = outputDiff.Depth() * outputDiff.Channels();
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, batchNorm, outputDiff.Height() * outputDiff.Width(), totalChannels);
BlobMaxPoolingBackwardKernel<<<blockCount, threadCount>>>( desc, isAtomic,
GetRaw(outputDiffData), GetRaw(maxIndicesData), GetRaw(inputDiffData), batchNorm );
}
CMeanPoolingDesc* CCudaMathEngine::InitMeanPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int strideHeight, int strideWidth, const CBlobDesc& result )
{
CCudaMeanPoolingDesc* desc = new CCudaMeanPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
return desc;
}
void CCudaMathEngine::BlobMeanPooling( const CMeanPoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMeanPoolingDescInternal& desc = static_cast<const CCudaMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
int totalChannels = result.Depth() * result.Channels();
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount,
result.ObjectCount(), result.Height() * result.Width(), totalChannels);
BlobMeanPoolingKernel<<<blockCount, threadCount>>>( desc, GetRaw(sourceData), GetRaw(resultData) );
}
void CCudaMathEngine::BlobMeanPoolingBackward( const CMeanPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMeanPoolingDescInternal& desc = static_cast<const CCudaMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& outputDiff = desc.Result;
const bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth;
dim3 blockCount;
dim3 threadCount;
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
getCudaTaskGrid3D( blockCount, threadCount, outputDiff.ObjectCount(), outputDiff.Height() * outputDiff.Width(),
outputDiff.Depth() * outputDiff.Channels() );
BlobMeanPoolingBackwardKernel<<<blockCount, threadCount>>>( desc, GetRaw(outputDiffData),
GetRaw(inputDiffData), isAtomic );
}
CGlobalMaxOverTimePoolingDesc* CCudaMathEngine::InitGlobalMaxOverTimePooling( const CBlobDesc& source, const CBlobDesc& result )
{
CCudaGlobalMaxOverTimePoolingDesc* desc = new CCudaGlobalMaxOverTimePoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
return desc;
}
void CCudaMathEngine::BlobGlobalMaxOverTimePooling( const CGlobalMaxOverTimePoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaGlobalMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
int objectCount = source.BatchLength();
int objectSize = source.BlobSize() / objectCount;
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, objectSize);
if( maxIndicesData == 0 ) {
BlobGlobalMaxOverTimePoolingKernel<<<blockCount, threadCount>>>( desc, GetRaw(sourceData), GetRaw(resultData) );
} else {
BlobGlobalMaxOverTimePoolingWithIndexKernel<<<blockCount, threadCount>>>( desc, GetRaw(sourceData), GetRaw(*maxIndicesData), GetRaw(resultData) );
}
}
void CCudaMathEngine::BlobGlobalMaxOverTimePoolingBackward( const CGlobalMaxOverTimePoolingDesc& poolingDesc,
const CFloatHandle& sourceData, const CIntHandle& maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaGlobalMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
const CCudaBlobDesc& result = desc.Result;
VectorFill(resultData, 0, result.BlobSize());
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, source.BlobSize());
BlobGlobalMaxOverTimePoolingBackwardKernel<<<blockCount, threadCount>>>( desc, GetRaw(sourceData), GetRaw(maxIndicesData), GetRaw(resultData) );
}
CGlobalMaxPoolingDesc* CCudaMathEngine::InitGlobalMaxPooling( const CBlobDesc& source, const CBlobDesc& maxIndices, const CBlobDesc& result )
{
CCudaGlobalMaxPoolingDesc* desc = new CCudaGlobalMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.MaxIndices = maxIndices;
desc->Internal.Result = result;
return desc;
}
void CCudaMathEngine::BlobGlobalMaxPooling( const CGlobalMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle& maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxPoolingDescInternal& desc = static_cast<const CCudaGlobalMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& source = desc.Source;
const CCudaBlobDesc& maxIndices = desc.MaxIndices;
const CCudaBlobDesc& result = desc.Result;
ASSERT_EXPR(source.ObjectCount() == result.ObjectCount() && maxIndices.ObjectCount() == result.ObjectCount());
ASSERT_EXPR(maxIndices.ObjectSize() == result.ObjectSize());
int poolSize = source.Depth() * source.Height() * source.Width();
int maxCount = result.Depth() * result.Height() * result.Width();
int poolSizeNorm = (poolSize + BlobGlobalMaxPoolingCombine - 1) / BlobGlobalMaxPoolingCombine;
// As the shared memory size depends on maxCount, we may need to limit the number of threads
int sharedMemoryPerThread = 4 * maxCount * sizeof(float);
int maxThreadCount = device->SharedMemoryLimit / sharedMemoryPerThread;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid2DMinYX(1, device->ThreadMaxCount, blockCount, threadCount,
source.ObjectCount() * source.Channels(), poolSizeNorm, maxThreadCount);
blockCount.x = 1;
int sharedSize = threadCount.y * threadCount.x * sharedMemoryPerThread;
BlobGlobalMaxPoolingKernel<<<blockCount, threadCount, sharedSize>>>( desc, GetRaw( sourceData ),
GetRaw( maxIndicesData ), GetRaw( resultData ), poolSize, maxCount, poolSizeNorm );
}
void CCudaMathEngine::BlobGlobalMaxPoolingBackward( const CGlobalMaxPoolingDesc& poolingDesc,
const CFloatHandle& outputDiffData, const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaGlobalMaxPoolingDescInternal& desc = static_cast<const CCudaGlobalMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
VectorFill( inputDiffData, 0, inputDiff.BlobSize() );
int poolSize = inputDiff.Depth() * inputDiff.Height() * inputDiff.Width();
int maxCount = outputDiff.Depth() * outputDiff.Height() * outputDiff.Width();
int fullSize = outputDiff.ObjectCount() * maxCount * outputDiff.Channels();
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, fullSize, BlobGlobalMaxPoolingBackwardCombine);
BlobGlobalMaxPoolingBackwardKernel<<<blockCount, threadCount>>>( desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ), poolSize, maxCount, fullSize );
}
C3dMaxPoolingDesc* CCudaMathEngine::Init3dMaxPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int filterDepth,
int strideHeight, int strideWidth, int strideDepth,
const CBlobDesc& result )
{
CCuda3dMaxPoolingDesc* desc = new CCuda3dMaxPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterDepth = filterDepth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
desc->Internal.StrideDepth = strideDepth;
return desc;
}
void CCudaMathEngine::Blob3dMaxPooling( const C3dMaxPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->IsNull() || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMaxPoolingDescInternal& desc = static_cast<const CCuda3dMaxPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, result.ObjectCount(),
result.Depth() * result.Height() * result.Width(), result.Channels());
Blob3dMaxPoolingKernel<<<blockCount, threadCount>>>( desc, GetRaw( sourceData ),
maxIndicesData == 0 ? 0 : GetRaw( *maxIndicesData ), GetRaw( resultData ) );
}
void CCudaMathEngine::Blob3dMaxPoolingBackward( const C3dMaxPoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMaxPoolingDescInternal& desc = static_cast<const CCuda3dMaxPoolingDesc&>( poolingDesc ).Internal;
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth || desc.FilterDepth > desc.StrideDepth;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, desc.Result.ObjectCount(),
desc.Result.Depth() * desc.Result.Height() * desc.Result.Width(), desc.Result.Channels());
Blob3dMaxPoolingBackwardKernel<<<blockCount, threadCount>>>( desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ), isAtomic );
}
C3dMeanPoolingDesc* CCudaMathEngine::Init3dMeanPooling( const CBlobDesc& source,
int filterHeight, int filterWidth, int filterDepth,
int strideHeight, int strideWidth, int strideDepth,
const CBlobDesc& result )
{
CCuda3dMeanPoolingDesc* desc = new CCuda3dMeanPoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterHeight = filterHeight;
desc->Internal.FilterWidth = filterWidth;
desc->Internal.FilterDepth = filterDepth;
desc->Internal.StrideHeight = strideHeight;
desc->Internal.StrideWidth = strideWidth;
desc->Internal.StrideDepth = strideDepth;
return desc;
}
void CCudaMathEngine::Blob3dMeanPooling( const C3dMeanPoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMeanPoolingDescInternal& desc = static_cast<const CCuda3dMeanPoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, result.ObjectCount(),
result.Depth() * result.Height() * result.Width(), result.Channels());
Blob3dMeanPoolingKernel<<<blockCount, threadCount>>>( desc, GetRaw( sourceData ), GetRaw( resultData ) );
}
void CCudaMathEngine::Blob3dMeanPoolingBackward( const C3dMeanPoolingDesc& poolingDesc,
const CFloatHandle& outputDiffData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCuda3dMeanPoolingDescInternal& desc = static_cast<const CCuda3dMeanPoolingDesc&>( poolingDesc ).Internal;
if( desc.FilterHeight != desc.StrideHeight || desc.FilterWidth != desc.StrideWidth || desc.FilterDepth != desc.StrideDepth ) {
// Either the cube blocks used for pooling have nonzero intersections, and we need to add up several diffs,
// or some of the data is skipped when pooling and we need to set diff = 0 for it
VectorFill( inputDiffData, 0, desc.Source.BlobSize() );
}
// Indicates that the cube blocks used for pooling have nonzero intersections, and the diffs should be added up (atomicAdd)
bool isAtomic = desc.FilterHeight > desc.StrideHeight || desc.FilterWidth > desc.StrideWidth || desc.FilterDepth > desc.StrideDepth;
const CCudaBlobDesc& outputDiff = desc.Result;
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid3DMinZYX(1, 1, 32, blockCount, threadCount, outputDiff.ObjectCount(),
outputDiff.Depth() * outputDiff.Height() * outputDiff.Width(), outputDiff.Channels());
Blob3dMeanPoolingBackwardKernel<<<blockCount, threadCount>>>( desc, GetRaw( outputDiffData ),
GetRaw( inputDiffData ), isAtomic );
}
CMaxOverTimePoolingDesc* CCudaMathEngine::InitMaxOverTimePooling( const CBlobDesc& source,
int filterLen, int strideLen, const CBlobDesc& result )
{
CCudaMaxOverTimePoolingDesc* desc = new CCudaMaxOverTimePoolingDesc();
desc->Internal.Source = source;
desc->Internal.Result = result;
desc->Internal.FilterLen = filterLen;
desc->Internal.StrideLen = strideLen;
return desc;
}
void CCudaMathEngine::BlobMaxOverTimePooling( const CMaxOverTimePoolingDesc& poolingDesc, const CFloatHandle& sourceData,
const CIntHandle* maxIndicesData, const CFloatHandle& resultData )
{
ASSERT_EXPR( sourceData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData == 0 || maxIndicesData->GetMathEngine() == this );
ASSERT_EXPR( resultData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& result = desc.Result;
int xSize = (desc.FilterLen + BlobMaxOverTimePoolingCombine - 1) / BlobMaxOverTimePoolingCombine;
xSize = alignXSizeForWarp(xSize);
dim3 blockCount;
dim3 threadCount;
getCudaTaskGrid2DMinYX(1, device->ThreadMaxCount, blockCount, threadCount, result.BlobSize(), xSize);
blockCount.x = 1; // in any case there may only one block along the X coordinate so that we can calculate the maximum inside one block
int sharedSize = threadCount.x * threadCount.y * threadCount.z;
if( maxIndicesData != 0 ) {
BlobMaxOverTimePoolingKernel<<<blockCount, threadCount, sharedSize * sizeof(CValueWithIndex)>>>( desc,
GetRaw( sourceData ), GetRaw( *maxIndicesData ), GetRaw( resultData ) );
} else {
BlobMaxOverTimePoolingKernel<<<blockCount, threadCount, sharedSize * sizeof(float)>>>( desc,
GetRaw( sourceData ), GetRaw( resultData ) );
}
}
void CCudaMathEngine::BlobMaxOverTimePoolingBackward( const CMaxOverTimePoolingDesc& poolingDesc, const CFloatHandle& outputDiffData,
const CIntHandle& maxIndicesData, const CFloatHandle& inputDiffData )
{
ASSERT_EXPR( outputDiffData.GetMathEngine() == this );
ASSERT_EXPR( maxIndicesData.GetMathEngine() == this );
ASSERT_EXPR( inputDiffData.GetMathEngine() == this );
SetCudaDevice( device->DeviceNumber );
const CCudaMaxOverTimePoolingDescInternal& desc = static_cast<const CCudaMaxOverTimePoolingDesc&>( poolingDesc ).Internal;
const CCudaBlobDesc& inputDiff = desc.Source;
const CCudaBlobDesc& outputDiff = desc.Result;
// Set diff to 0
CCudaMathEngine::VectorFill( inputDiffData, 0, inputDiff.BlobSize() );
int blockCount;
int threadCount;
getCudaTaskGrid(blockCount, threadCount, outputDiff.BlobSize(), BlobMaxOverTimePoolingBackwardCombine);
if( desc.StrideLen >= desc.FilterLen ) {
// The pooling areas do not intersect, no need to add
CStoreSet store;
BlobMaxOverTimePoolingBackwardKernel<<<blockCount, threadCount>>>(store, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ) );
} else {
CStoreAtomicAdd store;
BlobMaxOverTimePoolingBackwardKernel<<<blockCount, threadCount>>>(store, desc, GetRaw( outputDiffData ),
GetRaw( maxIndicesData ), GetRaw( inputDiffData ));
}
}
} // namespace NeoML
#endif // NEOML_USE_CUDA
|
7580485cf1202d93884a2c66705ec87988029577.hip | // !!! This is a file automatically generated by hipify!!!
#include <hip/hip_runtime.h>
#include <stdio.h>
#include <stdlib.h>
__device__ int mandel(float c_re, float c_im, int count)
{
float z_re = c_re, z_im = c_im;
int i;
for (i = 0; i < count; ++i)
{
if (z_re * z_re + z_im * z_im > 4.f)
break;
float new_re = z_re * z_re - z_im * z_im;
float new_im = 2.f * z_re * z_im;
z_re = c_re + new_re;
z_im = c_im + new_im;
}
return i;
}
__global__ void mandelKernel(int *deviceans,float lowerX,float lowerY,float stepX,float stepY,int resX,int resY,int maxIterations)
{
// To avoid error caused by the floating number, use the following pseudo code
//
// float x = lowerX + thisX * stepX;
// float y = lowerY + thisY * stepY;
int localx,localy;
localx=blockIdx.x*blockDim.x + threadIdx.x;
localy=blockIdx.y*blockDim.y+threadIdx.y;
float tmpx,tmpy;
tmpx = lowerX + localx*stepX;
tmpy = lowerY + localy*stepY;
int ans = mandel(tmpx,tmpy,maxIterations);
deviceans[resX* localy + localx] = ans;
}
// Host front-end function that allocates the memory and launches the GPU kernel
void hostFE (float upperX, float upperY, float lowerX, float lowerY, int* img, int resX, int resY, int maxIterations)
{
float stepX = (upperX - lowerX) / resX;
float stepY = (upperY - lowerY) / resY;
int *deviceans;
size_t pitch;
int *hostans;
dim3 threadperblock(16,16);
dim3 numblocks(resX/16,resY/16);
//hipMalloc(&deviceans,resX*resY*sizeof(int));
hipHostMalloc(&hostans,sizeof(int)*resX*resY,hipHostMallocMapped);
hipMallocPitch(&deviceans,&pitch,resX*sizeof(int),resY);
hipLaunchKernelGGL(( mandelKernel), dim3(numblocks),dim3(threadperblock), 0, 0, deviceans,lowerX,lowerY,stepX,stepY,resX,resY,maxIterations);
hipDeviceSynchronize();
hipMemcpy(hostans,deviceans,resY*resX*sizeof(int),hipMemcpyDeviceToHost);
for(int i=0;i<resY;++i){
for(int j = 0;j<resX;++j){
img[i*resX+j]=hostans[i*resX+j];
}
}
hipFree(deviceans);
hipHostFree(hostans);
}
| 7580485cf1202d93884a2c66705ec87988029577.cu | #include <cuda.h>
#include <stdio.h>
#include <stdlib.h>
__device__ int mandel(float c_re, float c_im, int count)
{
float z_re = c_re, z_im = c_im;
int i;
for (i = 0; i < count; ++i)
{
if (z_re * z_re + z_im * z_im > 4.f)
break;
float new_re = z_re * z_re - z_im * z_im;
float new_im = 2.f * z_re * z_im;
z_re = c_re + new_re;
z_im = c_im + new_im;
}
return i;
}
__global__ void mandelKernel(int *deviceans,float lowerX,float lowerY,float stepX,float stepY,int resX,int resY,int maxIterations)
{
// To avoid error caused by the floating number, use the following pseudo code
//
// float x = lowerX + thisX * stepX;
// float y = lowerY + thisY * stepY;
int localx,localy;
localx=blockIdx.x*blockDim.x + threadIdx.x;
localy=blockIdx.y*blockDim.y+threadIdx.y;
float tmpx,tmpy;
tmpx = lowerX + localx*stepX;
tmpy = lowerY + localy*stepY;
int ans = mandel(tmpx,tmpy,maxIterations);
deviceans[resX* localy + localx] = ans;
}
// Host front-end function that allocates the memory and launches the GPU kernel
void hostFE (float upperX, float upperY, float lowerX, float lowerY, int* img, int resX, int resY, int maxIterations)
{
float stepX = (upperX - lowerX) / resX;
float stepY = (upperY - lowerY) / resY;
int *deviceans;
size_t pitch;
int *hostans;
dim3 threadperblock(16,16);
dim3 numblocks(resX/16,resY/16);
//cudaMalloc(&deviceans,resX*resY*sizeof(int));
cudaHostAlloc(&hostans,sizeof(int)*resX*resY,cudaHostAllocMapped);
cudaMallocPitch(&deviceans,&pitch,resX*sizeof(int),resY);
mandelKernel<<<numblocks,threadperblock>>>(deviceans,lowerX,lowerY,stepX,stepY,resX,resY,maxIterations);
cudaDeviceSynchronize();
cudaMemcpy(hostans,deviceans,resY*resX*sizeof(int),cudaMemcpyDeviceToHost);
for(int i=0;i<resY;++i){
for(int j = 0;j<resX;++j){
img[i*resX+j]=hostans[i*resX+j];
}
}
cudaFree(deviceans);
cudaFreeHost(hostans);
}
|
29a021cd16c28814ce5e7f07d18d4712cccc5487.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* triplet_loss_layer.cu
*
*/
#include <algorithm>
#include <vector>
#include "caffe/layer.hpp"
#include "caffe/util/io.hpp"
#include "caffe/util/math_functions.hpp"
#include "caffe/custom_layers.hpp"
namespace caffe {
template <typename Dtype>
void TripletLossLayer<Dtype>::Forward_gpu(
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
const int count = bottom[0]->count();
caffe_gpu_sub(
count,
bottom[0]->gpu_data(), // a
bottom[1]->gpu_data(), // p
diff_ap_.mutable_gpu_data()); // a_i-p_i
caffe_gpu_sub(
count,
bottom[0]->gpu_data(), // a
bottom[2]->gpu_data(), // n
diff_an_.mutable_gpu_data()); // a_i-n_i
caffe_gpu_sub(
count,
bottom[1]->gpu_data(), // p
bottom[2]->gpu_data(), // n
diff_pn_.mutable_gpu_data()); // p_i-n_i
caffe_gpu_powx(
count,
diff_ap_.mutable_gpu_data(), // a_i-p_i
Dtype(2),
diff_sq_ap_.mutable_gpu_data()); // (a_i-p_i)^2
caffe_gpu_gemv(
CblasNoTrans,
bottom[0]->num(),
bottom[0]->channels(),
Dtype(1.0), //alpha
diff_sq_ap_.gpu_data(), // (a_i-p_i)^2 // A
summer_vec_.gpu_data(), // x
Dtype(0.0), //belta
dist_sq_ap_.mutable_gpu_data()); // \Sum (a_i-p_i)^2 //y
caffe_gpu_powx(
count,
diff_an_.mutable_gpu_data(), // a_i-n_i
Dtype(2),
diff_sq_an_.mutable_gpu_data()); // (a_i-n_i)^2
caffe_gpu_gemv(
CblasNoTrans,
bottom[0]->num(),
bottom[0]->channels(),
Dtype(1.0), //alpha
diff_sq_an_.gpu_data(), // (a_i-n_i)^2 // A
summer_vec_.gpu_data(), // x
Dtype(0.0), //belta
dist_sq_an_.mutable_gpu_data()); // \Sum (a_i-n_i)^2 //y
Dtype margin = this->layer_param_.triplet_loss_param().margin();
Dtype loss(0.0);
for (int i = 0; i < bottom[0]->num(); ++i) {
loss += sampleW_*::max(margin + dist_sq_ap_.cpu_data()[i] - dist_sq_an_.cpu_data()[i], Dtype(0.0));
}
loss = loss / static_cast<Dtype>(bottom[0]->num()) / Dtype(2);
top[0]->mutable_cpu_data()[0] = loss;
}
template <typename Dtype>
__global__ void CLLBackward(const int count, const int channels,
const Dtype margin, const Dtype alpha,
const Dtype* diff, const Dtype* dist_sq_ap_, const Dtype* dist_sq_an_,
Dtype *bottom_diff) {
CUDA_KERNEL_LOOP(i, count) {
int n = i / channels; // the num index, to access dist_sq_ap_ and dist_sq_an_
Dtype mdist(0.0);
mdist = margin + dist_sq_ap_[n] - dist_sq_an_[n];
if (mdist > 0.0) {
//bottom_diff[i] = alpha*sampleW_*diff[i];
bottom_diff[i] = alpha*diff[i];
}
else {
bottom_diff[i] = 0;
}
}
}
template <typename Dtype>
void TripletLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
Dtype margin = this->layer_param_.triplet_loss_param().margin();
const int count = bottom[0]->count();
const int channels = bottom[0]->channels();
for (int i = 0; i < 3; ++i) {
if (propagate_down[i]) {
const Dtype sign = (i < 2) ? -1 : 1;
const Dtype alpha = sign * top[0]->cpu_diff()[0] /
static_cast<Dtype>(bottom[0]->num());
if (i == 0) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_pn_.gpu_data(), // the cached eltwise difference between p and n
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
else if (i == 1) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_ap_.gpu_data(), // the cached eltwise difference between a and p
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
else if (i == 2) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_an_.gpu_data(), // the cached eltwise difference between a and n
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
}
}
}
INSTANTIATE_LAYER_GPU_FUNCS(TripletLossLayer);
} // namespace caffe
| 29a021cd16c28814ce5e7f07d18d4712cccc5487.cu | /*
* triplet_loss_layer.cu
*
*/
#include <algorithm>
#include <vector>
#include "caffe/layer.hpp"
#include "caffe/util/io.hpp"
#include "caffe/util/math_functions.hpp"
#include "caffe/custom_layers.hpp"
namespace caffe {
template <typename Dtype>
void TripletLossLayer<Dtype>::Forward_gpu(
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
const int count = bottom[0]->count();
caffe_gpu_sub(
count,
bottom[0]->gpu_data(), // a
bottom[1]->gpu_data(), // p
diff_ap_.mutable_gpu_data()); // a_i-p_i
caffe_gpu_sub(
count,
bottom[0]->gpu_data(), // a
bottom[2]->gpu_data(), // n
diff_an_.mutable_gpu_data()); // a_i-n_i
caffe_gpu_sub(
count,
bottom[1]->gpu_data(), // p
bottom[2]->gpu_data(), // n
diff_pn_.mutable_gpu_data()); // p_i-n_i
caffe_gpu_powx(
count,
diff_ap_.mutable_gpu_data(), // a_i-p_i
Dtype(2),
diff_sq_ap_.mutable_gpu_data()); // (a_i-p_i)^2
caffe_gpu_gemv(
CblasNoTrans,
bottom[0]->num(),
bottom[0]->channels(),
Dtype(1.0), //alpha
diff_sq_ap_.gpu_data(), // (a_i-p_i)^2 // A
summer_vec_.gpu_data(), // x
Dtype(0.0), //belta
dist_sq_ap_.mutable_gpu_data()); // \Sum (a_i-p_i)^2 //y
caffe_gpu_powx(
count,
diff_an_.mutable_gpu_data(), // a_i-n_i
Dtype(2),
diff_sq_an_.mutable_gpu_data()); // (a_i-n_i)^2
caffe_gpu_gemv(
CblasNoTrans,
bottom[0]->num(),
bottom[0]->channels(),
Dtype(1.0), //alpha
diff_sq_an_.gpu_data(), // (a_i-n_i)^2 // A
summer_vec_.gpu_data(), // x
Dtype(0.0), //belta
dist_sq_an_.mutable_gpu_data()); // \Sum (a_i-n_i)^2 //y
Dtype margin = this->layer_param_.triplet_loss_param().margin();
Dtype loss(0.0);
for (int i = 0; i < bottom[0]->num(); ++i) {
loss += sampleW_*std::max(margin + dist_sq_ap_.cpu_data()[i] - dist_sq_an_.cpu_data()[i], Dtype(0.0));
}
loss = loss / static_cast<Dtype>(bottom[0]->num()) / Dtype(2);
top[0]->mutable_cpu_data()[0] = loss;
}
template <typename Dtype>
__global__ void CLLBackward(const int count, const int channels,
const Dtype margin, const Dtype alpha,
const Dtype* diff, const Dtype* dist_sq_ap_, const Dtype* dist_sq_an_,
Dtype *bottom_diff) {
CUDA_KERNEL_LOOP(i, count) {
int n = i / channels; // the num index, to access dist_sq_ap_ and dist_sq_an_
Dtype mdist(0.0);
mdist = margin + dist_sq_ap_[n] - dist_sq_an_[n];
if (mdist > 0.0) {
//bottom_diff[i] = alpha*sampleW_*diff[i];
bottom_diff[i] = alpha*diff[i];
}
else {
bottom_diff[i] = 0;
}
}
}
template <typename Dtype>
void TripletLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
Dtype margin = this->layer_param_.triplet_loss_param().margin();
const int count = bottom[0]->count();
const int channels = bottom[0]->channels();
for (int i = 0; i < 3; ++i) {
if (propagate_down[i]) {
const Dtype sign = (i < 2) ? -1 : 1;
const Dtype alpha = sign * top[0]->cpu_diff()[0] /
static_cast<Dtype>(bottom[0]->num());
if (i == 0) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_pn_.gpu_data(), // the cached eltwise difference between p and n
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
else if (i == 1) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_ap_.gpu_data(), // the cached eltwise difference between a and p
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
else if (i == 2) {
// NOLINT_NEXT_LINE(whitespace/operators)
CLLBackward<Dtype> << <CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS >> >(
count, channels, margin, alpha,
diff_an_.gpu_data(), // the cached eltwise difference between a and n
dist_sq_ap_.gpu_data(), // the cached square distance between a and p
dist_sq_an_.gpu_data(), // the cached square distance between a and n
bottom[i]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
}
}
}
INSTANTIATE_LAYER_GPU_FUNCS(TripletLossLayer);
} // namespace caffe
|
104e0da8c271230003a1bd3b3fe6cf7acd3973de.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <iostream>
#include <random>
#include <gemm_core/gemm_core.hpp>
constexpr unsigned N = 16;
template <class T>
std::string get_type_name();
template <> std::string get_type_name<float>(){return "float";}
template <> std::string get_type_name<half>(){return "half";}
template <class T, class S>
__device__ __host__ T convert(const S);
template <> __device__ __host__ float convert<float, float>(const float a) {return a;}
template <> __device__ __host__ float convert<float, half >(const half a) {return __half2float(a);}
template <> __device__ __host__ half convert<half , float>(const float a) {return __float2half(a);}
template <> __device__ __host__ half convert<half , half >(const half a) {return a;}
template <class T>
__global__ void test_gemv_16x16_kernel(T* const c, const T* const a, const T* const b){
mtk::gemm_core::ger_core16x16(c, N, a, b, threadIdx.x & 0x1f);
}
template <class T>
void test_gemv(){
T* a;
T* b;
T* c;
std::printf("%s\n", get_type_name<T>().c_str());
hipHostMalloc(&a, N * sizeof(T));
hipHostMalloc(&b, N * sizeof(T));
hipHostMalloc(&c, N * N * sizeof(T));
std::mt19937 mt(std::random_device{}());
std::uniform_real_distribution<float> dist(-1.0f, 1.0f);
for(unsigned i = 0; i < N; i++){
a[i] = convert<T>(dist(mt));
}
for(unsigned i = 0; i < N; i++){
b[i] = convert<T>(dist(mt));
}
for(unsigned i = 0; i < N * N; i++){
c[i] = convert<T>(1.0f);
}
hipDeviceSynchronize();
hipLaunchKernelGGL(( test_gemv_16x16_kernel<T>), dim3(1), dim3(32), 0, 0, c, a, b);
hipDeviceSynchronize();
float error = 0.0f;
for(unsigned i = 0; i < N; i++){
for(unsigned j = 0; j < N; j++){
const auto ca = convert<float>(a[i]) * convert<float>(b[j]) + 1.0f;
error = ::max((convert<float>(c[i + j * N]) - ca) * (convert<float>(c[i + j * N]) - ca), error);
}
}
std::printf("error = %e\n", std::sqrt(error));
hipFree(a);
hipFree(b);
hipFree(c);
}
int main() {
test_gemv<float>();
test_gemv<half >();
}
| 104e0da8c271230003a1bd3b3fe6cf7acd3973de.cu | #include <iostream>
#include <random>
#include <gemm_core/gemm_core.hpp>
constexpr unsigned N = 16;
template <class T>
std::string get_type_name();
template <> std::string get_type_name<float>(){return "float";}
template <> std::string get_type_name<half>(){return "half";}
template <class T, class S>
__device__ __host__ T convert(const S);
template <> __device__ __host__ float convert<float, float>(const float a) {return a;}
template <> __device__ __host__ float convert<float, half >(const half a) {return __half2float(a);}
template <> __device__ __host__ half convert<half , float>(const float a) {return __float2half(a);}
template <> __device__ __host__ half convert<half , half >(const half a) {return a;}
template <class T>
__global__ void test_gemv_16x16_kernel(T* const c, const T* const a, const T* const b){
mtk::gemm_core::ger_core16x16(c, N, a, b, threadIdx.x & 0x1f);
}
template <class T>
void test_gemv(){
T* a;
T* b;
T* c;
std::printf("%s\n", get_type_name<T>().c_str());
cudaMallocHost(&a, N * sizeof(T));
cudaMallocHost(&b, N * sizeof(T));
cudaMallocHost(&c, N * N * sizeof(T));
std::mt19937 mt(std::random_device{}());
std::uniform_real_distribution<float> dist(-1.0f, 1.0f);
for(unsigned i = 0; i < N; i++){
a[i] = convert<T>(dist(mt));
}
for(unsigned i = 0; i < N; i++){
b[i] = convert<T>(dist(mt));
}
for(unsigned i = 0; i < N * N; i++){
c[i] = convert<T>(1.0f);
}
cudaDeviceSynchronize();
test_gemv_16x16_kernel<T><<<1, 32>>>(c, a, b);
cudaDeviceSynchronize();
float error = 0.0f;
for(unsigned i = 0; i < N; i++){
for(unsigned j = 0; j < N; j++){
const auto ca = convert<float>(a[i]) * convert<float>(b[j]) + 1.0f;
error = std::max((convert<float>(c[i + j * N]) - ca) * (convert<float>(c[i + j * N]) - ca), error);
}
}
std::printf("error = %e\n", std::sqrt(error));
cudaFree(a);
cudaFree(b);
cudaFree(c);
}
int main() {
test_gemv<float>();
test_gemv<half >();
}
|
1d0e904c308c1dc95c4ec00bb11e046a0a7fe167.hip | // !!! This is a file automatically generated by hipify!!!
// Copyright (c) 2017 Sony Corporation. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <nbla/array.hpp>
#include <nbla/variable.hpp>
#include <nbla/cuda/array/cuda_array.hpp>
#include <nbla/cuda/common.hpp>
#include <nbla/cuda/cudnn/cudnn.hpp>
#include <nbla/cuda/cudnn/function/batch_normalization.hpp>
#include <nbla/cuda/function/batch_normalization.hpp>
#include <nbla/cuda/limits.hpp>
#include <type_traits>
namespace nbla {
#define DRV_BN_T() get_dtype_by_cudnn_data_type(derived_bn_dtype_)
template <typename T>
void BatchNormalizationCudaCudnn<T>::setup_impl(const Variables &inputs,
const Variables &outputs) {
if (outputs.size() == 3) {
// [WORKAROUND]
// To use saved mean and variance and to propagate mean and variance
// gradient are not supported with cuDNN.
// Because cuDNN's backward interface is different from NNabla's one.
// So Fall back to CUDA implementation if outputs.size() == 3
// TODO: Change saved variance to inverse variance like cuDNN
this->fall_back_func_ = make_shared<BatchNormalizationCuda<T>>(
this->ctx_, this->axes_, this->decay_rate_, this->eps_,
this->batch_stat_);
this->fall_back_func_->setup(inputs, outputs);
return;
}
BatchNormalizationCuda<T>::setup_impl(inputs, outputs);
cudnn_handle_ = SingletonManager::get<CudnnHandleManager>()->handle(device_);
NBLA_CHECK(this->axes_.size() == 1, error_code::value,
"Axes on a single dimension is only supported.");
int N = this->size0_;
int C = this->size1_;
int H = this->size2_;
int W = 1;
mode_ = CUDNN_BATCHNORM_SPATIAL;
// Channel last is restricted for spatial input
bool channel_last = this->axes_[0] == inputs[0]->ndim() - 1;
if (inputs[0]->ndim() == 2) { // typical 1-d affine output with shape (N, C)
mode_ = CUDNN_BATCHNORM_PER_ACTIVATION;
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
} else if (channel_last) {
// To prevent NOT SUPPORTED error in CUDNNN, N and H are recalculated.
// (Large N is not allowed.)
N = inputs[0]->shape()[0];
H = inputs[0]->size() / (N * C);
if (this->batch_stat_) {
// cudnnBatchNormalizationForwardInference does not support this mode.
mode_ = CUDNN_BATCHNORM_SPATIAL_PERSISTENT;
}
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
} else {
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NCHW,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NCHW,
cudnn_data_type<T>::type(), N, C, H, W));
}
// Get BN data type.
NBLA_CUDNN_CHECK(cudnnDeriveBNTensorDescriptor(
bn_scale_bias_mean_var_desc_.desc, input_desc_.desc, mode_));
int n, c, h, w, sn, sc, sh, sw; // garbage
NBLA_CUDNN_CHECK(cudnnGetTensor4dDescriptor(bn_scale_bias_mean_var_desc_.desc,
&derived_bn_dtype_, &n, &c, &h,
&w, &sn, &sc, &sh, &sw));
#if CUDNN_VERSION >= 7400
// Check if the confition we can use faster BN.
can_use_bn_ex_ =
channel_last && std::is_same<Tw, nbla::HalfCuda>::value && C % 4 == 0;
can_use_bn_ex_ &= this->batch_stat_;
if (can_use_bn_ex_) {
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationForwardTrainingExWorkspaceSize(
this->cudnn_handle_, this->mode_, this->ops_,
this->input_desc_.desc, /* x desc */
nullptr, /* z desc */
this->output_desc_.desc, /* y desc */
this->bn_scale_bias_mean_var_desc_.desc, nullptr,
&forward_workspace_size_));
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationTrainingExReserveSpaceSize(
this->cudnn_handle_, this->mode_, this->ops_, this->act_desc_.desc,
this->input_desc_.desc, &reserve_size_));
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationBackwardExWorkspaceSize(
this->cudnn_handle_, this->mode_, this->ops_,
this->input_desc_.desc, /* x desc */
this->output_desc_.desc, /* y desc */
this->output_desc_.desc, /* dy desc */
this->input_desc_.desc, /*dz desc*/
this->input_desc_.desc, /* dx desc */
this->bn_scale_bias_mean_var_desc_.desc, this->act_desc_.desc,
&backward_workspace_size_));
}
#endif
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl(const Variables &inputs,
const Variables &outputs) {
cuda_set_device(std::stoi(this->ctx_.device_id));
if (this->batch_stat_) { // Training mode.
forward_impl_batch(inputs, outputs);
} else { // Testing mode.
forward_impl_global(inputs, outputs);
}
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl_batch(
const Variables &inputs, const Variables &outputs) {
// Check whether it outputs batch mean and var.
Variable *batch_mean = &this->mean_;
Variable *batch_var = &this->var_;
// Inputs
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
// Output
Tw *y = outputs[0]->cast_data_and_get_pointer<Tw>(this->ctx_, true);
void *m = batch_mean->data()
->cast(DRV_BN_T(), this->ctx_, true)
->pointer(); // batch mean
void *v = batch_var->data()
->cast(DRV_BN_T(), this->ctx_, true)
->pointer(); // batch var
// Inputs/Outputs
void *rm = inputs[3]
->data()
->cast(DRV_BN_T(), this->ctx_)
->pointer(); // running mean
void *rv =
inputs[4]->data()->cast(DRV_BN_T(), this->ctx_)->pointer(); // running var
auto a = get_cudnn_scalar_arg<T>(1);
auto b = get_cudnn_scalar_arg<T>(0);
double eps = ::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
#if CUDNN_VERSION >= 7400
if (can_use_bn_ex_) {
// Get buffers.
NdArray workspace(Shape_t{(Size_t)forward_workspace_size_});
reserve_ = make_shared<NdArray>(Shape_t{(Size_t)reserve_size_});
void *workspace_ptr =
workspace.cast(DRV_BN_T(), this->ctx_, true)->pointer();
void *reserve_ptr = reserve_->cast(DRV_BN_T(), this->ctx_, true)->pointer();
// Execute forward.
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardTrainingEx(
this->cudnn_handle_, this->mode_, this->ops_, &a, &b, input_desc_.desc,
x, /* x */
nullptr, nullptr, /* z */
output_desc_.desc, y, /* y */
this->bn_scale_bias_mean_var_desc_.desc, gamma, beta,
1 - this->decay_rate_, rm, rv, eps, m, v,
this->act_desc_.desc, /* activation descriptor */
workspace_ptr, /* workspace pointer */
forward_workspace_size_, /* workspace size */
reserve_ptr, /* reserve space pointer */
reserve_size_ /* reserve space size */
));
return;
}
#endif
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardTraining(
cudnn_handle_, mode_, &a, &b, input_desc_.desc, x, output_desc_.desc, y,
bn_scale_bias_mean_var_desc_.desc, gamma, beta, 1 - this->decay_rate_, rm,
rv, eps, m, v));
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl_global(
const Variables &inputs, const Variables &outputs) {
// Inputs
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *rm = inputs[3]
->data()
->get(DRV_BN_T(), this->ctx_)
->const_pointer(); // running mean
const void *rv = inputs[4]
->data()
->get(DRV_BN_T(), this->ctx_)
->const_pointer(); // running var
// Output
Tw *y = outputs[0]->cast_data_and_get_pointer<Tw>(this->ctx_, true);
auto a = get_cudnn_scalar_arg<T>(1);
auto b = get_cudnn_scalar_arg<T>(0);
double eps = ::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardInference(
cudnn_handle_, mode_, &a, &b, input_desc_.desc, x, output_desc_.desc, y,
bn_scale_bias_mean_var_desc_.desc, gamma, beta, rm, rv, eps));
}
template <class T>
void BatchNormalizationCudaCudnn<T>::backward_impl(
const Variables &inputs, const Variables &outputs,
const vector<bool> &propagate_down, const vector<bool> &accum) {
cuda_set_device(std::stoi(this->ctx_.device_id));
if (this->batch_stat_) { // Training mode.
backward_impl_batch(inputs, outputs, propagate_down, accum);
} else { // Testing mode.
NBLA_ERROR(error_code::not_implemented, "");
}
}
template <class T>
void BatchNormalizationCudaCudnn<T>::backward_impl_batch(
const Variables &inputs, const Variables &outputs,
const vector<bool> &propagate_down, const vector<bool> &accum) {
if (!(propagate_down[0] || propagate_down[1] || propagate_down[2])) {
return;
}
// Check whether it outputs batch mean/var.
Variable *batch_mean = &this->mean_;
Variable *batch_var = &this->var_;
// Common inputs wrt. gradient.
const Tw *dy = outputs[0]->get_grad_pointer<Tw>(this->ctx_);
const void *m =
batch_mean->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *v =
batch_var->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
auto a_data = get_cudnn_scalar_arg<T>(propagate_down[0] ? 1 : 0);
auto b_data = get_cudnn_scalar_arg<T>(accum[0] && propagate_down[0] ? 1 : 0);
auto a_param =
get_cudnn_scalar_arg<T>(propagate_down[1] || propagate_down[2] ? 1 : 0);
auto b_param = a_param;
if (!(accum[1] || accum[2])) {
b_param = 0;
}
size_t prop_down_workspace_size = 0;
if (!propagate_down[0]) {
prop_down_workspace_size = ::max(
prop_down_workspace_size, inputs[0]->size() * sizeof_dtype(DRV_BN_T()));
}
if (!propagate_down[1] || !propagate_down[2]) {
prop_down_workspace_size = ::max(
prop_down_workspace_size, inputs[1]->size() * sizeof_dtype(DRV_BN_T()));
}
void *prop_down_buf = nullptr;
shared_ptr<CudaCachedArray> prop_down_workspace(
prop_down_workspace_size ? new CudaCachedArray(prop_down_workspace_size,
dtypes::BYTE, this->ctx_)
: nullptr);
if (prop_down_workspace_size) {
prop_down_buf = prop_down_workspace->pointer();
}
Tw *dx = propagate_down[0]
? inputs[0]->cast_grad_and_get_pointer<Tw>(this->ctx_, !accum[0])
: (Tw *)prop_down_buf;
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
// Specify write only flag to prevent unnecessary memset.
const bool param_diff_write = b_param == 0;
void *db = propagate_down[1]
? inputs[1]
->grad()
->cast(DRV_BN_T(), this->ctx_, param_diff_write)
->pointer()
: prop_down_buf;
void *dg = propagate_down[2]
? inputs[2]
->grad()
->cast(DRV_BN_T(), this->ctx_, param_diff_write)
->pointer()
: prop_down_buf;
double eps = ::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
#if CUDNN_VERSION >= 7400
if (can_use_bn_ex_) {
// Get buffers.
NdArray workspace(Shape_t{(Size_t)backward_workspace_size_});
NBLA_CHECK(reserve_, error_code::value, "Forward is not called.");
void *workspace_ptr =
workspace.cast(DRV_BN_T(), this->ctx_, true)->pointer();
void *reserve_ptr =
reserve_->cast(DRV_BN_T(), this->ctx_, false /* rw access */)
->pointer();
// Execute backward.
NBLA_CUDNN_CHECK(cudnnBatchNormalizationBackwardEx(
this->cudnn_handle_, this->mode_, this->ops_, &a_data, &b_data,
&a_param, &b_param, input_desc_.desc, x, /* x */
nullptr, nullptr, /* y */
output_desc_.desc, dy, /* dy */
nullptr, nullptr, /* dz == null */
input_desc_.desc, dx, /* dx */
this->bn_scale_bias_mean_var_desc_.desc, gamma, beta, dg, db, eps, m, v,
this->act_desc_.desc, /* activation descriptor */
workspace_ptr, /* workspace pointer */
backward_workspace_size_, /* workspace size */
reserve_ptr, /* reserve space pointer */
reserve_size_ /* reserve space size */
));
// Clear reserved buffer for backward
reserve_ = nullptr;
return;
}
#endif
NBLA_CUDNN_CHECK(cudnnBatchNormalizationBackward(
cudnn_handle_, mode_, &a_data, &b_data, &a_param, &b_param,
input_desc_.desc, x, output_desc_.desc, dy, input_desc_.desc, dx,
bn_scale_bias_mean_var_desc_.desc, gamma, dg, db, eps, m, v));
}
} // namespace nbla
| 1d0e904c308c1dc95c4ec00bb11e046a0a7fe167.cu | // Copyright (c) 2017 Sony Corporation. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <nbla/array.hpp>
#include <nbla/variable.hpp>
#include <nbla/cuda/array/cuda_array.hpp>
#include <nbla/cuda/common.hpp>
#include <nbla/cuda/cudnn/cudnn.hpp>
#include <nbla/cuda/cudnn/function/batch_normalization.hpp>
#include <nbla/cuda/function/batch_normalization.hpp>
#include <nbla/cuda/limits.hpp>
#include <type_traits>
namespace nbla {
#define DRV_BN_T() get_dtype_by_cudnn_data_type(derived_bn_dtype_)
template <typename T>
void BatchNormalizationCudaCudnn<T>::setup_impl(const Variables &inputs,
const Variables &outputs) {
if (outputs.size() == 3) {
// [WORKAROUND]
// To use saved mean and variance and to propagate mean and variance
// gradient are not supported with cuDNN.
// Because cuDNN's backward interface is different from NNabla's one.
// So Fall back to CUDA implementation if outputs.size() == 3
// TODO: Change saved variance to inverse variance like cuDNN
this->fall_back_func_ = make_shared<BatchNormalizationCuda<T>>(
this->ctx_, this->axes_, this->decay_rate_, this->eps_,
this->batch_stat_);
this->fall_back_func_->setup(inputs, outputs);
return;
}
BatchNormalizationCuda<T>::setup_impl(inputs, outputs);
cudnn_handle_ = SingletonManager::get<CudnnHandleManager>()->handle(device_);
NBLA_CHECK(this->axes_.size() == 1, error_code::value,
"Axes on a single dimension is only supported.");
int N = this->size0_;
int C = this->size1_;
int H = this->size2_;
int W = 1;
mode_ = CUDNN_BATCHNORM_SPATIAL;
// Channel last is restricted for spatial input
bool channel_last = this->axes_[0] == inputs[0]->ndim() - 1;
if (inputs[0]->ndim() == 2) { // typical 1-d affine output with shape (N, C)
mode_ = CUDNN_BATCHNORM_PER_ACTIVATION;
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
} else if (channel_last) {
// To prevent NOT SUPPORTED error in CUDNNN, N and H are recalculated.
// (Large N is not allowed.)
N = inputs[0]->shape()[0];
H = inputs[0]->size() / (N * C);
if (this->batch_stat_) {
// cudnnBatchNormalizationForwardInference does not support this mode.
mode_ = CUDNN_BATCHNORM_SPATIAL_PERSISTENT;
}
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NHWC,
cudnn_data_type<T>::type(), N, C, H, W));
} else {
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(input_desc_.desc, CUDNN_TENSOR_NCHW,
cudnn_data_type<T>::type(), N, C, H, W));
NBLA_CUDNN_CHECK(
cudnnSetTensor4dDescriptor(output_desc_.desc, CUDNN_TENSOR_NCHW,
cudnn_data_type<T>::type(), N, C, H, W));
}
// Get BN data type.
NBLA_CUDNN_CHECK(cudnnDeriveBNTensorDescriptor(
bn_scale_bias_mean_var_desc_.desc, input_desc_.desc, mode_));
int n, c, h, w, sn, sc, sh, sw; // garbage
NBLA_CUDNN_CHECK(cudnnGetTensor4dDescriptor(bn_scale_bias_mean_var_desc_.desc,
&derived_bn_dtype_, &n, &c, &h,
&w, &sn, &sc, &sh, &sw));
#if CUDNN_VERSION >= 7400
// Check if the confition we can use faster BN.
can_use_bn_ex_ =
channel_last && std::is_same<Tw, nbla::HalfCuda>::value && C % 4 == 0;
can_use_bn_ex_ &= this->batch_stat_;
if (can_use_bn_ex_) {
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationForwardTrainingExWorkspaceSize(
this->cudnn_handle_, this->mode_, this->ops_,
this->input_desc_.desc, /* x desc */
nullptr, /* z desc */
this->output_desc_.desc, /* y desc */
this->bn_scale_bias_mean_var_desc_.desc, nullptr,
&forward_workspace_size_));
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationTrainingExReserveSpaceSize(
this->cudnn_handle_, this->mode_, this->ops_, this->act_desc_.desc,
this->input_desc_.desc, &reserve_size_));
NBLA_CUDNN_CHECK(cudnnGetBatchNormalizationBackwardExWorkspaceSize(
this->cudnn_handle_, this->mode_, this->ops_,
this->input_desc_.desc, /* x desc */
this->output_desc_.desc, /* y desc */
this->output_desc_.desc, /* dy desc */
this->input_desc_.desc, /*dz desc*/
this->input_desc_.desc, /* dx desc */
this->bn_scale_bias_mean_var_desc_.desc, this->act_desc_.desc,
&backward_workspace_size_));
}
#endif
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl(const Variables &inputs,
const Variables &outputs) {
cuda_set_device(std::stoi(this->ctx_.device_id));
if (this->batch_stat_) { // Training mode.
forward_impl_batch(inputs, outputs);
} else { // Testing mode.
forward_impl_global(inputs, outputs);
}
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl_batch(
const Variables &inputs, const Variables &outputs) {
// Check whether it outputs batch mean and var.
Variable *batch_mean = &this->mean_;
Variable *batch_var = &this->var_;
// Inputs
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
// Output
Tw *y = outputs[0]->cast_data_and_get_pointer<Tw>(this->ctx_, true);
void *m = batch_mean->data()
->cast(DRV_BN_T(), this->ctx_, true)
->pointer(); // batch mean
void *v = batch_var->data()
->cast(DRV_BN_T(), this->ctx_, true)
->pointer(); // batch var
// Inputs/Outputs
void *rm = inputs[3]
->data()
->cast(DRV_BN_T(), this->ctx_)
->pointer(); // running mean
void *rv =
inputs[4]->data()->cast(DRV_BN_T(), this->ctx_)->pointer(); // running var
auto a = get_cudnn_scalar_arg<T>(1);
auto b = get_cudnn_scalar_arg<T>(0);
double eps = std::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
#if CUDNN_VERSION >= 7400
if (can_use_bn_ex_) {
// Get buffers.
NdArray workspace(Shape_t{(Size_t)forward_workspace_size_});
reserve_ = make_shared<NdArray>(Shape_t{(Size_t)reserve_size_});
void *workspace_ptr =
workspace.cast(DRV_BN_T(), this->ctx_, true)->pointer();
void *reserve_ptr = reserve_->cast(DRV_BN_T(), this->ctx_, true)->pointer();
// Execute forward.
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardTrainingEx(
this->cudnn_handle_, this->mode_, this->ops_, &a, &b, input_desc_.desc,
x, /* x */
nullptr, nullptr, /* z */
output_desc_.desc, y, /* y */
this->bn_scale_bias_mean_var_desc_.desc, gamma, beta,
1 - this->decay_rate_, rm, rv, eps, m, v,
this->act_desc_.desc, /* activation descriptor */
workspace_ptr, /* workspace pointer */
forward_workspace_size_, /* workspace size */
reserve_ptr, /* reserve space pointer */
reserve_size_ /* reserve space size */
));
return;
}
#endif
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardTraining(
cudnn_handle_, mode_, &a, &b, input_desc_.desc, x, output_desc_.desc, y,
bn_scale_bias_mean_var_desc_.desc, gamma, beta, 1 - this->decay_rate_, rm,
rv, eps, m, v));
}
template <class T>
void BatchNormalizationCudaCudnn<T>::forward_impl_global(
const Variables &inputs, const Variables &outputs) {
// Inputs
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *rm = inputs[3]
->data()
->get(DRV_BN_T(), this->ctx_)
->const_pointer(); // running mean
const void *rv = inputs[4]
->data()
->get(DRV_BN_T(), this->ctx_)
->const_pointer(); // running var
// Output
Tw *y = outputs[0]->cast_data_and_get_pointer<Tw>(this->ctx_, true);
auto a = get_cudnn_scalar_arg<T>(1);
auto b = get_cudnn_scalar_arg<T>(0);
double eps = std::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
NBLA_CUDNN_CHECK(cudnnBatchNormalizationForwardInference(
cudnn_handle_, mode_, &a, &b, input_desc_.desc, x, output_desc_.desc, y,
bn_scale_bias_mean_var_desc_.desc, gamma, beta, rm, rv, eps));
}
template <class T>
void BatchNormalizationCudaCudnn<T>::backward_impl(
const Variables &inputs, const Variables &outputs,
const vector<bool> &propagate_down, const vector<bool> &accum) {
cuda_set_device(std::stoi(this->ctx_.device_id));
if (this->batch_stat_) { // Training mode.
backward_impl_batch(inputs, outputs, propagate_down, accum);
} else { // Testing mode.
NBLA_ERROR(error_code::not_implemented, "");
}
}
template <class T>
void BatchNormalizationCudaCudnn<T>::backward_impl_batch(
const Variables &inputs, const Variables &outputs,
const vector<bool> &propagate_down, const vector<bool> &accum) {
if (!(propagate_down[0] || propagate_down[1] || propagate_down[2])) {
return;
}
// Check whether it outputs batch mean/var.
Variable *batch_mean = &this->mean_;
Variable *batch_var = &this->var_;
// Common inputs wrt. gradient.
const Tw *dy = outputs[0]->get_grad_pointer<Tw>(this->ctx_);
const void *m =
batch_mean->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *v =
batch_var->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const Tw *x = inputs[0]->get_data_pointer<Tw>(this->ctx_);
auto a_data = get_cudnn_scalar_arg<T>(propagate_down[0] ? 1 : 0);
auto b_data = get_cudnn_scalar_arg<T>(accum[0] && propagate_down[0] ? 1 : 0);
auto a_param =
get_cudnn_scalar_arg<T>(propagate_down[1] || propagate_down[2] ? 1 : 0);
auto b_param = a_param;
if (!(accum[1] || accum[2])) {
b_param = 0;
}
size_t prop_down_workspace_size = 0;
if (!propagate_down[0]) {
prop_down_workspace_size = std::max(
prop_down_workspace_size, inputs[0]->size() * sizeof_dtype(DRV_BN_T()));
}
if (!propagate_down[1] || !propagate_down[2]) {
prop_down_workspace_size = std::max(
prop_down_workspace_size, inputs[1]->size() * sizeof_dtype(DRV_BN_T()));
}
void *prop_down_buf = nullptr;
shared_ptr<CudaCachedArray> prop_down_workspace(
prop_down_workspace_size ? new CudaCachedArray(prop_down_workspace_size,
dtypes::BYTE, this->ctx_)
: nullptr);
if (prop_down_workspace_size) {
prop_down_buf = prop_down_workspace->pointer();
}
Tw *dx = propagate_down[0]
? inputs[0]->cast_grad_and_get_pointer<Tw>(this->ctx_, !accum[0])
: (Tw *)prop_down_buf;
const void *beta =
inputs[1]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
const void *gamma =
inputs[2]->data()->get(DRV_BN_T(), this->ctx_)->const_pointer();
// Specify write only flag to prevent unnecessary memset.
const bool param_diff_write = b_param == 0;
void *db = propagate_down[1]
? inputs[1]
->grad()
->cast(DRV_BN_T(), this->ctx_, param_diff_write)
->pointer()
: prop_down_buf;
void *dg = propagate_down[2]
? inputs[2]
->grad()
->cast(DRV_BN_T(), this->ctx_, param_diff_write)
->pointer()
: prop_down_buf;
double eps = std::max((double)this->eps_, CUDNN_BN_MIN_EPSILON);
#if CUDNN_VERSION >= 7400
if (can_use_bn_ex_) {
// Get buffers.
NdArray workspace(Shape_t{(Size_t)backward_workspace_size_});
NBLA_CHECK(reserve_, error_code::value, "Forward is not called.");
void *workspace_ptr =
workspace.cast(DRV_BN_T(), this->ctx_, true)->pointer();
void *reserve_ptr =
reserve_->cast(DRV_BN_T(), this->ctx_, false /* rw access */)
->pointer();
// Execute backward.
NBLA_CUDNN_CHECK(cudnnBatchNormalizationBackwardEx(
this->cudnn_handle_, this->mode_, this->ops_, &a_data, &b_data,
&a_param, &b_param, input_desc_.desc, x, /* x */
nullptr, nullptr, /* y */
output_desc_.desc, dy, /* dy */
nullptr, nullptr, /* dz == null */
input_desc_.desc, dx, /* dx */
this->bn_scale_bias_mean_var_desc_.desc, gamma, beta, dg, db, eps, m, v,
this->act_desc_.desc, /* activation descriptor */
workspace_ptr, /* workspace pointer */
backward_workspace_size_, /* workspace size */
reserve_ptr, /* reserve space pointer */
reserve_size_ /* reserve space size */
));
// Clear reserved buffer for backward
reserve_ = nullptr;
return;
}
#endif
NBLA_CUDNN_CHECK(cudnnBatchNormalizationBackward(
cudnn_handle_, mode_, &a_data, &b_data, &a_param, &b_param,
input_desc_.desc, x, output_desc_.desc, dy, input_desc_.desc, dx,
bn_scale_bias_mean_var_desc_.desc, gamma, dg, db, eps, m, v));
}
} // namespace nbla
|
1d00306c8b81cc3cd8b731c8b160c733965df6e2.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* Copyright (c) 2016 Jean-Noel Braun.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifdef BCNN_USE_CUDA
#include "bcnn/bcnn.h"
void bcnn_cuda_gemm(int TA, int TB, int M, int N, int K, float ALPHA,
float *A_gpu, int lda,
float *B_gpu, int ldb,
float BETA,
float *C_gpu, int ldc)
{
hipblasHandle_t handle = bcnn_cublas_handle();
int ldaa = (TA == 0) ? K : M;
int ldbb = (TB == 0) ? N : K;
hipblasStatus_t status = hipblasSgemm(handle, (TB ? HIPBLAS_OP_T : HIPBLAS_OP_N),
(TA ? HIPBLAS_OP_T : HIPBLAS_OP_N), N, M, K, &ALPHA, B_gpu, ldbb, A_gpu, ldaa, &BETA, C_gpu, N);
bcnn_cublas_check(status);
}
void bcnn_cuda_gemv(int TA, const int M,
const int N, const float alpha, const float* A, const float* x,
const float beta, float* y)
{
hipblasHandle_t handle = bcnn_cublas_handle();
hipblasOperation_t cuTA = (TA ? HIPBLAS_OP_T : HIPBLAS_OP_N);
hipblasStatus_t status = hipblasSgemv(handle, cuTA, N, M, &alpha,
A, N, x, 1, &beta, y, 1);
bcnn_cublas_check(status);
}
__global__ void _bcnn_cuda_fill_f32_kernel(int N, float ALPHA, float *X, int INCX)
{
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
if (i < N)
X[i*INCX] = ALPHA;
}
void bcnn_cuda_fill_f32(int n, float alpha, float *x, int incx)
{
hipLaunchKernelGGL(( _bcnn_cuda_fill_f32_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, alpha, x, incx);
bcnn_cuda_check(hipPeekAtLastError());
}
void bcnn_cuda_copy_f32(int n, float *x, int incx, float *y, int incy)
{
hipblasHandle_t handle = bcnn_cublas_handle();
hipblasStatus_t status = hipblasScopy(handle, n, x, incx, y, incy);
bcnn_cublas_check(status);
}
void bcnn_cuda_axpy(int n, float alpha, float *x, int incx, float *y, int incy)
{
hipblasHandle_t handle = bcnn_cublas_handle();
hipblasStatus_t status = hipblasSaxpy(handle, n, &alpha, x, incx, y, incy);
bcnn_cublas_check(status);
}
void bcnn_cuda_scal(int n, float alpha, float *x, int incx)
{
hipblasHandle_t handle = bcnn_cublas_handle();
hipblasStatus_t status = hipblasSscal(handle, n, &alpha, x, incx);
bcnn_cublas_check(status);
}
__global__ void _bcnn_vadd_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] + b[i];
}
void bcnn_cuda_vadd(int n, float *a, float *b, float *y)
{
hipLaunchKernelGGL(( _bcnn_vadd_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, a, b, y);
}
__global__ void _bcnn_vsub_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] - b[i];
}
void bcnn_cuda_vsub(int n, float *a, float *b, float *y)
{
hipLaunchKernelGGL(( _bcnn_vsub_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, a, b, y);
}
__global__ void _bcnn_vmul_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] * b[i];
}
void bcnn_cuda_vmul(int n, float *a, float *b, float *y)
{
hipLaunchKernelGGL(( _bcnn_vmul_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, a, b, y);
}
__global__ void _bcnn_vdiv_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] / b[i];
}
void bcnn_cuda_vdiv(int n, float *a, float *b, float *y)
{
hipLaunchKernelGGL(( _bcnn_vdiv_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, a, b, y);
}
__global__ void _bcnn_pow_kernel(int n, float *x, float a, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = pow(x[i], a);
}
void bcnn_cuda_pow(int n, float *x, float a, float *y)
{
hipLaunchKernelGGL(( _bcnn_pow_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, x, a, y);
}
void bcnn_cuda_axpby(int n, float a, float *x, float b, float *y)
{
bcnn_cuda_scal(n, b, y, 1);
bcnn_cuda_axpy(n, a, x, 1, y, 1);
}
__global__ void _bcnn_add_scalar_kernel(int n, float a, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] += a;
}
void bcnn_cuda_add_scalar(int n, float a, float* y)
{
hipLaunchKernelGGL(( _bcnn_add_scalar_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, a, y);
}
__global__ void _bcnn_vsum_kernel(int n, float *x, float *sum)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
*sum += x[i];
}
void bcnn_cuda_vsum(int n, float *x, float *sum)
{
hipLaunchKernelGGL(( _bcnn_vsum_kernel), dim3(bcnn_cuda_gridsize(n)), dim3(BCNN_CUDA_THREADS), 0, 0, n, x, sum);
}
__global__ void _mean_variance_forward_kernel(float *x, int b, int c, int wxh, float *mean, float *var)
{
float scale = 1.0f / (b * wxh);
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x, j, k, ind;
if (i >= c)
return;
mean[i] = 0;
for (j = 0; j < b; ++j){
for (k = 0; k < wxh; ++k){
ind = j *c * wxh + i * wxh + k;
mean[i] += x[ind];
var[i] += x[ind] * x[ind];
}
}
mean[i] *= scale;
var[i] = var[i] * scale - mean[i] * mean[i];
}
void bcnn_cuda_mean_variance_forward(float *x, int b, int c, int wxh, float *mean, float *var)
{
hipLaunchKernelGGL(( _mean_variance_forward_kernel), dim3(bcnn_cuda_gridsize(c)), dim3(BCNN_CUDA_THREADS), 0, 0, x, b, c, wxh, mean, var);
}
__global__ void _norm_forward_kernel(float *x, float *mean, float *variance, int b, int c, int wxh)
{
int ind = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
int j = (ind / wxh) % c;
if (ind >= b * c * wxh)
return;
x[ind] = (x[ind] - mean[j]) / (sqrt(variance[j] + 0.000001f));
}
void bcnn_cuda_norm_forward(float *x, float *mean, float *variance, int b, int c, int wxh)
{
hipLaunchKernelGGL(( _norm_forward_kernel), dim3(bcnn_cuda_gridsize(b * c * wxh)), dim3(BCNN_CUDA_THREADS), 0, 0, x, mean, variance, b, c, wxh);
}
__global__ void _norm_backward_kernel(float *x, float *mean, float *var, float *mean_diff, float *var_diff, int b, int c, int wxh, float *grad)
{
int ind = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
int j = (ind / wxh) % c;
if (ind >= b * c * wxh)
return;
grad[ind] = grad[ind] * 1.0f / (sqrtf(var[j] + 0.00001f)) + var_diff[j] * 2.0f * (x[ind] - mean[j]) / (wxh * b) + mean_diff[j] / (wxh * b);
}
void bcnn_cuda_norm_backward(float *x, float *mean, float *var, float *mean_diff, float *var_diff, int b, int c, int wxh, float *grad)
{
hipLaunchKernelGGL(( _norm_backward_kernel), dim3(bcnn_cuda_gridsize(b * c * wxh)), dim3(BCNN_CUDA_THREADS), 0, 0, x, mean, var, mean_diff, var_diff, b, c, wxh, grad);
}
__global__ void _mean_variance_backward_kernel(float *x, float *grad, float *mean, float *var, int b, int c, int wxh, float *mean_diff, float *var_diff)
{
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x, j, k, ind;
if (i >= c)
return;
mean_diff[i] = 0;
var_diff[i] = 0;
for (j = 0; j < b; ++j) {
for (k = 0; k < wxh; ++k) {
ind = j * c * wxh + i * wxh + k;
mean_diff[i] += grad[ind];
var_diff[i] += grad[ind] * (x[ind] - mean[i]);
}
}
mean_diff[i] *= (-1.0f / sqrt (var[i] + 0.00001f));
var_diff[i] *= -0.5f / (var[i] * sqrtf(var[i]) + 0.00001f);
}
void bcnn_cuda_mean_variance_backward(float *x, float *grad, float *mean, float *var, int b, int c, int wxh, float *mean_diff, float *var_diff)
{
hipLaunchKernelGGL(( _mean_variance_backward_kernel), dim3(bcnn_cuda_gridsize(c)), dim3(BCNN_CUDA_THREADS), 0, 0, x, grad, mean, var, b, c, wxh, mean_diff, var_diff);
}
#endif | 1d00306c8b81cc3cd8b731c8b160c733965df6e2.cu | /*
* Copyright (c) 2016 Jean-Noel Braun.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifdef BCNN_USE_CUDA
#include "bcnn/bcnn.h"
void bcnn_cuda_gemm(int TA, int TB, int M, int N, int K, float ALPHA,
float *A_gpu, int lda,
float *B_gpu, int ldb,
float BETA,
float *C_gpu, int ldc)
{
cublasHandle_t handle = bcnn_cublas_handle();
int ldaa = (TA == 0) ? K : M;
int ldbb = (TB == 0) ? N : K;
cublasStatus_t status = cublasSgemm(handle, (TB ? CUBLAS_OP_T : CUBLAS_OP_N),
(TA ? CUBLAS_OP_T : CUBLAS_OP_N), N, M, K, &ALPHA, B_gpu, ldbb, A_gpu, ldaa, &BETA, C_gpu, N);
bcnn_cublas_check(status);
}
void bcnn_cuda_gemv(int TA, const int M,
const int N, const float alpha, const float* A, const float* x,
const float beta, float* y)
{
cublasHandle_t handle = bcnn_cublas_handle();
cublasOperation_t cuTA = (TA ? CUBLAS_OP_T : CUBLAS_OP_N);
cublasStatus_t status = cublasSgemv(handle, cuTA, N, M, &alpha,
A, N, x, 1, &beta, y, 1);
bcnn_cublas_check(status);
}
__global__ void _bcnn_cuda_fill_f32_kernel(int N, float ALPHA, float *X, int INCX)
{
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
if (i < N)
X[i*INCX] = ALPHA;
}
void bcnn_cuda_fill_f32(int n, float alpha, float *x, int incx)
{
_bcnn_cuda_fill_f32_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, alpha, x, incx);
bcnn_cuda_check(cudaPeekAtLastError());
}
void bcnn_cuda_copy_f32(int n, float *x, int incx, float *y, int incy)
{
cublasHandle_t handle = bcnn_cublas_handle();
cublasStatus_t status = cublasScopy(handle, n, x, incx, y, incy);
bcnn_cublas_check(status);
}
void bcnn_cuda_axpy(int n, float alpha, float *x, int incx, float *y, int incy)
{
cublasHandle_t handle = bcnn_cublas_handle();
cublasStatus_t status = cublasSaxpy(handle, n, &alpha, x, incx, y, incy);
bcnn_cublas_check(status);
}
void bcnn_cuda_scal(int n, float alpha, float *x, int incx)
{
cublasHandle_t handle = bcnn_cublas_handle();
cublasStatus_t status = cublasSscal(handle, n, &alpha, x, incx);
bcnn_cublas_check(status);
}
__global__ void _bcnn_vadd_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] + b[i];
}
void bcnn_cuda_vadd(int n, float *a, float *b, float *y)
{
_bcnn_vadd_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, a, b, y);
}
__global__ void _bcnn_vsub_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] - b[i];
}
void bcnn_cuda_vsub(int n, float *a, float *b, float *y)
{
_bcnn_vsub_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, a, b, y);
}
__global__ void _bcnn_vmul_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] * b[i];
}
void bcnn_cuda_vmul(int n, float *a, float *b, float *y)
{
_bcnn_vmul_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, a, b, y);
}
__global__ void _bcnn_vdiv_kernel(int n, float *a, float *b, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = a[i] / b[i];
}
void bcnn_cuda_vdiv(int n, float *a, float *b, float *y)
{
_bcnn_vdiv_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, a, b, y);
}
__global__ void _bcnn_pow_kernel(int n, float *x, float a, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] = pow(x[i], a);
}
void bcnn_cuda_pow(int n, float *x, float a, float *y)
{
_bcnn_pow_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, x, a, y);
}
void bcnn_cuda_axpby(int n, float a, float *x, float b, float *y)
{
bcnn_cuda_scal(n, b, y, 1);
bcnn_cuda_axpy(n, a, x, 1, y, 1);
}
__global__ void _bcnn_add_scalar_kernel(int n, float a, float *y)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
y[i] += a;
}
void bcnn_cuda_add_scalar(int n, float a, float* y)
{
_bcnn_add_scalar_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, a, y);
}
__global__ void _bcnn_vsum_kernel(int n, float *x, float *sum)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
*sum += x[i];
}
void bcnn_cuda_vsum(int n, float *x, float *sum)
{
_bcnn_vsum_kernel<<<bcnn_cuda_gridsize(n), BCNN_CUDA_THREADS>>>(n, x, sum);
}
__global__ void _mean_variance_forward_kernel(float *x, int b, int c, int wxh, float *mean, float *var)
{
float scale = 1.0f / (b * wxh);
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x, j, k, ind;
if (i >= c)
return;
mean[i] = 0;
for (j = 0; j < b; ++j){
for (k = 0; k < wxh; ++k){
ind = j *c * wxh + i * wxh + k;
mean[i] += x[ind];
var[i] += x[ind] * x[ind];
}
}
mean[i] *= scale;
var[i] = var[i] * scale - mean[i] * mean[i];
}
void bcnn_cuda_mean_variance_forward(float *x, int b, int c, int wxh, float *mean, float *var)
{
_mean_variance_forward_kernel<<<bcnn_cuda_gridsize(c), BCNN_CUDA_THREADS>>>(x, b, c, wxh, mean, var);
}
__global__ void _norm_forward_kernel(float *x, float *mean, float *variance, int b, int c, int wxh)
{
int ind = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
int j = (ind / wxh) % c;
if (ind >= b * c * wxh)
return;
x[ind] = (x[ind] - mean[j]) / (sqrt(variance[j] + 0.000001f));
}
void bcnn_cuda_norm_forward(float *x, float *mean, float *variance, int b, int c, int wxh)
{
_norm_forward_kernel<<<bcnn_cuda_gridsize(b * c * wxh), BCNN_CUDA_THREADS>>>(x, mean, variance, b, c, wxh);
}
__global__ void _norm_backward_kernel(float *x, float *mean, float *var, float *mean_diff, float *var_diff, int b, int c, int wxh, float *grad)
{
int ind = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
int j = (ind / wxh) % c;
if (ind >= b * c * wxh)
return;
grad[ind] = grad[ind] * 1.0f / (sqrtf(var[j] + 0.00001f)) + var_diff[j] * 2.0f * (x[ind] - mean[j]) / (wxh * b) + mean_diff[j] / (wxh * b);
}
void bcnn_cuda_norm_backward(float *x, float *mean, float *var, float *mean_diff, float *var_diff, int b, int c, int wxh, float *grad)
{
_norm_backward_kernel<<<bcnn_cuda_gridsize(b * c * wxh), BCNN_CUDA_THREADS>>>(x, mean, var, mean_diff, var_diff, b, c, wxh, grad);
}
__global__ void _mean_variance_backward_kernel(float *x, float *grad, float *mean, float *var, int b, int c, int wxh, float *mean_diff, float *var_diff)
{
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x, j, k, ind;
if (i >= c)
return;
mean_diff[i] = 0;
var_diff[i] = 0;
for (j = 0; j < b; ++j) {
for (k = 0; k < wxh; ++k) {
ind = j * c * wxh + i * wxh + k;
mean_diff[i] += grad[ind];
var_diff[i] += grad[ind] * (x[ind] - mean[i]);
}
}
mean_diff[i] *= (-1.0f / sqrt (var[i] + 0.00001f));
var_diff[i] *= -0.5f / (var[i] * sqrtf(var[i]) + 0.00001f);
}
void bcnn_cuda_mean_variance_backward(float *x, float *grad, float *mean, float *var, int b, int c, int wxh, float *mean_diff, float *var_diff)
{
_mean_variance_backward_kernel<<<bcnn_cuda_gridsize(c), BCNN_CUDA_THREADS>>>(x, grad, mean, var, b, c, wxh, mean_diff, var_diff);
}
#endif |
e116bf20c65226178565bec02fa40cb6ccc41ba2.hip | // !!! This is a file automatically generated by hipify!!!
#include<stdio.h>
#include<hipfft.h>
#include <hip/hip_complex.h>
#define BATCH 1
int main(){
// Transform each column of a 2d array with 10 rows and 3 columns:
//int rank = 1; /* not 2: we are computing 1d transforms */
//int n[] = {10}; /* 1d transforms of length 10 */
//int howmany = 3;
// int idist = odist = 1;
// int istride = ostride = 3; /* distance between two elements in
//
//
//
// the same column */
int n[]={10};
int *iembed = n, *oembed = n;
int rank=1;
int howmany = 3;
int idist=1,odist= 1;
int istride=3,ostride=3;
hipfftComplex** datacpu[10][3], data[10][3];
datacpu= (hipfftComplex **)malloc(sizeof(hipfftComplex)*10*3);
datacpu[0][1]=6;
datacpu[1][3]=7;
hipfftHandle plan;
hipMalloc((void**)&data, sizeof(hipfftComplex)*10*3);
hipMemcpy(datacpu,data, 10*3*sizeof(hipfftComplex), hipMemcpyHostToDevice);
hipfftPlanMany(&plan, rank, n, &iembed, istride, idist, &oembed, ostride, odist, HIPFFT_C2C, BATCH);
hipfftExecC2C(plan, data, data, HIPFFT_FORWARD);
hipDeviceSynchronize();
//free memory
hipfftDestroy(plan);
hipFree(data);
printf("fin");
return 0;
}
| e116bf20c65226178565bec02fa40cb6ccc41ba2.cu | #include<stdio.h>
#include<cufft.h>
#include <cuComplex.h>
#define BATCH 1
int main(){
// Transform each column of a 2d array with 10 rows and 3 columns:
//int rank = 1; /* not 2: we are computing 1d transforms */
//int n[] = {10}; /* 1d transforms of length 10 */
//int howmany = 3;
// int idist = odist = 1;
// int istride = ostride = 3; /* distance between two elements in
//
//
//
// the same column */
int n[]={10};
int *iembed = n, *oembed = n;
int rank=1;
int howmany = 3;
int idist=1,odist= 1;
int istride=3,ostride=3;
cufftComplex** datacpu[10][3], data[10][3];
datacpu= (cufftComplex **)malloc(sizeof(cufftComplex)*10*3);
datacpu[0][1]=6;
datacpu[1][3]=7;
cufftHandle plan;
cudaMalloc((void**)&data, sizeof(cufftComplex)*10*3);
cudaMemcpy(datacpu,data, 10*3*sizeof(cufftComplex), cudaMemcpyHostToDevice);
cufftPlanMany(&plan, rank, n, &iembed, istride, idist, &oembed, ostride, odist, CUFFT_C2C, BATCH);
cufftExecC2C(plan, data, data, CUFFT_FORWARD);
cudaDeviceSynchronize();
//free memory
cufftDestroy(plan);
cudaFree(data);
printf("fin");
return 0;
}
|
ef82330a92db983eabc91734206b2fde8143a1fa.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <iostream>
#include <hiprand/hiprand.h>
struct random_d_array
{
float *d_a;
int n;
random_d_array(int n) :n{n}
{
hipMalloc((void**)&d_a, n*sizeof(float));
hiprandGenerator_t gen;
hiprandCreateGenerator(&gen, HIPRAND_RNG_PSEUDO_DEFAULT);
hiprandGenerateUniform(gen, d_a, n);
}
~random_d_array()
{
hipFree(&d_a);
}
};
using namespace std;
__global__ void MyKernel(float *array, int arrayCount)
{
int idx = threadIdx.x + blockIdx.x * blockDim.x;
if(idx < arrayCount)
array[idx] *= array[idx];
}
int launchMyKernel(float *array, int arrayCount)
{
int blockSize;
int minGridSize;
int gridSize;
hipEvent_t start, stop;
float milliseconds = 0;
blockSize = 32;
gridSize = (arrayCount + blockSize - 1)/blockSize;
cout << "Trying non-optiomal blockSize = " << blockSize << ", gridSize = " << gridSize << endl;
float average = 0.0;
for(int i = 0; i < 10; ++i)
{
hipEventCreate(&start);
hipEventCreate(&stop);
hipEventRecord(start);
hipLaunchKernelGGL(( MyKernel), dim3(gridSize), dim3(blockSize), 0, 0, array, arrayCount);
hipEventRecord(stop);
hipDeviceSynchronize();
hipEventElapsedTime(&milliseconds, start, stop);
hipEventDestroy(start);
hipEventDestroy(stop);
cout <<"i = "<< i << ": " << milliseconds << " ms" << endl;
if(i > 0) average += milliseconds;
}
average /= 10 - 1;
cout << "Average = " << average << endl;
cout << "============" << endl;
hipOccupancyMaxPotentialBlockSize(&minGridSize, &blockSize,
(void*)MyKernel, 0, arrayCount);
gridSize = (arrayCount + blockSize - 1)/blockSize;
cout << "Suggested blockSize = " << blockSize << ", gridSize = " << gridSize << ", minGridSize = " << minGridSize << endl;
average = 0.0;
for(int i = 0; i < 10; ++i)
{
hipEventCreate(&start);
hipEventCreate(&stop);
hipEventRecord(start);
hipLaunchKernelGGL(( MyKernel), dim3(gridSize), dim3(blockSize), 0, 0, array, arrayCount);
hipEventRecord(stop);
hipDeviceSynchronize();
hipEventElapsedTime(&milliseconds, start, stop);
hipEventDestroy(start);
hipEventDestroy(stop);
cout << "i = " << i << ": " << milliseconds << " ms" << endl;
if(i > 0) average += milliseconds;
}
average /= 10 - 1;
cout << "Average = " << average << endl;
return 0;
}
int main()
{
int n = 100000;
random_d_array A(n);
launchMyKernel(A.d_a, n);
}
| ef82330a92db983eabc91734206b2fde8143a1fa.cu | #include <iostream>
#include <curand.h>
struct random_d_array
{
float *d_a;
int n;
random_d_array(int n) :n{n}
{
cudaMalloc((void**)&d_a, n*sizeof(float));
curandGenerator_t gen;
curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
curandGenerateUniform(gen, d_a, n);
}
~random_d_array()
{
cudaFree(&d_a);
}
};
using namespace std;
__global__ void MyKernel(float *array, int arrayCount)
{
int idx = threadIdx.x + blockIdx.x * blockDim.x;
if(idx < arrayCount)
array[idx] *= array[idx];
}
int launchMyKernel(float *array, int arrayCount)
{
int blockSize;
int minGridSize;
int gridSize;
cudaEvent_t start, stop;
float milliseconds = 0;
blockSize = 32;
gridSize = (arrayCount + blockSize - 1)/blockSize;
cout << "Trying non-optiomal blockSize = " << blockSize << ", gridSize = " << gridSize << endl;
float average = 0.0;
for(int i = 0; i < 10; ++i)
{
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
MyKernel<<<gridSize, blockSize>>>(array, arrayCount);
cudaEventRecord(stop);
cudaDeviceSynchronize();
cudaEventElapsedTime(&milliseconds, start, stop);
cudaEventDestroy(start);
cudaEventDestroy(stop);
cout <<"i = "<< i << ": " << milliseconds << " ms" << endl;
if(i > 0) average += milliseconds;
}
average /= 10 - 1;
cout << "Average = " << average << endl;
cout << "============" << endl;
cudaOccupancyMaxPotentialBlockSize(&minGridSize, &blockSize,
(void*)MyKernel, 0, arrayCount);
gridSize = (arrayCount + blockSize - 1)/blockSize;
cout << "Suggested blockSize = " << blockSize << ", gridSize = " << gridSize << ", minGridSize = " << minGridSize << endl;
average = 0.0;
for(int i = 0; i < 10; ++i)
{
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
MyKernel<<<gridSize, blockSize>>>(array, arrayCount);
cudaEventRecord(stop);
cudaDeviceSynchronize();
cudaEventElapsedTime(&milliseconds, start, stop);
cudaEventDestroy(start);
cudaEventDestroy(stop);
cout << "i = " << i << ": " << milliseconds << " ms" << endl;
if(i > 0) average += milliseconds;
}
average /= 10 - 1;
cout << "Average = " << average << endl;
return 0;
}
int main()
{
int n = 100000;
random_d_array A(n);
launchMyKernel(A.d_a, n);
}
|
9baf79de09ffc959b39df829fafc27e0e8fe6944.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// @author raver119@gmail.com
//
#include <Environment.h>
#include <loops/transform_bool.h>
#include <types/types.h>
#include <op_boilerplate.h>
#include <loops/legacy_ops.h>
#include <helpers/DebugHelper.h>
using namespace simdOps;
template <typename X, typename Z, typename OpType>
__global__ void transformBoolSimple(void *dy, Nd4jLong *xShapeInfo, int xRank,
void *params,
void *result, Nd4jLong *zShapeInfo, int zRank,
int *allocationPointer,
void *reductionPointer,
Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
functions::transform::TransformBool<X,Z>::template transformCuda<OpType>(dy,xShapeInfo,params,result,zShapeInfo,allocationPointer,reductionPointer,tadShapeInfo, tadOffsets);
}
namespace functions {
namespace transform {
template<typename X, typename Y>
_CUDA_H void TransformBool<X,Y>::executeTransformShaped(dim3 launchDims, hipStream_t *stream, int opNum, void *x, Nd4jLong *xShape, int xRank, void *extraParams, void *z, Nd4jLong *zShape, int zRank, int *allocationPointer, void *reductionPointer, Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
DISPATCH_BY_OPNUM_TT(intermediateShaped, PARAMS(launchDims, stream, x, xShape, xRank, extraParams, z, zShape, zRank, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets), TRANSFORM_BOOL_OPS);
DEBUG_KERNEL(stream, opNum);
}
template<typename X, typename Z>
template <typename OpType>
__device__ void TransformBool<X,Z>::transformCuda(
void *vdy,
Nd4jLong *shapeInfo,
void *vparams,
void *vresult,
Nd4jLong *zShapeInfo,
int *allocationPointer, void *vreductionPointer,
Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
auto dy = static_cast<X*>(vdy);
auto result = static_cast<Z*>(vresult);
auto params = static_cast<X*>(vparams);
auto reductionPointer = static_cast<Z*>(vreductionPointer);
if(OpType::requiresSpecial) {
OpType::execSpecialCuda(dy,shapeInfo,result,zShapeInfo,params, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets);
return;
} else {
auto xOrder = shape::order(shapeInfo);
auto zOrder = shape::order(zShapeInfo);
auto xEws = shape::elementWiseStride(shapeInfo);
auto zEws = shape::elementWiseStride(zShapeInfo);
auto tid = blockIdx.x * blockDim.x + threadIdx.x;
__shared__ Nd4jLong length;
if(threadIdx.x == 0)
length = shape::length(shapeInfo);
__syncthreads();
int totalThreads = gridDim.x * blockDim.x;
if(xEws >= 1 && zEws >= 1 && xOrder == zOrder) {
if(xEws == 1 && zEws == 1) {
/* equal, positive, non-unit increments. */
for (Nd4jLong i = tid; i < length; i += totalThreads) {
result[i] = OpType::op(dy[i], params);
}
}
else {
for (Nd4jLong i = tid; i < length; i += totalThreads) {
result[i * zEws] = OpType::op(dy[i * xEws], params);
}
}
}
else {
for (Nd4jLong i = tid; i < length; i+= totalThreads) {
auto xOffset2 = shape::getIndexOffset(i, shapeInfo, length);
auto zOffset2 = shape::getIndexOffset(i, zShapeInfo, length);
result[zOffset2] = OpType::op(dy[xOffset2], params);
}
}
}
};
template<typename X, typename Z>
template <typename OpType>
_CUDA_H void TransformBool<X,Z>::intermediateShaped(dim3 launchDims, hipStream_t *stream, void *x, Nd4jLong *xShape, int xRank, void *extraParams, void *z, Nd4jLong *zShape, int zRank, int *allocationPointer, void *reductionPointer, Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
hipLaunchKernelGGL(( transformBoolSimple<X, Z, OpType>), dim3(launchDims.x), dim3(launchDims.y), launchDims.z, *stream, x, xShape, xRank, extraParams, z, zShape, zRank, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets);
nd4j::DebugHelper::checkErrorCode(stream, "transformBool(...) failed");
}
BUILD_DOUBLE_TEMPLATE(template class ND4J_EXPORT TransformBool, , LIBND4J_TYPES, BOOL_TYPES);
}
}
| 9baf79de09ffc959b39df829fafc27e0e8fe6944.cu | /*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// @author raver119@gmail.com
//
#include <Environment.h>
#include <loops/transform_bool.h>
#include <types/types.h>
#include <op_boilerplate.h>
#include <loops/legacy_ops.h>
#include <helpers/DebugHelper.h>
using namespace simdOps;
template <typename X, typename Z, typename OpType>
__global__ void transformBoolSimple(void *dy, Nd4jLong *xShapeInfo, int xRank,
void *params,
void *result, Nd4jLong *zShapeInfo, int zRank,
int *allocationPointer,
void *reductionPointer,
Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
functions::transform::TransformBool<X,Z>::template transformCuda<OpType>(dy,xShapeInfo,params,result,zShapeInfo,allocationPointer,reductionPointer,tadShapeInfo, tadOffsets);
}
namespace functions {
namespace transform {
template<typename X, typename Y>
_CUDA_H void TransformBool<X,Y>::executeTransformShaped(dim3 launchDims, cudaStream_t *stream, int opNum, void *x, Nd4jLong *xShape, int xRank, void *extraParams, void *z, Nd4jLong *zShape, int zRank, int *allocationPointer, void *reductionPointer, Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
DISPATCH_BY_OPNUM_TT(intermediateShaped, PARAMS(launchDims, stream, x, xShape, xRank, extraParams, z, zShape, zRank, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets), TRANSFORM_BOOL_OPS);
DEBUG_KERNEL(stream, opNum);
}
template<typename X, typename Z>
template <typename OpType>
__device__ void TransformBool<X,Z>::transformCuda(
void *vdy,
Nd4jLong *shapeInfo,
void *vparams,
void *vresult,
Nd4jLong *zShapeInfo,
int *allocationPointer, void *vreductionPointer,
Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
auto dy = static_cast<X*>(vdy);
auto result = static_cast<Z*>(vresult);
auto params = static_cast<X*>(vparams);
auto reductionPointer = static_cast<Z*>(vreductionPointer);
if(OpType::requiresSpecial) {
OpType::execSpecialCuda(dy,shapeInfo,result,zShapeInfo,params, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets);
return;
} else {
auto xOrder = shape::order(shapeInfo);
auto zOrder = shape::order(zShapeInfo);
auto xEws = shape::elementWiseStride(shapeInfo);
auto zEws = shape::elementWiseStride(zShapeInfo);
auto tid = blockIdx.x * blockDim.x + threadIdx.x;
__shared__ Nd4jLong length;
if(threadIdx.x == 0)
length = shape::length(shapeInfo);
__syncthreads();
int totalThreads = gridDim.x * blockDim.x;
if(xEws >= 1 && zEws >= 1 && xOrder == zOrder) {
if(xEws == 1 && zEws == 1) {
/* equal, positive, non-unit increments. */
for (Nd4jLong i = tid; i < length; i += totalThreads) {
result[i] = OpType::op(dy[i], params);
}
}
else {
for (Nd4jLong i = tid; i < length; i += totalThreads) {
result[i * zEws] = OpType::op(dy[i * xEws], params);
}
}
}
else {
for (Nd4jLong i = tid; i < length; i+= totalThreads) {
auto xOffset2 = shape::getIndexOffset(i, shapeInfo, length);
auto zOffset2 = shape::getIndexOffset(i, zShapeInfo, length);
result[zOffset2] = OpType::op(dy[xOffset2], params);
}
}
}
};
template<typename X, typename Z>
template <typename OpType>
_CUDA_H void TransformBool<X,Z>::intermediateShaped(dim3 launchDims, cudaStream_t *stream, void *x, Nd4jLong *xShape, int xRank, void *extraParams, void *z, Nd4jLong *zShape, int zRank, int *allocationPointer, void *reductionPointer, Nd4jLong *tadShapeInfo, Nd4jLong *tadOffsets) {
transformBoolSimple<X, Z, OpType><<<launchDims.x, launchDims.y, launchDims.z, *stream>>>(x, xShape, xRank, extraParams, z, zShape, zRank, allocationPointer, reductionPointer, tadShapeInfo, tadOffsets);
nd4j::DebugHelper::checkErrorCode(stream, "transformBool(...) failed");
}
BUILD_DOUBLE_TEMPLATE(template class ND4J_EXPORT TransformBool, , LIBND4J_TYPES, BOOL_TYPES);
}
}
|
5bba5fc63832abeedd1edfbdf7ac90802981074e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <vector>
const int BLOCK_X = 256;
std::vector<float> calc_prefix_cpu(const std::vector<float> &data)
{
std::vector<float> res(data.size());
res[0] = data[0];
for(size_t i = 1; i < data.size(); ++i)
{
res[i] = res[i-1] + data[i];
}
return res;
}
__global__ void cuda_prefixsum(float *input, float *output, int sz)
{
__shared__ float s[BLOCK_X*2];
unsigned int tidx = threadIdx.x;
//load data into the shared memory
int left_idx = 2*blockIdx.x*blockDim.x + tidx;
int right_idx = left_idx + blockDim.x ;
if(left_idx < sz)
{
s[tidx] = input[left_idx];
}
else
{
s[tidx] = 0.;
}
if(right_idx < sz)
{
s[tidx + blockDim.x] = input[right_idx];
}
else
{
s[tidx + blockDim.x] = 0.;
}
__syncthreads();
// forward pass
for (int stride = 1; stride <= blockDim.x; stride <<= 1)
{
int idx = (threadIdx.x + 1)*stride*2 - 1;
if (idx < 2*blockDim.x)
{
s[idx] += s[idx - stride];
}
__syncthreads();
}
// backward pass
for (int stride = blockDim.x/2; stride > 0; stride >>= 1)
{
int idx = (threadIdx.x + 1)*stride*2 - 1;
if (idx + stride < 2*blockDim.x)
{
s[idx + stride] += s[idx];
}
__syncthreads();
}
if (left_idx < sz)
{
output[left_idx] = s[tidx];
}
if (right_idx < sz)
{
output[right_idx] = s[tidx + blockDim.x];
}
}
__global__ void aggregate(float *input, float *output, int sz)
{
int tidx = threadIdx.x;
int dest_idx = (tidx + 1)*BLOCK_X*2 - 1;
if (dest_idx < sz)
{
output[tidx] = input[dest_idx];
}
}
__global__ void collect_sums(float *input, float *output, int sz)
{
int dest_idx = threadIdx.x + blockDim.x*(blockIdx.x + 1);
if (dest_idx < sz)
{
output[dest_idx] += input[blockIdx.x];
}
}
std::vector<float> calc_prefix_cuda(const std::vector<float> &data)
{
const size_t num_elems = data.size();
const size_t grid_x = (num_elems - 1)/(BLOCK_X*2) + 1;
std::vector<float> res(num_elems);
float *dev_data;
float *dev_buffer;
float *dev_aggregate;
float *dev_res;
hipMalloc((void **)&dev_data, num_elems*sizeof(float));
hipMalloc((void **)&dev_buffer, num_elems*sizeof(float));
hipMalloc((void **)&dev_aggregate, grid_x*sizeof(float));
hipMalloc((void **)&dev_res, grid_x*sizeof(float));
hipMemset(dev_buffer, 0., num_elems*sizeof(float));
hipMemcpy(dev_data, data.data(), num_elems*sizeof(float), hipMemcpyHostToDevice);
hipLaunchKernelGGL(( cuda_prefixsum), dim3(grid_x), dim3(BLOCK_X), 0, 0, dev_data, dev_buffer, num_elems);
hipLaunchKernelGGL(( aggregate), dim3(1), dim3(grid_x), 0, 0, dev_buffer,dev_aggregate, num_elems);
hipLaunchKernelGGL(( cuda_prefixsum), dim3(1), dim3(grid_x), 0, 0, dev_aggregate, dev_res, grid_x);
hipLaunchKernelGGL(( collect_sums), dim3(grid_x), dim3(2*BLOCK_X), 0, 0, dev_res, dev_buffer, num_elems);
hipMemcpy(res.data(), dev_buffer, num_elems*sizeof(float), hipMemcpyDeviceToHost);
hipFree(dev_data);
hipFree(dev_buffer);
hipFree(dev_aggregate);
hipFree(dev_res);
return res;
} | 5bba5fc63832abeedd1edfbdf7ac90802981074e.cu | #include <vector>
const int BLOCK_X = 256;
std::vector<float> calc_prefix_cpu(const std::vector<float> &data)
{
std::vector<float> res(data.size());
res[0] = data[0];
for(size_t i = 1; i < data.size(); ++i)
{
res[i] = res[i-1] + data[i];
}
return res;
}
__global__ void cuda_prefixsum(float *input, float *output, int sz)
{
__shared__ float s[BLOCK_X*2];
unsigned int tidx = threadIdx.x;
//load data into the shared memory
int left_idx = 2*blockIdx.x*blockDim.x + tidx;
int right_idx = left_idx + blockDim.x ;
if(left_idx < sz)
{
s[tidx] = input[left_idx];
}
else
{
s[tidx] = 0.;
}
if(right_idx < sz)
{
s[tidx + blockDim.x] = input[right_idx];
}
else
{
s[tidx + blockDim.x] = 0.;
}
__syncthreads();
// forward pass
for (int stride = 1; stride <= blockDim.x; stride <<= 1)
{
int idx = (threadIdx.x + 1)*stride*2 - 1;
if (idx < 2*blockDim.x)
{
s[idx] += s[idx - stride];
}
__syncthreads();
}
// backward pass
for (int stride = blockDim.x/2; stride > 0; stride >>= 1)
{
int idx = (threadIdx.x + 1)*stride*2 - 1;
if (idx + stride < 2*blockDim.x)
{
s[idx + stride] += s[idx];
}
__syncthreads();
}
if (left_idx < sz)
{
output[left_idx] = s[tidx];
}
if (right_idx < sz)
{
output[right_idx] = s[tidx + blockDim.x];
}
}
__global__ void aggregate(float *input, float *output, int sz)
{
int tidx = threadIdx.x;
int dest_idx = (tidx + 1)*BLOCK_X*2 - 1;
if (dest_idx < sz)
{
output[tidx] = input[dest_idx];
}
}
__global__ void collect_sums(float *input, float *output, int sz)
{
int dest_idx = threadIdx.x + blockDim.x*(blockIdx.x + 1);
if (dest_idx < sz)
{
output[dest_idx] += input[blockIdx.x];
}
}
std::vector<float> calc_prefix_cuda(const std::vector<float> &data)
{
const size_t num_elems = data.size();
const size_t grid_x = (num_elems - 1)/(BLOCK_X*2) + 1;
std::vector<float> res(num_elems);
float *dev_data;
float *dev_buffer;
float *dev_aggregate;
float *dev_res;
cudaMalloc((void **)&dev_data, num_elems*sizeof(float));
cudaMalloc((void **)&dev_buffer, num_elems*sizeof(float));
cudaMalloc((void **)&dev_aggregate, grid_x*sizeof(float));
cudaMalloc((void **)&dev_res, grid_x*sizeof(float));
cudaMemset(dev_buffer, 0., num_elems*sizeof(float));
cudaMemcpy(dev_data, data.data(), num_elems*sizeof(float), cudaMemcpyHostToDevice);
cuda_prefixsum<<<grid_x, BLOCK_X>>>(dev_data, dev_buffer, num_elems);
aggregate<<<1, grid_x>>>(dev_buffer,dev_aggregate, num_elems);
cuda_prefixsum<<<1, grid_x>>>(dev_aggregate, dev_res, grid_x);
collect_sums<<<grid_x, 2*BLOCK_X>>> (dev_res, dev_buffer, num_elems);
cudaMemcpy(res.data(), dev_buffer, num_elems*sizeof(float), cudaMemcpyDeviceToHost);
cudaFree(dev_data);
cudaFree(dev_buffer);
cudaFree(dev_aggregate);
cudaFree(dev_res);
return res;
} |
bb72212d143adca23bc22b45e9e1a4e0646f8810.hip | // !!! This is a file automatically generated by hipify!!!
#include <iostream>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <time.h>
#include <hip/hip_runtime.h>
#include <stdint.h>
#include <math.h>
#include <unistd.h>
#include <omp.h>
#include<limits>
double diff(timespec start, timespec end)
{
double a=0;
if((end.tv_nsec-start.tv_nsec)<0)
{
a=end.tv_sec-start.tv_sec-1;
a+=(1000000000+end.tv_nsec-start.tv_nsec)/1000000000.0;
}
else
{
a=end.tv_sec-start.tv_sec+(end.tv_nsec-start.tv_nsec)/1000000000.0;
}
return a;
}
struct NUM_ADD
{
short2 read_reference_number;
int address_array;
};
__global__ void calculate_cigar( int size, char * data, NUM_ADD *num_add,int4 * result, int * direction) //, char * result
{
int offset=blockIdx.x;
__shared__ short2 read_reference_number;
__shared__ char * read_base_array;
__shared__ char4 * reference_base_array;
__shared__ int mismatch;
__shared__ int match;
__shared__ int open;
__shared__ int extend;
__shared__ short2 * direction_index;
while(offset<size)
{
if( threadIdx.x==0)
{
read_reference_number=num_add[offset].read_reference_number;
read_base_array=(char *) (data+num_add[offset].address_array);
reference_base_array=(char4 *) (read_base_array+(read_reference_number.x+127)/128*128);
direction_index=(short2 *) (direction+offset*640*1100);
}
__syncthreads();
__shared__ char reference_base_in_char[600];
int hh=(read_reference_number.y+4-1)/4;
int tt=(hh+blockDim.x-1)/blockDim.x;
for(int ii=0;ii<tt;ii++)
{
int aa=threadIdx.x+ii*blockDim.x;
if(aa< hh)
{
char4 reference_base_in_thread;
reference_base_in_thread=reference_base_array[aa]; //Is it right to get data from global memory
reference_base_in_char[aa*4]=reference_base_in_thread.x;
reference_base_in_char[aa*4+1]=reference_base_in_thread.y;
reference_base_in_char[aa*4+2]=reference_base_in_thread.z;
reference_base_in_char[aa*4+3]=reference_base_in_thread.w;
}
}
__shared__ int MM[578];
__shared__ int gap_h[578]; //insertion
__shared__ short2 gap_size_h[578]; //insertion
__shared__ int result_col;
__shared__ int result_row;
__shared__ int result_col_index;
__shared__ int result_row_index;
//__shared__ char cigar_m[128];
//__shared__ int cigar_int_m[128];
//int final_result;
//int final_i;
//int final_j;
if(threadIdx.x==0)
{
MM[0]=0;
gap_h[0]=-1000000000;//std::numeric_limits<int>::min()/2;
gap_size_h[0].x=0;
gap_size_h[0].y=0;
match=200;
mismatch=-150;
open=-260;
extend=-11;
result_col=-1000000000;//std::numeric_limits<int>::min()/2;
result_row=-1000000000;//std::numeric_limits<int>::min()/2;
// for(int i=0;i<read_reference_number.y;i++)
// printf("%c",reference_base_in_char[i]);
// printf("\n");
// for(int i=0;i<read_reference_number.x;i++)
// printf("%c",read_base_array[i]);
//printf("%d\n",offset);
}
__syncthreads();
// int read_number=read_reference_number.x;
{
char read_base;
read_base=read_base_array[threadIdx.x];
int gap_v=-1000000000;//std::numeric_limits<int>::min()/2;;
int gap_size_v=0; //Deletion
int M=0; //now
int step_right; //now
int ki=0;//insertion h negetive
//deletion v
int MMM=0;
short mt=0;
short2 curmt;
curmt.x=0;
curmt.y=0;
int current_reference_id=0;
for(int j=0;j<read_reference_number.x+read_reference_number.y-1;j++)
{
int aa=j-threadIdx.x;
if( aa>=0 && (current_reference_id<read_reference_number.y))
{
int prev_gap=M+open; //M which is cacluated by last step in the same thread
gap_v+=extend;
if(prev_gap>gap_v)
{
gap_v=prev_gap;
gap_size_v=1;
}
else
gap_size_v++;
char reference_base_each=reference_base_in_char[current_reference_id];
M=MMM+(read_base==reference_base_each? match:mismatch);
prev_gap=MM[threadIdx.x]+open;
step_right=gap_h[threadIdx.x]+extend;
if(prev_gap>step_right)
{
step_right=prev_gap;
ki=1;
}
else
ki=gap_size_h[threadIdx.x].x+1;
bool diag=(M>=gap_v)&&(M>=step_right);
curmt.y=0;
if(diag)
{
curmt.x=0;
//if(threadIdx.x==0||current_reference_id==0)
// curmt.y=0;
// else
curmt.y=mt+1;
// curBtrack=0;
}
else
if(step_right>=gap_v)
{
M=step_right;
curmt.x=0-ki;
// curBtrack=0-ki;
}
else
{
M=gap_v;
curmt.x=gap_size_v;
//curBtrack=gap_size_v;
}
MMM=MM[threadIdx.x];
mt=gap_size_h[threadIdx.x].y;
direction_index[640*j+threadIdx.x]=curmt;
//if(threadIdx.x==read_reference_number.x-3)
//printf("%p %d ", &direction_index[800*j+threadIdx.x],curBtrack);
if(current_reference_id==read_reference_number.y-1)
{
if(M>=result_row)
{
result_row=M;
result_row_index=threadIdx.x; //
}
//printf("%d %d %d %d %d \n",read_reference_number.y,M,result_row,result_row_index,threadIdx.x);
}
if(threadIdx.x==read_reference_number.x-1)
{
if(M>=result_col)
{
result_col=M;
result_col_index=current_reference_id; // +1
}
}
current_reference_id++;
}
__syncthreads(); //to make sure that the former value of MM[threadIdx.x+1] are used by other threads.
MM[threadIdx.x+1]=M;
gap_h[threadIdx.x+1]=step_right;
gap_size_h[threadIdx.x+1].x=ki;
gap_size_h[threadIdx.x+1].y=curmt.y;
__syncthreads(); // there should be two synthreads(); // to make sure that all of MM[threadIdx.x+1] have get a new value before M,D and I changed.
}
}
// char state;//0 match; 1 mistmatch; 2 inseriton; 3 deletion
// __shared__ int cigar_index;
// int segment_length;
// short2 btr;
// char new_state;
// int step_length;
//if(threadIdx.x==0)printf("Offset=%d\n",offset);
int4 result4;
// if(threadIdx.x==read_reference_number.x-1)
if(threadIdx.x==0)
{
//printf("%d %d %d %d\n", result_row,result_col, result_row_index,result_col_index);
if(result_row>result_col||result_row==result_col&&(read_reference_number.x-result_row_index-1)>(read_reference_number.y-result_col_index-1))
{
// final_result=result_row;
result4.x=read_reference_number.y-1;
result4.y=result_row_index;
result4.z=read_reference_number.x-1-result_row_index;
}
else
{
// final_result=result_col;
result4.x=result_col_index;
result4.y=read_reference_number.x-1;
result4.z=0;
}
//result[offset*3]=final_result;
// printf("%d %d %d %d\n",size,offset,result4.x,result4.y);
result[offset]=result4;
}
__syncthreads();
offset+=gridDim.x;
//if(threadIdx.x==0) printf("%d %d\n",offset,size);
}
}
__global__ void calculate_cigar_2( int size, int4 * result, char * cigar,int * cigar_int,int * direction) //, char * result
{
int offset=blockIdx.x;
int4 result4;;
short2 * direction_index;
__shared__ char * cigar_store;
__shared__ int *cigar_int_store;
__shared__ char cigar_m[128];
__shared__ int cigar_int_m[128];
while(offset<size)
{
char state;//0 match; 1 mistmatch; 2 inseriton; 3 deletion
__shared__ int cigar_index;
int segment_length;
short2 btr;
char new_state;
int step_length;
if( threadIdx.x==0)
{
result4=result[offset];
direction_index=(short2 *) (direction+offset*640*1100);
cigar_store=(char *) (cigar+offset*sizeof(char)*128);
cigar_int_store=(int *) (cigar_int+offset*128);
//printf("\n %d %d\n", final_i,final_j);
cigar_index=0;
if(result4.z>0)
{
cigar_m[cigar_index]='S';
cigar_int_m[cigar_index]=result4.z;
cigar_index++;
}
segment_length=0;
state='N';
do
{
btr=direction_index[(result4.x+result4.y)*640+result4.y];
if(btr.x>0)
{
new_state='D';
step_length=btr.x;
result4.x-=step_length;
}
else
if(btr.x<0)
{
new_state='I';
step_length=0-btr.x;
result4.y-=step_length;
}
else
{
new_state='M';
step_length=btr.y;
result4.x-=step_length;
result4.y-=step_length;
}
if(state=='N') state=new_state;
if(state==new_state)
{
segment_length+=step_length;
}
else
{
cigar_m[cigar_index]=state;
cigar_int_m[cigar_index]=segment_length;
segment_length=step_length;
cigar_index++;
state=new_state;
}
}while(result4.x>=0&&result4.y>=0);
cigar_m[cigar_index]=state;
cigar_int_m[cigar_index]=segment_length;
cigar_index++;
if(result4.y>=0)
{
cigar_m[cigar_index]='S';
cigar_int_m[cigar_index]=result4.y+1;
cigar_index++;
}
result4.z=result4.x+1;
result4.w=cigar_index;
result[offset]=result4;
/* for(int i=cigar_index-1;i>=0;i--)
{
printf("%d%c",cigar_int_m[i],cigar_m[i]);
}
*/
}
__syncthreads();
if(threadIdx.x<cigar_index && cigar_index<=blockDim.x)
{
// if(threadIdx.x==0)
// printf("%c %d\n",cigar_m[cigar_index-1-threadIdx.x], cigar_int_m[cigar_index-1-threadIdx.x]);
cigar_store[threadIdx.x]=cigar_m[cigar_index-1-threadIdx.x];
cigar_int_store[threadIdx.x]=cigar_int_m[cigar_index-1-threadIdx.x];
// if(threadIdx.x==0)
// printf("%c %d\n", cigar_store[threadIdx.x],cigar_int_store[threadIdx.x]);
}
offset+=gridDim.x;
}
}
struct InputData
{
char read_base[600];
char reference_base[600];
};
int main(int artc, char* args[])
{
int total_size=0;
FILE * file;
file=fopen(args[1],"r");
int size;
double computation_time=0;//total_time=0;
timespec start,finish;
/* char data[200][1000];
for(int i=0;i<101;i++)
{
fscanf(file,"%s ", data[i]);
}
int row=atoi(args[2]);
int col=atoi(args[3]);
size=row*col;
for(int ww=0;ww<1;ww++)
{ int index=0;
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<row;i++)
for(int j=0;j<col;j++)
{
strcpy(inputdata[index].reference_base,data[1]);
strcpy(inputdata[index].read_base,data[1]);
index++;
}
*/
/* fscanf(file,"%d",&size);
while(!feof(file))
{
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<size;i++)
{
fscanf(file,"%s ",inputdata[i].reference_base);
fscanf(file,"%s ",inputdata[i].read_base);
}
*/
char data[200][1000];
for(int i=0;i<101;i++)
{
fscanf(file,"%s ", data[i]);
}
int row=atoi(args[2]);
int col=atoi(args[3]);
size=row*col;
for(int ww=0;ww<1;ww++)
{ int index=0;
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<row;i++)
for(int j=0;j<col;j++)
{
strcpy(inputdata[index].reference_base,data[i]);
strcpy(inputdata[index].read_base,data[j]);
index++;
}
//data preparation.
char * data_h_total=(char*)malloc(size * 640* sizeof (char)*2+(size*sizeof(NUM_ADD)+127)/128*128);
NUM_ADD * data_num_add=(NUM_ADD *) (data_h_total);
char * data_h=data_h_total+(size*sizeof(NUM_ADD)+127)/128*128; //.thus we donot need to worry about align
int data_size=0;
char * data_d_total;
hipMalloc( (char **) &data_d_total, (size*sizeof(NUM_ADD)+127)/128*128+size *( 640 )* sizeof (char)*2+sizeof(int)*size*4);
int * result_h=(int *) malloc(sizeof(int)*size*4);
char * cigar_h=(char *) malloc(sizeof(char)*size*128);
int * cigar_int_h=(int *) malloc(sizeof(int)*size*128);
for(int i=0;i<size;i++)
{
char4 reference_tep[150];
int read_len=strlen(inputdata[i].read_base);
int ref_len=strlen(inputdata[i].reference_base);
int new_len=(ref_len+4-1)/4;
total_size+=ref_len*read_len;
for(int j=0;j<new_len;j++)
{
reference_tep[j].x=inputdata[i].reference_base[j*4];
if(j*4+1<ref_len)
reference_tep[j].y=inputdata[i].reference_base[j*4+1];
if(j*4+2<ref_len)
reference_tep[j].z=inputdata[i].reference_base[j*4+2];
if(j*4+3<ref_len)
reference_tep[j].w=inputdata[i].reference_base[j*4+3];
}
data_num_add[i].read_reference_number.x=read_len;
data_num_add[i].read_reference_number.y=ref_len;
data_num_add[i].address_array=data_size;
memcpy(data_h,inputdata[i].read_base,read_len);
data_h+=(read_len+128-1)/128*128;
data_size+=(read_len+128-1)/128*128;
memcpy(data_h,reference_tep,sizeof(char4)* new_len);
data_h+=(new_len*sizeof(char4)+127)/128*128;
data_size+=(new_len*sizeof(char4)+127)/128*128;
}
int data_size_to_copy=data_size+(size*sizeof(NUM_ADD)+127)/128*128;
hipMemcpy(data_d_total,data_h_total,data_size_to_copy,hipMemcpyHostToDevice);
NUM_ADD * num_add_d=(NUM_ADD *) (data_d_total);
char * data_d=data_d_total+(size*sizeof(NUM_ADD)+127)/128*128;
int4 * result_d=(int4 *) (data_d_total+data_size_to_copy);
char * cigar;
hipMalloc( (char **) &cigar, size * (128* sizeof (char)+128*sizeof(int)));
int * cigar_int=(int *) (cigar+size*128*sizeof(char));
int * direction;
hipMalloc( (int **) & direction, (size+1) * (640*1100* sizeof (int)));
dim3 block(576);
dim3 grid(size);
clock_gettime(CLOCK_MONOTONIC_RAW,&start);
hipLaunchKernelGGL(( calculate_cigar), dim3(grid),dim3(block), 0, 0, size,data_d,num_add_d,result_d,direction); //result
// calculate_cigar_2<<<grid,block>>> (size,result_d,cigar,cigar_int,direction); //result
hipMemcpy(result_h,result_d,size*sizeof(int)*4,hipMemcpyDeviceToHost);
hipMemcpy(cigar_h,cigar,128*sizeof(char)*size, hipMemcpyDeviceToHost);
hipMemcpy(cigar_int_h,cigar_int,128*sizeof(int)*size,hipMemcpyDeviceToHost);
clock_gettime(CLOCK_MONOTONIC_RAW,&finish);
computation_time+=diff(start,finish);
// for(int i=0;i<size;i++)
{
// printf("%d %d\n",result_h[i*4],result_h[i*4+1]);
/* printf("[");
for(int j=0;j<result_h[i*4+3];j++)
{
if(j!=0) printf(", ");
printf("%d%c",cigar_int_h[128*i+j],cigar_h[128*i+j]);
}
printf("]\n");
*/ }
hipFree(direction);
free(data_h_total);
hipFree(data_d_total);
free(inputdata);
hipFree(cigar);
free(cigar_int_h);
free(cigar_h);
// fscanf(file,"%d",&size);
}
// printf(" computation_time= %e total_time=%e \n",computation_time,0);
printf(" computation_time= %e %d GCUPs=%lf\n",computation_time,total_size,( (double)total_size)/computation_time/1000000000);
return 0;
}
| bb72212d143adca23bc22b45e9e1a4e0646f8810.cu | #include <iostream>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <time.h>
#include <cuda.h>
#include <stdint.h>
#include <math.h>
#include <unistd.h>
#include <omp.h>
#include<limits>
double diff(timespec start, timespec end)
{
double a=0;
if((end.tv_nsec-start.tv_nsec)<0)
{
a=end.tv_sec-start.tv_sec-1;
a+=(1000000000+end.tv_nsec-start.tv_nsec)/1000000000.0;
}
else
{
a=end.tv_sec-start.tv_sec+(end.tv_nsec-start.tv_nsec)/1000000000.0;
}
return a;
}
struct NUM_ADD
{
short2 read_reference_number;
int address_array;
};
__global__ void calculate_cigar( int size, char * data, NUM_ADD *num_add,int4 * result, int * direction) //, char * result
{
int offset=blockIdx.x;
__shared__ short2 read_reference_number;
__shared__ char * read_base_array;
__shared__ char4 * reference_base_array;
__shared__ int mismatch;
__shared__ int match;
__shared__ int open;
__shared__ int extend;
__shared__ short2 * direction_index;
while(offset<size)
{
if( threadIdx.x==0)
{
read_reference_number=num_add[offset].read_reference_number;
read_base_array=(char *) (data+num_add[offset].address_array);
reference_base_array=(char4 *) (read_base_array+(read_reference_number.x+127)/128*128);
direction_index=(short2 *) (direction+offset*640*1100);
}
__syncthreads();
__shared__ char reference_base_in_char[600];
int hh=(read_reference_number.y+4-1)/4;
int tt=(hh+blockDim.x-1)/blockDim.x;
for(int ii=0;ii<tt;ii++)
{
int aa=threadIdx.x+ii*blockDim.x;
if(aa< hh)
{
char4 reference_base_in_thread;
reference_base_in_thread=reference_base_array[aa]; //Is it right to get data from global memory
reference_base_in_char[aa*4]=reference_base_in_thread.x;
reference_base_in_char[aa*4+1]=reference_base_in_thread.y;
reference_base_in_char[aa*4+2]=reference_base_in_thread.z;
reference_base_in_char[aa*4+3]=reference_base_in_thread.w;
}
}
__shared__ int MM[578];
__shared__ int gap_h[578]; //insertion
__shared__ short2 gap_size_h[578]; //insertion
__shared__ int result_col;
__shared__ int result_row;
__shared__ int result_col_index;
__shared__ int result_row_index;
//__shared__ char cigar_m[128];
//__shared__ int cigar_int_m[128];
//int final_result;
//int final_i;
//int final_j;
if(threadIdx.x==0)
{
MM[0]=0;
gap_h[0]=-1000000000;//std::numeric_limits<int>::min()/2;
gap_size_h[0].x=0;
gap_size_h[0].y=0;
match=200;
mismatch=-150;
open=-260;
extend=-11;
result_col=-1000000000;//std::numeric_limits<int>::min()/2;
result_row=-1000000000;//std::numeric_limits<int>::min()/2;
// for(int i=0;i<read_reference_number.y;i++)
// printf("%c",reference_base_in_char[i]);
// printf("\n");
// for(int i=0;i<read_reference_number.x;i++)
// printf("%c",read_base_array[i]);
//printf("%d\n",offset);
}
__syncthreads();
// int read_number=read_reference_number.x;
{
char read_base;
read_base=read_base_array[threadIdx.x];
int gap_v=-1000000000;//std::numeric_limits<int>::min()/2;;
int gap_size_v=0; //Deletion
int M=0; //now
int step_right; //now
int ki=0;//insertion h negetive
//deletion v
int MMM=0;
short mt=0;
short2 curmt;
curmt.x=0;
curmt.y=0;
int current_reference_id=0;
for(int j=0;j<read_reference_number.x+read_reference_number.y-1;j++)
{
int aa=j-threadIdx.x;
if( aa>=0 && (current_reference_id<read_reference_number.y))
{
int prev_gap=M+open; //M which is cacluated by last step in the same thread
gap_v+=extend;
if(prev_gap>gap_v)
{
gap_v=prev_gap;
gap_size_v=1;
}
else
gap_size_v++;
char reference_base_each=reference_base_in_char[current_reference_id];
M=MMM+(read_base==reference_base_each? match:mismatch);
prev_gap=MM[threadIdx.x]+open;
step_right=gap_h[threadIdx.x]+extend;
if(prev_gap>step_right)
{
step_right=prev_gap;
ki=1;
}
else
ki=gap_size_h[threadIdx.x].x+1;
bool diag=(M>=gap_v)&&(M>=step_right);
curmt.y=0;
if(diag)
{
curmt.x=0;
//if(threadIdx.x==0||current_reference_id==0)
// curmt.y=0;
// else
curmt.y=mt+1;
// curBtrack=0;
}
else
if(step_right>=gap_v)
{
M=step_right;
curmt.x=0-ki;
// curBtrack=0-ki;
}
else
{
M=gap_v;
curmt.x=gap_size_v;
//curBtrack=gap_size_v;
}
MMM=MM[threadIdx.x];
mt=gap_size_h[threadIdx.x].y;
direction_index[640*j+threadIdx.x]=curmt;
//if(threadIdx.x==read_reference_number.x-3)
//printf("%p %d ", &direction_index[800*j+threadIdx.x],curBtrack);
if(current_reference_id==read_reference_number.y-1)
{
if(M>=result_row)
{
result_row=M;
result_row_index=threadIdx.x; //
}
//printf("%d %d %d %d %d \n",read_reference_number.y,M,result_row,result_row_index,threadIdx.x);
}
if(threadIdx.x==read_reference_number.x-1)
{
if(M>=result_col)
{
result_col=M;
result_col_index=current_reference_id; // +1
}
}
current_reference_id++;
}
__syncthreads(); //to make sure that the former value of MM[threadIdx.x+1] are used by other threads.
MM[threadIdx.x+1]=M;
gap_h[threadIdx.x+1]=step_right;
gap_size_h[threadIdx.x+1].x=ki;
gap_size_h[threadIdx.x+1].y=curmt.y;
__syncthreads(); // there should be two synthreads(); // to make sure that all of MM[threadIdx.x+1] have get a new value before M,D and I changed.
}
}
// char state;//0 match; 1 mistmatch; 2 inseriton; 3 deletion
// __shared__ int cigar_index;
// int segment_length;
// short2 btr;
// char new_state;
// int step_length;
//if(threadIdx.x==0)printf("Offset=%d\n",offset);
int4 result4;
// if(threadIdx.x==read_reference_number.x-1)
if(threadIdx.x==0)
{
//printf("%d %d %d %d\n", result_row,result_col, result_row_index,result_col_index);
if(result_row>result_col||result_row==result_col&&(read_reference_number.x-result_row_index-1)>(read_reference_number.y-result_col_index-1))
{
// final_result=result_row;
result4.x=read_reference_number.y-1;
result4.y=result_row_index;
result4.z=read_reference_number.x-1-result_row_index;
}
else
{
// final_result=result_col;
result4.x=result_col_index;
result4.y=read_reference_number.x-1;
result4.z=0;
}
//result[offset*3]=final_result;
// printf("%d %d %d %d\n",size,offset,result4.x,result4.y);
result[offset]=result4;
}
__syncthreads();
offset+=gridDim.x;
//if(threadIdx.x==0) printf("%d %d\n",offset,size);
}
}
__global__ void calculate_cigar_2( int size, int4 * result, char * cigar,int * cigar_int,int * direction) //, char * result
{
int offset=blockIdx.x;
int4 result4;;
short2 * direction_index;
__shared__ char * cigar_store;
__shared__ int *cigar_int_store;
__shared__ char cigar_m[128];
__shared__ int cigar_int_m[128];
while(offset<size)
{
char state;//0 match; 1 mistmatch; 2 inseriton; 3 deletion
__shared__ int cigar_index;
int segment_length;
short2 btr;
char new_state;
int step_length;
if( threadIdx.x==0)
{
result4=result[offset];
direction_index=(short2 *) (direction+offset*640*1100);
cigar_store=(char *) (cigar+offset*sizeof(char)*128);
cigar_int_store=(int *) (cigar_int+offset*128);
//printf("\n %d %d\n", final_i,final_j);
cigar_index=0;
if(result4.z>0)
{
cigar_m[cigar_index]='S';
cigar_int_m[cigar_index]=result4.z;
cigar_index++;
}
segment_length=0;
state='N';
do
{
btr=direction_index[(result4.x+result4.y)*640+result4.y];
if(btr.x>0)
{
new_state='D';
step_length=btr.x;
result4.x-=step_length;
}
else
if(btr.x<0)
{
new_state='I';
step_length=0-btr.x;
result4.y-=step_length;
}
else
{
new_state='M';
step_length=btr.y;
result4.x-=step_length;
result4.y-=step_length;
}
if(state=='N') state=new_state;
if(state==new_state)
{
segment_length+=step_length;
}
else
{
cigar_m[cigar_index]=state;
cigar_int_m[cigar_index]=segment_length;
segment_length=step_length;
cigar_index++;
state=new_state;
}
}while(result4.x>=0&&result4.y>=0);
cigar_m[cigar_index]=state;
cigar_int_m[cigar_index]=segment_length;
cigar_index++;
if(result4.y>=0)
{
cigar_m[cigar_index]='S';
cigar_int_m[cigar_index]=result4.y+1;
cigar_index++;
}
result4.z=result4.x+1;
result4.w=cigar_index;
result[offset]=result4;
/* for(int i=cigar_index-1;i>=0;i--)
{
printf("%d%c",cigar_int_m[i],cigar_m[i]);
}
*/
}
__syncthreads();
if(threadIdx.x<cigar_index && cigar_index<=blockDim.x)
{
// if(threadIdx.x==0)
// printf("%c %d\n",cigar_m[cigar_index-1-threadIdx.x], cigar_int_m[cigar_index-1-threadIdx.x]);
cigar_store[threadIdx.x]=cigar_m[cigar_index-1-threadIdx.x];
cigar_int_store[threadIdx.x]=cigar_int_m[cigar_index-1-threadIdx.x];
// if(threadIdx.x==0)
// printf("%c %d\n", cigar_store[threadIdx.x],cigar_int_store[threadIdx.x]);
}
offset+=gridDim.x;
}
}
struct InputData
{
char read_base[600];
char reference_base[600];
};
int main(int artc, char* args[])
{
int total_size=0;
FILE * file;
file=fopen(args[1],"r");
int size;
double computation_time=0;//total_time=0;
timespec start,finish;
/* char data[200][1000];
for(int i=0;i<101;i++)
{
fscanf(file,"%s ", data[i]);
}
int row=atoi(args[2]);
int col=atoi(args[3]);
size=row*col;
for(int ww=0;ww<1;ww++)
{ int index=0;
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<row;i++)
for(int j=0;j<col;j++)
{
strcpy(inputdata[index].reference_base,data[1]);
strcpy(inputdata[index].read_base,data[1]);
index++;
}
*/
/* fscanf(file,"%d",&size);
while(!feof(file))
{
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<size;i++)
{
fscanf(file,"%s ",inputdata[i].reference_base);
fscanf(file,"%s ",inputdata[i].read_base);
}
*/
char data[200][1000];
for(int i=0;i<101;i++)
{
fscanf(file,"%s ", data[i]);
}
int row=atoi(args[2]);
int col=atoi(args[3]);
size=row*col;
for(int ww=0;ww<1;ww++)
{ int index=0;
InputData * inputdata=(InputData* )malloc(size*(sizeof(InputData)));
for(int i=0;i<row;i++)
for(int j=0;j<col;j++)
{
strcpy(inputdata[index].reference_base,data[i]);
strcpy(inputdata[index].read_base,data[j]);
index++;
}
//data preparation.
char * data_h_total=(char*)malloc(size * 640* sizeof (char)*2+(size*sizeof(NUM_ADD)+127)/128*128);
NUM_ADD * data_num_add=(NUM_ADD *) (data_h_total);
char * data_h=data_h_total+(size*sizeof(NUM_ADD)+127)/128*128; //.thus we donot need to worry about align
int data_size=0;
char * data_d_total;
cudaMalloc( (char **) &data_d_total, (size*sizeof(NUM_ADD)+127)/128*128+size *( 640 )* sizeof (char)*2+sizeof(int)*size*4);
int * result_h=(int *) malloc(sizeof(int)*size*4);
char * cigar_h=(char *) malloc(sizeof(char)*size*128);
int * cigar_int_h=(int *) malloc(sizeof(int)*size*128);
for(int i=0;i<size;i++)
{
char4 reference_tep[150];
int read_len=strlen(inputdata[i].read_base);
int ref_len=strlen(inputdata[i].reference_base);
int new_len=(ref_len+4-1)/4;
total_size+=ref_len*read_len;
for(int j=0;j<new_len;j++)
{
reference_tep[j].x=inputdata[i].reference_base[j*4];
if(j*4+1<ref_len)
reference_tep[j].y=inputdata[i].reference_base[j*4+1];
if(j*4+2<ref_len)
reference_tep[j].z=inputdata[i].reference_base[j*4+2];
if(j*4+3<ref_len)
reference_tep[j].w=inputdata[i].reference_base[j*4+3];
}
data_num_add[i].read_reference_number.x=read_len;
data_num_add[i].read_reference_number.y=ref_len;
data_num_add[i].address_array=data_size;
memcpy(data_h,inputdata[i].read_base,read_len);
data_h+=(read_len+128-1)/128*128;
data_size+=(read_len+128-1)/128*128;
memcpy(data_h,reference_tep,sizeof(char4)* new_len);
data_h+=(new_len*sizeof(char4)+127)/128*128;
data_size+=(new_len*sizeof(char4)+127)/128*128;
}
int data_size_to_copy=data_size+(size*sizeof(NUM_ADD)+127)/128*128;
cudaMemcpy(data_d_total,data_h_total,data_size_to_copy,cudaMemcpyHostToDevice);
NUM_ADD * num_add_d=(NUM_ADD *) (data_d_total);
char * data_d=data_d_total+(size*sizeof(NUM_ADD)+127)/128*128;
int4 * result_d=(int4 *) (data_d_total+data_size_to_copy);
char * cigar;
cudaMalloc( (char **) &cigar, size * (128* sizeof (char)+128*sizeof(int)));
int * cigar_int=(int *) (cigar+size*128*sizeof(char));
int * direction;
cudaMalloc( (int **) & direction, (size+1) * (640*1100* sizeof (int)));
dim3 block(576);
dim3 grid(size);
clock_gettime(CLOCK_MONOTONIC_RAW,&start);
calculate_cigar<<<grid,block>>> (size,data_d,num_add_d,result_d,direction); //result
// calculate_cigar_2<<<grid,block>>> (size,result_d,cigar,cigar_int,direction); //result
cudaMemcpy(result_h,result_d,size*sizeof(int)*4,cudaMemcpyDeviceToHost);
cudaMemcpy(cigar_h,cigar,128*sizeof(char)*size, cudaMemcpyDeviceToHost);
cudaMemcpy(cigar_int_h,cigar_int,128*sizeof(int)*size,cudaMemcpyDeviceToHost);
clock_gettime(CLOCK_MONOTONIC_RAW,&finish);
computation_time+=diff(start,finish);
// for(int i=0;i<size;i++)
{
// printf("%d %d\n",result_h[i*4],result_h[i*4+1]);
/* printf("[");
for(int j=0;j<result_h[i*4+3];j++)
{
if(j!=0) printf(", ");
printf("%d%c",cigar_int_h[128*i+j],cigar_h[128*i+j]);
}
printf("]\n");
*/ }
cudaFree(direction);
free(data_h_total);
cudaFree(data_d_total);
free(inputdata);
cudaFree(cigar);
free(cigar_int_h);
free(cigar_h);
// fscanf(file,"%d",&size);
}
// printf(" computation_time= %e total_time=%e \n",computation_time,0);
printf(" computation_time= %e %d GCUPs=%lf\n",computation_time,total_size,( (double)total_size)/computation_time/1000000000);
return 0;
}
|
b51ef1cceee0dae173d56af1f55c1514ee4148d3.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <cfloat>
#include "Watertight.h"
#include "DefineFuncs.h"
__device__
int sign_mask(float f)
{
float n = 1.0f;
float nn = -n;
int* pn = reinterpret_cast<int*>(&n);
int* pnn = reinterpret_cast<int*>(&nn);
// mask is the bit difference of n and nn, only the sign bit is 1, the rest are 0
int mask = *pn ^ *pnn;
int* pf = reinterpret_cast<int*>(&f);
// Return the sign bit of f
return *pf & mask;
}
__device__
float xorf(float f, int mask)
{
int* pf = reinterpret_cast<int*>(&f);
int res = *pf ^ mask;
return *reinterpret_cast<float*>(&res);
}
//__device__
//vec3 abs(vec3 a)
//{
// vec3 res;
// res.x = (a.x > 0.0f) ? a.x : -a.x;
// res.y = (a.y > 0.0f) ? a.y : -a.y;
// res.z = (a.z > 0.0f) ? a.z : -a.z;
// return res;
//}
//
//__device__
//int max_dim(vec3 a)
//{
// int max_dim = 0;
//
// if (a[max_dim] < a[1])
// max_dim = 1;
//
// if (a[max_dim] < a[2])
// max_dim = 2;
//
// return max_dim;
//}
void Watertight::test(TestData* data)
{
hipLaunchKernelGGL(( watertightTest), dim3(NUM_BLOCKS), dim3(NUM_THREADSPERBLOCK), 0, 0, data->triangles, data->ray, data->triangleCount, result);
}
//#define BACKFACE_CULLING
__global__ void watertightTest(Triangle* triangles, Ray* ray, size_t triangleCount, IntersectionResult* resultArray)
{
unsigned int index = threadIdx.x + blockIdx.x * blockDim.x;
unsigned int stride = blockDim.x * gridDim.x;
while (index < triangleCount)
{
float t;
if (watertightIntersectTriangle(*ray, triangles[index], &t))
{
resultArray[index].hit = true;
resultArray[index].distance = t;
}
else
{
resultArray[index].hit = false;
}
index += stride;
}
return;
}
__device__ int watertightIntersectTriangle(Ray ray, Triangle tri, float* t)
{
//vec3 dir = ray.direction;
vec3 org = ray.origin;
// vvv These calculations were moved to the ray creation vvv
// Calculate dimension where the ray direction is maximal
//int kz = max_dim(abs(dir));
//int kx = kz + 1; if (kx == 3) kx = 0;
//int ky = kx + 1; if (ky == 3) ky = 0;
//// Swap kx and ky dimension to preserve winding direction of triangles
//if (dir[kz] < 0.0f)
//{
// int temp = kx;
// kx = ky;
// ky = temp;
//}
//// Calculate shear constants
//float Sx = dir[kx] / dir[kz];
//float Sy = dir[ky] / dir[kz];
//float Sz = 1.0f / dir[kz];
// Calculate vertices relative to ray origin
vec3 A; SUB(A, tri[0], org);
vec3 B; SUB(B, tri[1], org);
vec3 C; SUB(C, tri[2], org);
// Perform shear and scale of vertices
const float Ax = A[ray.kx] - ray.Sx * A[ray.kz];
const float Ay = A[ray.ky] - ray.Sy * A[ray.kz];
const float Bx = B[ray.kx] - ray.Sx * B[ray.kz];
const float By = B[ray.ky] - ray.Sy * B[ray.kz];
const float Cx = C[ray.kx] - ray.Sx * C[ray.kz];
const float Cy = C[ray.ky] - ray.Sy * C[ray.kz];
// Calculate scaled barycentric coordinates
float U = Cx * By - Cy * Bx;
float V = Ax * Cy - Ay * Cx;
float W = Bx * Ay - By * Ax;
#ifdef BACKFACE_CULLING
if (U < 0.0f || V < 0.0f || W < 0.0f) return 0;
#else
if ((U < 0.0f || V < 0.0f || W < 0.0f) && (U > 0.0f || V > 0.0f || W > 0.0f)) return 0;
#endif
// Fallback to test against edges using double precision
if (U == 0.0f || V == 0.0f || W == 0.0f)
{
double CxBy = (double)Cx * (double)By;
double CyBx = (double)Cy * (double)Bx;
U = (float)(CxBy - CyBx);
double AxCy = (double)Ax * (double)Cy;
double AyCx = (double)Ay * (double)Cx;
V = (float)(AxCy - AyCx);
double BxAy = (double)Bx * (double)Ay;
double ByAx = (double)By * (double)Ax;
W = (float)(BxAy - ByAx);
#ifdef BACKFACE_CULLING
if (U < 0.0f || V < 0.0f || W < 0.0f) return 0;
#else
if ((U < 0.0f || V < 0.0f || W < 0.0f) && (U > 0.0f || V > 0.0f || W > 0.0f)) return 0;
#endif
}
// Calculate determinant
float det = U + V + W;
if (det == 0.0f) return 0;
// Calculate scaled z-coordinates of vertices and use them to calculate the hit distance
const float Az = ray.Sz * A[ray.kz];
const float Bz = ray.Sz * B[ray.kz];
const float Cz = ray.Sz * C[ray.kz];
const float T = U * Az + V * Bz + W * Cz;
#ifdef BACKFACE_CULLING
if (T < 0.0f /*|| T > res.distance * det*/) return 0;
#else
int det_sign = sign_mask(det);
if (xorf(T, det_sign) < 0.0f /*|| xorf(T, det_sign) > res.distance * xorf(det, det_sign)*/) return 0;
#endif
// normalize U, V, W and T
const float rcpDet = 1.0f / det;
//hit.u = U * rcpDet;
//hit.v = V * rcpDet;
//hit.w = W * rcpDet;
*t = T * rcpDet;
return 1;
}
| b51ef1cceee0dae173d56af1f55c1514ee4148d3.cu | #include <cfloat>
#include "Watertight.h"
#include "DefineFuncs.h"
__device__
int sign_mask(float f)
{
float n = 1.0f;
float nn = -n;
int* pn = reinterpret_cast<int*>(&n);
int* pnn = reinterpret_cast<int*>(&nn);
// mask is the bit difference of n and nn, only the sign bit is 1, the rest are 0
int mask = *pn ^ *pnn;
int* pf = reinterpret_cast<int*>(&f);
// Return the sign bit of f
return *pf & mask;
}
__device__
float xorf(float f, int mask)
{
int* pf = reinterpret_cast<int*>(&f);
int res = *pf ^ mask;
return *reinterpret_cast<float*>(&res);
}
//__device__
//vec3 abs(vec3 a)
//{
// vec3 res;
// res.x = (a.x > 0.0f) ? a.x : -a.x;
// res.y = (a.y > 0.0f) ? a.y : -a.y;
// res.z = (a.z > 0.0f) ? a.z : -a.z;
// return res;
//}
//
//__device__
//int max_dim(vec3 a)
//{
// int max_dim = 0;
//
// if (a[max_dim] < a[1])
// max_dim = 1;
//
// if (a[max_dim] < a[2])
// max_dim = 2;
//
// return max_dim;
//}
void Watertight::test(TestData* data)
{
watertightTest<<<NUM_BLOCKS, NUM_THREADSPERBLOCK>>>(data->triangles, data->ray, data->triangleCount, result);
}
//#define BACKFACE_CULLING
__global__ void watertightTest(Triangle* triangles, Ray* ray, size_t triangleCount, IntersectionResult* resultArray)
{
unsigned int index = threadIdx.x + blockIdx.x * blockDim.x;
unsigned int stride = blockDim.x * gridDim.x;
while (index < triangleCount)
{
float t;
if (watertightIntersectTriangle(*ray, triangles[index], &t))
{
resultArray[index].hit = true;
resultArray[index].distance = t;
}
else
{
resultArray[index].hit = false;
}
index += stride;
}
return;
}
__device__ int watertightIntersectTriangle(Ray ray, Triangle tri, float* t)
{
//vec3 dir = ray.direction;
vec3 org = ray.origin;
// vvv These calculations were moved to the ray creation vvv
// Calculate dimension where the ray direction is maximal
//int kz = max_dim(abs(dir));
//int kx = kz + 1; if (kx == 3) kx = 0;
//int ky = kx + 1; if (ky == 3) ky = 0;
//// Swap kx and ky dimension to preserve winding direction of triangles
//if (dir[kz] < 0.0f)
//{
// int temp = kx;
// kx = ky;
// ky = temp;
//}
//// Calculate shear constants
//float Sx = dir[kx] / dir[kz];
//float Sy = dir[ky] / dir[kz];
//float Sz = 1.0f / dir[kz];
// Calculate vertices relative to ray origin
vec3 A; SUB(A, tri[0], org);
vec3 B; SUB(B, tri[1], org);
vec3 C; SUB(C, tri[2], org);
// Perform shear and scale of vertices
const float Ax = A[ray.kx] - ray.Sx * A[ray.kz];
const float Ay = A[ray.ky] - ray.Sy * A[ray.kz];
const float Bx = B[ray.kx] - ray.Sx * B[ray.kz];
const float By = B[ray.ky] - ray.Sy * B[ray.kz];
const float Cx = C[ray.kx] - ray.Sx * C[ray.kz];
const float Cy = C[ray.ky] - ray.Sy * C[ray.kz];
// Calculate scaled barycentric coordinates
float U = Cx * By - Cy * Bx;
float V = Ax * Cy - Ay * Cx;
float W = Bx * Ay - By * Ax;
#ifdef BACKFACE_CULLING
if (U < 0.0f || V < 0.0f || W < 0.0f) return 0;
#else
if ((U < 0.0f || V < 0.0f || W < 0.0f) && (U > 0.0f || V > 0.0f || W > 0.0f)) return 0;
#endif
// Fallback to test against edges using double precision
if (U == 0.0f || V == 0.0f || W == 0.0f)
{
double CxBy = (double)Cx * (double)By;
double CyBx = (double)Cy * (double)Bx;
U = (float)(CxBy - CyBx);
double AxCy = (double)Ax * (double)Cy;
double AyCx = (double)Ay * (double)Cx;
V = (float)(AxCy - AyCx);
double BxAy = (double)Bx * (double)Ay;
double ByAx = (double)By * (double)Ax;
W = (float)(BxAy - ByAx);
#ifdef BACKFACE_CULLING
if (U < 0.0f || V < 0.0f || W < 0.0f) return 0;
#else
if ((U < 0.0f || V < 0.0f || W < 0.0f) && (U > 0.0f || V > 0.0f || W > 0.0f)) return 0;
#endif
}
// Calculate determinant
float det = U + V + W;
if (det == 0.0f) return 0;
// Calculate scaled z-coordinates of vertices and use them to calculate the hit distance
const float Az = ray.Sz * A[ray.kz];
const float Bz = ray.Sz * B[ray.kz];
const float Cz = ray.Sz * C[ray.kz];
const float T = U * Az + V * Bz + W * Cz;
#ifdef BACKFACE_CULLING
if (T < 0.0f /*|| T > res.distance * det*/) return 0;
#else
int det_sign = sign_mask(det);
if (xorf(T, det_sign) < 0.0f /*|| xorf(T, det_sign) > res.distance * xorf(det, det_sign)*/) return 0;
#endif
// normalize U, V, W and T
const float rcpDet = 1.0f / det;
//hit.u = U * rcpDet;
//hit.v = V * rcpDet;
//hit.w = W * rcpDet;
*t = T * rcpDet;
return 1;
}
|
3a3593557119d3b38ffab72e97afb746f6f0b9cd.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
// compile with -std=c++11 -O3 -lcurand
#include <iostream>
#include <cstdio>
#include <hiprand/hiprand.h>
using std::cout;
using std::endl;
#define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(hipError_t code, const char *file, int line, bool abort=true)
{
if (code != hipSuccess)
{
fprintf(stderr,"GPUassert: %s %s %d\n", hipGetErrorString(code), file, line);
if (abort) exit(code);
}
}
// Utility class used to avoid linker errors with extern
// unsized shared memory arrays with templated type
template <class T>
struct SharedMemory {
__device__ inline operator T*() {
extern __shared__ int __smem[];
return (T*) __smem;
}
__device__ inline operator const T*() const {
extern __shared__ int __smem[];
return (T*) __smem;
}
};
// specialize for double to avoid unaligned memory
// access compile errors
template <>
struct SharedMemory<double> {
__device__ inline operator double*() {
extern __shared__ double __smem_d[];
return (double*) __smem_d;
}
__device__ inline operator const double*() const {
extern __shared__ double __smem_d[];
return (double*) __smem_d;
}
};
template <class T>
__device__ void reduce(T* g_idata, T* sdata) {
// load shared mem
unsigned int tid = threadIdx.x;
sdata[tid] = g_idata[tid];
__syncthreads();
// do reduction in shared mem
for (unsigned int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
sdata[tid] += sdata[tid + s];
}
__syncthreads();
}
// result is now in sdata[0]
}
// This kernel requires blockDim * sizeof(T) Bytes of shared memory.
// Each block processes `c * blockDim` elements.
// The buffers, which should be affected by the call to `__threadfence_system`
// must be volatile, as it is described in the CUDA C programming guide.
template <class T, bool COMMUNICATION_ON>
__global__ void producer_kernel(T* data, volatile T* partial_data, volatile unsigned* counter,
const unsigned c) {
const unsigned global_start = blockIdx.x * blockDim.x * c;
T* sdata = SharedMemory<T>();
for (unsigned i = 0; i < c; ++i) {
const unsigned offset = i * blockDim.x;
const auto curr_start = data + global_start + offset;
reduce(curr_start, sdata);
// now we have the sum of blockDim elements in sdata[0]
if (threadIdx.x == 0) {
// save the mean of recently processed elements
partial_data[blockIdx.x * c + i] = sdata[0] / (T) blockDim.x;
if (COMMUNICATION_ON) {
__threadfence_system();
++counter[blockIdx.x]; // mark this block as processed
}
}
}
}
template<class T, bool COMMUNICATION_ON>
__global__ void consumer_kernel(T* data, const volatile T* partial_data,
const volatile unsigned* counter,
const unsigned c) {
__shared__ T mean;
const unsigned global_start = blockIdx.x * blockDim.x * c;
for (unsigned i = 0; i < c; ++i) {
const unsigned offset = i * blockDim.x;
if (COMMUNICATION_ON) {
if (threadIdx.x == 0) {
while (counter[blockIdx.x] < c) {}
mean = partial_data[blockIdx.x * c + i];
}
__syncthreads();
data[offset + global_start + threadIdx.x] = mean;
}
else { // no communication
data[offset + global_start + threadIdx.x] = threadIdx.x;
}
}
}
int runMeasurement(
const unsigned num_threads,
const unsigned num_blocks,
const unsigned c)
{
hiprandGenerator_t gen;
hiprandCreateGenerator(&gen, HIPRAND_RNG_PSEUDO_DEFAULT);
using DataT = float;
//const unsigned num_threads = 32;
//const unsigned num_blocks = 2;
const size_t sh_mem_size = num_threads * sizeof(DataT);
//const unsigned c = 2; // number of results per block
//const size_t N = num_threads * num_blocks * c; // total number of elements
const size_t size = c * num_threads * num_blocks * sizeof(DataT); // total size in Bytes
//cout << " - size = " << size / 1e9 << " GB" << endl;
//cout << " - num_threads = " << num_threads << endl;
//cout << " - num_blocks = " << num_blocks << endl;
//cout << " - sh_mem_size = " << sh_mem_size << endl;
//cout << " - N = " << N << endl;
//cout << " - c = " << c << endl;
DataT* in_data;
DataT* out_data;
DataT* partial_data;
unsigned* counter;
hipSetDevice(0);
hipMalloc(&in_data, size);
hipMallocManaged(&partial_data, num_blocks * c * sizeof(DataT));
hiprandGenerateUniform(gen, (float*) in_data, size / sizeof(float)); // fill with random bits
hipDeviceSynchronize();
hipSetDevice(1);
hipMallocManaged(&counter, num_blocks * sizeof(unsigned));
hipMemAdvise(counter, num_blocks * sizeof(unsigned),
hipMemAdviseSetPreferredLocation, hipCpuDeviceId);
hipMalloc(&out_data, size);
hipMemset(out_data, 0, size);
hipSetDevice(0);
// cout << " - Going to start the kernel" << endl;
hipEvent_t start, stop;
hipEventCreate(&start);
hipEventCreate(&stop);
hipEventRecord(start, 0);
hipLaunchKernelGGL(( producer_kernel<DataT, true>), dim3(num_blocks), dim3(num_threads), sh_mem_size, 0, in_data, partial_data, counter, c);
hipSetDevice(1);
hipLaunchKernelGGL(( consumer_kernel<DataT, true>), dim3(num_blocks), dim3(num_threads), 0, 0, out_data, partial_data, counter, c);
hipSetDevice(0);
gpuErrchk(hipEventRecord(stop, 0));
gpuErrchk(hipEventSynchronize(stop));
float time_in_ms;
gpuErrchk(hipEventElapsedTime(&time_in_ms, start, stop));
gpuErrchk(hipSetDevice(0));
gpuErrchk(hipDeviceSynchronize());
hipEventDestroy(start);
hipEventDestroy(stop);
hipFree(in_data);
hipFree(partial_data);
hipFree(counter);
hiprandDestroyGenerator(gen);
//cout << "time:" << time_in_ms << endl;
cout << time_in_ms;
return 0;
}
int main()
{ cout << "np.array(";
int tArray[] = {512,1024};
int tLength = 2;
int bArray[] = {1,2,4,8};
int bLength = 4;
int cArray[] = {256, 1024, 4096, 16384, 65536};
int cLength = 5;
cout << "(";
for (int t = 0; t < tLength; t++)
{
cout << "(";
for (int b = 0; b < bLength; b++)
{
cout << "(";
for (int c = 0; c < cLength; c++)
{
//run kernel
runMeasurement(tArray[t], bArray[b], cArray[c]);
if(c < cLength - 1)
{
cout << ","<<endl;
}
}
cout << ")";
if(b < bLength - 1)
{
cout << ",";
}
}
cout << ")" ;
if(t < tLength - 1)
{
cout << ",";
}
}
cout << ")";
// runMeasurement(512,4,4096); //b, t, c
cout << ")" << std::endl;
}
| 3a3593557119d3b38ffab72e97afb746f6f0b9cd.cu | // compile with -std=c++11 -O3 -lcurand
#include <iostream>
#include <cstdio>
#include <curand.h>
using std::cout;
using std::endl;
#define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
// Utility class used to avoid linker errors with extern
// unsized shared memory arrays with templated type
template <class T>
struct SharedMemory {
__device__ inline operator T*() {
extern __shared__ int __smem[];
return (T*) __smem;
}
__device__ inline operator const T*() const {
extern __shared__ int __smem[];
return (T*) __smem;
}
};
// specialize for double to avoid unaligned memory
// access compile errors
template <>
struct SharedMemory<double> {
__device__ inline operator double*() {
extern __shared__ double __smem_d[];
return (double*) __smem_d;
}
__device__ inline operator const double*() const {
extern __shared__ double __smem_d[];
return (double*) __smem_d;
}
};
template <class T>
__device__ void reduce(T* g_idata, T* sdata) {
// load shared mem
unsigned int tid = threadIdx.x;
sdata[tid] = g_idata[tid];
__syncthreads();
// do reduction in shared mem
for (unsigned int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
sdata[tid] += sdata[tid + s];
}
__syncthreads();
}
// result is now in sdata[0]
}
// This kernel requires blockDim * sizeof(T) Bytes of shared memory.
// Each block processes `c * blockDim` elements.
// The buffers, which should be affected by the call to `__threadfence_system`
// must be volatile, as it is described in the CUDA C programming guide.
template <class T, bool COMMUNICATION_ON>
__global__ void producer_kernel(T* data, volatile T* partial_data, volatile unsigned* counter,
const unsigned c) {
const unsigned global_start = blockIdx.x * blockDim.x * c;
T* sdata = SharedMemory<T>();
for (unsigned i = 0; i < c; ++i) {
const unsigned offset = i * blockDim.x;
const auto curr_start = data + global_start + offset;
reduce(curr_start, sdata);
// now we have the sum of blockDim elements in sdata[0]
if (threadIdx.x == 0) {
// save the mean of recently processed elements
partial_data[blockIdx.x * c + i] = sdata[0] / (T) blockDim.x;
if (COMMUNICATION_ON) {
__threadfence_system();
++counter[blockIdx.x]; // mark this block as processed
}
}
}
}
template<class T, bool COMMUNICATION_ON>
__global__ void consumer_kernel(T* data, const volatile T* partial_data,
const volatile unsigned* counter,
const unsigned c) {
__shared__ T mean;
const unsigned global_start = blockIdx.x * blockDim.x * c;
for (unsigned i = 0; i < c; ++i) {
const unsigned offset = i * blockDim.x;
if (COMMUNICATION_ON) {
if (threadIdx.x == 0) {
while (counter[blockIdx.x] < c) {}
mean = partial_data[blockIdx.x * c + i];
}
__syncthreads();
data[offset + global_start + threadIdx.x] = mean;
}
else { // no communication
data[offset + global_start + threadIdx.x] = threadIdx.x;
}
}
}
int runMeasurement(
const unsigned num_threads,
const unsigned num_blocks,
const unsigned c)
{
curandGenerator_t gen;
curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
using DataT = float;
//const unsigned num_threads = 32;
//const unsigned num_blocks = 2;
const size_t sh_mem_size = num_threads * sizeof(DataT);
//const unsigned c = 2; // number of results per block
//const size_t N = num_threads * num_blocks * c; // total number of elements
const size_t size = c * num_threads * num_blocks * sizeof(DataT); // total size in Bytes
//cout << " - size = " << size / 1e9 << " GB" << endl;
//cout << " - num_threads = " << num_threads << endl;
//cout << " - num_blocks = " << num_blocks << endl;
//cout << " - sh_mem_size = " << sh_mem_size << endl;
//cout << " - N = " << N << endl;
//cout << " - c = " << c << endl;
DataT* in_data;
DataT* out_data;
DataT* partial_data;
unsigned* counter;
cudaSetDevice(0);
cudaMalloc(&in_data, size);
cudaMallocManaged(&partial_data, num_blocks * c * sizeof(DataT));
curandGenerateUniform(gen, (float*) in_data, size / sizeof(float)); // fill with random bits
cudaDeviceSynchronize();
cudaSetDevice(1);
cudaMallocManaged(&counter, num_blocks * sizeof(unsigned));
cudaMemAdvise(counter, num_blocks * sizeof(unsigned),
cudaMemAdviseSetPreferredLocation, cudaCpuDeviceId);
cudaMalloc(&out_data, size);
cudaMemset(out_data, 0, size);
cudaSetDevice(0);
// cout << " - Going to start the kernel" << endl;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start, 0);
producer_kernel<DataT, true><<<num_blocks, num_threads, sh_mem_size>>>(in_data, partial_data, counter, c);
cudaSetDevice(1);
consumer_kernel<DataT, true><<<num_blocks, num_threads>>>(out_data, partial_data, counter, c);
cudaSetDevice(0);
gpuErrchk(cudaEventRecord(stop, 0));
gpuErrchk(cudaEventSynchronize(stop));
float time_in_ms;
gpuErrchk(cudaEventElapsedTime(&time_in_ms, start, stop));
gpuErrchk(cudaSetDevice(0));
gpuErrchk(cudaDeviceSynchronize());
cudaEventDestroy(start);
cudaEventDestroy(stop);
cudaFree(in_data);
cudaFree(partial_data);
cudaFree(counter);
curandDestroyGenerator(gen);
//cout << "time:" << time_in_ms << endl;
cout << time_in_ms;
return 0;
}
int main()
{ cout << "np.array(";
int tArray[] = {512,1024};
int tLength = 2;
int bArray[] = {1,2,4,8};
int bLength = 4;
int cArray[] = {256, 1024, 4096, 16384, 65536};
int cLength = 5;
cout << "(";
for (int t = 0; t < tLength; t++)
{
cout << "(";
for (int b = 0; b < bLength; b++)
{
cout << "(";
for (int c = 0; c < cLength; c++)
{
//run kernel
runMeasurement(tArray[t], bArray[b], cArray[c]);
if(c < cLength - 1)
{
cout << ","<<endl;
}
}
cout << ")";
if(b < bLength - 1)
{
cout << ",";
}
}
cout << ")" ;
if(t < tLength - 1)
{
cout << ",";
}
}
cout << ")";
// runMeasurement(512,4,4096); //b, t, c
cout << ")" << std::endl;
}
|
8414c34c9b078a9808f5aab024dfce59fcc5807b.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#define BOOST_TEST_DYN_LINK
#include <boost/test/unit_test.hpp>
#include "VCluster/VCluster.hpp"
#include <Vector/vector_dist.hpp>
#include "Vector/tests/vector_dist_util_unit_tests.hpp"
#define SUB_UNIT_FACTOR 1024
template<unsigned int dim , typename vector_dist_type>
__global__ void move_parts_gpu_test(vector_dist_type vd)
{
auto p = GET_PARTICLE(vd);
#pragma unroll
for (int i = 0 ; i < dim ; i++)
{
vd.getPos(p)[i] += 0.05;
}
}
BOOST_AUTO_TEST_SUITE( vector_dist_gpu_test )
void print_test(std::string test, size_t sz)
{
if (create_vcluster().getProcessUnitID() == 0)
std::cout << test << " " << sz << "\n";
}
__global__ void initialize_props(vector_dist_ker<3, float, aggregate<float, float [3], float[3]>> vd)
{
auto p = GET_PARTICLE(vd);
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<1>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<1>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
}
template<typename T,typename CellList_type>
__global__ void calculate_force(vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd,
vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd_sort,
CellList_type cl,
int rank)
{
auto p = GET_PARTICLE(vd);
Point<3,T> xp = vd.getPos(p);
auto it = cl.getNNIterator(cl.getCell(xp));
Point<3,T> force1({0.0,0.0,0.0});
Point<3,T> force2({0.0,0.0,0.0});
while (it.isNext())
{
auto q1 = it.get_sort();
auto q2 = it.get();
if (q2 == p) {++it; continue;}
Point<3,T> xq_1 = vd_sort.getPos(q1);
Point<3,T> xq_2 = vd.getPos(q2);
Point<3,T> r1 = xq_1 - xp;
Point<3,T> r2 = xq_2 - xp;
// Normalize
r1 /= r1.norm();
r2 /= r2.norm();
force1 += vd_sort.template getProp<0>(q1)*r1;
force2 += vd.template getProp<0>(q2)*r2;
++it;
}
vd.template getProp<1>(p)[0] = force1.get(0);
vd.template getProp<1>(p)[1] = force1.get(1);
vd.template getProp<1>(p)[2] = force1.get(2);
vd.template getProp<2>(p)[0] = force2.get(0);
vd.template getProp<2>(p)[1] = force2.get(1);
vd.template getProp<2>(p)[2] = force2.get(2);
}
template<typename T, typename CellList_type>
__global__ void calculate_force_full_sort(vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd,
CellList_type cl, int rank)
{
unsigned int p;
GET_PARTICLE_SORT(p,cl);
Point<3,T> xp = vd.getPos(p);
auto it = cl.getNNIterator(cl.getCell(xp));
Point<3,T> force1({0.0,0.0,0.0});
while (it.isNext())
{
auto q1 = it.get_sort();
if (q1 == p) {++it; continue;}
Point<3,T> xq_1 = vd.getPos(q1);
Point<3,T> r1 = xq_1 - xp;
// Normalize
r1 /= r1.norm();
force1 += vd.template getProp<0>(q1)*r1;
++it;
}
vd.template getProp<1>(p)[0] = force1.get(0);
vd.template getProp<1>(p)[1] = force1.get(1);
vd.template getProp<1>(p)[2] = force1.get(2);
}
template<typename CellList_type, typename vector_type>
bool check_force(CellList_type & NN_cpu, vector_type & vd)
{
typedef typename vector_type::stype St;
auto it6 = vd.getDomainIterator();
bool match = true;
while (it6.isNext())
{
auto p = it6.get();
Point<3,St> xp = vd.getPos(p);
// Calculate on CPU
Point<3,St> force({0.0,0.0,0.0});
auto NNc = NN_cpu.getNNIterator(NN_cpu.getCell(xp));
while (NNc.isNext())
{
auto q = NNc.get();
if (q == p.getKey()) {++NNc; continue;}
Point<3,St> xq_2 = vd.getPos(q);
Point<3,St> r2 = xq_2 - xp;
// Normalize
r2 /= r2.norm();
force += vd.template getProp<0>(q)*r2;
++NNc;
}
match &= fabs(vd.template getProp<1>(p)[0] - vd.template getProp<2>(p)[0]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[1] - vd.template getProp<2>(p)[1]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[2] - vd.template getProp<2>(p)[2]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[0] - force.get(0)) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[1] - force.get(1)) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[2] - force.get(2)) < 0.0003;
if (match == false)
{
std::cout << "ERROR: " << vd.template getProp<1>(p)[0] << " " << vd.template getProp<2>(p)[0] << std::endl;
std::cout << "ERROR: " << vd.template getProp<1>(p)[1] << " " << vd.template getProp<2>(p)[1] << std::endl;
std::cout << "ERROR: " << vd.template getProp<1>(p)[2] << " " << vd.template getProp<2>(p)[2] << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[0] << " " << force.get(0) << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[1] << " " << force.get(1) << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[2] << " " << force.get(2) << std::endl;
break;
}
++it6;
}
return match;
}
BOOST_AUTO_TEST_CASE( vector_dist_gpu_ghost_get )
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
vector_dist_gpu<3,float,aggregate<float,float[3],float[3]>> vd(1000,domain,bc,g);
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] = (float)rand() / RAND_MAX;
vd.getPos(p)[1] = (float)rand() / RAND_MAX;
vd.getPos(p)[2] = (float)rand() / RAND_MAX;
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<1>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<1>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<2>(p)[0] = vd.getPos(p)[0] + 3.0*vd.getPos(p)[1];
vd.template getProp<2>(p)[1] = vd.getPos(p)[0] + 3.0*vd.getPos(p)[2];
vd.template getProp<2>(p)[2] = vd.getPos(p)[1] + 3.0*vd.getPos(p)[2];
++it;
}
// Ok we redistribute the particles (CPU based)
vd.map();
vd.template ghost_get<0,1,2>();
// Now we check the the ghost contain the correct information
bool check = true;
auto itg = vd.getDomainAndGhostIterator();
while (itg.isNext())
{
auto p = itg.get();
check &= (vd.template getProp<0>(p) == vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2]);
check &= (vd.template getProp<1>(p)[0] == vd.getPos(p)[0] + vd.getPos(p)[1]);
check &= (vd.template getProp<1>(p)[1] == vd.getPos(p)[0] + vd.getPos(p)[2]);
check &= (vd.template getProp<1>(p)[2] == vd.getPos(p)[1] + vd.getPos(p)[2]);
check &= (vd.template getProp<2>(p)[0] == vd.getPos(p)[0] + 3.0*vd.getPos(p)[1]);
check &= (vd.template getProp<2>(p)[1] == vd.getPos(p)[0] + 3.0*vd.getPos(p)[2]);
check &= (vd.template getProp<2>(p)[2] == vd.getPos(p)[1] + 3.0*vd.getPos(p)[2]);
++itg;
}
size_t tot_s = vd.size_local_with_ghost();
v_cl.sum(tot_s);
v_cl.execute();
// We check that we check something
BOOST_REQUIRE(tot_s > 1000);
}
template<typename vector_type, typename CellList_type, typename CellList_type_cpu>
void check_cell_list_cpu_and_gpu(vector_type & vd, CellList_type & NN, CellList_type_cpu & NN_cpu)
{
auto it5 = vd.getDomainIteratorGPU(32);
hipLaunchKernelGGL(( calculate_force<typename vector_type::stype,decltype(NN.toKernel())>), dim3(it5.wthr),dim3(it5.thr), 0, 0, vd.toKernel(),vd.toKernel_sorted(),NN.toKernel(),create_vcluster().rank());
vd.template deviceToHostProp<1,2>();
bool test = check_force(NN_cpu,vd);
BOOST_REQUIRE_EQUAL(test,true);
// We reset the property 1 on device
auto rst = vd.getDomainIterator();
while (rst.isNext())
{
auto p = rst.get();
vd.template getProp<1>(p)[0] = 0.0;
vd.template getProp<1>(p)[1] = 0.0;
vd.template getProp<1>(p)[2] = 0.0;
++rst;
}
vd.template hostToDeviceProp<1>();
// We do exactly the same test as before, but now we completely use the sorted version
hipLaunchKernelGGL(( calculate_force_full_sort<typename vector_type::stype,decltype(NN.toKernel())>), dim3(it5.wthr),dim3(it5.thr), 0, 0, vd.toKernel_sorted(),NN.toKernel(),create_vcluster().rank());
vd.template merge_sort<1>(NN);
vd.template deviceToHostProp<1>();
test = check_force(NN_cpu,vd);
BOOST_REQUIRE_EQUAL(test,true);
}
BOOST_AUTO_TEST_CASE( vector_dist_gpu_test)
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={NON_PERIODIC,NON_PERIODIC,NON_PERIODIC};
vector_dist_gpu<3,float,aggregate<float,float[3],float[3]>> vd(10000,domain,bc,g);
srand(55067*create_vcluster().rank());
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
int x = rand();
int y = rand();
int z = rand();
vd.getPos(p)[0] = (float)x / RAND_MAX;
vd.getPos(p)[1] = (float)y / RAND_MAX;
vd.getPos(p)[2] = (float)z / RAND_MAX;
Point<3,float> xp = vd.getPos(p);
++it;
}
// Ok we redistribute the particles (CPU based)
vd.map();
size_t size_l = vd.size_local();
v_cl.sum(size_l);
v_cl.execute();
BOOST_REQUIRE_EQUAL(size_l,10000);
auto & ct = vd.getDecomposition();
bool noOut = true;
size_t cnt = 0;
auto it2 = vd.getDomainIterator();
while (it2.isNext())
{
auto p = it2.get();
noOut &= ct.isLocal(vd.getPos(p));
cnt++;
++it2;
}
BOOST_REQUIRE_EQUAL(noOut,true);
BOOST_REQUIRE_EQUAL(cnt,vd.size_local());
// now we offload all the properties
auto it3 = vd.getDomainIteratorGPU();
// offload to device
vd.hostToDevicePos();
hipLaunchKernelGGL(( initialize_props), dim3(it3.wthr),dim3(it3.thr), 0, 0, vd.toKernel());
// now we check what we initialized
vd.deviceToHostProp<0,1>();
auto it4 = vd.getDomainIterator();
while (it4.isNext())
{
auto p = it4.get();
BOOST_REQUIRE_CLOSE(vd.template getProp<0>(p),vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[0],vd.getPos(p)[0] + vd.getPos(p)[1],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[1],vd.getPos(p)[0] + vd.getPos(p)[2],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[2],vd.getPos(p)[1] + vd.getPos(p)[2],0.01);
++it4;
}
// here we do a ghost_get
vd.ghost_get<0>();
// Double ghost get to check crashes
vd.ghost_get<0>();
// we re-offload what we received
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
auto NN = vd.getCellListGPU(0.1);
auto NN_cpu = vd.getCellList(0.1);
check_cell_list_cpu_and_gpu(vd,NN,NN_cpu);
auto NN_up = vd.getCellListGPU(0.1);
NN_up.clear();
vd.updateCellList(NN_up);
check_cell_list_cpu_and_gpu(vd,NN_up,NN_cpu);
}
template<typename St>
void vdist_calc_gpu_test()
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,St> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,St> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
//! [Create a gpu vector]
vector_dist_gpu<3,St,aggregate<St,St[3],St[3]>> vd(1000,domain,bc,g);
//! [Create a gpu vector]
//! [Fill gpu vector and move to GPU]
srand(v_cl.rank()*10000);
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] = (St)rand() / RAND_MAX;
vd.getPos(p)[1] = (St)rand() / RAND_MAX;
vd.getPos(p)[2] = (St)rand() / RAND_MAX;
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0];
vd.template getProp<1>(p)[1] = vd.getPos(p)[1];
vd.template getProp<1>(p)[2] = vd.getPos(p)[2];
vd.template getProp<2>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<2>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<2>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
++it;
}
// move on device
vd.hostToDevicePos();
vd.template hostToDeviceProp<0,1,2>();
// Ok we redistribute the particles (GPU based)
vd.map(RUN_ON_DEVICE);
//! [Fill gpu vector and move to GPU]
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Reset the host part
auto it3 = vd.getDomainIterator();
while (it3.isNext())
{
auto p = it3.get();
vd.getPos(p)[0] = 1.0;
vd.getPos(p)[1] = 1.0;
vd.getPos(p)[2] = 1.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
++it3;
}
// we move from Device to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Check
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
match &= vd.template getProp<0>(p) == vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
match &= vd.template getProp<1>(p)[0] == vd.getPos(p)[0];
match &= vd.template getProp<1>(p)[1] == vd.getPos(p)[1];
match &= vd.template getProp<1>(p)[2] == vd.getPos(p)[2];
match &= vd.template getProp<2>(p)[0] == vd.getPos(p)[0] + vd.getPos(p)[1];
match &= vd.template getProp<2>(p)[1] == vd.getPos(p)[0] + vd.getPos(p)[2];
match &= vd.template getProp<2>(p)[2] == vd.getPos(p)[1] + vd.getPos(p)[2];
++it2;
}
BOOST_REQUIRE_EQUAL(match,true);
// count local particles
size_t l_cnt = 0;
size_t nl_cnt = 0;
size_t n_out = 0;
// Domain + ghost box
Box<3,St> dom_ext = domain;
dom_ext.enlarge(g);
auto it5 = vd.getDomainIterator();
count_local_n_local<3>(vd,it5,bc,domain,dom_ext,l_cnt,nl_cnt,n_out);
BOOST_REQUIRE_EQUAL(n_out,0);
BOOST_REQUIRE_EQUAL(l_cnt,vd.size_local());
// we do 10 gpu steps (using a cpu vector to check that map and ghost get work as expented)
for (size_t i = 0 ; i < 10 ; i++)
{
vd.map(RUN_ON_DEVICE);
CUDA_SAFE(hipGetLastError());
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// To test we copy on a cpu distributed vector and we do a map
vector_dist<3,St,aggregate<St,St[3],St[3]>> vd_cpu(vd.getDecomposition().template duplicate_convert<HeapMemory,memory_traits_lin>(),0);
auto itc = vd.getDomainIterator();
while (itc.isNext())
{
auto p = itc.get();
vd_cpu.add();
vd_cpu.getLastPos()[0] = vd.getPos(p)[0];
vd_cpu.getLastPos()[1] = vd.getPos(p)[1];
vd_cpu.getLastPos()[2] = vd.getPos(p)[2];
vd_cpu.template getLastProp<0>() = vd.template getProp<0>(p);
vd_cpu.template getLastProp<1>()[0] = vd.template getProp<1>(p)[0];
vd_cpu.template getLastProp<1>()[1] = vd.template getProp<1>(p)[1];
vd_cpu.template getLastProp<1>()[2] = vd.template getProp<1>(p)[2];
vd_cpu.template getLastProp<2>()[0] = vd.template getProp<2>(p)[0];
vd_cpu.template getLastProp<2>()[1] = vd.template getProp<2>(p)[1];
vd_cpu.template getLastProp<2>()[2] = vd.template getProp<2>(p)[2];
++itc;
}
vd_cpu.template ghost_get<0,1,2>();
//! [Fill the ghost on GPU]
vd.template ghost_get<0,1,2>(RUN_ON_DEVICE);
//! [Fill the ghost on GPU]
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
match = true;
// Particle on the gpu ghost and cpu ghost are not ordered in the same way so we have to reorder
struct part
{
Point<3,St> xp;
St prp0;
St prp1[3];
St prp2[3];
bool operator<(const part & tmp) const
{
if (xp.get(0) < tmp.xp.get(0))
{return true;}
else if (xp.get(0) > tmp.xp.get(0))
{return false;}
if (xp.get(1) < tmp.xp.get(1))
{return true;}
else if (xp.get(1) > tmp.xp.get(1))
{return false;}
if (xp.get(2) < tmp.xp.get(2))
{return true;}
else if (xp.get(2) > tmp.xp.get(2))
{return false;}
return false;
}
};
openfpm::vector<part> cpu_sort;
openfpm::vector<part> gpu_sort;
cpu_sort.resize(vd_cpu.size_local_with_ghost() - vd_cpu.size_local());
gpu_sort.resize(vd.size_local_with_ghost() - vd.size_local());
BOOST_REQUIRE_EQUAL(cpu_sort.size(),gpu_sort.size());
size_t cnt = 0;
auto itc2 = vd.getGhostIterator();
while (itc2.isNext())
{
auto p = itc2.get();
cpu_sort.get(cnt).xp.get(0) = vd_cpu.getPos(p)[0];
gpu_sort.get(cnt).xp.get(0) = vd.getPos(p)[0];
cpu_sort.get(cnt).xp.get(1) = vd_cpu.getPos(p)[1];
gpu_sort.get(cnt).xp.get(1) = vd.getPos(p)[1];
cpu_sort.get(cnt).xp.get(2) = vd_cpu.getPos(p)[2];
gpu_sort.get(cnt).xp.get(2) = vd.getPos(p)[2];
cpu_sort.get(cnt).prp0 = vd_cpu.template getProp<0>(p);
gpu_sort.get(cnt).prp0 = vd.template getProp<0>(p);
cpu_sort.get(cnt).prp1[0] = vd_cpu.template getProp<1>(p)[0];
gpu_sort.get(cnt).prp1[0] = vd.template getProp<1>(p)[0];
cpu_sort.get(cnt).prp1[1] = vd_cpu.template getProp<1>(p)[1];
gpu_sort.get(cnt).prp1[1] = vd.template getProp<1>(p)[1];
cpu_sort.get(cnt).prp1[2] = vd_cpu.template getProp<1>(p)[2];
gpu_sort.get(cnt).prp1[2] = vd.template getProp<1>(p)[2];
cpu_sort.get(cnt).prp2[0] = vd_cpu.template getProp<2>(p)[0];
gpu_sort.get(cnt).prp2[0] = vd.template getProp<2>(p)[0];
cpu_sort.get(cnt).prp2[1] = vd_cpu.template getProp<2>(p)[1];
gpu_sort.get(cnt).prp2[1] = vd.template getProp<2>(p)[1];
cpu_sort.get(cnt).prp2[2] = vd_cpu.template getProp<2>(p)[2];
gpu_sort.get(cnt).prp2[2] = vd.template getProp<2>(p)[2];
++cnt;
++itc2;
}
cpu_sort.sort();
gpu_sort.sort();
for (size_t i = 0 ; i < cpu_sort.size() ; i++)
{
match &= cpu_sort.get(i).xp.get(0) == gpu_sort.get(i).xp.get(0);
match &= cpu_sort.get(i).xp.get(1) == gpu_sort.get(i).xp.get(1);
match &= cpu_sort.get(i).xp.get(2) == gpu_sort.get(i).xp.get(2);
match &= cpu_sort.get(i).prp0 == gpu_sort.get(i).prp0;
match &= cpu_sort.get(i).prp1[0] == gpu_sort.get(i).prp1[0];
match &= cpu_sort.get(i).prp1[1] == gpu_sort.get(i).prp1[1];
match &= cpu_sort.get(i).prp1[2] == gpu_sort.get(i).prp1[2];
match &= cpu_sort.get(i).prp2[0] == gpu_sort.get(i).prp2[0];
match &= cpu_sort.get(i).prp2[1] == gpu_sort.get(i).prp2[1];
match &= cpu_sort.get(i).prp2[2] == gpu_sort.get(i).prp2[2];
}
BOOST_REQUIRE_EQUAL(match,true);
// move particles on gpu
auto ite = vd.getDomainIteratorGPU();
hipLaunchKernelGGL(( move_parts_gpu_test<3,decltype(vd.toKernel())>), dim3(ite.wthr),dim3(ite.thr), 0, 0, vd.toKernel());
}
}
BOOST_AUTO_TEST_CASE( vector_dist_map_on_gpu_test)
{
vdist_calc_gpu_test<float>();
vdist_calc_gpu_test<double>();
}
BOOST_AUTO_TEST_CASE(vector_dist_reduce)
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
vector_dist_gpu<3,float,aggregate<float,double,int,size_t>> vd(5000*v_cl.size(),domain,bc,g);
auto it = vd.getDomainIterator();
float fc = 1.0;
double dc = 1.0;
int ic = 1.0;
size_t sc = 1.0;
while(it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = fc;
vd.template getProp<1>(p) = dc;
vd.template getProp<2>(p) = ic;
vd.template getProp<3>(p) = sc;
fc += 1.0;
dc += 1.0;
ic += 1;
sc += 1;
++it;
}
vd.template hostToDeviceProp<0,1,2,3>();
float redf = reduce_local<0,_add_>(vd);
double redd = reduce_local<1,_add_>(vd);
int redi = reduce_local<2,_add_>(vd);
size_t reds = reduce_local<3,_add_>(vd);
BOOST_REQUIRE_EQUAL(redf,(vd.size_local()+1.0)*(vd.size_local())/2.0);
BOOST_REQUIRE_EQUAL(redd,(vd.size_local()+1.0)*(vd.size_local())/2.0);
BOOST_REQUIRE_EQUAL(redi,(vd.size_local()+1)*(vd.size_local())/2);
BOOST_REQUIRE_EQUAL(reds,(vd.size_local()+1)*(vd.size_local())/2);
float redf2 = reduce_local<0,_max_>(vd);
double redd2 = reduce_local<1,_max_>(vd);
int redi2 = reduce_local<2,_max_>(vd);
size_t reds2 = reduce_local<3,_max_>(vd);
BOOST_REQUIRE_EQUAL(redf2,vd.size_local());
BOOST_REQUIRE_EQUAL(redd2,vd.size_local());
BOOST_REQUIRE_EQUAL(redi2,vd.size_local());
BOOST_REQUIRE_EQUAL(reds2,vd.size_local());
}
void vector_dist_dlb_on_cuda_impl(size_t k,double r_cut)
{
typedef vector_dist_gpu<3,double,aggregate<double,double[3],double[3]>> vector_type;
Vcluster<> & v_cl = create_vcluster();
if (v_cl.getProcessingUnits() > 8)
return;
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
vector_type vd(0,domain,bc,g,DEC_GRAN(2048));
// Only processor 0 initialy add particles on a corner of a domain
if (v_cl.getProcessUnitID() == 0)
{
for(size_t i = 0 ; i < k ; i++)
{
vd.add();
vd.getLastPos()[0] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[1] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[2] = ((double)rand())/RAND_MAX * 0.3;
}
}
// Move to GPU
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
// now move to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
// Get the neighborhood of each particles
auto VV = vd.getVerlet(r_cut);
// store the number of neighborhood for each particles
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = VV.getNNPart(p.getKey());
++it;
}
// Move to GPU
vd.template hostToDeviceProp<0>();
ModelSquare md;
md.factor = 10;
vd.addComputationCosts(md);
vd.getDecomposition().decompose();
vd.map(RUN_ON_DEVICE);
vd.deviceToHostPos();
// Move info to CPU for addComputationcosts
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,7.0);
}
BOOST_REQUIRE(vd.size_local() != 0);
Point<3,double> v({1.0,1.0,1.0});
for (size_t i = 0 ; i < 25 ; i++)
{
// move particles to CPU and move the particles by 0.1
vd.deviceToHostPos();
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] += v.get(0) * 0.09;
vd.getPos(p)[1] += v.get(1) * 0.09;
vd.getPos(p)[2] += v.get(2) * 0.09;
++it;
}
//Back to GPU
vd.hostToDevicePos();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Check calc forces
auto NN_gpu = vd.getCellListGPU(r_cut);
auto NN_cpu = vd.getCellList(r_cut);
check_cell_list_cpu_and_gpu(vd,NN_gpu,NN_cpu);
auto VV2 = vd.getVerlet(r_cut);
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
match &= vd.template getProp<0>(p) == VV2.getNNPart(p.getKey());
++it2;
}
BOOST_REQUIRE_EQUAL(match,true);
ModelSquare md;
vd.addComputationCosts(md);
vd.getDecomposition().redecompose(200);
vd.map(RUN_ON_DEVICE);
BOOST_REQUIRE(vd.size_local() != 0);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,10.0);
}
}
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda)
{
vector_dist_dlb_on_cuda_impl(50000,0.01);
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda2)
{
if (create_vcluster().size() <= 3)
{return;};
vector_dist_dlb_on_cuda_impl(1000000,0.01);
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda3)
{
if (create_vcluster().size() < 8)
{return;}
vector_dist_dlb_on_cuda_impl(15000000,0.005);
}
BOOST_AUTO_TEST_CASE(vector_dist_keep_prop_on_cuda)
{
typedef vector_dist_gpu<3,double,aggregate<double,double[3],double[3][3]>> vector_type;
Vcluster<> & v_cl = create_vcluster();
if (v_cl.getProcessingUnits() > 8)
return;
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
vector_type vd(0,domain,bc,g,DEC_GRAN(2048));
// Only processor 0 initialy add particles on a corner of a domain
if (v_cl.getProcessUnitID() == 0)
{
for(size_t i = 0 ; i < 50000 ; i++)
{
vd.add();
vd.getLastPos()[0] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[1] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[2] = ((double)rand())/RAND_MAX * 0.3;
}
}
// Move to GPU
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
// now move to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
// store the number of neighborhood for each particles
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = 0.0;
vd.template getProp<1>(p)[0] = 1000.0;
vd.template getProp<1>(p)[1] = 2000.0;
vd.template getProp<1>(p)[2] = 3000.0;
vd.template getProp<2>(p)[0][0] = 6000,0;
vd.template getProp<2>(p)[0][1] = 7000.0;
vd.template getProp<2>(p)[0][2] = 8000.0;
vd.template getProp<2>(p)[1][0] = 9000.0;
vd.template getProp<2>(p)[1][1] = 10000.0;
vd.template getProp<2>(p)[1][2] = 11000.0;
vd.template getProp<2>(p)[2][0] = 12000.0;
vd.template getProp<2>(p)[2][1] = 13000.0;
vd.template getProp<2>(p)[2][2] = 14000.0;
++it;
}
// Move to GPU
vd.template hostToDeviceProp<0,1,2>();
ModelSquare md;
md.factor = 10;
vd.addComputationCosts(md);
vd.getDecomposition().decompose();
vd.map(RUN_ON_DEVICE);
vd.deviceToHostPos();
// Move info to CPU for addComputationcosts
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,7.0);
}
BOOST_REQUIRE(vd.size_local() != 0);
Point<3,double> v({1.0,1.0,1.0});
int base = 0;
for (size_t i = 0 ; i < 25 ; i++)
{
if (i % 2 == 0)
{
// move particles to CPU and move the particles by 0.1
vd.deviceToHostPos();
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] += v.get(0) * 0.09;
vd.getPos(p)[1] += v.get(1) * 0.09;
vd.getPos(p)[2] += v.get(2) * 0.09;
++it;
}
//Back to GPU
vd.hostToDevicePos();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
ModelSquare md;
vd.addComputationCosts(md);
vd.getDecomposition().redecompose(200);
vd.map(RUN_ON_DEVICE);
BOOST_REQUIRE(vd.size_local() != 0);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,10.0);
}
}
else
{
vd.template deviceToHostProp<0,1,2>();
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
vd.template getProp<0>(p) += 1;
vd.template getProp<1>(p)[0] += 1.0;
vd.template getProp<1>(p)[1] += 1.0;
vd.template getProp<1>(p)[2] += 1.0;
vd.template getProp<2>(p)[0][0] += 1.0;
vd.template getProp<2>(p)[0][1] += 1.0;
vd.template getProp<2>(p)[0][2] += 1.0;
vd.template getProp<2>(p)[1][0] += 1.0;
vd.template getProp<2>(p)[1][1] += 1.0;
vd.template getProp<2>(p)[1][2] += 1.0;
vd.template getProp<2>(p)[2][0] += 1.0;
vd.template getProp<2>(p)[2][1] += 1.0;
vd.template getProp<2>(p)[2][2] += 1.0;
++it2;
}
vd.template hostToDeviceProp<0,1,2>();
++base;
vd.template ghost_get<0,1,2>(RUN_ON_DEVICE | KEEP_PROPERTIES);
vd.template deviceToHostProp<0,1,2>();
// Check that the ghost contain the correct information
auto itg = vd.getGhostIterator();
while (itg.isNext())
{
auto p = itg.get();
match &= vd.template getProp<0>(p) == base;
match &= vd.template getProp<1>(p)[0] == base + 1000.0;
match &= vd.template getProp<1>(p)[1] == base + 2000.0;
match &= vd.template getProp<1>(p)[2] == base + 3000.0;
match &= vd.template getProp<2>(p)[0][0] == base + 6000.0;
match &= vd.template getProp<2>(p)[0][1] == base + 7000.0;
match &= vd.template getProp<2>(p)[0][2] == base + 8000.0;
match &= vd.template getProp<2>(p)[1][0] == base + 9000.0;
match &= vd.template getProp<2>(p)[1][1] == base + 10000.0;
match &= vd.template getProp<2>(p)[1][2] == base + 11000.0;
match &= vd.template getProp<2>(p)[2][0] == base + 12000.0;
match &= vd.template getProp<2>(p)[2][1] == base + 13000.0;
match &= vd.template getProp<2>(p)[2][2] == base + 14000.0;
++itg;
}
BOOST_REQUIRE_EQUAL(match,true);
}
}
}
BOOST_AUTO_TEST_CASE(vector_dist_compare_host_device)
{
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
if (create_vcluster().size() >= 16)
{return;}
vector_dist_gpu<3,double,aggregate<double,double[3],double[3][3]>> vdg(10000,domain,bc,g,DEC_GRAN(128));
auto it = vdg.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vdg.getPos(p)[0] = (double)rand() / RAND_MAX;
vdg.getPos(p)[1] = (double)rand() / RAND_MAX;
vdg.getPos(p)[2] = (double)rand() / RAND_MAX;
vdg.template getProp<0>(p) = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[0] = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[1] = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][2] = (double)rand() / RAND_MAX;
++it;
}
vdg.map();
vdg.hostToDeviceProp<0,1,2>();
vdg.hostToDevicePos();
bool test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getPos(100)[0] = 0.99999999;
test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDevicePos();
vdg.getPos(100)[0] = 0.99999999;
test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP VECTOR
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<1>(103)[0] = 0.99999999;
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<1>();
vdg.getProp<1>(103)[0] = 0.99999999;
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP scalar
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<0>(105) = 0.99999999;
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<0>();
vdg.getProp<0>(105) = 0.99999999;
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP scalar
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<2>(108)[1][2] = 0.99999999;
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<2>();
vdg.getProp<2>(108)[1][2] = 0.99999999;
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
}
BOOST_AUTO_TEST_SUITE_END()
| 8414c34c9b078a9808f5aab024dfce59fcc5807b.cu |
#define BOOST_TEST_DYN_LINK
#include <boost/test/unit_test.hpp>
#include "VCluster/VCluster.hpp"
#include <Vector/vector_dist.hpp>
#include "Vector/tests/vector_dist_util_unit_tests.hpp"
#define SUB_UNIT_FACTOR 1024
template<unsigned int dim , typename vector_dist_type>
__global__ void move_parts_gpu_test(vector_dist_type vd)
{
auto p = GET_PARTICLE(vd);
#pragma unroll
for (int i = 0 ; i < dim ; i++)
{
vd.getPos(p)[i] += 0.05;
}
}
BOOST_AUTO_TEST_SUITE( vector_dist_gpu_test )
void print_test(std::string test, size_t sz)
{
if (create_vcluster().getProcessUnitID() == 0)
std::cout << test << " " << sz << "\n";
}
__global__ void initialize_props(vector_dist_ker<3, float, aggregate<float, float [3], float[3]>> vd)
{
auto p = GET_PARTICLE(vd);
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<1>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<1>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
}
template<typename T,typename CellList_type>
__global__ void calculate_force(vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd,
vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd_sort,
CellList_type cl,
int rank)
{
auto p = GET_PARTICLE(vd);
Point<3,T> xp = vd.getPos(p);
auto it = cl.getNNIterator(cl.getCell(xp));
Point<3,T> force1({0.0,0.0,0.0});
Point<3,T> force2({0.0,0.0,0.0});
while (it.isNext())
{
auto q1 = it.get_sort();
auto q2 = it.get();
if (q2 == p) {++it; continue;}
Point<3,T> xq_1 = vd_sort.getPos(q1);
Point<3,T> xq_2 = vd.getPos(q2);
Point<3,T> r1 = xq_1 - xp;
Point<3,T> r2 = xq_2 - xp;
// Normalize
r1 /= r1.norm();
r2 /= r2.norm();
force1 += vd_sort.template getProp<0>(q1)*r1;
force2 += vd.template getProp<0>(q2)*r2;
++it;
}
vd.template getProp<1>(p)[0] = force1.get(0);
vd.template getProp<1>(p)[1] = force1.get(1);
vd.template getProp<1>(p)[2] = force1.get(2);
vd.template getProp<2>(p)[0] = force2.get(0);
vd.template getProp<2>(p)[1] = force2.get(1);
vd.template getProp<2>(p)[2] = force2.get(2);
}
template<typename T, typename CellList_type>
__global__ void calculate_force_full_sort(vector_dist_ker<3, T, aggregate<T, T[3], T [3]>> vd,
CellList_type cl, int rank)
{
unsigned int p;
GET_PARTICLE_SORT(p,cl);
Point<3,T> xp = vd.getPos(p);
auto it = cl.getNNIterator(cl.getCell(xp));
Point<3,T> force1({0.0,0.0,0.0});
while (it.isNext())
{
auto q1 = it.get_sort();
if (q1 == p) {++it; continue;}
Point<3,T> xq_1 = vd.getPos(q1);
Point<3,T> r1 = xq_1 - xp;
// Normalize
r1 /= r1.norm();
force1 += vd.template getProp<0>(q1)*r1;
++it;
}
vd.template getProp<1>(p)[0] = force1.get(0);
vd.template getProp<1>(p)[1] = force1.get(1);
vd.template getProp<1>(p)[2] = force1.get(2);
}
template<typename CellList_type, typename vector_type>
bool check_force(CellList_type & NN_cpu, vector_type & vd)
{
typedef typename vector_type::stype St;
auto it6 = vd.getDomainIterator();
bool match = true;
while (it6.isNext())
{
auto p = it6.get();
Point<3,St> xp = vd.getPos(p);
// Calculate on CPU
Point<3,St> force({0.0,0.0,0.0});
auto NNc = NN_cpu.getNNIterator(NN_cpu.getCell(xp));
while (NNc.isNext())
{
auto q = NNc.get();
if (q == p.getKey()) {++NNc; continue;}
Point<3,St> xq_2 = vd.getPos(q);
Point<3,St> r2 = xq_2 - xp;
// Normalize
r2 /= r2.norm();
force += vd.template getProp<0>(q)*r2;
++NNc;
}
match &= fabs(vd.template getProp<1>(p)[0] - vd.template getProp<2>(p)[0]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[1] - vd.template getProp<2>(p)[1]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[2] - vd.template getProp<2>(p)[2]) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[0] - force.get(0)) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[1] - force.get(1)) < 0.0003;
match &= fabs(vd.template getProp<1>(p)[2] - force.get(2)) < 0.0003;
if (match == false)
{
std::cout << "ERROR: " << vd.template getProp<1>(p)[0] << " " << vd.template getProp<2>(p)[0] << std::endl;
std::cout << "ERROR: " << vd.template getProp<1>(p)[1] << " " << vd.template getProp<2>(p)[1] << std::endl;
std::cout << "ERROR: " << vd.template getProp<1>(p)[2] << " " << vd.template getProp<2>(p)[2] << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[0] << " " << force.get(0) << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[1] << " " << force.get(1) << std::endl;
std::cout << "ERROR2: " << vd.template getProp<1>(p)[2] << " " << force.get(2) << std::endl;
break;
}
++it6;
}
return match;
}
BOOST_AUTO_TEST_CASE( vector_dist_gpu_ghost_get )
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
vector_dist_gpu<3,float,aggregate<float,float[3],float[3]>> vd(1000,domain,bc,g);
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] = (float)rand() / RAND_MAX;
vd.getPos(p)[1] = (float)rand() / RAND_MAX;
vd.getPos(p)[2] = (float)rand() / RAND_MAX;
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<1>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<1>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<2>(p)[0] = vd.getPos(p)[0] + 3.0*vd.getPos(p)[1];
vd.template getProp<2>(p)[1] = vd.getPos(p)[0] + 3.0*vd.getPos(p)[2];
vd.template getProp<2>(p)[2] = vd.getPos(p)[1] + 3.0*vd.getPos(p)[2];
++it;
}
// Ok we redistribute the particles (CPU based)
vd.map();
vd.template ghost_get<0,1,2>();
// Now we check the the ghost contain the correct information
bool check = true;
auto itg = vd.getDomainAndGhostIterator();
while (itg.isNext())
{
auto p = itg.get();
check &= (vd.template getProp<0>(p) == vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2]);
check &= (vd.template getProp<1>(p)[0] == vd.getPos(p)[0] + vd.getPos(p)[1]);
check &= (vd.template getProp<1>(p)[1] == vd.getPos(p)[0] + vd.getPos(p)[2]);
check &= (vd.template getProp<1>(p)[2] == vd.getPos(p)[1] + vd.getPos(p)[2]);
check &= (vd.template getProp<2>(p)[0] == vd.getPos(p)[0] + 3.0*vd.getPos(p)[1]);
check &= (vd.template getProp<2>(p)[1] == vd.getPos(p)[0] + 3.0*vd.getPos(p)[2]);
check &= (vd.template getProp<2>(p)[2] == vd.getPos(p)[1] + 3.0*vd.getPos(p)[2]);
++itg;
}
size_t tot_s = vd.size_local_with_ghost();
v_cl.sum(tot_s);
v_cl.execute();
// We check that we check something
BOOST_REQUIRE(tot_s > 1000);
}
template<typename vector_type, typename CellList_type, typename CellList_type_cpu>
void check_cell_list_cpu_and_gpu(vector_type & vd, CellList_type & NN, CellList_type_cpu & NN_cpu)
{
auto it5 = vd.getDomainIteratorGPU(32);
calculate_force<typename vector_type::stype,decltype(NN.toKernel())><<<it5.wthr,it5.thr>>>(vd.toKernel(),vd.toKernel_sorted(),NN.toKernel(),create_vcluster().rank());
vd.template deviceToHostProp<1,2>();
bool test = check_force(NN_cpu,vd);
BOOST_REQUIRE_EQUAL(test,true);
// We reset the property 1 on device
auto rst = vd.getDomainIterator();
while (rst.isNext())
{
auto p = rst.get();
vd.template getProp<1>(p)[0] = 0.0;
vd.template getProp<1>(p)[1] = 0.0;
vd.template getProp<1>(p)[2] = 0.0;
++rst;
}
vd.template hostToDeviceProp<1>();
// We do exactly the same test as before, but now we completely use the sorted version
calculate_force_full_sort<typename vector_type::stype,decltype(NN.toKernel())><<<it5.wthr,it5.thr>>>(vd.toKernel_sorted(),NN.toKernel(),create_vcluster().rank());
vd.template merge_sort<1>(NN);
vd.template deviceToHostProp<1>();
test = check_force(NN_cpu,vd);
BOOST_REQUIRE_EQUAL(test,true);
}
BOOST_AUTO_TEST_CASE( vector_dist_gpu_test)
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={NON_PERIODIC,NON_PERIODIC,NON_PERIODIC};
vector_dist_gpu<3,float,aggregate<float,float[3],float[3]>> vd(10000,domain,bc,g);
srand(55067*create_vcluster().rank());
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
int x = rand();
int y = rand();
int z = rand();
vd.getPos(p)[0] = (float)x / RAND_MAX;
vd.getPos(p)[1] = (float)y / RAND_MAX;
vd.getPos(p)[2] = (float)z / RAND_MAX;
Point<3,float> xp = vd.getPos(p);
++it;
}
// Ok we redistribute the particles (CPU based)
vd.map();
size_t size_l = vd.size_local();
v_cl.sum(size_l);
v_cl.execute();
BOOST_REQUIRE_EQUAL(size_l,10000);
auto & ct = vd.getDecomposition();
bool noOut = true;
size_t cnt = 0;
auto it2 = vd.getDomainIterator();
while (it2.isNext())
{
auto p = it2.get();
noOut &= ct.isLocal(vd.getPos(p));
cnt++;
++it2;
}
BOOST_REQUIRE_EQUAL(noOut,true);
BOOST_REQUIRE_EQUAL(cnt,vd.size_local());
// now we offload all the properties
auto it3 = vd.getDomainIteratorGPU();
// offload to device
vd.hostToDevicePos();
initialize_props<<<it3.wthr,it3.thr>>>(vd.toKernel());
// now we check what we initialized
vd.deviceToHostProp<0,1>();
auto it4 = vd.getDomainIterator();
while (it4.isNext())
{
auto p = it4.get();
BOOST_REQUIRE_CLOSE(vd.template getProp<0>(p),vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[0],vd.getPos(p)[0] + vd.getPos(p)[1],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[1],vd.getPos(p)[0] + vd.getPos(p)[2],0.01);
BOOST_REQUIRE_CLOSE(vd.template getProp<1>(p)[2],vd.getPos(p)[1] + vd.getPos(p)[2],0.01);
++it4;
}
// here we do a ghost_get
vd.ghost_get<0>();
// Double ghost get to check crashes
vd.ghost_get<0>();
// we re-offload what we received
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
auto NN = vd.getCellListGPU(0.1);
auto NN_cpu = vd.getCellList(0.1);
check_cell_list_cpu_and_gpu(vd,NN,NN_cpu);
auto NN_up = vd.getCellListGPU(0.1);
NN_up.clear();
vd.updateCellList(NN_up);
check_cell_list_cpu_and_gpu(vd,NN_up,NN_cpu);
}
template<typename St>
void vdist_calc_gpu_test()
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,St> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,St> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
//! [Create a gpu vector]
vector_dist_gpu<3,St,aggregate<St,St[3],St[3]>> vd(1000,domain,bc,g);
//! [Create a gpu vector]
//! [Fill gpu vector and move to GPU]
srand(v_cl.rank()*10000);
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] = (St)rand() / RAND_MAX;
vd.getPos(p)[1] = (St)rand() / RAND_MAX;
vd.getPos(p)[2] = (St)rand() / RAND_MAX;
vd.template getProp<0>(p) = vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
vd.template getProp<1>(p)[0] = vd.getPos(p)[0];
vd.template getProp<1>(p)[1] = vd.getPos(p)[1];
vd.template getProp<1>(p)[2] = vd.getPos(p)[2];
vd.template getProp<2>(p)[0] = vd.getPos(p)[0] + vd.getPos(p)[1];
vd.template getProp<2>(p)[1] = vd.getPos(p)[0] + vd.getPos(p)[2];
vd.template getProp<2>(p)[2] = vd.getPos(p)[1] + vd.getPos(p)[2];
++it;
}
// move on device
vd.hostToDevicePos();
vd.template hostToDeviceProp<0,1,2>();
// Ok we redistribute the particles (GPU based)
vd.map(RUN_ON_DEVICE);
//! [Fill gpu vector and move to GPU]
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Reset the host part
auto it3 = vd.getDomainIterator();
while (it3.isNext())
{
auto p = it3.get();
vd.getPos(p)[0] = 1.0;
vd.getPos(p)[1] = 1.0;
vd.getPos(p)[2] = 1.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
vd.template getProp<0>(p) = 0.0;
++it3;
}
// we move from Device to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Check
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
match &= vd.template getProp<0>(p) == vd.getPos(p)[0] + vd.getPos(p)[1] + vd.getPos(p)[2];
match &= vd.template getProp<1>(p)[0] == vd.getPos(p)[0];
match &= vd.template getProp<1>(p)[1] == vd.getPos(p)[1];
match &= vd.template getProp<1>(p)[2] == vd.getPos(p)[2];
match &= vd.template getProp<2>(p)[0] == vd.getPos(p)[0] + vd.getPos(p)[1];
match &= vd.template getProp<2>(p)[1] == vd.getPos(p)[0] + vd.getPos(p)[2];
match &= vd.template getProp<2>(p)[2] == vd.getPos(p)[1] + vd.getPos(p)[2];
++it2;
}
BOOST_REQUIRE_EQUAL(match,true);
// count local particles
size_t l_cnt = 0;
size_t nl_cnt = 0;
size_t n_out = 0;
// Domain + ghost box
Box<3,St> dom_ext = domain;
dom_ext.enlarge(g);
auto it5 = vd.getDomainIterator();
count_local_n_local<3>(vd,it5,bc,domain,dom_ext,l_cnt,nl_cnt,n_out);
BOOST_REQUIRE_EQUAL(n_out,0);
BOOST_REQUIRE_EQUAL(l_cnt,vd.size_local());
// we do 10 gpu steps (using a cpu vector to check that map and ghost get work as expented)
for (size_t i = 0 ; i < 10 ; i++)
{
vd.map(RUN_ON_DEVICE);
CUDA_SAFE(cudaGetLastError());
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// To test we copy on a cpu distributed vector and we do a map
vector_dist<3,St,aggregate<St,St[3],St[3]>> vd_cpu(vd.getDecomposition().template duplicate_convert<HeapMemory,memory_traits_lin>(),0);
auto itc = vd.getDomainIterator();
while (itc.isNext())
{
auto p = itc.get();
vd_cpu.add();
vd_cpu.getLastPos()[0] = vd.getPos(p)[0];
vd_cpu.getLastPos()[1] = vd.getPos(p)[1];
vd_cpu.getLastPos()[2] = vd.getPos(p)[2];
vd_cpu.template getLastProp<0>() = vd.template getProp<0>(p);
vd_cpu.template getLastProp<1>()[0] = vd.template getProp<1>(p)[0];
vd_cpu.template getLastProp<1>()[1] = vd.template getProp<1>(p)[1];
vd_cpu.template getLastProp<1>()[2] = vd.template getProp<1>(p)[2];
vd_cpu.template getLastProp<2>()[0] = vd.template getProp<2>(p)[0];
vd_cpu.template getLastProp<2>()[1] = vd.template getProp<2>(p)[1];
vd_cpu.template getLastProp<2>()[2] = vd.template getProp<2>(p)[2];
++itc;
}
vd_cpu.template ghost_get<0,1,2>();
//! [Fill the ghost on GPU]
vd.template ghost_get<0,1,2>(RUN_ON_DEVICE);
//! [Fill the ghost on GPU]
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
match = true;
// Particle on the gpu ghost and cpu ghost are not ordered in the same way so we have to reorder
struct part
{
Point<3,St> xp;
St prp0;
St prp1[3];
St prp2[3];
bool operator<(const part & tmp) const
{
if (xp.get(0) < tmp.xp.get(0))
{return true;}
else if (xp.get(0) > tmp.xp.get(0))
{return false;}
if (xp.get(1) < tmp.xp.get(1))
{return true;}
else if (xp.get(1) > tmp.xp.get(1))
{return false;}
if (xp.get(2) < tmp.xp.get(2))
{return true;}
else if (xp.get(2) > tmp.xp.get(2))
{return false;}
return false;
}
};
openfpm::vector<part> cpu_sort;
openfpm::vector<part> gpu_sort;
cpu_sort.resize(vd_cpu.size_local_with_ghost() - vd_cpu.size_local());
gpu_sort.resize(vd.size_local_with_ghost() - vd.size_local());
BOOST_REQUIRE_EQUAL(cpu_sort.size(),gpu_sort.size());
size_t cnt = 0;
auto itc2 = vd.getGhostIterator();
while (itc2.isNext())
{
auto p = itc2.get();
cpu_sort.get(cnt).xp.get(0) = vd_cpu.getPos(p)[0];
gpu_sort.get(cnt).xp.get(0) = vd.getPos(p)[0];
cpu_sort.get(cnt).xp.get(1) = vd_cpu.getPos(p)[1];
gpu_sort.get(cnt).xp.get(1) = vd.getPos(p)[1];
cpu_sort.get(cnt).xp.get(2) = vd_cpu.getPos(p)[2];
gpu_sort.get(cnt).xp.get(2) = vd.getPos(p)[2];
cpu_sort.get(cnt).prp0 = vd_cpu.template getProp<0>(p);
gpu_sort.get(cnt).prp0 = vd.template getProp<0>(p);
cpu_sort.get(cnt).prp1[0] = vd_cpu.template getProp<1>(p)[0];
gpu_sort.get(cnt).prp1[0] = vd.template getProp<1>(p)[0];
cpu_sort.get(cnt).prp1[1] = vd_cpu.template getProp<1>(p)[1];
gpu_sort.get(cnt).prp1[1] = vd.template getProp<1>(p)[1];
cpu_sort.get(cnt).prp1[2] = vd_cpu.template getProp<1>(p)[2];
gpu_sort.get(cnt).prp1[2] = vd.template getProp<1>(p)[2];
cpu_sort.get(cnt).prp2[0] = vd_cpu.template getProp<2>(p)[0];
gpu_sort.get(cnt).prp2[0] = vd.template getProp<2>(p)[0];
cpu_sort.get(cnt).prp2[1] = vd_cpu.template getProp<2>(p)[1];
gpu_sort.get(cnt).prp2[1] = vd.template getProp<2>(p)[1];
cpu_sort.get(cnt).prp2[2] = vd_cpu.template getProp<2>(p)[2];
gpu_sort.get(cnt).prp2[2] = vd.template getProp<2>(p)[2];
++cnt;
++itc2;
}
cpu_sort.sort();
gpu_sort.sort();
for (size_t i = 0 ; i < cpu_sort.size() ; i++)
{
match &= cpu_sort.get(i).xp.get(0) == gpu_sort.get(i).xp.get(0);
match &= cpu_sort.get(i).xp.get(1) == gpu_sort.get(i).xp.get(1);
match &= cpu_sort.get(i).xp.get(2) == gpu_sort.get(i).xp.get(2);
match &= cpu_sort.get(i).prp0 == gpu_sort.get(i).prp0;
match &= cpu_sort.get(i).prp1[0] == gpu_sort.get(i).prp1[0];
match &= cpu_sort.get(i).prp1[1] == gpu_sort.get(i).prp1[1];
match &= cpu_sort.get(i).prp1[2] == gpu_sort.get(i).prp1[2];
match &= cpu_sort.get(i).prp2[0] == gpu_sort.get(i).prp2[0];
match &= cpu_sort.get(i).prp2[1] == gpu_sort.get(i).prp2[1];
match &= cpu_sort.get(i).prp2[2] == gpu_sort.get(i).prp2[2];
}
BOOST_REQUIRE_EQUAL(match,true);
// move particles on gpu
auto ite = vd.getDomainIteratorGPU();
move_parts_gpu_test<3,decltype(vd.toKernel())><<<ite.wthr,ite.thr>>>(vd.toKernel());
}
}
BOOST_AUTO_TEST_CASE( vector_dist_map_on_gpu_test)
{
vdist_calc_gpu_test<float>();
vdist_calc_gpu_test<double>();
}
BOOST_AUTO_TEST_CASE(vector_dist_reduce)
{
auto & v_cl = create_vcluster();
if (v_cl.size() > 16)
{return;}
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
// set the ghost based on the radius cut off (make just a little bit smaller than the spacing)
Ghost<3,float> g(0.1);
// Boundary conditions
size_t bc[3]={PERIODIC,PERIODIC,PERIODIC};
vector_dist_gpu<3,float,aggregate<float,double,int,size_t>> vd(5000*v_cl.size(),domain,bc,g);
auto it = vd.getDomainIterator();
float fc = 1.0;
double dc = 1.0;
int ic = 1.0;
size_t sc = 1.0;
while(it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = fc;
vd.template getProp<1>(p) = dc;
vd.template getProp<2>(p) = ic;
vd.template getProp<3>(p) = sc;
fc += 1.0;
dc += 1.0;
ic += 1;
sc += 1;
++it;
}
vd.template hostToDeviceProp<0,1,2,3>();
float redf = reduce_local<0,_add_>(vd);
double redd = reduce_local<1,_add_>(vd);
int redi = reduce_local<2,_add_>(vd);
size_t reds = reduce_local<3,_add_>(vd);
BOOST_REQUIRE_EQUAL(redf,(vd.size_local()+1.0)*(vd.size_local())/2.0);
BOOST_REQUIRE_EQUAL(redd,(vd.size_local()+1.0)*(vd.size_local())/2.0);
BOOST_REQUIRE_EQUAL(redi,(vd.size_local()+1)*(vd.size_local())/2);
BOOST_REQUIRE_EQUAL(reds,(vd.size_local()+1)*(vd.size_local())/2);
float redf2 = reduce_local<0,_max_>(vd);
double redd2 = reduce_local<1,_max_>(vd);
int redi2 = reduce_local<2,_max_>(vd);
size_t reds2 = reduce_local<3,_max_>(vd);
BOOST_REQUIRE_EQUAL(redf2,vd.size_local());
BOOST_REQUIRE_EQUAL(redd2,vd.size_local());
BOOST_REQUIRE_EQUAL(redi2,vd.size_local());
BOOST_REQUIRE_EQUAL(reds2,vd.size_local());
}
void vector_dist_dlb_on_cuda_impl(size_t k,double r_cut)
{
typedef vector_dist_gpu<3,double,aggregate<double,double[3],double[3]>> vector_type;
Vcluster<> & v_cl = create_vcluster();
if (v_cl.getProcessingUnits() > 8)
return;
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
vector_type vd(0,domain,bc,g,DEC_GRAN(2048));
// Only processor 0 initialy add particles on a corner of a domain
if (v_cl.getProcessUnitID() == 0)
{
for(size_t i = 0 ; i < k ; i++)
{
vd.add();
vd.getLastPos()[0] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[1] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[2] = ((double)rand())/RAND_MAX * 0.3;
}
}
// Move to GPU
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
// now move to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
// Get the neighborhood of each particles
auto VV = vd.getVerlet(r_cut);
// store the number of neighborhood for each particles
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = VV.getNNPart(p.getKey());
++it;
}
// Move to GPU
vd.template hostToDeviceProp<0>();
ModelSquare md;
md.factor = 10;
vd.addComputationCosts(md);
vd.getDecomposition().decompose();
vd.map(RUN_ON_DEVICE);
vd.deviceToHostPos();
// Move info to CPU for addComputationcosts
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,7.0);
}
BOOST_REQUIRE(vd.size_local() != 0);
Point<3,double> v({1.0,1.0,1.0});
for (size_t i = 0 ; i < 25 ; i++)
{
// move particles to CPU and move the particles by 0.1
vd.deviceToHostPos();
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] += v.get(0) * 0.09;
vd.getPos(p)[1] += v.get(1) * 0.09;
vd.getPos(p)[2] += v.get(2) * 0.09;
++it;
}
//Back to GPU
vd.hostToDevicePos();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
// Check calc forces
auto NN_gpu = vd.getCellListGPU(r_cut);
auto NN_cpu = vd.getCellList(r_cut);
check_cell_list_cpu_and_gpu(vd,NN_gpu,NN_cpu);
auto VV2 = vd.getVerlet(r_cut);
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
match &= vd.template getProp<0>(p) == VV2.getNNPart(p.getKey());
++it2;
}
BOOST_REQUIRE_EQUAL(match,true);
ModelSquare md;
vd.addComputationCosts(md);
vd.getDecomposition().redecompose(200);
vd.map(RUN_ON_DEVICE);
BOOST_REQUIRE(vd.size_local() != 0);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,10.0);
}
}
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda)
{
vector_dist_dlb_on_cuda_impl(50000,0.01);
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda2)
{
if (create_vcluster().size() <= 3)
{return;};
vector_dist_dlb_on_cuda_impl(1000000,0.01);
}
BOOST_AUTO_TEST_CASE(vector_dist_dlb_on_cuda3)
{
if (create_vcluster().size() < 8)
{return;}
vector_dist_dlb_on_cuda_impl(15000000,0.005);
}
BOOST_AUTO_TEST_CASE(vector_dist_keep_prop_on_cuda)
{
typedef vector_dist_gpu<3,double,aggregate<double,double[3],double[3][3]>> vector_type;
Vcluster<> & v_cl = create_vcluster();
if (v_cl.getProcessingUnits() > 8)
return;
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
vector_type vd(0,domain,bc,g,DEC_GRAN(2048));
// Only processor 0 initialy add particles on a corner of a domain
if (v_cl.getProcessUnitID() == 0)
{
for(size_t i = 0 ; i < 50000 ; i++)
{
vd.add();
vd.getLastPos()[0] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[1] = ((double)rand())/RAND_MAX * 0.3;
vd.getLastPos()[2] = ((double)rand())/RAND_MAX * 0.3;
}
}
// Move to GPU
vd.hostToDevicePos();
vd.template hostToDeviceProp<0>();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
// now move to CPU
vd.deviceToHostPos();
vd.template deviceToHostProp<0>();
// store the number of neighborhood for each particles
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.template getProp<0>(p) = 0.0;
vd.template getProp<1>(p)[0] = 1000.0;
vd.template getProp<1>(p)[1] = 2000.0;
vd.template getProp<1>(p)[2] = 3000.0;
vd.template getProp<2>(p)[0][0] = 6000,0;
vd.template getProp<2>(p)[0][1] = 7000.0;
vd.template getProp<2>(p)[0][2] = 8000.0;
vd.template getProp<2>(p)[1][0] = 9000.0;
vd.template getProp<2>(p)[1][1] = 10000.0;
vd.template getProp<2>(p)[1][2] = 11000.0;
vd.template getProp<2>(p)[2][0] = 12000.0;
vd.template getProp<2>(p)[2][1] = 13000.0;
vd.template getProp<2>(p)[2][2] = 14000.0;
++it;
}
// Move to GPU
vd.template hostToDeviceProp<0,1,2>();
ModelSquare md;
md.factor = 10;
vd.addComputationCosts(md);
vd.getDecomposition().decompose();
vd.map(RUN_ON_DEVICE);
vd.deviceToHostPos();
// Move info to CPU for addComputationcosts
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,7.0);
}
BOOST_REQUIRE(vd.size_local() != 0);
Point<3,double> v({1.0,1.0,1.0});
int base = 0;
for (size_t i = 0 ; i < 25 ; i++)
{
if (i % 2 == 0)
{
// move particles to CPU and move the particles by 0.1
vd.deviceToHostPos();
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vd.getPos(p)[0] += v.get(0) * 0.09;
vd.getPos(p)[1] += v.get(1) * 0.09;
vd.getPos(p)[2] += v.get(2) * 0.09;
++it;
}
//Back to GPU
vd.hostToDevicePos();
vd.map(RUN_ON_DEVICE);
vd.template ghost_get<>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
ModelSquare md;
vd.addComputationCosts(md);
vd.getDecomposition().redecompose(200);
vd.map(RUN_ON_DEVICE);
BOOST_REQUIRE(vd.size_local() != 0);
vd.template ghost_get<0>(RUN_ON_DEVICE);
vd.deviceToHostPos();
vd.template deviceToHostProp<0,1,2>();
vd.addComputationCosts(md);
openfpm::vector<size_t> loads;
size_t load = vd.getDecomposition().getDistribution().getProcessorLoad();
v_cl.allGather(load,loads);
v_cl.execute();
for (size_t i = 0 ; i < loads.size() ; i++)
{
double load_f = load;
double load_fc = loads.get(i);
BOOST_REQUIRE_CLOSE(load_f,load_fc,10.0);
}
}
else
{
vd.template deviceToHostProp<0,1,2>();
auto it2 = vd.getDomainIterator();
bool match = true;
while (it2.isNext())
{
auto p = it2.get();
vd.template getProp<0>(p) += 1;
vd.template getProp<1>(p)[0] += 1.0;
vd.template getProp<1>(p)[1] += 1.0;
vd.template getProp<1>(p)[2] += 1.0;
vd.template getProp<2>(p)[0][0] += 1.0;
vd.template getProp<2>(p)[0][1] += 1.0;
vd.template getProp<2>(p)[0][2] += 1.0;
vd.template getProp<2>(p)[1][0] += 1.0;
vd.template getProp<2>(p)[1][1] += 1.0;
vd.template getProp<2>(p)[1][2] += 1.0;
vd.template getProp<2>(p)[2][0] += 1.0;
vd.template getProp<2>(p)[2][1] += 1.0;
vd.template getProp<2>(p)[2][2] += 1.0;
++it2;
}
vd.template hostToDeviceProp<0,1,2>();
++base;
vd.template ghost_get<0,1,2>(RUN_ON_DEVICE | KEEP_PROPERTIES);
vd.template deviceToHostProp<0,1,2>();
// Check that the ghost contain the correct information
auto itg = vd.getGhostIterator();
while (itg.isNext())
{
auto p = itg.get();
match &= vd.template getProp<0>(p) == base;
match &= vd.template getProp<1>(p)[0] == base + 1000.0;
match &= vd.template getProp<1>(p)[1] == base + 2000.0;
match &= vd.template getProp<1>(p)[2] == base + 3000.0;
match &= vd.template getProp<2>(p)[0][0] == base + 6000.0;
match &= vd.template getProp<2>(p)[0][1] == base + 7000.0;
match &= vd.template getProp<2>(p)[0][2] == base + 8000.0;
match &= vd.template getProp<2>(p)[1][0] == base + 9000.0;
match &= vd.template getProp<2>(p)[1][1] == base + 10000.0;
match &= vd.template getProp<2>(p)[1][2] == base + 11000.0;
match &= vd.template getProp<2>(p)[2][0] == base + 12000.0;
match &= vd.template getProp<2>(p)[2][1] == base + 13000.0;
match &= vd.template getProp<2>(p)[2][2] == base + 14000.0;
++itg;
}
BOOST_REQUIRE_EQUAL(match,true);
}
}
}
BOOST_AUTO_TEST_CASE(vector_dist_compare_host_device)
{
Box<3,double> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
Ghost<3,double> g(0.1);
size_t bc[3] = {PERIODIC,PERIODIC,PERIODIC};
if (create_vcluster().size() >= 16)
{return;}
vector_dist_gpu<3,double,aggregate<double,double[3],double[3][3]>> vdg(10000,domain,bc,g,DEC_GRAN(128));
auto it = vdg.getDomainIterator();
while (it.isNext())
{
auto p = it.get();
vdg.getPos(p)[0] = (double)rand() / RAND_MAX;
vdg.getPos(p)[1] = (double)rand() / RAND_MAX;
vdg.getPos(p)[2] = (double)rand() / RAND_MAX;
vdg.template getProp<0>(p) = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[0] = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[1] = (double)rand() / RAND_MAX;
vdg.template getProp<1>(p)[2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[0][2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[1][2] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][0] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][1] = (double)rand() / RAND_MAX;
vdg.template getProp<2>(p)[2][2] = (double)rand() / RAND_MAX;
++it;
}
vdg.map();
vdg.hostToDeviceProp<0,1,2>();
vdg.hostToDevicePos();
bool test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getPos(100)[0] = 0.99999999;
test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDevicePos();
vdg.getPos(100)[0] = 0.99999999;
test = vdg.compareHostAndDevicePos(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP VECTOR
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<1>(103)[0] = 0.99999999;
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<1>();
vdg.getProp<1>(103)[0] = 0.99999999;
test = vdg.compareHostAndDeviceProp<1>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP scalar
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<0>(105) = 0.99999999;
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<0>();
vdg.getProp<0>(105) = 0.99999999;
test = vdg.compareHostAndDeviceProp<0>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
////////////////////////////////////////////////// PROP scalar
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
vdg.getProp<2>(108)[1][2] = 0.99999999;
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,false);
vdg.hostToDeviceProp<2>();
vdg.getProp<2>(108)[1][2] = 0.99999999;
test = vdg.compareHostAndDeviceProp<2>(0.00001,0.00000001);
BOOST_REQUIRE_EQUAL(test,true);
}
BOOST_AUTO_TEST_SUITE_END()
|
cd512da38da49e1e420bc3c4695ae0ff693f5ea0.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* Copyright 2019 Stanford
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "gnn.h"
#include "cuda_helper.h"
#include "realm/runtime_impl.h"
#include "realm/cuda/cuda_module.h"
#include <fstream>
#include <sstream>
LegionRuntime::Logger::Category log_load("gnn");
void load_features_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<DATATYPE, 2> accInput(regions[0], FID_DATA);
Rect<2> rectInput = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectInput.lo[1], rowRight = rectInput.hi[1];
int inDim = rectInput.hi[0] - rectInput.lo[0] + 1;
assert(accInput.accessor.is_dense_arbitrary(rectInput));
DATATYPE* input = accInput.ptr(rectInput.lo);
std::string csvFile = prefix + ".feats.csv";
std::string binFile = prefix + ".feats.bin";
FILE* binFin = fopen(binFile.c_str(), "rb");
if (binFin == NULL) {
log_load.print("Load features from CSV: file(%s) rowLeft(%u) rowRight(%u)",
csvFile.c_str(), rowLeft, rowRight);
std::fstream csvFin;
csvFin.open(csvFile, std::ios::in);
std::string line, word;
// Skip the first rowLeft lines
for (V_ID v = 0; v < rowLeft; v++)
std::getline(csvFin, line);
for (V_ID v = rowLeft; v <= rowRight; v++) {
std::getline(csvFin, line);
std::stringstream ss(line);
int feat_cnt = 0;
while (std::getline(ss, word, ',')) {
float num = std::stof(word);
input[(v - rowLeft) * inDim + feat_cnt] = num;
feat_cnt ++;
}
assert(feat_cnt == inDim);
if (v % 10000 == 0) log_load.print("Loaded %u/%u nodes", v, rowRight);
}
FILE* binFout = fopen((prefix + ".feats.bin").c_str(), "wb");
fwrite(input, sizeof(DATATYPE), rectInput.volume(), binFout);
fclose(binFout);
csvFin.close();
} else {
log_load.print("Load features from Binary: file(%s) rowLeft(%u) rowRight(%u)\n",
binFile.c_str(), rowLeft, rowRight);
size_t ret = fread(input, sizeof(DATATYPE), rectInput.volume(), binFin);
assert(ret == rectInput.volume());
fclose(binFin);
}
}
LoadFeaturesTask::LoadFeaturesTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_FEATS_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_labels_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<DATATYPE, 2> accLabel(regions[0], FID_DATA);
Rect<2> rectLabel = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectLabel.lo[1], rowRight = rectLabel.hi[1];
int inDim = rectLabel.hi[0] - rectLabel.lo[0] + 1;
assert(accLabel.accessor.is_dense_arbitrary(rectLabel));
DATATYPE* label = accLabel.ptr(rectLabel.lo);
// TODO: remove me
//for (V_ID v = rowLeft; v<= rowRight; v++)
// for (int i = 0; i < inDim; i++)
// label[(v-rowLeft)*inDim+i] = i == 0 ? 1.0f : 0.0f;
//return;
std::string filename = prefix + ".label";
log_load.print("Load input labels from %s", filename.c_str());
FILE* file = fopen(filename.c_str(), "r");
assert(file != NULL);
// Skip the first rowLeft lines
int idx;
for (V_ID v = 0; v < rowLeft; v++)
fscanf(file, "%d", &idx);
for (V_ID v = rowLeft; v <= rowRight; v++) {
fscanf(file, "%d", &idx);
assert(idx >= 0 && idx < inDim);
for (int i = 0; i < inDim; i++)
label[(v - rowLeft) * inDim + i] = (i == idx) ? 1.0 : 0.0;
}
fclose(file);
}
LoadLabelsTask::LoadLabelsTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_LABEL_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_mask_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<int, 2> accMask(regions[0], FID_DATA);
Rect<2> rectMask = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectMask.lo[1], rowRight = rectMask.hi[1];
assert(rectMask.hi[0] == rectMask.lo[0]);
assert(accMask.accessor.is_dense_arbitrary(rectMask));
int* mask = accMask.ptr(rectMask.lo);
// TODO: remove me
//for (V_ID i = rowLeft; i <= rowRight; i++)
// mask[i - rowLeft] = MASK_TRAIN;
//return;
std::string filename = prefix + ".mask";
log_load.print("Load train mask: filename(%s) rowLeft(%u) rowRight(%u)",
filename.c_str(), rowLeft, rowRight);
std::fstream fin;
fin.open(filename, std::ios::in);
std::string line, word;
// Skip the first rowLeft lines
for (V_ID v = 0; v < rowLeft; v++)
std::getline(fin, line);
for (V_ID v = rowLeft; v <= rowRight; v++) {
std::getline(fin, line);
if (line == "Train") {
mask[v - rowLeft] = MASK_TRAIN;
} else if (line == "Val") {
mask[v - rowLeft] = MASK_VAL;
} else if (line == "Test") {
mask[v - rowLeft] = MASK_TEST;
} else if (line == "None") {
mask[v - rowLeft] = MASK_NONE;
} else {
printf("Unrecognized mask: %s\n", line.c_str());
assert(false);
}
}
}
LoadMaskTask::LoadMaskTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_MASK_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_graph_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 2);
assert(task->regions.size() == 2);
std::string prefix((char*) task->args);
const AccessorWO<E_ID, 1> acc_raw_rows(regions[0], FID_DATA);
const AccessorWO<V_ID, 1> acc_raw_cols(regions[1], FID_DATA);
Rect<1> rect_raw_rows = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
Rect<1> rect_raw_cols = runtime->get_index_space_domain(
ctx, task->regions[1].region.get_index_space());
V_ID rowLeft = rect_raw_rows.lo[0], rowRight = rect_raw_rows.hi[0];
E_ID colLeft = rect_raw_cols.lo[0], colRight = rect_raw_cols.hi[0];
assert(acc_raw_rows.accessor.is_dense_arbitrary(rect_raw_rows));
assert(acc_raw_cols.accessor.is_dense_arbitrary(rect_raw_cols));
E_ID* raw_rows = acc_raw_rows.ptr(rect_raw_rows.lo);
V_ID* raw_cols = acc_raw_cols.ptr(rect_raw_cols.lo);
std::string filename = prefix + ".add_self_edge.lux";
log_load.print("Load task: file(%s) rowLeft(%u) rowRight(%u) colLeft(%zu) colRight(%zu)",
filename.c_str(), rowLeft, rowRight, colLeft, colRight);
FILE* fd = fopen(filename.c_str(), "rb");
assert(fd != NULL);
int fseek_ret;
size_t fread_ret;
V_ID nv;
E_ID ne;
assert(fread(&nv, sizeof(V_ID), 1, fd) == 1);
assert(fread(&ne, sizeof(E_ID), 1, fd) == 1);
fseek_ret =
fseeko(fd, FILE_HEADER_SIZE + sizeof(E_ID) * (size_t)rowLeft, SEEK_SET);
assert(fseek_ret == 0);
fread_ret =
fread(raw_rows, sizeof(E_ID), (size_t)(rowRight - rowLeft + 1), fd);
assert(fread_ret == rowRight - rowLeft + 1);
fseek_ret =
fseeko(fd, FILE_HEADER_SIZE + sizeof(E_ID) * (size_t)nv
+ sizeof(V_ID) * (size_t)colLeft, SEEK_SET);
assert(fseek_ret == 0);
fread_ret =
fread(raw_cols, sizeof(V_ID), (size_t)(colRight - colLeft + 1), fd);
assert(fread_ret == colRight - colLeft + 1);
fclose(fd);
}
LoadGraphTask::LoadGraphTask(const Model& model,
const std::string& filename)
: IndexLauncher(LOAD_GRAPH_TASK_ID, model.taskIS,
TaskArgument(filename.c_str(), MAX_FILE_LEN),
model.taskArgs)
{
// regions[0]: raw_rows
{
RegionRequirement rr(model.myGraph.rawRowLP, 0/*projection id*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rawRowLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[1]: raw_cols
{
RegionRequirement rr(model.myGraph.rawColLP, 0/*projection id*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rawColLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
__global__
void init_graph_kernel(V_ID rowLeft,
V_ID rowRight,
E_ID colLeft,
NodeStruct* rowPtrs,
EdgeStruct* colIdxs,
const E_ID* rawRows,
const V_ID* rawCols)
{
for (V_ID n = blockIdx.x * blockDim.x + threadIdx.x;
n + rowLeft <= rowRight; n += blockDim.x * gridDim.x)
{
E_ID startColIdx, endColIdx = rawRows[n];
if (n == 0)
startColIdx = colLeft;
else
startColIdx = rawRows[n-1];
rowPtrs[n].index = endColIdx;
for (E_ID e = startColIdx; e < endColIdx; e++) {
colIdxs[e - colLeft].src = rawCols[e - colLeft];
colIdxs[e - colLeft].dst = n + rowLeft;
}
}
}
ResourceManager* init_task_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 4);
assert(task->regions.size() == 4);
const Graph* graph = (Graph*) task->args;
const AccessorWO<NodeStruct, 1> accRowPtr(regions[0], FID_DATA);
const AccessorWO<EdgeStruct, 1> accColIdx(regions[1], FID_DATA);
const AccessorRO<E_ID, 1> accRawRow(regions[2], FID_DATA);
const AccessorRO<V_ID, 1> accRawCol(regions[3], FID_DATA);
Rect<1> rectRowPtr = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
Rect<1> rectColIdx = runtime->get_index_space_domain(
ctx, task->regions[1].region.get_index_space());
Rect<1> rectRawRow = runtime->get_index_space_domain(
ctx, task->regions[2].region.get_index_space());
Rect<1> rectRawCol = runtime->get_index_space_domain(
ctx, task->regions[3].region.get_index_space());
assert(accRowPtr.accessor.is_dense_arbitrary(rectRowPtr));
assert(accColIdx.accessor.is_dense_arbitrary(rectColIdx));
assert(accRawRow.accessor.is_dense_arbitrary(rectRawRow));
assert(accRawCol.accessor.is_dense_arbitrary(rectRawCol));
NodeStruct* rowPtrs = accRowPtr.ptr(rectRowPtr);
EdgeStruct* colIdxs = accColIdx.ptr(rectColIdx);
const E_ID* rawRows = accRawRow.ptr(rectRawRow);
const V_ID* rawCols = accRawCol.ptr(rectRawCol);
V_ID rowLeft = rectRowPtr.lo[0], rowRight = rectRowPtr.hi[0];
E_ID colLeft = rectColIdx.lo[0], colRight = rectColIdx.hi[0];
log_load.print("Init task: rowLeft(%u) rowRight(%u) colLeft(%zu) colRight(%zu)",
rowLeft, rowRight, colLeft, colRight);
// init graph
hipLaunchKernelGGL(( init_graph_kernel), dim3(GET_BLOCKS(rowRight - rowLeft + 1)), dim3(CUDA_NUM_THREADS), 0, 0,
rowLeft, rowRight, colLeft, rowPtrs, colIdxs, rawRows, rawCols);
checkCUDA(hipDeviceSynchronize());
ResourceManager* manager = new ResourceManager();
manager->proc_id = task->current_proc.id;
// init nccl
//int numRanks = graph->numParts / graph->numMachines;
//int myRank = task->current_proc.id % numRanks;
//int node = task->current_proc.address_space();
//piece.nccl = graph->nccl[node*numRanks+myRank];
//printf("Before ncclCommInitRank: numRanks(%d) id(%s) myrank(%d) processorId(%d)\n", numRanks, graph->ncclID[node].internal, 0, task->current_proc.id);
//NCCLCheck(ncclCommInitRank(&piece.nccl, numRanks, graph->ncclID[node], task->current_proc.id % numRanks));
//printf("After ncclCommInitRank\n");
// init cublas
checkCUDA(hipblasCreate(&(manager->blas)));
checkCUDNN(cudnnCreate(&(manager->dnn)));
checkCUDA(hiprandCreateGenerator(&(manager->rand), HIPRAND_RNG_PSEUDO_DEFAULT));
// init hiprand
// TODO: change to random seed before releasing
checkCUDA(hiprandSetPseudoRandomGeneratorSeed(manager->rand, 0));
// init dropout states
checkCUDNN(cudnnDropoutGetStatesSize(manager->dnn, &(manager->dropoutSize)));
checkCUDA(hipMalloc(&(manager->dropoutStates), manager->dropoutSize));
//manager->numNodes = graph->numNodes;
//manager->numEdges = graph->numEdges;
//manager->numParts = graph->numParts;
// Allocate fbInput/fbOutput on the same memory as rowPtr
std::set<Memory> memFB;
regions[0].get_memories(memFB);
assert(memFB.size() == 1);
assert(memFB.begin()->kind() == Memory::GPU_FB_MEM);
Realm::MemoryImpl* memImpl =
Realm::get_runtime()->get_memory_impl(*memFB.begin());
Realm::Cuda::GPUFBMemory* memFBImpl = (Realm::Cuda::GPUFBMemory*) memImpl;
manager->allocator = memFBImpl;
for (int i = 0; i < MAX_NUM_CACHES; i++) {
if (i == 0)
manager->fbCache[i].volume = graph->maxHidden * (graph->numNodes + 128);
else
manager->fbCache[i].volume = graph->maxHidden * (rowRight - rowLeft + 1);
manager->fbCache[i].region = LogicalRegion::NO_REGION;
off_t offset = memFBImpl->alloc_bytes_local(manager->fbCache[i].volume * sizeof(DATATYPE));
assert(offset >= 0);
manager->fbCache[i].ptr = (DATATYPE*) memFBImpl->get_direct_ptr(offset, 0);
}
return manager;
}
InitTask::InitTask(const Model& model)
: IndexLauncher(INIT_TASK_ID, model.taskIS,
TaskArgument(&(model.myGraph), sizeof(Graph)), model.taskArgs)
{
// regions[0]: row_ptrs
{
RegionRequirement rr(model.myGraph.rowPtrLP, 0/*identity*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rowPtrLR,
MAP_TO_FB_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[1]: col_idxs
{
RegionRequirement rr(model.myGraph.colIdxLP, 0/*identity*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.colIdxLR,
MAP_TO_FB_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[2]: raw_rows
{
RegionRequirement rr(model.myGraph.rawRowLP, 0/*identity*/,
READ_ONLY, EXCLUSIVE, model.myGraph.rawRowLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[3]: raw_cols
{
RegionRequirement rr(model.myGraph.rawColLP, 0/*identity*/,
READ_ONLY, EXCLUSIVE, model.myGraph.rawColLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
| cd512da38da49e1e420bc3c4695ae0ff693f5ea0.cu | /* Copyright 2019 Stanford
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "gnn.h"
#include "cuda_helper.h"
#include "realm/runtime_impl.h"
#include "realm/cuda/cuda_module.h"
#include <fstream>
#include <sstream>
LegionRuntime::Logger::Category log_load("gnn");
void load_features_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<DATATYPE, 2> accInput(regions[0], FID_DATA);
Rect<2> rectInput = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectInput.lo[1], rowRight = rectInput.hi[1];
int inDim = rectInput.hi[0] - rectInput.lo[0] + 1;
assert(accInput.accessor.is_dense_arbitrary(rectInput));
DATATYPE* input = accInput.ptr(rectInput.lo);
std::string csvFile = prefix + ".feats.csv";
std::string binFile = prefix + ".feats.bin";
FILE* binFin = fopen(binFile.c_str(), "rb");
if (binFin == NULL) {
log_load.print("Load features from CSV: file(%s) rowLeft(%u) rowRight(%u)",
csvFile.c_str(), rowLeft, rowRight);
std::fstream csvFin;
csvFin.open(csvFile, std::ios::in);
std::string line, word;
// Skip the first rowLeft lines
for (V_ID v = 0; v < rowLeft; v++)
std::getline(csvFin, line);
for (V_ID v = rowLeft; v <= rowRight; v++) {
std::getline(csvFin, line);
std::stringstream ss(line);
int feat_cnt = 0;
while (std::getline(ss, word, ',')) {
float num = std::stof(word);
input[(v - rowLeft) * inDim + feat_cnt] = num;
feat_cnt ++;
}
assert(feat_cnt == inDim);
if (v % 10000 == 0) log_load.print("Loaded %u/%u nodes", v, rowRight);
}
FILE* binFout = fopen((prefix + ".feats.bin").c_str(), "wb");
fwrite(input, sizeof(DATATYPE), rectInput.volume(), binFout);
fclose(binFout);
csvFin.close();
} else {
log_load.print("Load features from Binary: file(%s) rowLeft(%u) rowRight(%u)\n",
binFile.c_str(), rowLeft, rowRight);
size_t ret = fread(input, sizeof(DATATYPE), rectInput.volume(), binFin);
assert(ret == rectInput.volume());
fclose(binFin);
}
}
LoadFeaturesTask::LoadFeaturesTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_FEATS_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_labels_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<DATATYPE, 2> accLabel(regions[0], FID_DATA);
Rect<2> rectLabel = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectLabel.lo[1], rowRight = rectLabel.hi[1];
int inDim = rectLabel.hi[0] - rectLabel.lo[0] + 1;
assert(accLabel.accessor.is_dense_arbitrary(rectLabel));
DATATYPE* label = accLabel.ptr(rectLabel.lo);
// TODO: remove me
//for (V_ID v = rowLeft; v<= rowRight; v++)
// for (int i = 0; i < inDim; i++)
// label[(v-rowLeft)*inDim+i] = i == 0 ? 1.0f : 0.0f;
//return;
std::string filename = prefix + ".label";
log_load.print("Load input labels from %s", filename.c_str());
FILE* file = fopen(filename.c_str(), "r");
assert(file != NULL);
// Skip the first rowLeft lines
int idx;
for (V_ID v = 0; v < rowLeft; v++)
fscanf(file, "%d", &idx);
for (V_ID v = rowLeft; v <= rowRight; v++) {
fscanf(file, "%d", &idx);
assert(idx >= 0 && idx < inDim);
for (int i = 0; i < inDim; i++)
label[(v - rowLeft) * inDim + i] = (i == idx) ? 1.0 : 0.0;
}
fclose(file);
}
LoadLabelsTask::LoadLabelsTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_LABEL_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_mask_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 1);
assert(task->regions.size() == 1);
std::string prefix((char*)task->args);
const AccessorWO<int, 2> accMask(regions[0], FID_DATA);
Rect<2> rectMask = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
V_ID rowLeft = rectMask.lo[1], rowRight = rectMask.hi[1];
assert(rectMask.hi[0] == rectMask.lo[0]);
assert(accMask.accessor.is_dense_arbitrary(rectMask));
int* mask = accMask.ptr(rectMask.lo);
// TODO: remove me
//for (V_ID i = rowLeft; i <= rowRight; i++)
// mask[i - rowLeft] = MASK_TRAIN;
//return;
std::string filename = prefix + ".mask";
log_load.print("Load train mask: filename(%s) rowLeft(%u) rowRight(%u)",
filename.c_str(), rowLeft, rowRight);
std::fstream fin;
fin.open(filename, std::ios::in);
std::string line, word;
// Skip the first rowLeft lines
for (V_ID v = 0; v < rowLeft; v++)
std::getline(fin, line);
for (V_ID v = rowLeft; v <= rowRight; v++) {
std::getline(fin, line);
if (line == "Train") {
mask[v - rowLeft] = MASK_TRAIN;
} else if (line == "Val") {
mask[v - rowLeft] = MASK_VAL;
} else if (line == "Test") {
mask[v - rowLeft] = MASK_TEST;
} else if (line == "None") {
mask[v - rowLeft] = MASK_NONE;
} else {
printf("Unrecognized mask: %s\n", line.c_str());
assert(false);
}
}
}
LoadMaskTask::LoadMaskTask(const Model& model,
const Tensor& input,
const std::string& filename)
: TaskLauncher(LOAD_MASK_TASK_ID,
TaskArgument(filename.c_str(), MAX_FILE_LEN))
{
// regions[0]: input
{
RegionRequirement rr(input.region, WRITE_ONLY, EXCLUSIVE, input.region,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
void load_graph_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 2);
assert(task->regions.size() == 2);
std::string prefix((char*) task->args);
const AccessorWO<E_ID, 1> acc_raw_rows(regions[0], FID_DATA);
const AccessorWO<V_ID, 1> acc_raw_cols(regions[1], FID_DATA);
Rect<1> rect_raw_rows = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
Rect<1> rect_raw_cols = runtime->get_index_space_domain(
ctx, task->regions[1].region.get_index_space());
V_ID rowLeft = rect_raw_rows.lo[0], rowRight = rect_raw_rows.hi[0];
E_ID colLeft = rect_raw_cols.lo[0], colRight = rect_raw_cols.hi[0];
assert(acc_raw_rows.accessor.is_dense_arbitrary(rect_raw_rows));
assert(acc_raw_cols.accessor.is_dense_arbitrary(rect_raw_cols));
E_ID* raw_rows = acc_raw_rows.ptr(rect_raw_rows.lo);
V_ID* raw_cols = acc_raw_cols.ptr(rect_raw_cols.lo);
std::string filename = prefix + ".add_self_edge.lux";
log_load.print("Load task: file(%s) rowLeft(%u) rowRight(%u) colLeft(%zu) colRight(%zu)",
filename.c_str(), rowLeft, rowRight, colLeft, colRight);
FILE* fd = fopen(filename.c_str(), "rb");
assert(fd != NULL);
int fseek_ret;
size_t fread_ret;
V_ID nv;
E_ID ne;
assert(fread(&nv, sizeof(V_ID), 1, fd) == 1);
assert(fread(&ne, sizeof(E_ID), 1, fd) == 1);
fseek_ret =
fseeko(fd, FILE_HEADER_SIZE + sizeof(E_ID) * (size_t)rowLeft, SEEK_SET);
assert(fseek_ret == 0);
fread_ret =
fread(raw_rows, sizeof(E_ID), (size_t)(rowRight - rowLeft + 1), fd);
assert(fread_ret == rowRight - rowLeft + 1);
fseek_ret =
fseeko(fd, FILE_HEADER_SIZE + sizeof(E_ID) * (size_t)nv
+ sizeof(V_ID) * (size_t)colLeft, SEEK_SET);
assert(fseek_ret == 0);
fread_ret =
fread(raw_cols, sizeof(V_ID), (size_t)(colRight - colLeft + 1), fd);
assert(fread_ret == colRight - colLeft + 1);
fclose(fd);
}
LoadGraphTask::LoadGraphTask(const Model& model,
const std::string& filename)
: IndexLauncher(LOAD_GRAPH_TASK_ID, model.taskIS,
TaskArgument(filename.c_str(), MAX_FILE_LEN),
model.taskArgs)
{
// regions[0]: raw_rows
{
RegionRequirement rr(model.myGraph.rawRowLP, 0/*projection id*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rawRowLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[1]: raw_cols
{
RegionRequirement rr(model.myGraph.rawColLP, 0/*projection id*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rawColLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
__global__
void init_graph_kernel(V_ID rowLeft,
V_ID rowRight,
E_ID colLeft,
NodeStruct* rowPtrs,
EdgeStruct* colIdxs,
const E_ID* rawRows,
const V_ID* rawCols)
{
for (V_ID n = blockIdx.x * blockDim.x + threadIdx.x;
n + rowLeft <= rowRight; n += blockDim.x * gridDim.x)
{
E_ID startColIdx, endColIdx = rawRows[n];
if (n == 0)
startColIdx = colLeft;
else
startColIdx = rawRows[n-1];
rowPtrs[n].index = endColIdx;
for (E_ID e = startColIdx; e < endColIdx; e++) {
colIdxs[e - colLeft].src = rawCols[e - colLeft];
colIdxs[e - colLeft].dst = n + rowLeft;
}
}
}
ResourceManager* init_task_impl(const Task *task,
const std::vector<PhysicalRegion> ®ions,
Context ctx, Runtime* runtime)
{
assert(regions.size() == 4);
assert(task->regions.size() == 4);
const Graph* graph = (Graph*) task->args;
const AccessorWO<NodeStruct, 1> accRowPtr(regions[0], FID_DATA);
const AccessorWO<EdgeStruct, 1> accColIdx(regions[1], FID_DATA);
const AccessorRO<E_ID, 1> accRawRow(regions[2], FID_DATA);
const AccessorRO<V_ID, 1> accRawCol(regions[3], FID_DATA);
Rect<1> rectRowPtr = runtime->get_index_space_domain(
ctx, task->regions[0].region.get_index_space());
Rect<1> rectColIdx = runtime->get_index_space_domain(
ctx, task->regions[1].region.get_index_space());
Rect<1> rectRawRow = runtime->get_index_space_domain(
ctx, task->regions[2].region.get_index_space());
Rect<1> rectRawCol = runtime->get_index_space_domain(
ctx, task->regions[3].region.get_index_space());
assert(accRowPtr.accessor.is_dense_arbitrary(rectRowPtr));
assert(accColIdx.accessor.is_dense_arbitrary(rectColIdx));
assert(accRawRow.accessor.is_dense_arbitrary(rectRawRow));
assert(accRawCol.accessor.is_dense_arbitrary(rectRawCol));
NodeStruct* rowPtrs = accRowPtr.ptr(rectRowPtr);
EdgeStruct* colIdxs = accColIdx.ptr(rectColIdx);
const E_ID* rawRows = accRawRow.ptr(rectRawRow);
const V_ID* rawCols = accRawCol.ptr(rectRawCol);
V_ID rowLeft = rectRowPtr.lo[0], rowRight = rectRowPtr.hi[0];
E_ID colLeft = rectColIdx.lo[0], colRight = rectColIdx.hi[0];
log_load.print("Init task: rowLeft(%u) rowRight(%u) colLeft(%zu) colRight(%zu)",
rowLeft, rowRight, colLeft, colRight);
// init graph
init_graph_kernel<<<GET_BLOCKS(rowRight - rowLeft + 1), CUDA_NUM_THREADS>>>(
rowLeft, rowRight, colLeft, rowPtrs, colIdxs, rawRows, rawCols);
checkCUDA(cudaDeviceSynchronize());
ResourceManager* manager = new ResourceManager();
manager->proc_id = task->current_proc.id;
// init nccl
//int numRanks = graph->numParts / graph->numMachines;
//int myRank = task->current_proc.id % numRanks;
//int node = task->current_proc.address_space();
//piece.nccl = graph->nccl[node*numRanks+myRank];
//printf("Before ncclCommInitRank: numRanks(%d) id(%s) myrank(%d) processorId(%d)\n", numRanks, graph->ncclID[node].internal, 0, task->current_proc.id);
//NCCLCheck(ncclCommInitRank(&piece.nccl, numRanks, graph->ncclID[node], task->current_proc.id % numRanks));
//printf("After ncclCommInitRank\n");
// init cublas
checkCUDA(cublasCreate(&(manager->blas)));
checkCUDNN(cudnnCreate(&(manager->dnn)));
checkCUDA(curandCreateGenerator(&(manager->rand), CURAND_RNG_PSEUDO_DEFAULT));
// init curand
// TODO: change to random seed before releasing
checkCUDA(curandSetPseudoRandomGeneratorSeed(manager->rand, 0));
// init dropout states
checkCUDNN(cudnnDropoutGetStatesSize(manager->dnn, &(manager->dropoutSize)));
checkCUDA(cudaMalloc(&(manager->dropoutStates), manager->dropoutSize));
//manager->numNodes = graph->numNodes;
//manager->numEdges = graph->numEdges;
//manager->numParts = graph->numParts;
// Allocate fbInput/fbOutput on the same memory as rowPtr
std::set<Memory> memFB;
regions[0].get_memories(memFB);
assert(memFB.size() == 1);
assert(memFB.begin()->kind() == Memory::GPU_FB_MEM);
Realm::MemoryImpl* memImpl =
Realm::get_runtime()->get_memory_impl(*memFB.begin());
Realm::Cuda::GPUFBMemory* memFBImpl = (Realm::Cuda::GPUFBMemory*) memImpl;
manager->allocator = memFBImpl;
for (int i = 0; i < MAX_NUM_CACHES; i++) {
if (i == 0)
manager->fbCache[i].volume = graph->maxHidden * (graph->numNodes + 128);
else
manager->fbCache[i].volume = graph->maxHidden * (rowRight - rowLeft + 1);
manager->fbCache[i].region = LogicalRegion::NO_REGION;
off_t offset = memFBImpl->alloc_bytes_local(manager->fbCache[i].volume * sizeof(DATATYPE));
assert(offset >= 0);
manager->fbCache[i].ptr = (DATATYPE*) memFBImpl->get_direct_ptr(offset, 0);
}
return manager;
}
InitTask::InitTask(const Model& model)
: IndexLauncher(INIT_TASK_ID, model.taskIS,
TaskArgument(&(model.myGraph), sizeof(Graph)), model.taskArgs)
{
// regions[0]: row_ptrs
{
RegionRequirement rr(model.myGraph.rowPtrLP, 0/*identity*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.rowPtrLR,
MAP_TO_FB_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[1]: col_idxs
{
RegionRequirement rr(model.myGraph.colIdxLP, 0/*identity*/,
WRITE_ONLY, EXCLUSIVE, model.myGraph.colIdxLR,
MAP_TO_FB_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[2]: raw_rows
{
RegionRequirement rr(model.myGraph.rawRowLP, 0/*identity*/,
READ_ONLY, EXCLUSIVE, model.myGraph.rawRowLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
// regions[3]: raw_cols
{
RegionRequirement rr(model.myGraph.rawColLP, 0/*identity*/,
READ_ONLY, EXCLUSIVE, model.myGraph.rawColLR,
MAP_TO_ZC_MEMORY);
rr.add_field(FID_DATA);
add_region_requirement(rr);
}
}
|
e18c7082ccd9a67910abf44b91f13f4ffb178bfb.hip | // !!! This is a file automatically generated by hipify!!!
#include <data_types/timeseries.hpp>
#include <data_types/fourierseries.hpp>
#include <data_types/candidates.hpp>
#include <data_types/filterbank.hpp>
#include <transforms/dedisperser.hpp>
#include <transforms/resampler.hpp>
#include <transforms/folder.hpp>
#include <transforms/ffter.hpp>
#include <transforms/dereddener.hpp>
#include <transforms/spectrumformer.hpp>
#include <transforms/birdiezapper.hpp>
#include <transforms/peakfinder.hpp>
#include <transforms/distiller.hpp>
#include <transforms/harmonicfolder.hpp>
#include <transforms/scorer.hpp>
#include <utils/exceptions.hpp>
#include <utils/utils.hpp>
#include <utils/stats.hpp>
#include <utils/stopwatch.hpp>
#include <utils/progress_bar.hpp>
#include <utils/cmdline.hpp>
#include <utils/output_stats.hpp>
#include <string>
#include <iostream>
#include <stdio.h>
#include <unistd.h>
#include "hip/hip_runtime.h"
#include "hipfft.h"
#include "pthread.h"
#include <cmath>
#include <map>
class DMDispenser {
private:
DispersionTrials<unsigned char>& trials;
pthread_mutex_t mutex;
int dm_idx;
int count;
ProgressBar* progress;
bool use_progress_bar;
public:
DMDispenser(DispersionTrials<unsigned char>& trials)
:trials(trials),dm_idx(0),use_progress_bar(false){
count = trials.get_count();
pthread_mutex_init(&mutex, NULL);
}
void enable_progress_bar(){
progress = new ProgressBar();
use_progress_bar = true;
}
int get_dm_trial_idx(void){
pthread_mutex_lock(&mutex);
int retval;
if (dm_idx==0)
if (use_progress_bar){
printf("Releasing DMs to workers...\n");
progress->start();
}
if (dm_idx >= trials.get_count()){
retval = -1;
if (use_progress_bar)
progress->stop();
} else {
if (use_progress_bar)
progress->set_progress((float)dm_idx/count);
retval = dm_idx;
dm_idx++;
}
pthread_mutex_unlock(&mutex);
return retval;
}
~DMDispenser(){
if (use_progress_bar)
delete progress;
pthread_mutex_destroy(&mutex);
}
};
class Worker {
private:
DispersionTrials<unsigned char>& trials;
DMDispenser& manager;
CmdLineOptions& args;
AccelerationPlan& acc_plan;
unsigned int size;
int device;
std::map<std::string,Stopwatch> timers;
public:
CandidateCollection dm_trial_cands;
Worker(DispersionTrials<unsigned char>& trials, DMDispenser& manager,
AccelerationPlan& acc_plan, CmdLineOptions& args, unsigned int size, int device)
:trials(trials),manager(manager),acc_plan(acc_plan),args(args),size(size),device(device){}
void start(void)
{
//Generate some timer instances for benchmarking
//timers["get_dm_trial"] = Stopwatch();
//timers["copy_to_device"] = Stopwatch();
//timers["rednoise"] = Stopwatch();
//timers["search"] = Stopwatch();
hipSetDevice(device);
Stopwatch pass_timer;
pass_timer.start();
bool padding = false;
if (size > trials.get_nsamps())
padding = true;
CuFFTerR2C r2cfft(size);
CuFFTerC2R c2rfft(size);
float tobs = size*trials.get_tsamp();
float bin_width = 1.0/tobs;
DeviceFourierSeries<hipfftComplex> d_fseries(size/2+1,bin_width);
DedispersedTimeSeries<unsigned char> tim;
ReusableDeviceTimeSeries<float,unsigned char> d_tim(size);
DeviceTimeSeries<float> d_tim_r(size);
TimeDomainResampler resampler;
DevicePowerSpectrum<float> pspec(d_fseries);
Zapper* bzap;
if (args.zapfilename!=""){
if (args.verbose)
std::cout << "Using zapfile: " << args.zapfilename << std::endl;
bzap = new Zapper(args.zapfilename);
}
Dereddener rednoise(size/2+1);
SpectrumFormer former;
PeakFinder cand_finder(args.min_snr,args.min_freq,args.max_freq,size);
HarmonicSums<float> sums(pspec,args.nharmonics);
HarmonicFolder harm_folder(sums);
std::vector<float> acc_list;
HarmonicDistiller harm_finder(args.freq_tol,args.max_harm,false);
AccelerationDistiller acc_still(tobs,args.freq_tol,true);
float mean,std,rms;
float padding_mean;
int ii;
PUSH_NVTX_RANGE("DM-Loop",0)
while (true){
//timers["get_trial_dm"].start();
ii = manager.get_dm_trial_idx();
//timers["get_trial_dm"].stop();
if (ii==-1)
break;
trials.get_idx(ii,tim);
if (args.verbose)
std::cout << "Copying DM trial to device (DM: " << tim.get_dm() << ")"<< std::endl;
d_tim.copy_from_host(tim);
//timers["rednoise"].start()
if (padding){
padding_mean = stats::mean<float>(d_tim.get_data(),trials.get_nsamps());
d_tim.fill(trials.get_nsamps(),d_tim.get_nsamps(),padding_mean);
}
if (args.verbose)
std::cout << "Generating accelration list" << std::endl;
acc_plan.generate_accel_list(tim.get_dm(),acc_list);
if (args.verbose)
std::cout << "Searching "<< acc_list.size()<< " acceleration trials for DM "<< tim.get_dm() << std::endl;
if (args.verbose)
std::cout << "Executing forward FFT" << std::endl;
r2cfft.execute(d_tim.get_data(),d_fseries.get_data());
if (args.verbose)
std::cout << "Forming power spectrum" << std::endl;
former.form(d_fseries,pspec);
if (args.verbose)
std::cout << "Finding running median" << std::endl;
rednoise.calculate_median(pspec);
if (args.verbose)
std::cout << "Dereddening Fourier series" << std::endl;
rednoise.deredden(d_fseries);
if (args.zapfilename!=""){
if (args.verbose)
std::cout << "Zapping birdies" << std::endl;
bzap->zap(d_fseries);
}
if (args.verbose)
std::cout << "Forming interpolated power spectrum" << std::endl;
former.form_interpolated(d_fseries,pspec);
if (args.verbose)
std::cout << "Finding statistics" << std::endl;
stats::stats<float>(pspec.get_data(),size/2+1,&mean,&rms,&std);
if (args.verbose)
std::cout << "Executing inverse FFT" << std::endl;
c2rfft.execute(d_fseries.get_data(),d_tim.get_data());
CandidateCollection accel_trial_cands;
PUSH_NVTX_RANGE("Acceleration-Loop",1)
for (int jj=0;jj<acc_list.size();jj++){
if (args.verbose)
std::cout << "Resampling to "<< acc_list[jj] << " m/s/s" << std::endl;
resampler.resample(d_tim,d_tim_r,size,acc_list[jj]);
if (args.verbose)
std::cout << "Execute forward FFT" << std::endl;
r2cfft.execute(d_tim_r.get_data(),d_fseries.get_data());
if (args.verbose)
std::cout << "Form interpolated power spectrum" << std::endl;
former.form_interpolated(d_fseries,pspec);
if (args.verbose)
std::cout << "Normalise power spectrum" << std::endl;
stats::normalise(pspec.get_data(),mean*size,std*size,size/2+1);
if (args.verbose)
std::cout << "Harmonic summing" << std::endl;
harm_folder.fold(pspec);
if (args.verbose)
std::cout << "Finding peaks" << std::endl;
SpectrumCandidates trial_cands(tim.get_dm(),ii,acc_list[jj]);
cand_finder.find_candidates(pspec,trial_cands);
cand_finder.find_candidates(sums,trial_cands);
if (args.verbose)
std::cout << "Distilling harmonics" << std::endl;
accel_trial_cands.append(harm_finder.distill(trial_cands.cands));
}
POP_NVTX_RANGE
if (args.verbose)
std::cout << "Distilling accelerations" << std::endl;
dm_trial_cands.append(acc_still.distill(accel_trial_cands.cands));
}
POP_NVTX_RANGE
if (args.zapfilename!="")
delete bzap;
if (args.verbose)
std::cout << "DM processing took " << pass_timer.getTime() << " seconds"<< std::endl;
}
};
void* launch_worker_thread(void* ptr){
reinterpret_cast<Worker*>(ptr)->start();
return NULL;
}
int main(int argc, char **argv)
{
std::map<std::string,Stopwatch> timers;
timers["reading"] = Stopwatch();
timers["dedispersion"] = Stopwatch();
timers["searching"] = Stopwatch();
timers["folding"] = Stopwatch();
timers["total"] = Stopwatch();
timers["total"].start();
CmdLineOptions args;
if (!read_cmdline_options(args,argc,argv))
ErrorChecker::throw_error("Failed to parse command line arguments.");
int nthreads = ::min(Utils::gpu_count(),args.max_num_threads);
nthreads = ::max(1,nthreads);
if (args.verbose)
std::cout << "Using file: " << args.infilename << std::endl;
std::string filename(args.infilename);
//Stopwatch timer;
if (args.progress_bar)
printf("Reading data from %s\n",args.infilename.c_str());
timers["reading"].start();
SigprocFilterbank filobj(filename);
timers["reading"].stop();
if (args.progress_bar){
printf("Complete (execution time %.2f s)\n",timers["reading"].getTime());
}
Dedisperser dedisperser(filobj,nthreads);
if (args.killfilename!=""){
if (args.verbose)
std::cout << "Using killfile: " << args.killfilename << std::endl;
dedisperser.set_killmask(args.killfilename);
}
if (args.verbose)
std::cout << "Generating DM list" << std::endl;
dedisperser.generate_dm_list(args.dm_start,args.dm_end,args.dm_pulse_width,args.dm_tol);
std::vector<float> dm_list = dedisperser.get_dm_list();
if (args.verbose){
std::cout << dm_list.size() << " DM trials" << std::endl;
for (int ii=0;ii<dm_list.size();ii++)
std::cout << dm_list[ii] << std::endl;
std::cout << "Executing dedispersion" << std::endl;
}
if (args.progress_bar)
printf("Starting dedispersion...\n");
timers["dedispersion"].start();
PUSH_NVTX_RANGE("Dedisperse",3)
DispersionTrials<unsigned char> trials = dedisperser.dedisperse();
POP_NVTX_RANGE
timers["dedispersion"].stop();
if (args.progress_bar)
printf("Complete (execution time %.2f s)\n",timers["dedispersion"].getTime());
unsigned int size;
if (args.size==0)
size = Utils::prev_power_of_two(filobj.get_nsamps());
else
//size = ::min(args.size,filobj.get_nsamps());
size = args.size;
if (args.verbose)
std::cout << "Setting transform length to " << size << " points" << std::endl;
AccelerationPlan acc_plan(args.acc_start, args.acc_end, args.acc_tol,
args.acc_pulse_width, size, filobj.get_tsamp(),
filobj.get_cfreq(), filobj.get_foff());
//Multithreading commands
timers["searching"].start();
std::vector<Worker*> workers(nthreads);
std::vector<pthread_t> threads(nthreads);
DMDispenser dispenser(trials);
if (args.progress_bar)
dispenser.enable_progress_bar();
for (int ii=0;ii<nthreads;ii++){
workers[ii] = (new Worker(trials,dispenser,acc_plan,args,size,ii));
pthread_create(&threads[ii], NULL, launch_worker_thread, (void*) workers[ii]);
}
DMDistiller dm_still(args.freq_tol,true);
HarmonicDistiller harm_still(args.freq_tol,args.max_harm,true,false);
CandidateCollection dm_cands;
for (int ii=0; ii<nthreads; ii++){
pthread_join(threads[ii],NULL);
dm_cands.append(workers[ii]->dm_trial_cands.cands);
}
timers["searching"].stop();
if (args.verbose)
std::cout << "Distilling DMs" << std::endl;
dm_cands.cands = dm_still.distill(dm_cands.cands);
dm_cands.cands = harm_still.distill(dm_cands.cands);
CandidateScorer cand_scorer(filobj.get_tsamp(),filobj.get_cfreq(), filobj.get_foff(),
fabs(filobj.get_foff())*filobj.get_nchans());
cand_scorer.score_all(dm_cands.cands);
if (args.verbose)
std::cout << "Setting up time series folder" << std::endl;
MultiFolder folder(dm_cands.cands,trials);
timers["folding"].start();
if (args.progress_bar)
folder.enable_progress_bar();
if (args.npdmp > 0){
if (args.verbose)
std::cout << "Folding top "<< args.npdmp <<" cands" << std::endl;
folder.fold_n(args.npdmp);
}
timers["folding"].stop();
if (args.verbose)
std::cout << "Writing output files" << std::endl;
//dm_cands.write_candidate_file("./old_cands.txt");
int new_size = ::min(args.limit,(int) dm_cands.cands.size());
dm_cands.cands.resize(new_size);
CandidateFileWriter cand_files(args.outdir);
cand_files.write_binary(dm_cands.cands,"candidates.peasoup");
OutputFileWriter stats;
stats.add_misc_info();
stats.add_header(filename);
stats.add_search_parameters(args);
stats.add_dm_list(dm_list);
std::vector<float> acc_list;
acc_plan.generate_accel_list(0.0,acc_list);
stats.add_acc_list(acc_list);
std::vector<int> device_idxs;
for (int device_idx=0;device_idx<nthreads;device_idx++)
device_idxs.push_back(device_idx);
stats.add_gpu_info(device_idxs);
stats.add_candidates(dm_cands.cands,cand_files.byte_mapping);
timers["total"].stop();
stats.add_timing_info(timers);
std::stringstream xml_filepath;
xml_filepath << args.outdir << "/" << "overview.xml";
stats.to_file(xml_filepath.str());
return 0;
}
| e18c7082ccd9a67910abf44b91f13f4ffb178bfb.cu | #include <data_types/timeseries.hpp>
#include <data_types/fourierseries.hpp>
#include <data_types/candidates.hpp>
#include <data_types/filterbank.hpp>
#include <transforms/dedisperser.hpp>
#include <transforms/resampler.hpp>
#include <transforms/folder.hpp>
#include <transforms/ffter.hpp>
#include <transforms/dereddener.hpp>
#include <transforms/spectrumformer.hpp>
#include <transforms/birdiezapper.hpp>
#include <transforms/peakfinder.hpp>
#include <transforms/distiller.hpp>
#include <transforms/harmonicfolder.hpp>
#include <transforms/scorer.hpp>
#include <utils/exceptions.hpp>
#include <utils/utils.hpp>
#include <utils/stats.hpp>
#include <utils/stopwatch.hpp>
#include <utils/progress_bar.hpp>
#include <utils/cmdline.hpp>
#include <utils/output_stats.hpp>
#include <string>
#include <iostream>
#include <stdio.h>
#include <unistd.h>
#include "cuda.h"
#include "cufft.h"
#include "pthread.h"
#include <cmath>
#include <map>
class DMDispenser {
private:
DispersionTrials<unsigned char>& trials;
pthread_mutex_t mutex;
int dm_idx;
int count;
ProgressBar* progress;
bool use_progress_bar;
public:
DMDispenser(DispersionTrials<unsigned char>& trials)
:trials(trials),dm_idx(0),use_progress_bar(false){
count = trials.get_count();
pthread_mutex_init(&mutex, NULL);
}
void enable_progress_bar(){
progress = new ProgressBar();
use_progress_bar = true;
}
int get_dm_trial_idx(void){
pthread_mutex_lock(&mutex);
int retval;
if (dm_idx==0)
if (use_progress_bar){
printf("Releasing DMs to workers...\n");
progress->start();
}
if (dm_idx >= trials.get_count()){
retval = -1;
if (use_progress_bar)
progress->stop();
} else {
if (use_progress_bar)
progress->set_progress((float)dm_idx/count);
retval = dm_idx;
dm_idx++;
}
pthread_mutex_unlock(&mutex);
return retval;
}
~DMDispenser(){
if (use_progress_bar)
delete progress;
pthread_mutex_destroy(&mutex);
}
};
class Worker {
private:
DispersionTrials<unsigned char>& trials;
DMDispenser& manager;
CmdLineOptions& args;
AccelerationPlan& acc_plan;
unsigned int size;
int device;
std::map<std::string,Stopwatch> timers;
public:
CandidateCollection dm_trial_cands;
Worker(DispersionTrials<unsigned char>& trials, DMDispenser& manager,
AccelerationPlan& acc_plan, CmdLineOptions& args, unsigned int size, int device)
:trials(trials),manager(manager),acc_plan(acc_plan),args(args),size(size),device(device){}
void start(void)
{
//Generate some timer instances for benchmarking
//timers["get_dm_trial"] = Stopwatch();
//timers["copy_to_device"] = Stopwatch();
//timers["rednoise"] = Stopwatch();
//timers["search"] = Stopwatch();
cudaSetDevice(device);
Stopwatch pass_timer;
pass_timer.start();
bool padding = false;
if (size > trials.get_nsamps())
padding = true;
CuFFTerR2C r2cfft(size);
CuFFTerC2R c2rfft(size);
float tobs = size*trials.get_tsamp();
float bin_width = 1.0/tobs;
DeviceFourierSeries<cufftComplex> d_fseries(size/2+1,bin_width);
DedispersedTimeSeries<unsigned char> tim;
ReusableDeviceTimeSeries<float,unsigned char> d_tim(size);
DeviceTimeSeries<float> d_tim_r(size);
TimeDomainResampler resampler;
DevicePowerSpectrum<float> pspec(d_fseries);
Zapper* bzap;
if (args.zapfilename!=""){
if (args.verbose)
std::cout << "Using zapfile: " << args.zapfilename << std::endl;
bzap = new Zapper(args.zapfilename);
}
Dereddener rednoise(size/2+1);
SpectrumFormer former;
PeakFinder cand_finder(args.min_snr,args.min_freq,args.max_freq,size);
HarmonicSums<float> sums(pspec,args.nharmonics);
HarmonicFolder harm_folder(sums);
std::vector<float> acc_list;
HarmonicDistiller harm_finder(args.freq_tol,args.max_harm,false);
AccelerationDistiller acc_still(tobs,args.freq_tol,true);
float mean,std,rms;
float padding_mean;
int ii;
PUSH_NVTX_RANGE("DM-Loop",0)
while (true){
//timers["get_trial_dm"].start();
ii = manager.get_dm_trial_idx();
//timers["get_trial_dm"].stop();
if (ii==-1)
break;
trials.get_idx(ii,tim);
if (args.verbose)
std::cout << "Copying DM trial to device (DM: " << tim.get_dm() << ")"<< std::endl;
d_tim.copy_from_host(tim);
//timers["rednoise"].start()
if (padding){
padding_mean = stats::mean<float>(d_tim.get_data(),trials.get_nsamps());
d_tim.fill(trials.get_nsamps(),d_tim.get_nsamps(),padding_mean);
}
if (args.verbose)
std::cout << "Generating accelration list" << std::endl;
acc_plan.generate_accel_list(tim.get_dm(),acc_list);
if (args.verbose)
std::cout << "Searching "<< acc_list.size()<< " acceleration trials for DM "<< tim.get_dm() << std::endl;
if (args.verbose)
std::cout << "Executing forward FFT" << std::endl;
r2cfft.execute(d_tim.get_data(),d_fseries.get_data());
if (args.verbose)
std::cout << "Forming power spectrum" << std::endl;
former.form(d_fseries,pspec);
if (args.verbose)
std::cout << "Finding running median" << std::endl;
rednoise.calculate_median(pspec);
if (args.verbose)
std::cout << "Dereddening Fourier series" << std::endl;
rednoise.deredden(d_fseries);
if (args.zapfilename!=""){
if (args.verbose)
std::cout << "Zapping birdies" << std::endl;
bzap->zap(d_fseries);
}
if (args.verbose)
std::cout << "Forming interpolated power spectrum" << std::endl;
former.form_interpolated(d_fseries,pspec);
if (args.verbose)
std::cout << "Finding statistics" << std::endl;
stats::stats<float>(pspec.get_data(),size/2+1,&mean,&rms,&std);
if (args.verbose)
std::cout << "Executing inverse FFT" << std::endl;
c2rfft.execute(d_fseries.get_data(),d_tim.get_data());
CandidateCollection accel_trial_cands;
PUSH_NVTX_RANGE("Acceleration-Loop",1)
for (int jj=0;jj<acc_list.size();jj++){
if (args.verbose)
std::cout << "Resampling to "<< acc_list[jj] << " m/s/s" << std::endl;
resampler.resample(d_tim,d_tim_r,size,acc_list[jj]);
if (args.verbose)
std::cout << "Execute forward FFT" << std::endl;
r2cfft.execute(d_tim_r.get_data(),d_fseries.get_data());
if (args.verbose)
std::cout << "Form interpolated power spectrum" << std::endl;
former.form_interpolated(d_fseries,pspec);
if (args.verbose)
std::cout << "Normalise power spectrum" << std::endl;
stats::normalise(pspec.get_data(),mean*size,std*size,size/2+1);
if (args.verbose)
std::cout << "Harmonic summing" << std::endl;
harm_folder.fold(pspec);
if (args.verbose)
std::cout << "Finding peaks" << std::endl;
SpectrumCandidates trial_cands(tim.get_dm(),ii,acc_list[jj]);
cand_finder.find_candidates(pspec,trial_cands);
cand_finder.find_candidates(sums,trial_cands);
if (args.verbose)
std::cout << "Distilling harmonics" << std::endl;
accel_trial_cands.append(harm_finder.distill(trial_cands.cands));
}
POP_NVTX_RANGE
if (args.verbose)
std::cout << "Distilling accelerations" << std::endl;
dm_trial_cands.append(acc_still.distill(accel_trial_cands.cands));
}
POP_NVTX_RANGE
if (args.zapfilename!="")
delete bzap;
if (args.verbose)
std::cout << "DM processing took " << pass_timer.getTime() << " seconds"<< std::endl;
}
};
void* launch_worker_thread(void* ptr){
reinterpret_cast<Worker*>(ptr)->start();
return NULL;
}
int main(int argc, char **argv)
{
std::map<std::string,Stopwatch> timers;
timers["reading"] = Stopwatch();
timers["dedispersion"] = Stopwatch();
timers["searching"] = Stopwatch();
timers["folding"] = Stopwatch();
timers["total"] = Stopwatch();
timers["total"].start();
CmdLineOptions args;
if (!read_cmdline_options(args,argc,argv))
ErrorChecker::throw_error("Failed to parse command line arguments.");
int nthreads = std::min(Utils::gpu_count(),args.max_num_threads);
nthreads = std::max(1,nthreads);
if (args.verbose)
std::cout << "Using file: " << args.infilename << std::endl;
std::string filename(args.infilename);
//Stopwatch timer;
if (args.progress_bar)
printf("Reading data from %s\n",args.infilename.c_str());
timers["reading"].start();
SigprocFilterbank filobj(filename);
timers["reading"].stop();
if (args.progress_bar){
printf("Complete (execution time %.2f s)\n",timers["reading"].getTime());
}
Dedisperser dedisperser(filobj,nthreads);
if (args.killfilename!=""){
if (args.verbose)
std::cout << "Using killfile: " << args.killfilename << std::endl;
dedisperser.set_killmask(args.killfilename);
}
if (args.verbose)
std::cout << "Generating DM list" << std::endl;
dedisperser.generate_dm_list(args.dm_start,args.dm_end,args.dm_pulse_width,args.dm_tol);
std::vector<float> dm_list = dedisperser.get_dm_list();
if (args.verbose){
std::cout << dm_list.size() << " DM trials" << std::endl;
for (int ii=0;ii<dm_list.size();ii++)
std::cout << dm_list[ii] << std::endl;
std::cout << "Executing dedispersion" << std::endl;
}
if (args.progress_bar)
printf("Starting dedispersion...\n");
timers["dedispersion"].start();
PUSH_NVTX_RANGE("Dedisperse",3)
DispersionTrials<unsigned char> trials = dedisperser.dedisperse();
POP_NVTX_RANGE
timers["dedispersion"].stop();
if (args.progress_bar)
printf("Complete (execution time %.2f s)\n",timers["dedispersion"].getTime());
unsigned int size;
if (args.size==0)
size = Utils::prev_power_of_two(filobj.get_nsamps());
else
//size = std::min(args.size,filobj.get_nsamps());
size = args.size;
if (args.verbose)
std::cout << "Setting transform length to " << size << " points" << std::endl;
AccelerationPlan acc_plan(args.acc_start, args.acc_end, args.acc_tol,
args.acc_pulse_width, size, filobj.get_tsamp(),
filobj.get_cfreq(), filobj.get_foff());
//Multithreading commands
timers["searching"].start();
std::vector<Worker*> workers(nthreads);
std::vector<pthread_t> threads(nthreads);
DMDispenser dispenser(trials);
if (args.progress_bar)
dispenser.enable_progress_bar();
for (int ii=0;ii<nthreads;ii++){
workers[ii] = (new Worker(trials,dispenser,acc_plan,args,size,ii));
pthread_create(&threads[ii], NULL, launch_worker_thread, (void*) workers[ii]);
}
DMDistiller dm_still(args.freq_tol,true);
HarmonicDistiller harm_still(args.freq_tol,args.max_harm,true,false);
CandidateCollection dm_cands;
for (int ii=0; ii<nthreads; ii++){
pthread_join(threads[ii],NULL);
dm_cands.append(workers[ii]->dm_trial_cands.cands);
}
timers["searching"].stop();
if (args.verbose)
std::cout << "Distilling DMs" << std::endl;
dm_cands.cands = dm_still.distill(dm_cands.cands);
dm_cands.cands = harm_still.distill(dm_cands.cands);
CandidateScorer cand_scorer(filobj.get_tsamp(),filobj.get_cfreq(), filobj.get_foff(),
fabs(filobj.get_foff())*filobj.get_nchans());
cand_scorer.score_all(dm_cands.cands);
if (args.verbose)
std::cout << "Setting up time series folder" << std::endl;
MultiFolder folder(dm_cands.cands,trials);
timers["folding"].start();
if (args.progress_bar)
folder.enable_progress_bar();
if (args.npdmp > 0){
if (args.verbose)
std::cout << "Folding top "<< args.npdmp <<" cands" << std::endl;
folder.fold_n(args.npdmp);
}
timers["folding"].stop();
if (args.verbose)
std::cout << "Writing output files" << std::endl;
//dm_cands.write_candidate_file("./old_cands.txt");
int new_size = std::min(args.limit,(int) dm_cands.cands.size());
dm_cands.cands.resize(new_size);
CandidateFileWriter cand_files(args.outdir);
cand_files.write_binary(dm_cands.cands,"candidates.peasoup");
OutputFileWriter stats;
stats.add_misc_info();
stats.add_header(filename);
stats.add_search_parameters(args);
stats.add_dm_list(dm_list);
std::vector<float> acc_list;
acc_plan.generate_accel_list(0.0,acc_list);
stats.add_acc_list(acc_list);
std::vector<int> device_idxs;
for (int device_idx=0;device_idx<nthreads;device_idx++)
device_idxs.push_back(device_idx);
stats.add_gpu_info(device_idxs);
stats.add_candidates(dm_cands.cands,cand_files.byte_mapping);
timers["total"].stop();
stats.add_timing_info(timers);
std::stringstream xml_filepath;
xml_filepath << args.outdir << "/" << "overview.xml";
stats.to_file(xml_filepath.str());
return 0;
}
|
7652a8eeb7c84a1dbe4d223d47c5f76f7cc96cf3.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#ifdef USE_MPI
#include <algorithm>
#include <vector>
#include "caffe/common_layers.hpp"
#include "caffe/util/math_functions.hpp"
#include "caffe/util/mpi_functions.hpp"
namespace caffe {
template <typename Dtype>
__global__ void kernel_test_forward(
const int num, const int channels, const int spatial_dim,
const Dtype* scale, const Dtype* bias, const Dtype* mean, const Dtype* var,
const Dtype eps, const Dtype* bottom_data, Dtype* top_data) {
CUDA_KERNEL_LOOP(index, num * channels * spatial_dim) {
int c = (index / spatial_dim) % channels;
top_data[index] = (bottom_data[index] - mean[c]) / sqrt(var[c] + eps)
* scale[c] + bias[c];
}
}
template <typename Dtype>
__global__ void kernel_local_stats(int num, int channels, int spatial_dim,
const Dtype norm_factor,
const Dtype* bottom_data, Dtype* mean, Dtype* var) {
// store local E[x] to mean, E[x^2] to var temporarily
__shared__ Dtype buffer1[CAFFE_CUDA_NUM_THREADS];
__shared__ Dtype buffer2[CAFFE_CUDA_NUM_THREADS];
const int tid = threadIdx.x;
const int c = blockIdx.x;
// load and accumulate data on each thread
buffer1[tid] = buffer2[tid] = 0;
for (int i = tid; i < num * spatial_dim; i += blockDim.x) {
const int index = i / spatial_dim * channels * spatial_dim
+ c * spatial_dim + i % spatial_dim;
buffer1[tid] += bottom_data[index];
buffer2[tid] += bottom_data[index] * bottom_data[index];
}
__syncthreads();
// do tree reduction
for (int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
buffer1[tid] += buffer1[tid + s];
buffer2[tid] += buffer2[tid + s];
}
__syncthreads();
}
// save the result back
if (tid == 0) {
mean[c] = buffer1[0] / norm_factor;
var[c] = buffer2[0] / norm_factor;
}
}
template <typename Dtype>
__global__ void kernel_backward_scale_bias(
const int num, const int channels, const int spatial_dim,
const Dtype* mean, const Dtype* var, const Dtype eps,
const Dtype* top_diff, const Dtype* bottom_data,
Dtype* scale_diff, Dtype* bias_diff) {
__shared__ Dtype buffer1[CAFFE_CUDA_NUM_THREADS];
__shared__ Dtype buffer2[CAFFE_CUDA_NUM_THREADS];
const int tid = threadIdx.x;
const int c = blockIdx.x;
// load and accumulate data on each thread
buffer1[tid] = buffer2[tid] = 0;
for (int i = tid; i < num * spatial_dim; i += blockDim.x) {
const int index = i / spatial_dim * channels * spatial_dim
+ c * spatial_dim + i % spatial_dim;
buffer1[tid] += top_diff[index] * (bottom_data[index] - mean[c])
/ sqrt(var[c] + eps);
buffer2[tid] += top_diff[index];
}
__syncthreads();
// do tree reduction
for (int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
buffer1[tid] += buffer1[tid + s];
buffer2[tid] += buffer2[tid + s];
}
__syncthreads();
}
// save the result back
if (tid == 0) {
scale_diff[c] = buffer1[0];
bias_diff[c] = buffer2[0];
}
}
template <typename Dtype>
__global__ void kernel_backward_bottom(
const int num, const int channels, const int spatial_dim,
const Dtype* scale, const Dtype* bias,
const Dtype* mean, const Dtype* var, const Dtype eps,
const Dtype norm_factor,
const Dtype* top_diff, const Dtype* scale_diff, const Dtype* bias_diff,
const Dtype* bottom_data, Dtype* bottom_diff) {
CUDA_KERNEL_LOOP(index, num * channels * spatial_dim) {
int c = (index / spatial_dim) % channels;
const Dtype inv_std = Dtype(1) / sqrt(var[c] + eps);
const Dtype x_norm = (bottom_data[index] - mean[c]) * inv_std;
bottom_diff[index] = scale[c] * inv_std *
(top_diff[index] - (x_norm * scale_diff[c] + bias_diff[c]) / norm_factor);
}
}
template <typename Dtype>
void SyncBNLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
if (this->phase_ == TEST) {
hipLaunchKernelGGL(( kernel_test_forward), dim3(CAFFE_GET_BLOCKS(bottom[0]->count())),
dim3(CAFFE_CUDA_NUM_THREADS), 0, 0,
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
this->blobs_[2]->gpu_data(),
this->blobs_[3]->gpu_data(),
bn_eps_,
bottom[0]->gpu_data(),
top[0]->mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
} else {
const int m = num_ * height_ * width_ * Caffe::MPI_all_rank();
// compute local E[x] and E[x^2]
hipLaunchKernelGGL(( kernel_local_stats), dim3(channels_), dim3(CAFFE_CUDA_NUM_THREADS), 0, 0,
num_, channels_, height_ * width_,
static_cast<Dtype>(m),
bottom[0]->gpu_data(),
mean_buffer_.mutable_gpu_data(),
var_buffer_.mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
// sync E[x] and E[x^2]
mpi_force_synchronize();
caffe_iallreduce(mean_buffer_.mutable_cpu_data(), channels_);
caffe_iallreduce(var_buffer_.mutable_cpu_data(), channels_);
mpi_force_synchronize();
// var = (E[x^2] - E[x]^2) * bias_correction_factor
caffe_gpu_mul(channels_, mean_buffer_.gpu_data(), mean_buffer_.gpu_data(),
top[0]->mutable_gpu_data()); // reuse the top buffer
caffe_gpu_sub(channels_, var_buffer_.gpu_data(), top[0]->gpu_data(),
var_buffer_.mutable_gpu_data());
if (m > 1) {
caffe_gpu_scal(channels_, Dtype(m) / (m-1),
var_buffer_.mutable_gpu_data());
}
// update running mean and var
caffe_gpu_axpby(mean_buffer_.count(),
Dtype(1) - bn_momentum_, mean_buffer_.gpu_data(),
bn_momentum_, this->blobs_[2]->mutable_gpu_data());
caffe_gpu_axpby(var_buffer_.count(),
Dtype(1) - bn_momentum_, var_buffer_.gpu_data(),
bn_momentum_, this->blobs_[3]->mutable_gpu_data());
// compute output
hipLaunchKernelGGL(( kernel_test_forward), dim3(CAFFE_GET_BLOCKS(bottom[0]->count())),
dim3(CAFFE_CUDA_NUM_THREADS), 0, 0,
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
bottom[0]->gpu_data(),
top[0]->mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
}
}
template <typename Dtype>
void SyncBNLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[0]) {
CHECK(this->param_propagate_down_[0] && this->param_propagate_down_[1])
<< "SyncBN layer params should backprop when the layer backprops";
// compute local scale and bias diff
hipLaunchKernelGGL(( kernel_backward_scale_bias), dim3(channels_), dim3(CAFFE_CUDA_NUM_THREADS), 0, 0,
num_, channels_, height_ * width_,
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
top[0]->gpu_diff(),
bottom[0]->gpu_data(),
mean_buffer_.mutable_gpu_diff(), // temp use for local scale diff
var_buffer_.mutable_gpu_diff() // temp use for local bias diff
);
CUDA_POST_KERNEL_CHECK;
// sync scale and bias diff
mpi_force_synchronize();
caffe_iallreduce(mean_buffer_.mutable_cpu_diff(), channels_);
caffe_iallreduce(var_buffer_.mutable_cpu_diff(), channels_);
mpi_force_synchronize();
// add to param blobs diff
caffe_gpu_axpy(channels_, Dtype(1) / Caffe::MPI_all_rank(),
mean_buffer_.gpu_diff(),
this->blobs_[0]->mutable_gpu_diff());
caffe_gpu_axpy(channels_, Dtype(1) / Caffe::MPI_all_rank(),
var_buffer_.gpu_diff(),
this->blobs_[1]->mutable_gpu_diff());
// compute bottom diff
hipLaunchKernelGGL(( kernel_backward_bottom), dim3(CAFFE_GET_BLOCKS(bottom[0]->count())),
dim3(CAFFE_CUDA_NUM_THREADS), 0, 0,
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
static_cast<Dtype>(num_ * height_ * width_ * Caffe::MPI_all_rank()),
top[0]->gpu_diff(),
mean_buffer_.gpu_diff(),
var_buffer_.gpu_diff(),
bottom[0]->gpu_data(),
bottom[0]->mutable_gpu_diff()
);
}
}
INSTANTIATE_LAYER_GPU_FUNCS(SyncBNLayer);
} // namespace caffe
#endif | 7652a8eeb7c84a1dbe4d223d47c5f76f7cc96cf3.cu | #ifdef USE_MPI
#include <algorithm>
#include <vector>
#include "caffe/common_layers.hpp"
#include "caffe/util/math_functions.hpp"
#include "caffe/util/mpi_functions.hpp"
namespace caffe {
template <typename Dtype>
__global__ void kernel_test_forward(
const int num, const int channels, const int spatial_dim,
const Dtype* scale, const Dtype* bias, const Dtype* mean, const Dtype* var,
const Dtype eps, const Dtype* bottom_data, Dtype* top_data) {
CUDA_KERNEL_LOOP(index, num * channels * spatial_dim) {
int c = (index / spatial_dim) % channels;
top_data[index] = (bottom_data[index] - mean[c]) / sqrt(var[c] + eps)
* scale[c] + bias[c];
}
}
template <typename Dtype>
__global__ void kernel_local_stats(int num, int channels, int spatial_dim,
const Dtype norm_factor,
const Dtype* bottom_data, Dtype* mean, Dtype* var) {
// store local E[x] to mean, E[x^2] to var temporarily
__shared__ Dtype buffer1[CAFFE_CUDA_NUM_THREADS];
__shared__ Dtype buffer2[CAFFE_CUDA_NUM_THREADS];
const int tid = threadIdx.x;
const int c = blockIdx.x;
// load and accumulate data on each thread
buffer1[tid] = buffer2[tid] = 0;
for (int i = tid; i < num * spatial_dim; i += blockDim.x) {
const int index = i / spatial_dim * channels * spatial_dim
+ c * spatial_dim + i % spatial_dim;
buffer1[tid] += bottom_data[index];
buffer2[tid] += bottom_data[index] * bottom_data[index];
}
__syncthreads();
// do tree reduction
for (int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
buffer1[tid] += buffer1[tid + s];
buffer2[tid] += buffer2[tid + s];
}
__syncthreads();
}
// save the result back
if (tid == 0) {
mean[c] = buffer1[0] / norm_factor;
var[c] = buffer2[0] / norm_factor;
}
}
template <typename Dtype>
__global__ void kernel_backward_scale_bias(
const int num, const int channels, const int spatial_dim,
const Dtype* mean, const Dtype* var, const Dtype eps,
const Dtype* top_diff, const Dtype* bottom_data,
Dtype* scale_diff, Dtype* bias_diff) {
__shared__ Dtype buffer1[CAFFE_CUDA_NUM_THREADS];
__shared__ Dtype buffer2[CAFFE_CUDA_NUM_THREADS];
const int tid = threadIdx.x;
const int c = blockIdx.x;
// load and accumulate data on each thread
buffer1[tid] = buffer2[tid] = 0;
for (int i = tid; i < num * spatial_dim; i += blockDim.x) {
const int index = i / spatial_dim * channels * spatial_dim
+ c * spatial_dim + i % spatial_dim;
buffer1[tid] += top_diff[index] * (bottom_data[index] - mean[c])
/ sqrt(var[c] + eps);
buffer2[tid] += top_diff[index];
}
__syncthreads();
// do tree reduction
for (int s = blockDim.x / 2; s > 0; s >>= 1) {
if (tid < s) {
buffer1[tid] += buffer1[tid + s];
buffer2[tid] += buffer2[tid + s];
}
__syncthreads();
}
// save the result back
if (tid == 0) {
scale_diff[c] = buffer1[0];
bias_diff[c] = buffer2[0];
}
}
template <typename Dtype>
__global__ void kernel_backward_bottom(
const int num, const int channels, const int spatial_dim,
const Dtype* scale, const Dtype* bias,
const Dtype* mean, const Dtype* var, const Dtype eps,
const Dtype norm_factor,
const Dtype* top_diff, const Dtype* scale_diff, const Dtype* bias_diff,
const Dtype* bottom_data, Dtype* bottom_diff) {
CUDA_KERNEL_LOOP(index, num * channels * spatial_dim) {
int c = (index / spatial_dim) % channels;
const Dtype inv_std = Dtype(1) / sqrt(var[c] + eps);
const Dtype x_norm = (bottom_data[index] - mean[c]) * inv_std;
bottom_diff[index] = scale[c] * inv_std *
(top_diff[index] - (x_norm * scale_diff[c] + bias_diff[c]) / norm_factor);
}
}
template <typename Dtype>
void SyncBNLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
if (this->phase_ == TEST) {
kernel_test_forward<<<CAFFE_GET_BLOCKS(bottom[0]->count()),
CAFFE_CUDA_NUM_THREADS>>>(
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
this->blobs_[2]->gpu_data(),
this->blobs_[3]->gpu_data(),
bn_eps_,
bottom[0]->gpu_data(),
top[0]->mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
} else {
const int m = num_ * height_ * width_ * Caffe::MPI_all_rank();
// compute local E[x] and E[x^2]
kernel_local_stats<<<channels_, CAFFE_CUDA_NUM_THREADS>>>(
num_, channels_, height_ * width_,
static_cast<Dtype>(m),
bottom[0]->gpu_data(),
mean_buffer_.mutable_gpu_data(),
var_buffer_.mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
// sync E[x] and E[x^2]
mpi_force_synchronize();
caffe_iallreduce(mean_buffer_.mutable_cpu_data(), channels_);
caffe_iallreduce(var_buffer_.mutable_cpu_data(), channels_);
mpi_force_synchronize();
// var = (E[x^2] - E[x]^2) * bias_correction_factor
caffe_gpu_mul(channels_, mean_buffer_.gpu_data(), mean_buffer_.gpu_data(),
top[0]->mutable_gpu_data()); // reuse the top buffer
caffe_gpu_sub(channels_, var_buffer_.gpu_data(), top[0]->gpu_data(),
var_buffer_.mutable_gpu_data());
if (m > 1) {
caffe_gpu_scal(channels_, Dtype(m) / (m-1),
var_buffer_.mutable_gpu_data());
}
// update running mean and var
caffe_gpu_axpby(mean_buffer_.count(),
Dtype(1) - bn_momentum_, mean_buffer_.gpu_data(),
bn_momentum_, this->blobs_[2]->mutable_gpu_data());
caffe_gpu_axpby(var_buffer_.count(),
Dtype(1) - bn_momentum_, var_buffer_.gpu_data(),
bn_momentum_, this->blobs_[3]->mutable_gpu_data());
// compute output
kernel_test_forward<<<CAFFE_GET_BLOCKS(bottom[0]->count()),
CAFFE_CUDA_NUM_THREADS>>>(
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
bottom[0]->gpu_data(),
top[0]->mutable_gpu_data()
);
CUDA_POST_KERNEL_CHECK;
}
}
template <typename Dtype>
void SyncBNLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[0]) {
CHECK(this->param_propagate_down_[0] && this->param_propagate_down_[1])
<< "SyncBN layer params should backprop when the layer backprops";
// compute local scale and bias diff
kernel_backward_scale_bias<<<channels_, CAFFE_CUDA_NUM_THREADS>>>(
num_, channels_, height_ * width_,
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
top[0]->gpu_diff(),
bottom[0]->gpu_data(),
mean_buffer_.mutable_gpu_diff(), // temp use for local scale diff
var_buffer_.mutable_gpu_diff() // temp use for local bias diff
);
CUDA_POST_KERNEL_CHECK;
// sync scale and bias diff
mpi_force_synchronize();
caffe_iallreduce(mean_buffer_.mutable_cpu_diff(), channels_);
caffe_iallreduce(var_buffer_.mutable_cpu_diff(), channels_);
mpi_force_synchronize();
// add to param blobs diff
caffe_gpu_axpy(channels_, Dtype(1) / Caffe::MPI_all_rank(),
mean_buffer_.gpu_diff(),
this->blobs_[0]->mutable_gpu_diff());
caffe_gpu_axpy(channels_, Dtype(1) / Caffe::MPI_all_rank(),
var_buffer_.gpu_diff(),
this->blobs_[1]->mutable_gpu_diff());
// compute bottom diff
kernel_backward_bottom<<<CAFFE_GET_BLOCKS(bottom[0]->count()),
CAFFE_CUDA_NUM_THREADS>>>(
num_, channels_, height_ * width_,
this->blobs_[0]->gpu_data(),
this->blobs_[1]->gpu_data(),
mean_buffer_.gpu_data(),
var_buffer_.gpu_data(),
bn_eps_,
static_cast<Dtype>(num_ * height_ * width_ * Caffe::MPI_all_rank()),
top[0]->gpu_diff(),
mean_buffer_.gpu_diff(),
var_buffer_.gpu_diff(),
bottom[0]->gpu_data(),
bottom[0]->mutable_gpu_diff()
);
}
}
INSTANTIATE_LAYER_GPU_FUNCS(SyncBNLayer);
} // namespace caffe
#endif |
60c8957e14bd5b8b930e6ada7463a5a8e5e30707.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#define in(i, dim) dev_G[i + dim * inputOffset]
#define out(i, dim) dev_out[i + dim * outputOffset];
__global__ void computeG(int ni, int nj, int nk, int nl,
int3 constants, double *dev_G, double *dev_out) {
short rank = ctaRank();
short bsize = ctaSize();
int inputOffset = ::max(ni + nj - 1, nk + nl - 1);
int outputOffset = ::max(ni, nk) * ::max(nj, nl);
//Loop over the 3 dimensions: x, y, z
for (int dim = 0; dim < 3; ++dim) {
for (int i = 0; i < ::max(ni, nk); ++i) {
out(i, dim) = in(i, dim);
}
__syncthreads();
for (int j = 1; j < nj; ++j) {
for (int i = 0; i < ni + nj - 1 - j; ++i) {
double temp = dev_G[i + 1];
__syncthreads();
// Figure out actual constant 'constants'
in(i, dim) = temp + constants.dim * in(i, dim);
__syncthreads();
if (rank < ni) {
out(i + j * ni, dim) = in(i, dim);
}
}
}
for (int l = 1; l < nl; ++l) {
for (int k = 0; k < nk + nl - 1 - l; ++k) {
double temp = dev_G[i + 1];
__syncthreads();
//Figure out actual constant 'constants'
in(k, dim) = temp + constants.dim * in(k, dim);
__syncthreads();
if (rank < nk) {
out(k + l * nk, dim) = in(k, dim);
}
}
}
}
}
| 60c8957e14bd5b8b930e6ada7463a5a8e5e30707.cu | #define in(i, dim) dev_G[i + dim * inputOffset]
#define out(i, dim) dev_out[i + dim * outputOffset];
__global__ void computeG(int ni, int nj, int nk, int nl,
int3 constants, double *dev_G, double *dev_out) {
short rank = ctaRank();
short bsize = ctaSize();
int inputOffset = std::max(ni + nj - 1, nk + nl - 1);
int outputOffset = std::max(ni, nk) * std::max(nj, nl);
//Loop over the 3 dimensions: x, y, z
for (int dim = 0; dim < 3; ++dim) {
for (int i = 0; i < std::max(ni, nk); ++i) {
out(i, dim) = in(i, dim);
}
__syncthreads();
for (int j = 1; j < nj; ++j) {
for (int i = 0; i < ni + nj - 1 - j; ++i) {
double temp = dev_G[i + 1];
__syncthreads();
// Figure out actual constant 'constants'
in(i, dim) = temp + constants.dim * in(i, dim);
__syncthreads();
if (rank < ni) {
out(i + j * ni, dim) = in(i, dim);
}
}
}
for (int l = 1; l < nl; ++l) {
for (int k = 0; k < nk + nl - 1 - l; ++k) {
double temp = dev_G[i + 1];
__syncthreads();
//Figure out actual constant 'constants'
in(k, dim) = temp + constants.dim * in(k, dim);
__syncthreads();
if (rank < nk) {
out(k + l * nk, dim) = in(k, dim);
}
}
}
}
}
|
30063f54758ce34c282502e5069d1a95184d5e9e.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "cudaUtils.cuh"
#include <stdio.h>
#define NEIGHBORINDEXSTEP 1
__global__ void planeVertexNormalEstimate_kernel(
float* vbo_data, unsigned int w, unsigned int h)
{
unsigned int x = blockIdx.x * blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y * blockDim.y + threadIdx.y;
unsigned int index = y * w + x;
if (x < w - 1 && y < h - 1 && x > 0 && y > 0) {
float center[3] = {
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int leftindex = y * w + (x - 1);
float left[3] = {
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int rightindex = y * w + (x + 1);
float right[3] = {
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int topindex = (y - 1) * w + x;
float top[3] = {
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int downindex = (y + 1) * w + x;
float down[3] = {
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
//Diff to right
float dx1 = right[0] - left[0];
float dy1 = right[1] - left[1];
float dz1 = right[2] - left[2];
//Diff to bottom
float dx2 = down[0] - top[0];
float dy2 = down[1] - top[1];
float dz2 = down[2] - top[2];
//d1 cross d2
float normx = dy1 * dz2 - dz1 * dy2;
float normy = dz1 * dx2 - dx1 * dz2;
float normz = dx1 * dy2 - dy1 * dx2;
//if n dot p > 0, flip towards viewpoint
if (normx * center[0] + normy * center[1] + normz * center[2] > 0.0f)
{
//Flip towards camera
normx = -normx;
normy = -normy;
normz = -normz;
}
float length = sqrt(normx * normx + normy * normy + normz * normz);
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 0] = normx / length;
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 1] = normy / length;
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 2] = normz / length;
}
}
void launch_kernel(float* pos, unsigned int w, unsigned int h)
{
// execute the kernel
dim3 block(8, 8, 1);
dim3 grid(w / block.x, h / block.y, 1);
planeVertexNormalEstimate_kernel << < grid, block >> > (pos, w, h);
}
void CudaAlogrithm::planeVertexNormalEstimate(struct cudaGraphicsResource** vbo_resource,
unsigned int w, unsigned int h
)
{
// map OpenGL buffer object for writing from CUDA
float* dptr;
hipGraphicsMapResources(1, vbo_resource, 0);
size_t num_bytes;
hipGraphicsResourceGetMappedPointer((void**)&dptr, &num_bytes, *vbo_resource);
launch_kernel(dptr, w, h);
// unmap buffer object
hipGraphicsUnmapResources(1, vbo_resource, 0);
} | 30063f54758ce34c282502e5069d1a95184d5e9e.cu | #include "cudaUtils.cuh"
#include <stdio.h>
#define NEIGHBORINDEXSTEP 1
__global__ void planeVertexNormalEstimate_kernel(
float* vbo_data, unsigned int w, unsigned int h)
{
unsigned int x = blockIdx.x * blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y * blockDim.y + threadIdx.y;
unsigned int index = y * w + x;
if (x < w - 1 && y < h - 1 && x > 0 && y > 0) {
float center[3] = {
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int leftindex = y * w + (x - 1);
float left[3] = {
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[leftindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int rightindex = y * w + (x + 1);
float right[3] = {
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[rightindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int topindex = (y - 1) * w + x;
float top[3] = {
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[topindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
unsigned int downindex = (y + 1) * w + x;
float down[3] = {
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 0],
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 1],
vbo_data[downindex * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_VERTEX + 2]
};
//Diff to right
float dx1 = right[0] - left[0];
float dy1 = right[1] - left[1];
float dz1 = right[2] - left[2];
//Diff to bottom
float dx2 = down[0] - top[0];
float dy2 = down[1] - top[1];
float dz2 = down[2] - top[2];
//d1 cross d2
float normx = dy1 * dz2 - dz1 * dy2;
float normy = dz1 * dx2 - dx1 * dz2;
float normz = dx1 * dy2 - dy1 * dx2;
//if n dot p > 0, flip towards viewpoint
if (normx * center[0] + normy * center[1] + normz * center[2] > 0.0f)
{
//Flip towards camera
normx = -normx;
normy = -normy;
normz = -normz;
}
float length = sqrt(normx * normx + normy * normy + normz * normz);
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 0] = normx / length;
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 1] = normy / length;
vbo_data[index * ATTRIBUTESIZE + ATTRIBUTE_OFFSET_NORMAL + 2] = normz / length;
}
}
void launch_kernel(float* pos, unsigned int w, unsigned int h)
{
// execute the kernel
dim3 block(8, 8, 1);
dim3 grid(w / block.x, h / block.y, 1);
planeVertexNormalEstimate_kernel << < grid, block >> > (pos, w, h);
}
void CudaAlogrithm::planeVertexNormalEstimate(struct cudaGraphicsResource** vbo_resource,
unsigned int w, unsigned int h
)
{
// map OpenGL buffer object for writing from CUDA
float* dptr;
cudaGraphicsMapResources(1, vbo_resource, 0);
size_t num_bytes;
cudaGraphicsResourceGetMappedPointer((void**)&dptr, &num_bytes, *vbo_resource);
launch_kernel(dptr, w, h);
// unmap buffer object
cudaGraphicsUnmapResources(1, vbo_resource, 0);
} |
69ce1d6f24321858922698289a2e5972415211e3.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdio.h>
#include <stdlib.h>
#include <cutil.h>
#include <math.h>
// Includes
#include <stdio.h>
#include "../include/ContAcq-IntClk.h"
// includes, project
#include "../include/sdkHelper.h" // helper for shared functions common to CUDA SDK samples
//#include <shrQATest.h>
//#include <shrUtils.h>
// includes CUDA
#include <hip/hip_runtime.h>
#define THREADS_PER_BLOCK 256
#define NUM_OF_BLOCKS 15
#define F 2
#define ITERATIONS (unsigned)( 10000 )
#define ITERATIONS2 REPLACE_ITERATIONS
#define max_tid THREADS_PER_BLOCK*NUM_OF_BLOCKS
#define LINE_SIZE 128
#define SETS 64
#define ASSOC 6
#define SIMD_WIDTH 32
// Variables
int* h_A;
int* h_B;
int* h_C;
int* d_A;
int* d_B;
int* d_C;
bool noprompt = false;
unsigned int my_timer;
// Functions
void CleanupResources(void);
void RandomInit(int*, int);
void ParseArguments(int, char**);
////////////////////////////////////////////////////////////////////////////////
// These are CUDA Helper functions
// This will output the proper CUDA error strings in the event that a CUDA host call returns an error
#define checkCudaErrors(err) __checkCudaErrors (err, __FILE__, __LINE__)
inline void __checkCudaErrors(hipError_t err, const char *file, const int line ){
if(hipSuccess != err){
fprintf(stderr, "%s(%i) : CUDA Runtime API error %d: %s.\n",file, line, (int)err, hipGetErrorString( err ) );
exit(-1);
}
}
// This will output the proper error string when calling hipGetLastError
#define getLastCudaError(msg) __getLastCudaError (msg, __FILE__, __LINE__)
inline void __getLastCudaError(const char *errorMessage, const char *file, const int line ){
hipError_t err = hipGetLastError();
if (hipSuccess != err){
fprintf(stderr, "%s(%i) : getLastCudaError() CUDA error : %s : (%d) %s.\n",file, line, errorMessage, (int)err, hipGetErrorString( err ) );
exit(-1);
}
}
// end of CUDA Helper Functions
// Device code
__global__ void PowerKernal(int* A, int* C, int N){
int tid = blockDim.x * blockIdx.x + threadIdx.x;
//Do Some Computation
//int size = (LINE_SIZE*ASSOC*SETS)/sizeof(int);
//unsigned j=0, k=0;
int m_sum=N;
// m_sum = A[tid*F];
for (unsigned j=0; j<ITERATIONS2; j++){
for(unsigned k=0; k<ITERATIONS; ++k){
m_sum=A[((unsigned)(tid*F)+(unsigned)(k*max_tid*F))];
}
m_sum+=j;
}
C[tid]=m_sum;
__syncthreads();
}
// Host code
int main(){
printf("Power Microbenchmarks\n");
//int N = LINE_SIZE*SETS*ASSOC;
unsigned N =((unsigned)(max_tid*F)+(unsigned)(ITERATIONS*max_tid*F));
size_t size = N * sizeof(int);
// Allocate input vectors h_A and h_B in host memory
h_A = (int*)malloc(size);
if (h_A == 0) CleanupResources();
//h_B = (float*)malloc(size);
//if (h_B == 0) CleanupResources();
h_C = (int*)malloc(size);
if (h_C == 0) CleanupResources();
// Initialize input vectors
RandomInit(h_A, N);
//RandomInit(h_B, N);
// Allocate vectors in device memory
checkCudaErrors( hipMalloc((void**)&d_A, size) );
//checkCudaErrors( hipMalloc((void**)&d_B, size) );
checkCudaErrors( hipMalloc((void**)&d_C, size) );
// Copy vectors from host memory to device memory
checkCudaErrors( hipMemcpy(d_A, h_A, size, hipMemcpyHostToDevice) );
//checkCudaErrors( hipMemcpy(d_B, h_B, size, hipMemcpyHostToDevice) );
//VecAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, N);
dim3 dimGrid(NUM_OF_BLOCKS,1);
dim3 dimBlock(THREADS_PER_BLOCK,1);
CUT_SAFE_CALL(cutCreateTimer(&my_timer));
TaskHandle taskhandle = LaunchDAQ();
CUT_SAFE_CALL(cutStartTimer(my_timer));
hipLaunchKernelGGL(( PowerKernal), dim3(dimGrid),dim3(dimBlock), 0, 0, d_A, d_C, N);
CUDA_SAFE_CALL( hipDeviceSynchronize() );
printf("execution time = %f\n", cutGetTimerValue(my_timer));
TurnOffDAQ(taskhandle, cutGetTimerValue(my_timer));
CUT_SAFE_CALL(cutStopTimer(my_timer));
CUT_SAFE_CALL(cutDeleteTimer(my_timer));
getLastCudaError("kernel launch failure");
#ifdef _DEBUG
checkCudaErrors( hipDeviceSynchronize() );
#endif
// Copy result from device memory to host memory
// h_C contains the result in host memory
checkCudaErrors( hipMemcpy(h_C, d_C, size, hipMemcpyDeviceToHost) );
CleanupResources();
return 0;
}
void CleanupResources(void){
// Free device memory
if (d_A)
hipFree(d_A);
//if (d_B)
// hipFree(d_B);
if (d_C)
hipFree(d_C);
// Free host memory
if (h_A)
free(h_A);
// if (h_B)
// free(h_B);
if (h_C)
free(h_C);
}
// Allocates an array with random float entries.
void RandomInit(int* data, int n){
for (int i = 0; i < n; ++i)
data[i] = (int)(rand() / RAND_MAX);
}
| 69ce1d6f24321858922698289a2e5972415211e3.cu | #include <stdio.h>
#include <stdlib.h>
#include <cutil.h>
#include <math.h>
// Includes
#include <stdio.h>
#include "../include/ContAcq-IntClk.h"
// includes, project
#include "../include/sdkHelper.h" // helper for shared functions common to CUDA SDK samples
//#include <shrQATest.h>
//#include <shrUtils.h>
// includes CUDA
#include <cuda_runtime.h>
#define THREADS_PER_BLOCK 256
#define NUM_OF_BLOCKS 15
#define F 2
#define ITERATIONS (unsigned)( 10000 )
#define ITERATIONS2 REPLACE_ITERATIONS
#define max_tid THREADS_PER_BLOCK*NUM_OF_BLOCKS
#define LINE_SIZE 128
#define SETS 64
#define ASSOC 6
#define SIMD_WIDTH 32
// Variables
int* h_A;
int* h_B;
int* h_C;
int* d_A;
int* d_B;
int* d_C;
bool noprompt = false;
unsigned int my_timer;
// Functions
void CleanupResources(void);
void RandomInit(int*, int);
void ParseArguments(int, char**);
////////////////////////////////////////////////////////////////////////////////
// These are CUDA Helper functions
// This will output the proper CUDA error strings in the event that a CUDA host call returns an error
#define checkCudaErrors(err) __checkCudaErrors (err, __FILE__, __LINE__)
inline void __checkCudaErrors(cudaError err, const char *file, const int line ){
if(cudaSuccess != err){
fprintf(stderr, "%s(%i) : CUDA Runtime API error %d: %s.\n",file, line, (int)err, cudaGetErrorString( err ) );
exit(-1);
}
}
// This will output the proper error string when calling cudaGetLastError
#define getLastCudaError(msg) __getLastCudaError (msg, __FILE__, __LINE__)
inline void __getLastCudaError(const char *errorMessage, const char *file, const int line ){
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
fprintf(stderr, "%s(%i) : getLastCudaError() CUDA error : %s : (%d) %s.\n",file, line, errorMessage, (int)err, cudaGetErrorString( err ) );
exit(-1);
}
}
// end of CUDA Helper Functions
// Device code
__global__ void PowerKernal(int* A, int* C, int N){
int tid = blockDim.x * blockIdx.x + threadIdx.x;
//Do Some Computation
//int size = (LINE_SIZE*ASSOC*SETS)/sizeof(int);
//unsigned j=0, k=0;
int m_sum=N;
// m_sum = A[tid*F];
for (unsigned j=0; j<ITERATIONS2; j++){
for(unsigned k=0; k<ITERATIONS; ++k){
m_sum=A[((unsigned)(tid*F)+(unsigned)(k*max_tid*F))];
}
m_sum+=j;
}
C[tid]=m_sum;
__syncthreads();
}
// Host code
int main(){
printf("Power Microbenchmarks\n");
//int N = LINE_SIZE*SETS*ASSOC;
unsigned N =((unsigned)(max_tid*F)+(unsigned)(ITERATIONS*max_tid*F));
size_t size = N * sizeof(int);
// Allocate input vectors h_A and h_B in host memory
h_A = (int*)malloc(size);
if (h_A == 0) CleanupResources();
//h_B = (float*)malloc(size);
//if (h_B == 0) CleanupResources();
h_C = (int*)malloc(size);
if (h_C == 0) CleanupResources();
// Initialize input vectors
RandomInit(h_A, N);
//RandomInit(h_B, N);
// Allocate vectors in device memory
checkCudaErrors( cudaMalloc((void**)&d_A, size) );
//checkCudaErrors( cudaMalloc((void**)&d_B, size) );
checkCudaErrors( cudaMalloc((void**)&d_C, size) );
// Copy vectors from host memory to device memory
checkCudaErrors( cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice) );
//checkCudaErrors( cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice) );
//VecAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, N);
dim3 dimGrid(NUM_OF_BLOCKS,1);
dim3 dimBlock(THREADS_PER_BLOCK,1);
CUT_SAFE_CALL(cutCreateTimer(&my_timer));
TaskHandle taskhandle = LaunchDAQ();
CUT_SAFE_CALL(cutStartTimer(my_timer));
PowerKernal<<<dimGrid,dimBlock>>>(d_A, d_C, N);
CUDA_SAFE_CALL( cudaThreadSynchronize() );
printf("execution time = %f\n", cutGetTimerValue(my_timer));
TurnOffDAQ(taskhandle, cutGetTimerValue(my_timer));
CUT_SAFE_CALL(cutStopTimer(my_timer));
CUT_SAFE_CALL(cutDeleteTimer(my_timer));
getLastCudaError("kernel launch failure");
#ifdef _DEBUG
checkCudaErrors( cudaDeviceSynchronize() );
#endif
// Copy result from device memory to host memory
// h_C contains the result in host memory
checkCudaErrors( cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost) );
CleanupResources();
return 0;
}
void CleanupResources(void){
// Free device memory
if (d_A)
cudaFree(d_A);
//if (d_B)
// cudaFree(d_B);
if (d_C)
cudaFree(d_C);
// Free host memory
if (h_A)
free(h_A);
// if (h_B)
// free(h_B);
if (h_C)
free(h_C);
}
// Allocates an array with random float entries.
void RandomInit(int* data, int n){
for (int i = 0; i < n; ++i)
data[i] = (int)(rand() / RAND_MAX);
}
|
ca3676681889897c45daa46db0f54dc7390de51b.hip | // !!! This is a file automatically generated by hipify!!!
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <thrust/transform.h>
#include "paddle/fluid/operators/cumprod_op.h"
#include "paddle/fluid/operators/math/complex_functors.h"
#include "paddle/fluid/operators/math/inclusive_scan.h"
#include "paddle/fluid/platform/for_range.h"
namespace paddle {
namespace operators {
template <typename T>
struct MultiplyFunctor {
HOSTDEVICE T operator()(T a, T b) const { return a * b; }
};
template <typename T>
class CumprodOpCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &ctx) const override {
const auto *x = ctx.Input<framework::Tensor>("X");
auto *y = ctx.Output<framework::Tensor>("Out");
auto dim = ctx.Attr<int>("dim");
size_t outer_dim, mid_dim, inner_dim;
GetCumprodDimInfo(x->dims(), dim, &outer_dim, &mid_dim, &inner_dim);
const auto *x_data = x->data<T>();
auto *y_data = y->mutable_data<T>(ctx.GetPlace());
const auto &dev_ctx =
ctx.template device_context<platform::CUDADeviceContext>();
math::InclusiveScan<T, MultiplyFunctor<T>>(
x_data, y_data, outer_dim, mid_dim, inner_dim, static_cast<T>(1),
MultiplyFunctor<T>(), /*reverse=*/false, dev_ctx);
}
};
template <typename T>
struct IsZeroFunctor {
HOSTDEVICE bool operator()(T x) const { return x == static_cast<T>(0); }
};
template <typename T>
struct CumprodGradFunctorExceptFirstZero {
HOSTDEVICE CumprodGradFunctorExceptFirstZero(
const T *x, const T *y, const T *dy_mul_y_reversed_cumsum,
const uint8_t *zero_mask, size_t mid_dim, size_t inner_dim, T *dx,
int64_t *first_zero_idx, T *x_filled_one)
: x_(x),
y_(y),
dy_mul_y_reversed_cumsum_(dy_mul_y_reversed_cumsum),
zero_mask_(zero_mask),
mid_dim_(mid_dim),
inner_dim_(inner_dim),
dx_(dx),
first_zero_idx_(first_zero_idx),
x_filled_one_(x_filled_one) {}
HOSTDEVICE void operator()(size_t idx) const {
auto inner_idx = idx % inner_dim_;
auto outer_idx = idx / (mid_dim_ * inner_dim_);
auto mid_idx = (idx - inner_idx) / inner_dim_ % mid_dim_;
auto mask = zero_mask_[idx];
bool should_fill_one = true;
if (mask == 0) {
dx_[idx] = dy_mul_y_reversed_cumsum_[idx] / x_[idx];
if (mid_idx == mid_dim_ - 1) {
// record first zero position as -1, i.e., no zero
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = -1;
}
} else if (mid_idx > 0) { // mask > 0
if (zero_mask_[idx - inner_dim_] > 0) { // not first zero
dx_[idx] = 0;
should_fill_one = false;
} else {
// idx is the first zero position, it should be recorded
dx_[idx] = y_[idx - inner_dim_];
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = mid_idx;
}
} else { // the first zero position is index 0
dx_[idx] = 1;
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = 0;
}
x_filled_one_[idx] = should_fill_one ? 1 : x_[idx];
}
private:
const T *x_;
const T *y_;
const T *dy_mul_y_reversed_cumsum_;
const uint8_t *zero_mask_;
size_t mid_dim_;
size_t inner_dim_;
T *dx_;
int64_t *first_zero_idx_;
T *x_filled_one_;
};
template <typename T>
struct FillFirstZeroPositionGradFunctor {
HOSTDEVICE FillFirstZeroPositionGradFunctor(const int64_t *first_zero_idx,
const T *grad_value,
size_t mid_dim, size_t inner_dim,
T *dx)
: first_zero_idx_(first_zero_idx),
grad_value_(grad_value),
mid_dim_(mid_dim),
inner_dim_(inner_dim),
dx_(dx) {}
HOSTDEVICE void operator()(size_t idx) const {
auto outer_idx = idx / inner_dim_;
auto inner_idx = idx % inner_dim_;
auto mid_idx = first_zero_idx_[idx];
if (mid_idx >= 0) {
auto full_idx =
outer_idx * mid_dim_ * inner_dim_ + mid_idx * inner_dim_ + inner_idx;
dx_[full_idx] *= grad_value_[full_idx];
}
}
private:
const int64_t *first_zero_idx_;
const T *grad_value_;
size_t mid_dim_;
size_t inner_dim_;
T *dx_;
};
/*
Reference to
https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/native/ReduceOps.cpp
input: x, y, dL/dy
output: dL/dx
dL/dx[i] = sum{0<=j<n} (dL/dy[j])*(dy[j]/dx[i]) (1)
= sum(0<=j<n} (dL/dy[j])*(d(x[0]*x[1]*...*x[j])/dx[i])
if x[i] != 0, dL/dx[i] = sum{i<=j<n} (dL/dy[j])*(y[j]/x[i]) (2)
if x[i] == 0, the formula(2) can not be applied directly.
Suppose k is the first index of zero element, the formula will be:
i > k, dL/dx[i] = 0;
i < k, dL/dx[i] = 1/x[i]*sum{i<=j<n} (dL/dy[j]*y[j])
i = k, dL/dx[i] = y[i-1]*sum{i<=j<n} (dL/dy[j])*(x[i+1]*...*x[j])
First, we will show the main resolution.
We need to judge the relationship between i (current index) and k (index
which corresponds to the first element of 0).
To mark the relationship, we now introduce zero_mask and we also need to
mark the index of the first zero element.
zero_mask = cummax(x[i] == 0); //label whether x[i]==0 until the index.
zero_index = -1; //store the first zero element's index.
e.g. x = [1, 4, 5, 0, 2, 3, 0];
zero_mask = [0, 0, 0, 1, 1, 1, 1];
zero_index = 3;
When i < k, we need to calculate the result of sum{i<=j<n}(d_y[j]*y[j]), we can
use reversed cumsum to calculate it.
R = reversed_cumsum(dy[j]*y[j]); //store the calculation result of the
sum{i<=j<n}(d_y[j]*y[j]) and x[k+1],x[k+2],...,x[j] along the index k+1 ~ j.
When i = k, we need to calculate the result of prod{i<w<j}(x[w]).
To calculate it, we introduce x_filled_one, which fill 1 before x[k+1] along
the index 0 ~ k.
e.g. x = [1, 4, 5, 0, 2, 3, 0];
x_filled_one = [1, 1, 1, 1, 2, 3, 0];
Thus, we can use cumprod(x_filled_one[j]) to calculate the result of
prod{k<=w<j}(x[w]).
Then, we will show more detailed implementation.
for (int i = 0; i < numel; i++) {
if (zero_mask[i] == 0) { //case i < k
dx[i] = R[i] / x[i];
x_filled_one[i] = 1;
} else {
if (i == 0) { //case i = k
dx[i] = 1;
zero_index = i;
x_filled_one[i] = 1;
} else {
if (zero_mask[i-1] == 0) { //case i = k
dx[i] = y[i-1];
zero_index = i;
x_filled_one[i] = 1;
} else { //case i > k
dx[i] = 0;
x_filled_one[i] = x[i];
}
}
}
}
T = reversed_cumsum(dy[j]*cumprod(x_filled_one[j]));
if (zero_index != -1) {
dx[zero_index] *= T[zero_index];
}
*/
template <typename T>
class CumprodGradOpCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &ctx) const override {
const auto *x = ctx.Input<framework::Tensor>("X");
const auto *y = ctx.Input<framework::Tensor>("Out");
const auto *dy =
ctx.Input<framework::Tensor>(framework::GradVarName("Out"));
auto *dx = ctx.Output<framework::Tensor>(framework::GradVarName("X"));
auto dim = ctx.Attr<int>("dim");
size_t outer_dim, mid_dim, inner_dim;
GetCumprodDimInfo(x->dims(), dim, &outer_dim, &mid_dim, &inner_dim);
if (outer_dim == 0 || mid_dim == 0 || inner_dim == 0) return;
size_t numel = outer_dim * mid_dim * inner_dim;
const auto *x_data = x->data<T>();
const auto *y_data = y->data<T>();
const auto *dy_data = dy->data<T>();
auto place = BOOST_GET_CONST(platform::CUDAPlace, ctx.GetPlace());
const auto &dev_ctx =
ctx.template device_context<platform::CUDADeviceContext>();
auto *dx_data = dx->mutable_data<T>(place);
// deal with complex
const T *x_data_deal;
const T *y_data_deal;
memory::AllocationPtr x_conj;
memory::AllocationPtr y_conj;
if (framework::IsComplex<T>::value) {
x_conj = memory::Alloc(place, numel * sizeof(T));
auto *x_data_conj = reinterpret_cast<T *>(x_conj->ptr());
y_conj = memory::Alloc(place, numel * sizeof(T));
auto *y_data_conj = reinterpret_cast<T *>(y_conj->ptr());
platform::ForRange<platform::CUDADeviceContext> for_range_x(dev_ctx,
numel);
math::ConjFunctor<T> functor_x(x_data, numel, x_data_conj);
for_range_x(functor_x);
platform::ForRange<platform::CUDADeviceContext> for_range_y(dev_ctx,
numel);
math::ConjFunctor<T> functor_y(y_data, numel, y_data_conj);
for_range_y(functor_y);
x_data_deal = x_data_conj;
y_data_deal = y_data_conj;
} else {
x_data_deal = x_data;
y_data_deal = y_data;
}
// Step 1: find cummax-ed zero mask of x
#ifdef PADDLE_WITH_CUDA
const auto &exec_policy = thrust::hip::par.on(dev_ctx.stream());
#else
const auto &exec_policy = thrust::hip::par.on(dev_ctx.stream());
#endif
auto zero_mask_without_cummax =
memory::Alloc(place, numel * sizeof(uint8_t));
auto *zero_mask_without_cummax_data =
reinterpret_cast<uint8_t *>(zero_mask_without_cummax->ptr());
thrust::transform(
exec_policy, thrust::device_pointer_cast(x_data_deal),
thrust::device_pointer_cast(x_data_deal) + numel,
thrust::device_pointer_cast(zero_mask_without_cummax_data),
IsZeroFunctor<T>());
auto zero_mask = memory::Alloc(place, numel * sizeof(uint8_t));
auto *zero_mask_data = reinterpret_cast<uint8_t *>(zero_mask->ptr());
math::InclusiveScan<uint8_t, hipcub::Max>(
zero_mask_without_cummax_data, zero_mask_data, outer_dim, mid_dim,
inner_dim, static_cast<uint8_t>(0), hipcub::Max(), /*reverse=*/false,
dev_ctx);
zero_mask_without_cummax = nullptr;
// Step 2: calculate reversed cumsum(dy * y)
auto dy_mul_y = memory::Alloc(place, numel * sizeof(T));
auto *dy_mul_y_data = reinterpret_cast<T *>(dy_mul_y->ptr());
thrust::transform(exec_policy, thrust::device_pointer_cast(dy_data),
thrust::device_pointer_cast(dy_data) + numel,
thrust::device_pointer_cast(y_data_deal),
thrust::device_pointer_cast(dy_mul_y_data),
MultiplyFunctor<T>());
auto dy_mul_y_reversed_cumsum = memory::Alloc(place, numel * sizeof(T));
auto *dy_mul_y_reversed_cumsum_data =
reinterpret_cast<T *>(dy_mul_y_reversed_cumsum->ptr());
math::InclusiveScan<T, hipcub::Sum>(
dy_mul_y_data, dy_mul_y_reversed_cumsum_data, outer_dim, mid_dim,
inner_dim, static_cast<T>(0), hipcub::Sum(), /*reverse=*/true, dev_ctx);
// Step 3: calculate the gradient value except the first zero position.
// The gradient value of the first zero position is filled with out[idx-1],
// while the gradient value of the other positions are calculated out
// completely. This functor also:
// (1) find the first zero index, i.e., first_zero_idx_data.
// (2) fill x_filled_one, which satifies
// x_filled_one[i] = x[i], i > pos
// x_filled_one[i] = 1, i <= pos
auto first_zero_idx =
memory::Alloc(place, outer_dim * inner_dim * sizeof(int64_t));
auto *first_zero_idx_data =
reinterpret_cast<int64_t *>(first_zero_idx->ptr());
auto *x_filled_one_data = dy_mul_y_data; // reuse former allocated memory
platform::ForRange<platform::CUDADeviceContext> for_range(dev_ctx, numel);
CumprodGradFunctorExceptFirstZero<T> functor_except_first_zero(
x_data_deal, y_data_deal, dy_mul_y_reversed_cumsum_data, zero_mask_data,
mid_dim, inner_dim, dx_data, first_zero_idx_data, x_filled_one_data);
for_range(functor_except_first_zero);
// Step 4: calculate cumprod of x_filled_one
auto *x_filled_one_cumprod_data =
dy_mul_y_reversed_cumsum_data; // reuse former allocated memory
math::InclusiveScan<T, MultiplyFunctor<T>>(
x_filled_one_data, x_filled_one_cumprod_data, outer_dim, mid_dim,
inner_dim, static_cast<T>(1), MultiplyFunctor<T>(), /*reverse=*/false,
dev_ctx);
// Step 5: calculate reversed cumsum(dy * x_filled_one_cumprod)
auto *dy_mul_x_filled_one_cumprod =
dy_mul_y_data; // reuse former allocated memory
thrust::transform(exec_policy, thrust::device_pointer_cast(dy_data),
thrust::device_pointer_cast(dy_data) + numel,
thrust::device_pointer_cast(x_filled_one_cumprod_data),
thrust::device_pointer_cast(dy_mul_x_filled_one_cumprod),
MultiplyFunctor<T>());
auto *dy_mul_x_filled_one_cumprod_reversed_cumsum =
dy_mul_y_reversed_cumsum_data; // reuse former allocated memory
math::InclusiveScan<T, hipcub::Sum>(
dy_mul_x_filled_one_cumprod,
dy_mul_x_filled_one_cumprod_reversed_cumsum, outer_dim, mid_dim,
inner_dim, static_cast<T>(0), hipcub::Sum(),
/*reverse=*/true, dev_ctx);
// Step 6: fill zero pos gradient value
platform::ForRange<platform::CUDADeviceContext>
for_range_fill_zero_pos_grad(dev_ctx, outer_dim * inner_dim);
FillFirstZeroPositionGradFunctor<T> fill_first_zero_pos_grad_functor(
first_zero_idx_data, dy_mul_x_filled_one_cumprod_reversed_cumsum,
mid_dim, inner_dim, dx_data);
for_range_fill_zero_pos_grad(fill_first_zero_pos_grad_functor);
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OP_CUDA_KERNEL(
cumprod, ops::CumprodOpCUDAKernel<float>, ops::CumprodOpCUDAKernel<double>,
ops::CumprodOpCUDAKernel<int>, ops::CumprodOpCUDAKernel<int64_t>,
ops::CumprodOpCUDAKernel<paddle::platform::complex<float>>,
ops::CumprodOpCUDAKernel<paddle::platform::complex<double>>);
REGISTER_OP_CUDA_KERNEL(
cumprod_grad, ops::CumprodGradOpCUDAKernel<float>,
ops::CumprodGradOpCUDAKernel<double>, ops::CumprodGradOpCUDAKernel<int>,
ops::CumprodGradOpCUDAKernel<int64_t>,
ops::CumprodGradOpCUDAKernel<paddle::platform::complex<float>>,
ops::CumprodGradOpCUDAKernel<paddle::platform::complex<double>>);
| ca3676681889897c45daa46db0f54dc7390de51b.cu | // Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <thrust/transform.h>
#include "paddle/fluid/operators/cumprod_op.h"
#include "paddle/fluid/operators/math/complex_functors.h"
#include "paddle/fluid/operators/math/inclusive_scan.h"
#include "paddle/fluid/platform/for_range.h"
namespace paddle {
namespace operators {
template <typename T>
struct MultiplyFunctor {
HOSTDEVICE T operator()(T a, T b) const { return a * b; }
};
template <typename T>
class CumprodOpCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &ctx) const override {
const auto *x = ctx.Input<framework::Tensor>("X");
auto *y = ctx.Output<framework::Tensor>("Out");
auto dim = ctx.Attr<int>("dim");
size_t outer_dim, mid_dim, inner_dim;
GetCumprodDimInfo(x->dims(), dim, &outer_dim, &mid_dim, &inner_dim);
const auto *x_data = x->data<T>();
auto *y_data = y->mutable_data<T>(ctx.GetPlace());
const auto &dev_ctx =
ctx.template device_context<platform::CUDADeviceContext>();
math::InclusiveScan<T, MultiplyFunctor<T>>(
x_data, y_data, outer_dim, mid_dim, inner_dim, static_cast<T>(1),
MultiplyFunctor<T>(), /*reverse=*/false, dev_ctx);
}
};
template <typename T>
struct IsZeroFunctor {
HOSTDEVICE bool operator()(T x) const { return x == static_cast<T>(0); }
};
template <typename T>
struct CumprodGradFunctorExceptFirstZero {
HOSTDEVICE CumprodGradFunctorExceptFirstZero(
const T *x, const T *y, const T *dy_mul_y_reversed_cumsum,
const uint8_t *zero_mask, size_t mid_dim, size_t inner_dim, T *dx,
int64_t *first_zero_idx, T *x_filled_one)
: x_(x),
y_(y),
dy_mul_y_reversed_cumsum_(dy_mul_y_reversed_cumsum),
zero_mask_(zero_mask),
mid_dim_(mid_dim),
inner_dim_(inner_dim),
dx_(dx),
first_zero_idx_(first_zero_idx),
x_filled_one_(x_filled_one) {}
HOSTDEVICE void operator()(size_t idx) const {
auto inner_idx = idx % inner_dim_;
auto outer_idx = idx / (mid_dim_ * inner_dim_);
auto mid_idx = (idx - inner_idx) / inner_dim_ % mid_dim_;
auto mask = zero_mask_[idx];
bool should_fill_one = true;
if (mask == 0) {
dx_[idx] = dy_mul_y_reversed_cumsum_[idx] / x_[idx];
if (mid_idx == mid_dim_ - 1) {
// record first zero position as -1, i.e., no zero
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = -1;
}
} else if (mid_idx > 0) { // mask > 0
if (zero_mask_[idx - inner_dim_] > 0) { // not first zero
dx_[idx] = 0;
should_fill_one = false;
} else {
// idx is the first zero position, it should be recorded
dx_[idx] = y_[idx - inner_dim_];
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = mid_idx;
}
} else { // the first zero position is index 0
dx_[idx] = 1;
first_zero_idx_[outer_idx * inner_dim_ + inner_idx] = 0;
}
x_filled_one_[idx] = should_fill_one ? 1 : x_[idx];
}
private:
const T *x_;
const T *y_;
const T *dy_mul_y_reversed_cumsum_;
const uint8_t *zero_mask_;
size_t mid_dim_;
size_t inner_dim_;
T *dx_;
int64_t *first_zero_idx_;
T *x_filled_one_;
};
template <typename T>
struct FillFirstZeroPositionGradFunctor {
HOSTDEVICE FillFirstZeroPositionGradFunctor(const int64_t *first_zero_idx,
const T *grad_value,
size_t mid_dim, size_t inner_dim,
T *dx)
: first_zero_idx_(first_zero_idx),
grad_value_(grad_value),
mid_dim_(mid_dim),
inner_dim_(inner_dim),
dx_(dx) {}
HOSTDEVICE void operator()(size_t idx) const {
auto outer_idx = idx / inner_dim_;
auto inner_idx = idx % inner_dim_;
auto mid_idx = first_zero_idx_[idx];
if (mid_idx >= 0) {
auto full_idx =
outer_idx * mid_dim_ * inner_dim_ + mid_idx * inner_dim_ + inner_idx;
dx_[full_idx] *= grad_value_[full_idx];
}
}
private:
const int64_t *first_zero_idx_;
const T *grad_value_;
size_t mid_dim_;
size_t inner_dim_;
T *dx_;
};
/*
Reference to
https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/native/ReduceOps.cpp
input: x, y, dL/dy
output: dL/dx
dL/dx[i] = sum{0<=j<n} (dL/dy[j])*(dy[j]/dx[i]) (1)
= sum(0<=j<n} (dL/dy[j])*(d(x[0]*x[1]*...*x[j])/dx[i])
if x[i] != 0, dL/dx[i] = sum{i<=j<n} (dL/dy[j])*(y[j]/x[i]) (2)
if x[i] == 0, the formula(2) can not be applied directly.
Suppose k is the first index of zero element, the formula will be:
i > k, dL/dx[i] = 0;
i < k, dL/dx[i] = 1/x[i]*sum{i<=j<n} (dL/dy[j]*y[j])
i = k, dL/dx[i] = y[i-1]*sum{i<=j<n} (dL/dy[j])*(x[i+1]*...*x[j])
First, we will show the main resolution.
We need to judge the relationship between i (current index) and k (index
which corresponds to the first element of 0).
To mark the relationship, we now introduce zero_mask and we also need to
mark the index of the first zero element.
zero_mask = cummax(x[i] == 0); //label whether x[i]==0 until the index.
zero_index = -1; //store the first zero element's index.
e.g. x = [1, 4, 5, 0, 2, 3, 0];
zero_mask = [0, 0, 0, 1, 1, 1, 1];
zero_index = 3;
When i < k, we need to calculate the result of sum{i<=j<n}(d_y[j]*y[j]), we can
use reversed cumsum to calculate it.
R = reversed_cumsum(dy[j]*y[j]); //store the calculation result of the
sum{i<=j<n}(d_y[j]*y[j]) and x[k+1],x[k+2],...,x[j] along the index k+1 ~ j.
When i = k, we need to calculate the result of prod{i<w<j}(x[w]).
To calculate it, we introduce x_filled_one, which fill 1 before x[k+1] along
the index 0 ~ k.
e.g. x = [1, 4, 5, 0, 2, 3, 0];
x_filled_one = [1, 1, 1, 1, 2, 3, 0];
Thus, we can use cumprod(x_filled_one[j]) to calculate the result of
prod{k<=w<j}(x[w]).
Then, we will show more detailed implementation.
for (int i = 0; i < numel; i++) {
if (zero_mask[i] == 0) { //case i < k
dx[i] = R[i] / x[i];
x_filled_one[i] = 1;
} else {
if (i == 0) { //case i = k
dx[i] = 1;
zero_index = i;
x_filled_one[i] = 1;
} else {
if (zero_mask[i-1] == 0) { //case i = k
dx[i] = y[i-1];
zero_index = i;
x_filled_one[i] = 1;
} else { //case i > k
dx[i] = 0;
x_filled_one[i] = x[i];
}
}
}
}
T = reversed_cumsum(dy[j]*cumprod(x_filled_one[j]));
if (zero_index != -1) {
dx[zero_index] *= T[zero_index];
}
*/
template <typename T>
class CumprodGradOpCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &ctx) const override {
const auto *x = ctx.Input<framework::Tensor>("X");
const auto *y = ctx.Input<framework::Tensor>("Out");
const auto *dy =
ctx.Input<framework::Tensor>(framework::GradVarName("Out"));
auto *dx = ctx.Output<framework::Tensor>(framework::GradVarName("X"));
auto dim = ctx.Attr<int>("dim");
size_t outer_dim, mid_dim, inner_dim;
GetCumprodDimInfo(x->dims(), dim, &outer_dim, &mid_dim, &inner_dim);
if (outer_dim == 0 || mid_dim == 0 || inner_dim == 0) return;
size_t numel = outer_dim * mid_dim * inner_dim;
const auto *x_data = x->data<T>();
const auto *y_data = y->data<T>();
const auto *dy_data = dy->data<T>();
auto place = BOOST_GET_CONST(platform::CUDAPlace, ctx.GetPlace());
const auto &dev_ctx =
ctx.template device_context<platform::CUDADeviceContext>();
auto *dx_data = dx->mutable_data<T>(place);
// deal with complex
const T *x_data_deal;
const T *y_data_deal;
memory::AllocationPtr x_conj;
memory::AllocationPtr y_conj;
if (framework::IsComplex<T>::value) {
x_conj = memory::Alloc(place, numel * sizeof(T));
auto *x_data_conj = reinterpret_cast<T *>(x_conj->ptr());
y_conj = memory::Alloc(place, numel * sizeof(T));
auto *y_data_conj = reinterpret_cast<T *>(y_conj->ptr());
platform::ForRange<platform::CUDADeviceContext> for_range_x(dev_ctx,
numel);
math::ConjFunctor<T> functor_x(x_data, numel, x_data_conj);
for_range_x(functor_x);
platform::ForRange<platform::CUDADeviceContext> for_range_y(dev_ctx,
numel);
math::ConjFunctor<T> functor_y(y_data, numel, y_data_conj);
for_range_y(functor_y);
x_data_deal = x_data_conj;
y_data_deal = y_data_conj;
} else {
x_data_deal = x_data;
y_data_deal = y_data;
}
// Step 1: find cummax-ed zero mask of x
#ifdef PADDLE_WITH_CUDA
const auto &exec_policy = thrust::cuda::par.on(dev_ctx.stream());
#else
const auto &exec_policy = thrust::hip::par.on(dev_ctx.stream());
#endif
auto zero_mask_without_cummax =
memory::Alloc(place, numel * sizeof(uint8_t));
auto *zero_mask_without_cummax_data =
reinterpret_cast<uint8_t *>(zero_mask_without_cummax->ptr());
thrust::transform(
exec_policy, thrust::device_pointer_cast(x_data_deal),
thrust::device_pointer_cast(x_data_deal) + numel,
thrust::device_pointer_cast(zero_mask_without_cummax_data),
IsZeroFunctor<T>());
auto zero_mask = memory::Alloc(place, numel * sizeof(uint8_t));
auto *zero_mask_data = reinterpret_cast<uint8_t *>(zero_mask->ptr());
math::InclusiveScan<uint8_t, cub::Max>(
zero_mask_without_cummax_data, zero_mask_data, outer_dim, mid_dim,
inner_dim, static_cast<uint8_t>(0), cub::Max(), /*reverse=*/false,
dev_ctx);
zero_mask_without_cummax = nullptr;
// Step 2: calculate reversed cumsum(dy * y)
auto dy_mul_y = memory::Alloc(place, numel * sizeof(T));
auto *dy_mul_y_data = reinterpret_cast<T *>(dy_mul_y->ptr());
thrust::transform(exec_policy, thrust::device_pointer_cast(dy_data),
thrust::device_pointer_cast(dy_data) + numel,
thrust::device_pointer_cast(y_data_deal),
thrust::device_pointer_cast(dy_mul_y_data),
MultiplyFunctor<T>());
auto dy_mul_y_reversed_cumsum = memory::Alloc(place, numel * sizeof(T));
auto *dy_mul_y_reversed_cumsum_data =
reinterpret_cast<T *>(dy_mul_y_reversed_cumsum->ptr());
math::InclusiveScan<T, cub::Sum>(
dy_mul_y_data, dy_mul_y_reversed_cumsum_data, outer_dim, mid_dim,
inner_dim, static_cast<T>(0), cub::Sum(), /*reverse=*/true, dev_ctx);
// Step 3: calculate the gradient value except the first zero position.
// The gradient value of the first zero position is filled with out[idx-1],
// while the gradient value of the other positions are calculated out
// completely. This functor also:
// (1) find the first zero index, i.e., first_zero_idx_data.
// (2) fill x_filled_one, which satifies
// x_filled_one[i] = x[i], i > pos
// x_filled_one[i] = 1, i <= pos
auto first_zero_idx =
memory::Alloc(place, outer_dim * inner_dim * sizeof(int64_t));
auto *first_zero_idx_data =
reinterpret_cast<int64_t *>(first_zero_idx->ptr());
auto *x_filled_one_data = dy_mul_y_data; // reuse former allocated memory
platform::ForRange<platform::CUDADeviceContext> for_range(dev_ctx, numel);
CumprodGradFunctorExceptFirstZero<T> functor_except_first_zero(
x_data_deal, y_data_deal, dy_mul_y_reversed_cumsum_data, zero_mask_data,
mid_dim, inner_dim, dx_data, first_zero_idx_data, x_filled_one_data);
for_range(functor_except_first_zero);
// Step 4: calculate cumprod of x_filled_one
auto *x_filled_one_cumprod_data =
dy_mul_y_reversed_cumsum_data; // reuse former allocated memory
math::InclusiveScan<T, MultiplyFunctor<T>>(
x_filled_one_data, x_filled_one_cumprod_data, outer_dim, mid_dim,
inner_dim, static_cast<T>(1), MultiplyFunctor<T>(), /*reverse=*/false,
dev_ctx);
// Step 5: calculate reversed cumsum(dy * x_filled_one_cumprod)
auto *dy_mul_x_filled_one_cumprod =
dy_mul_y_data; // reuse former allocated memory
thrust::transform(exec_policy, thrust::device_pointer_cast(dy_data),
thrust::device_pointer_cast(dy_data) + numel,
thrust::device_pointer_cast(x_filled_one_cumprod_data),
thrust::device_pointer_cast(dy_mul_x_filled_one_cumprod),
MultiplyFunctor<T>());
auto *dy_mul_x_filled_one_cumprod_reversed_cumsum =
dy_mul_y_reversed_cumsum_data; // reuse former allocated memory
math::InclusiveScan<T, cub::Sum>(
dy_mul_x_filled_one_cumprod,
dy_mul_x_filled_one_cumprod_reversed_cumsum, outer_dim, mid_dim,
inner_dim, static_cast<T>(0), cub::Sum(),
/*reverse=*/true, dev_ctx);
// Step 6: fill zero pos gradient value
platform::ForRange<platform::CUDADeviceContext>
for_range_fill_zero_pos_grad(dev_ctx, outer_dim * inner_dim);
FillFirstZeroPositionGradFunctor<T> fill_first_zero_pos_grad_functor(
first_zero_idx_data, dy_mul_x_filled_one_cumprod_reversed_cumsum,
mid_dim, inner_dim, dx_data);
for_range_fill_zero_pos_grad(fill_first_zero_pos_grad_functor);
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OP_CUDA_KERNEL(
cumprod, ops::CumprodOpCUDAKernel<float>, ops::CumprodOpCUDAKernel<double>,
ops::CumprodOpCUDAKernel<int>, ops::CumprodOpCUDAKernel<int64_t>,
ops::CumprodOpCUDAKernel<paddle::platform::complex<float>>,
ops::CumprodOpCUDAKernel<paddle::platform::complex<double>>);
REGISTER_OP_CUDA_KERNEL(
cumprod_grad, ops::CumprodGradOpCUDAKernel<float>,
ops::CumprodGradOpCUDAKernel<double>, ops::CumprodGradOpCUDAKernel<int>,
ops::CumprodGradOpCUDAKernel<int64_t>,
ops::CumprodGradOpCUDAKernel<paddle::platform::complex<float>>,
ops::CumprodGradOpCUDAKernel<paddle::platform::complex<double>>);
|
58027d5cb5d96023ebaa68c1c931949cee024828.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/framework/eigen.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/operators/lookup_table_op.h"
#include "paddle/fluid/platform/assert.h"
#include "paddle/fluid/platform/cuda_primitives.h"
#include "paddle/fluid/platform/float16.h"
namespace paddle {
namespace operators {
template <typename T, int BlockDimX, int BlockDimY, int GridDimX,
bool PaddingFlag>
__global__ void LookupTable(T *output, const T *table, const int64_t *ids,
const int64_t N, const int64_t K, const int64_t D,
const int64_t padding_idx) {
int idx = threadIdx.x;
int idy = blockIdx.x + threadIdx.y * GridDimX;
while (idy < K) {
int64_t id = ids[idy];
PADDLE_ASSERT_MSG(
id >= 0,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
PADDLE_ASSERT_MSG(
id < N,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
T *out = output + idy * D;
const T *tab = table + id * D;
for (int i = idx; i < D; i += BlockDimX) {
if (PaddingFlag) {
if (id == padding_idx)
out[i] = static_cast<T>(0);
else
out[i] = tab[i];
} else {
out[i] = tab[i];
}
}
idy += BlockDimY * GridDimX;
}
}
template <typename T, int BlockDimX, int BlockDimY, int GridDimX>
__global__ void LookupTableGrad(T *table, const T *output, const int64_t *ids,
const int64_t N, const int64_t K,
const int64_t D) {
int idx = threadIdx.x;
int idy = blockIdx.x + threadIdx.y * GridDimX;
while (idy < K) {
int64_t id = ids[idy];
PADDLE_ASSERT_MSG(
id >= 0,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
PADDLE_ASSERT_MSG(
id < N,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
const T *out = output + idy * D;
T *tab = table + id * D;
for (int i = idx; i < D; i += BlockDimX) {
paddle::platform::CudaAtomicAdd(&tab[i], out[i]);
}
idy += BlockDimY * GridDimX;
}
}
template <typename T>
class LookupTableCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &context) const override {
auto *table_t = context.Input<LoDTensor>("W");
auto *ids_t = context.Input<LoDTensor>("Ids");
auto *output_t = context.Output<LoDTensor>("Out");
int64_t padding_idx = context.Attr<int64_t>("padding_idx");
auto id_name = context.Inputs("Ids").front();
auto out_name = context.Outputs("Out").front();
// for remote prefetch
auto epmap = context.Attr<std::vector<std::string>>("epmap");
auto height_sections =
context.Attr<std::vector<int64_t>>("height_sections");
auto table_names = context.Attr<std::vector<std::string>>("table_names");
if (!epmap.empty()) {
// if epmap is not empty, then the parameter will be fetched from remote
// parameter
// server
#ifdef PADDLE_WITH_DISTRIBUTE
operators::distributed::prefetch(id_name, out_name, table_names, epmap,
height_sections, context,
context.scope());
#else
PADDLE_THROW(
"paddle is not compiled with distribute support, can not do "
"parameter prefetch!");
#endif
} else {
size_t N = table_t->dims()[0];
size_t D = table_t->dims()[1];
size_t K = ids_t->numel();
auto *ids = ids_t->data<int64_t>();
auto *table = table_t->data<T>();
auto *output = output_t->mutable_data<T>(context.GetPlace());
dim3 threads(128, 8);
dim3 grids(8, 1);
if (padding_idx == -1)
hipLaunchKernelGGL(( LookupTable<T, 128, 8, 8, false>),
dim3(grids), dim3(threads), 0, context.cuda_device_context().stream(),
output, table, ids, N, K, D, padding_idx);
else
hipLaunchKernelGGL(( LookupTable<T, 128, 8, 8, true>),
dim3(grids), dim3(threads), 0, context.cuda_device_context().stream(),
output, table, ids, N, K, D, padding_idx);
}
}
};
template <typename T>
class LookupTableGradCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &context) const override {
auto &dev_ctx =
context.template device_context<platform::CUDADeviceContext>();
bool is_sparse = context.Attr<bool>("is_sparse");
// Since paddings are not trainable and fixed in forward, the gradient of
// paddings makes no sense and we don't deal with it in backward.
if (is_sparse) {
auto *ids = context.Input<LoDTensor>("Ids");
auto *table = context.Input<LoDTensor>("W");
auto *d_output = context.Input<LoDTensor>(framework::GradVarName("Out"));
auto *d_table = context.Output<SelectedRows>(framework::GradVarName("W"));
auto *ids_data = ids->data<int64_t>();
int64_t ids_num = ids->numel();
auto stream = dev_ctx.stream();
// copy GPU memory to CPU pinned memory
framework::Vector<int64_t> new_rows;
new_rows.resize(ids_num);
auto gpu_place = boost::get<platform::CUDAPlace>(context.GetPlace());
// TODO(yuyang18): Strange code here.
memory::Copy(gpu_place, new_rows.CUDAMutableData(context.GetPlace()),
gpu_place, ids_data, ids_num * sizeof(int64_t), stream);
d_table->set_rows(new_rows);
auto *d_table_value = d_table->mutable_value();
d_table_value->Resize({ids_num, table->dims()[1]});
d_table_value->mutable_data<T>(context.GetPlace());
auto *d_table_data = d_table_value->data<T>();
auto *d_output_data = d_output->data<T>();
auto d_output_dims = d_output->dims();
PADDLE_ENFORCE_EQ(
d_table_value->dims(),
framework::flatten_to_2d(d_output_dims, d_output_dims.size() - 1));
memory::Copy(gpu_place, d_table_data, gpu_place, d_output_data,
d_output->numel() * sizeof(T), stream);
} else {
auto ids_t = context.Input<LoDTensor>("Ids");
auto d_output_t = context.Input<LoDTensor>(framework::GradVarName("Out"));
auto d_table_t = context.Output<LoDTensor>(framework::GradVarName("W"));
int N = d_table_t->dims()[0];
int D = d_table_t->dims()[1];
int K = ids_t->numel();
const int64_t *ids = ids_t->data<int64_t>();
const T *d_output = d_output_t->data<T>();
T *d_table = d_table_t->mutable_data<T>(context.GetPlace());
auto t = framework::EigenVector<T>::Flatten(*d_table_t);
t.device(*dev_ctx.eigen_device()) = t.constant(static_cast<T>(0));
dim3 threads(128, 8);
dim3 grids(8, 1);
hipLaunchKernelGGL(( LookupTableGrad<T, 128, 8, 8>), dim3(grids), dim3(threads), 0, dev_ctx.stream(),
d_table, d_output, ids, N, K, D);
}
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
namespace plat = paddle::platform;
REGISTER_OP_CUDA_KERNEL(lookup_table, ops::LookupTableCUDAKernel<float>,
ops::LookupTableCUDAKernel<double>,
ops::LookupTableCUDAKernel<plat::float16>);
REGISTER_OP_CUDA_KERNEL(lookup_table_grad,
ops::LookupTableGradCUDAKernel<float>,
ops::LookupTableGradCUDAKernel<double>,
ops::LookupTableGradCUDAKernel<plat::float16>);
| 58027d5cb5d96023ebaa68c1c931949cee024828.cu | /* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/framework/eigen.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/operators/lookup_table_op.h"
#include "paddle/fluid/platform/assert.h"
#include "paddle/fluid/platform/cuda_primitives.h"
#include "paddle/fluid/platform/float16.h"
namespace paddle {
namespace operators {
template <typename T, int BlockDimX, int BlockDimY, int GridDimX,
bool PaddingFlag>
__global__ void LookupTable(T *output, const T *table, const int64_t *ids,
const int64_t N, const int64_t K, const int64_t D,
const int64_t padding_idx) {
int idx = threadIdx.x;
int idy = blockIdx.x + threadIdx.y * GridDimX;
while (idy < K) {
int64_t id = ids[idy];
PADDLE_ASSERT_MSG(
id >= 0,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
PADDLE_ASSERT_MSG(
id < N,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
T *out = output + idy * D;
const T *tab = table + id * D;
for (int i = idx; i < D; i += BlockDimX) {
if (PaddingFlag) {
if (id == padding_idx)
out[i] = static_cast<T>(0);
else
out[i] = tab[i];
} else {
out[i] = tab[i];
}
}
idy += BlockDimY * GridDimX;
}
}
template <typename T, int BlockDimX, int BlockDimY, int GridDimX>
__global__ void LookupTableGrad(T *table, const T *output, const int64_t *ids,
const int64_t N, const int64_t K,
const int64_t D) {
int idx = threadIdx.x;
int idy = blockIdx.x + threadIdx.y * GridDimX;
while (idy < K) {
int64_t id = ids[idy];
PADDLE_ASSERT_MSG(
id >= 0,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
PADDLE_ASSERT_MSG(
id < N,
"Variable value (input) of OP(fluid.layers.embedding) "
"expected >= 0 and < %ld, but got %ld. Please check input value.",
N, id);
const T *out = output + idy * D;
T *tab = table + id * D;
for (int i = idx; i < D; i += BlockDimX) {
paddle::platform::CudaAtomicAdd(&tab[i], out[i]);
}
idy += BlockDimY * GridDimX;
}
}
template <typename T>
class LookupTableCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &context) const override {
auto *table_t = context.Input<LoDTensor>("W");
auto *ids_t = context.Input<LoDTensor>("Ids");
auto *output_t = context.Output<LoDTensor>("Out");
int64_t padding_idx = context.Attr<int64_t>("padding_idx");
auto id_name = context.Inputs("Ids").front();
auto out_name = context.Outputs("Out").front();
// for remote prefetch
auto epmap = context.Attr<std::vector<std::string>>("epmap");
auto height_sections =
context.Attr<std::vector<int64_t>>("height_sections");
auto table_names = context.Attr<std::vector<std::string>>("table_names");
if (!epmap.empty()) {
// if epmap is not empty, then the parameter will be fetched from remote
// parameter
// server
#ifdef PADDLE_WITH_DISTRIBUTE
operators::distributed::prefetch(id_name, out_name, table_names, epmap,
height_sections, context,
context.scope());
#else
PADDLE_THROW(
"paddle is not compiled with distribute support, can not do "
"parameter prefetch!");
#endif
} else {
size_t N = table_t->dims()[0];
size_t D = table_t->dims()[1];
size_t K = ids_t->numel();
auto *ids = ids_t->data<int64_t>();
auto *table = table_t->data<T>();
auto *output = output_t->mutable_data<T>(context.GetPlace());
dim3 threads(128, 8);
dim3 grids(8, 1);
if (padding_idx == -1)
LookupTable<T, 128, 8, 8, false><<<
grids, threads, 0, context.cuda_device_context().stream()>>>(
output, table, ids, N, K, D, padding_idx);
else
LookupTable<T, 128, 8, 8, true><<<
grids, threads, 0, context.cuda_device_context().stream()>>>(
output, table, ids, N, K, D, padding_idx);
}
}
};
template <typename T>
class LookupTableGradCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext &context) const override {
auto &dev_ctx =
context.template device_context<platform::CUDADeviceContext>();
bool is_sparse = context.Attr<bool>("is_sparse");
// Since paddings are not trainable and fixed in forward, the gradient of
// paddings makes no sense and we don't deal with it in backward.
if (is_sparse) {
auto *ids = context.Input<LoDTensor>("Ids");
auto *table = context.Input<LoDTensor>("W");
auto *d_output = context.Input<LoDTensor>(framework::GradVarName("Out"));
auto *d_table = context.Output<SelectedRows>(framework::GradVarName("W"));
auto *ids_data = ids->data<int64_t>();
int64_t ids_num = ids->numel();
auto stream = dev_ctx.stream();
// copy GPU memory to CPU pinned memory
framework::Vector<int64_t> new_rows;
new_rows.resize(ids_num);
auto gpu_place = boost::get<platform::CUDAPlace>(context.GetPlace());
// TODO(yuyang18): Strange code here.
memory::Copy(gpu_place, new_rows.CUDAMutableData(context.GetPlace()),
gpu_place, ids_data, ids_num * sizeof(int64_t), stream);
d_table->set_rows(new_rows);
auto *d_table_value = d_table->mutable_value();
d_table_value->Resize({ids_num, table->dims()[1]});
d_table_value->mutable_data<T>(context.GetPlace());
auto *d_table_data = d_table_value->data<T>();
auto *d_output_data = d_output->data<T>();
auto d_output_dims = d_output->dims();
PADDLE_ENFORCE_EQ(
d_table_value->dims(),
framework::flatten_to_2d(d_output_dims, d_output_dims.size() - 1));
memory::Copy(gpu_place, d_table_data, gpu_place, d_output_data,
d_output->numel() * sizeof(T), stream);
} else {
auto ids_t = context.Input<LoDTensor>("Ids");
auto d_output_t = context.Input<LoDTensor>(framework::GradVarName("Out"));
auto d_table_t = context.Output<LoDTensor>(framework::GradVarName("W"));
int N = d_table_t->dims()[0];
int D = d_table_t->dims()[1];
int K = ids_t->numel();
const int64_t *ids = ids_t->data<int64_t>();
const T *d_output = d_output_t->data<T>();
T *d_table = d_table_t->mutable_data<T>(context.GetPlace());
auto t = framework::EigenVector<T>::Flatten(*d_table_t);
t.device(*dev_ctx.eigen_device()) = t.constant(static_cast<T>(0));
dim3 threads(128, 8);
dim3 grids(8, 1);
LookupTableGrad<T, 128, 8, 8><<<grids, threads, 0, dev_ctx.stream()>>>(
d_table, d_output, ids, N, K, D);
}
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
namespace plat = paddle::platform;
REGISTER_OP_CUDA_KERNEL(lookup_table, ops::LookupTableCUDAKernel<float>,
ops::LookupTableCUDAKernel<double>,
ops::LookupTableCUDAKernel<plat::float16>);
REGISTER_OP_CUDA_KERNEL(lookup_table_grad,
ops::LookupTableGradCUDAKernel<float>,
ops::LookupTableGradCUDAKernel<double>,
ops::LookupTableGradCUDAKernel<plat::float16>);
|
1d3c92e3993b469cd77e6aafd5e78f4228f59f85.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdlib.h>
#include <stdio.h>
#include <hip/hip_runtime.h>
#include <hip/hip_runtime.h>
#include <assert.h>
__global__ void cu_arrayDelete(int* arrayIO)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
arrayIO[idx] = arrayIO[idx] - 16;
}
void checkCUDAError(const char *msg)
{
hipError_t err = hipGetLastError();
if (hipSuccess != err) {
printf("Cuda error: %s: %s./n", msg, hipGetErrorString(err));
exit(EXIT_FAILURE);
}
}
int page_locked_mem(int argc, char *argv[])
{
int* h_pData = NULL;
int* d_pData = NULL;
hipDeviceProp_t deviceProp;
hipGetDeviceProperties(&deviceProp, 0);
if (!deviceProp.canMapHostMemory) {
printf("Device %d cannot map host memory!/n");
}
hipSetDeviceFlags(hipDeviceMapHost);
checkCUDAError("hipSetDeviceFlags");
hipHostMalloc(&h_pData, 512, hipHostMallocMapped);
hipHostGetDevicePointer((void **)&d_pData, (void *)h_pData, 0);
for (int i = 0; i<128; i++)
{
h_pData[i] = 255;
}
cu_arrayDelete << <4, 32 >> >(d_pData);
hipDeviceSynchronize();
for (int i = 0; i<128; i++)
printf("%d/n", h_pData[0]);
return 0;
} | 1d3c92e3993b469cd77e6aafd5e78f4228f59f85.cu | #include <stdlib.h>
#include <stdio.h>
#include <cuda_runtime.h>
#include <cuda.h>
#include <assert.h>
__global__ void cu_arrayDelete(int* arrayIO)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
arrayIO[idx] = arrayIO[idx] - 16;
}
void checkCUDAError(const char *msg)
{
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err) {
printf("Cuda error: %s: %s./n", msg, cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
}
int page_locked_mem(int argc, char *argv[])
{
int* h_pData = NULL;
int* d_pData = NULL;
cudaDeviceProp deviceProp;
cudaGetDeviceProperties(&deviceProp, 0);
if (!deviceProp.canMapHostMemory) {
printf("Device %d cannot map host memory!/n");
}
cudaSetDeviceFlags(cudaDeviceMapHost);
checkCUDAError("cudaSetDeviceFlags");
cudaHostAlloc(&h_pData, 512, cudaHostAllocMapped);
cudaHostGetDevicePointer((void **)&d_pData, (void *)h_pData, 0);
for (int i = 0; i<128; i++)
{
h_pData[i] = 255;
}
cu_arrayDelete << <4, 32 >> >(d_pData);
cudaThreadSynchronize();
for (int i = 0; i<128; i++)
printf("%d/n", h_pData[0]);
return 0;
} |
f035e009052c8107d341d09e35f38168d550a5e9.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*!
* Copyright 2017 XGBoost contributors
*/
#include <thrust/execution_policy.h>
#include <thrust/reduce.h>
#include <thrust/sequence.h>
#include <xgboost/tree_updater.h>
#include <algorithm>
#include <memory>
#include <queue>
#include <utility>
#include <vector>
#include "../common/compressed_iterator.h"
#include "../common/device_helpers.cuh"
#include "../common/hist_util.h"
#include "../common/host_device_vector.h"
#include "../common/timer.h"
#include "param.h"
#include "updater_gpu_common.cuh"
namespace xgboost {
namespace tree {
DMLC_REGISTRY_FILE_TAG(updater_gpu_hist);
typedef bst_gpair_precise gpair_sum_t;
template <int BLOCK_THREADS, typename reduce_t, typename temp_storage_t>
__device__ gpair_sum_t ReduceFeature(const gpair_sum_t* begin,
const gpair_sum_t* end,
temp_storage_t* temp_storage) {
__shared__ cub::Uninitialized<gpair_sum_t> uninitialized_sum;
gpair_sum_t& shared_sum = uninitialized_sum.Alias();
gpair_sum_t local_sum = gpair_sum_t();
for (auto itr = begin; itr < end; itr += BLOCK_THREADS) {
bool thread_active = itr + threadIdx.x < end;
// Scan histogram
gpair_sum_t bin = thread_active ? *(itr + threadIdx.x) : gpair_sum_t();
local_sum += reduce_t(temp_storage->sum_reduce).Reduce(bin, hipcub::Sum());
}
if (threadIdx.x == 0) {
shared_sum = local_sum;
}
__syncthreads();
return shared_sum;
}
template <int BLOCK_THREADS, typename reduce_t, typename scan_t,
typename max_reduce_t, typename temp_storage_t>
__device__ void EvaluateFeature(int fidx, const gpair_sum_t* hist,
const int* feature_segments, float min_fvalue,
const float* gidx_fvalue_map,
DeviceSplitCandidate* best_split,
const DeviceNodeStats& node,
const GPUTrainingParam& param,
temp_storage_t* temp_storage, int constraint,
const ValueConstraint& value_constraint) {
int gidx_begin = feature_segments[fidx];
int gidx_end = feature_segments[fidx + 1];
gpair_sum_t feature_sum = ReduceFeature<BLOCK_THREADS, reduce_t>(
hist + gidx_begin, hist + gidx_end, temp_storage);
auto prefix_op = SumCallbackOp<gpair_sum_t>();
for (int scan_begin = gidx_begin; scan_begin < gidx_end;
scan_begin += BLOCK_THREADS) {
bool thread_active = scan_begin + threadIdx.x < gidx_end;
gpair_sum_t bin =
thread_active ? hist[scan_begin + threadIdx.x] : gpair_sum_t();
scan_t(temp_storage->scan).ExclusiveScan(bin, bin, hipcub::Sum(), prefix_op);
// Calculate gain
gpair_sum_t parent_sum = gpair_sum_t(node.sum_gradients);
gpair_sum_t missing = parent_sum - feature_sum;
bool missing_left = true;
const float null_gain = -FLT_MAX;
float gain = null_gain;
if (thread_active) {
gain = loss_chg_missing(bin, missing, parent_sum, node.root_gain, param,
constraint, value_constraint, missing_left);
}
__syncthreads();
// Find thread with best gain
hipcub::KeyValuePair<int, float> tuple(threadIdx.x, gain);
hipcub::KeyValuePair<int, float> best =
max_reduce_t(temp_storage->max_reduce).Reduce(tuple, hipcub::ArgMax());
__shared__ hipcub::KeyValuePair<int, float> block_max;
if (threadIdx.x == 0) {
block_max = best;
}
__syncthreads();
// Best thread updates split
if (threadIdx.x == block_max.key) {
int gidx = scan_begin + threadIdx.x;
float fvalue =
gidx == gidx_begin ? min_fvalue : gidx_fvalue_map[gidx - 1];
gpair_sum_t left = missing_left ? bin + missing : bin;
gpair_sum_t right = parent_sum - left;
best_split->Update(gain, missing_left ? LeftDir : RightDir, fvalue, fidx,
left, right, param);
}
__syncthreads();
}
}
template <int BLOCK_THREADS>
__global__ void evaluate_split_kernel(
const gpair_sum_t* d_hist, int nidx, uint64_t n_features,
DeviceNodeStats nodes, const int* d_feature_segments,
const float* d_fidx_min_map, const float* d_gidx_fvalue_map,
GPUTrainingParam gpu_param, DeviceSplitCandidate* d_split,
ValueConstraint value_constraint, int* d_monotonic_constraints) {
typedef hipcub::KeyValuePair<int, float> ArgMaxT;
typedef hipcub::BlockScan<gpair_sum_t, BLOCK_THREADS, cub::BLOCK_SCAN_WARP_SCANS>
BlockScanT;
typedef hipcub::BlockReduce<ArgMaxT, BLOCK_THREADS> MaxReduceT;
typedef hipcub::BlockReduce<gpair_sum_t, BLOCK_THREADS> SumReduceT;
union TempStorage {
typename BlockScanT::TempStorage scan;
typename MaxReduceT::TempStorage max_reduce;
typename SumReduceT::TempStorage sum_reduce;
};
__shared__ cub::Uninitialized<DeviceSplitCandidate> uninitialized_split;
DeviceSplitCandidate& best_split = uninitialized_split.Alias();
__shared__ TempStorage temp_storage;
if (threadIdx.x == 0) {
best_split = DeviceSplitCandidate();
}
__syncthreads();
auto fidx = blockIdx.x;
auto constraint = d_monotonic_constraints[fidx];
EvaluateFeature<BLOCK_THREADS, SumReduceT, BlockScanT, MaxReduceT>(
fidx, d_hist, d_feature_segments, d_fidx_min_map[fidx], d_gidx_fvalue_map,
&best_split, nodes, gpu_param, &temp_storage, constraint,
value_constraint);
__syncthreads();
if (threadIdx.x == 0) {
// Record best loss
d_split[fidx] = best_split;
}
}
// Find a gidx value for a given feature otherwise return -1 if not found
template <typename gidx_iter_t>
__device__ int BinarySearchRow(bst_uint begin, bst_uint end, gidx_iter_t data,
int fidx_begin, int fidx_end) {
bst_uint previous_middle = UINT32_MAX;
while (end != begin) {
auto middle = begin + (end - begin) / 2;
if (middle == previous_middle) {
break;
}
previous_middle = middle;
auto gidx = data[middle];
if (gidx >= fidx_begin && gidx < fidx_end) {
return gidx;
} else if (gidx < fidx_begin) {
begin = middle;
} else {
end = middle;
}
}
// Value is missing
return -1;
}
struct DeviceHistogram {
dh::bulk_allocator<dh::memory_type::DEVICE> ba;
dh::dvec<gpair_sum_t> data;
int n_bins;
void Init(int device_idx, int max_nodes, int n_bins, bool silent) {
this->n_bins = n_bins;
ba.allocate(device_idx, silent, &data, size_t(max_nodes) * size_t(n_bins));
}
void Reset() { data.fill(gpair_sum_t()); }
gpair_sum_t* GetHistPtr(int nidx) { return data.data() + nidx * n_bins; }
void PrintNidx(int nidx) const {
auto h_data = data.as_vector();
std::cout << "nidx " << nidx << ":\n";
for (int i = n_bins * nidx; i < n_bins * (nidx + 1); i++) {
std::cout << h_data[i] << " ";
}
std::cout << "\n";
}
};
// Manage memory for a single GPU
struct DeviceShard {
struct Segment {
size_t begin;
size_t end;
Segment() : begin(0), end(0) {}
Segment(size_t begin, size_t end) : begin(begin), end(end) {
CHECK_GE(end, begin);
}
size_t Size() const { return end - begin; }
};
int device_idx;
int normalised_device_idx; // Device index counting from param.gpu_id
dh::bulk_allocator<dh::memory_type::DEVICE> ba;
dh::dvec<common::compressed_byte_t> gidx_buffer;
dh::dvec<bst_gpair> gpair;
dh::dvec2<bst_uint> ridx; // Row index relative to this shard
dh::dvec2<int> position;
std::vector<Segment> ridx_segments;
dh::dvec<int> feature_segments;
dh::dvec<float> gidx_fvalue_map;
dh::dvec<float> min_fvalue;
dh::dvec<int> monotone_constraints;
std::vector<bst_gpair> node_sum_gradients;
common::CompressedIterator<uint32_t> gidx;
int row_stride;
bst_uint row_begin_idx; // The row offset for this shard
bst_uint row_end_idx;
bst_uint n_rows;
int n_bins;
int null_gidx_value;
DeviceHistogram hist;
TrainParam param;
int64_t* tmp_pinned; // Small amount of staging memory
std::vector<hipStream_t> streams;
dh::CubMemory temp_memory;
DeviceShard(int device_idx, int normalised_device_idx,
const common::GHistIndexMatrix& gmat, bst_uint row_begin,
bst_uint row_end, int n_bins, TrainParam param)
: device_idx(device_idx),
normalised_device_idx(normalised_device_idx),
row_begin_idx(row_begin),
row_end_idx(row_end),
n_rows(row_end - row_begin),
n_bins(n_bins),
null_gidx_value(n_bins),
param(param) {
// Convert to ELLPACK matrix representation
int max_elements_row = 0;
for (auto i = row_begin; i < row_end; i++) {
max_elements_row =
(std::max)(max_elements_row,
static_cast<int>(gmat.row_ptr[i + 1] - gmat.row_ptr[i]));
}
row_stride = max_elements_row;
std::vector<int> ellpack_matrix(row_stride * n_rows, null_gidx_value);
for (auto i = row_begin; i < row_end; i++) {
int row_count = 0;
for (auto j = gmat.row_ptr[i]; j < gmat.row_ptr[i + 1]; j++) {
ellpack_matrix[(i - row_begin) * row_stride + row_count] =
gmat.index[j];
row_count++;
}
}
// Allocate
int num_symbols = n_bins + 1;
size_t compressed_size_bytes =
common::CompressedBufferWriter::CalculateBufferSize(
ellpack_matrix.size(), num_symbols);
CHECK(!(param.max_leaves == 0 && param.max_depth == 0))
<< "Max leaves and max depth cannot both be unconstrained for "
"gpu_hist.";
int max_nodes =
param.max_leaves > 0 ? param.max_leaves * 2 : n_nodes(param.max_depth);
ba.allocate(device_idx, param.silent, &gidx_buffer, compressed_size_bytes,
&gpair, n_rows, &ridx, n_rows, &position, n_rows,
&feature_segments, gmat.cut->row_ptr.size(), &gidx_fvalue_map,
gmat.cut->cut.size(), &min_fvalue, gmat.cut->min_val.size(),
&monotone_constraints, param.monotone_constraints.size());
gidx_fvalue_map = gmat.cut->cut;
min_fvalue = gmat.cut->min_val;
feature_segments = gmat.cut->row_ptr;
monotone_constraints = param.monotone_constraints;
node_sum_gradients.resize(max_nodes);
ridx_segments.resize(max_nodes);
// Compress gidx
common::CompressedBufferWriter cbw(num_symbols);
std::vector<common::compressed_byte_t> host_buffer(gidx_buffer.size());
cbw.Write(host_buffer.data(), ellpack_matrix.begin(), ellpack_matrix.end());
gidx_buffer = host_buffer;
gidx =
common::CompressedIterator<uint32_t>(gidx_buffer.data(), num_symbols);
common::CompressedIterator<uint32_t> ci_host(host_buffer.data(),
num_symbols);
// Init histogram
hist.Init(device_idx, max_nodes, gmat.cut->row_ptr.back(), param.silent);
dh::safe_cuda(hipHostMalloc(&tmp_pinned, sizeof(int64_t)));
}
~DeviceShard() {
for (auto& stream : streams) {
dh::safe_cuda(hipStreamDestroy(stream));
}
dh::safe_cuda(hipHostFree(tmp_pinned));
}
// Get vector of at least n initialised streams
std::vector<hipStream_t>& GetStreams(int n) {
if (n > streams.size()) {
for (auto& stream : streams) {
dh::safe_cuda(hipStreamDestroy(stream));
}
streams.clear();
streams.resize(n);
for (auto& stream : streams) {
dh::safe_cuda(hipStreamCreate(&stream));
}
}
return streams;
}
// Reset values for each update iteration
void Reset(HostDeviceVector<bst_gpair>* dh_gpair, int device) {
auto begin = dh_gpair->tbegin(device);
dh::safe_cuda(hipSetDevice(device_idx));
position.current_dvec().fill(0);
std::fill(node_sum_gradients.begin(), node_sum_gradients.end(),
bst_gpair());
thrust::sequence(ridx.current_dvec().tbegin(), ridx.current_dvec().tend());
std::fill(ridx_segments.begin(), ridx_segments.end(), Segment(0, 0));
ridx_segments.front() = Segment(0, ridx.size());
this->gpair.copy(begin + row_begin_idx, begin + row_end_idx);
subsample_gpair(&gpair, param.subsample, row_begin_idx);
hist.Reset();
}
void BuildHist(int nidx) {
auto segment = ridx_segments[nidx];
auto d_node_hist = hist.GetHistPtr(nidx);
auto d_gidx = gidx;
auto d_ridx = ridx.current();
auto d_gpair = gpair.data();
auto row_stride = this->row_stride;
auto null_gidx_value = this->null_gidx_value;
auto n_elements = segment.Size() * row_stride;
dh::launch_n(device_idx, n_elements, [=] __device__(size_t idx) {
int ridx = d_ridx[(idx / row_stride) + segment.begin];
int gidx = d_gidx[ridx * row_stride + idx % row_stride];
if (gidx != null_gidx_value) {
AtomicAddGpair(d_node_hist + gidx, d_gpair[ridx]);
}
});
}
void SubtractionTrick(int nidx_parent, int nidx_histogram,
int nidx_subtraction) {
auto d_node_hist_parent = hist.GetHistPtr(nidx_parent);
auto d_node_hist_histogram = hist.GetHistPtr(nidx_histogram);
auto d_node_hist_subtraction = hist.GetHistPtr(nidx_subtraction);
dh::launch_n(device_idx, hist.n_bins, [=] __device__(size_t idx) {
d_node_hist_subtraction[idx] =
d_node_hist_parent[idx] - d_node_hist_histogram[idx];
});
}
__device__ void CountLeft(int64_t* d_count, int val, int left_nidx) {
unsigned ballot = __ballot(val == left_nidx);
if (threadIdx.x % 32 == 0) {
atomicAdd(reinterpret_cast<unsigned long long*>(d_count), // NOLINT
static_cast<unsigned long long>(__popc(ballot))); // NOLINT
}
}
void UpdatePosition(int nidx, int left_nidx, int right_nidx, int fidx,
int split_gidx, bool default_dir_left, bool is_dense,
int fidx_begin, int fidx_end) {
dh::safe_cuda(hipSetDevice(device_idx));
temp_memory.LazyAllocate(sizeof(int64_t));
auto d_left_count = temp_memory.Pointer<int64_t>();
dh::safe_cuda(hipMemset(d_left_count, 0, sizeof(int64_t)));
auto segment = ridx_segments[nidx];
auto d_ridx = ridx.current();
auto d_position = position.current();
auto d_gidx = gidx;
auto row_stride = this->row_stride;
dh::launch_n<1, 512>(
device_idx, segment.Size(), [=] __device__(bst_uint idx) {
idx += segment.begin;
auto ridx = d_ridx[idx];
auto row_begin = row_stride * ridx;
auto row_end = row_begin + row_stride;
auto gidx = -1;
if (is_dense) {
gidx = d_gidx[row_begin + fidx];
} else {
gidx = BinarySearchRow(row_begin, row_end, d_gidx, fidx_begin,
fidx_end);
}
int position;
if (gidx >= 0) {
// Feature is found
position = gidx <= split_gidx ? left_nidx : right_nidx;
} else {
// Feature is missing
position = default_dir_left ? left_nidx : right_nidx;
}
CountLeft(d_left_count, position, left_nidx);
d_position[idx] = position;
});
dh::safe_cuda(hipMemcpy(tmp_pinned, d_left_count, sizeof(int64_t),
hipMemcpyDeviceToHost));
auto left_count = *tmp_pinned;
SortPosition(segment, left_nidx, right_nidx);
// dh::safe_cuda(hipStreamSynchronize(stream));
ridx_segments[left_nidx] =
Segment(segment.begin, segment.begin + left_count);
ridx_segments[right_nidx] =
Segment(segment.begin + left_count, segment.end);
}
void SortPosition(const Segment& segment, int left_nidx, int right_nidx) {
int min_bits = 0;
int max_bits = static_cast<int>(
::ceil(std::log2((std::max)(left_nidx, right_nidx) + 1)));
size_t temp_storage_bytes = 0;
hipcub::DeviceRadixSort::SortPairs(
nullptr, temp_storage_bytes, position.current() + segment.begin,
position.other() + segment.begin, ridx.current() + segment.begin,
ridx.other() + segment.begin, segment.Size(), min_bits, max_bits);
temp_memory.LazyAllocate(temp_storage_bytes);
hipcub::DeviceRadixSort::SortPairs(
temp_memory.d_temp_storage, temp_memory.temp_storage_bytes,
position.current() + segment.begin, position.other() + segment.begin,
ridx.current() + segment.begin, ridx.other() + segment.begin,
segment.Size(), min_bits, max_bits);
dh::safe_cuda(hipMemcpy(
position.current() + segment.begin, position.other() + segment.begin,
segment.Size() * sizeof(int), hipMemcpyDeviceToDevice));
dh::safe_cuda(hipMemcpy(
ridx.current() + segment.begin, ridx.other() + segment.begin,
segment.Size() * sizeof(bst_uint), hipMemcpyDeviceToDevice));
}
};
class GPUHistMaker : public TreeUpdater {
public:
struct ExpandEntry;
GPUHistMaker() : initialised(false) {}
~GPUHistMaker() {}
void Init(
const std::vector<std::pair<std::string, std::string>>& args) override {
param.InitAllowUnknown(args);
CHECK(param.n_gpus != 0) << "Must have at least one device";
n_devices = param.n_gpus;
dh::check_compute_capability();
if (param.grow_policy == TrainParam::kLossGuide) {
qexpand_.reset(new ExpandQueue(loss_guide));
} else {
qexpand_.reset(new ExpandQueue(depth_wise));
}
monitor.Init("updater_gpu_hist", param.debug_verbose);
}
void Update(const std::vector<bst_gpair>& gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) override {
monitor.Start("Update", dList);
// TODO(canonizer): move it into the class if this ever becomes a bottleneck
HostDeviceVector<bst_gpair> gpair_d(gpair.size(), param.gpu_id);
dh::safe_cuda(hipSetDevice(param.gpu_id));
thrust::copy(gpair.begin(), gpair.end(), gpair_d.tbegin(param.gpu_id));
Update(&gpair_d, dmat, trees);
monitor.Stop("Update", dList);
}
void Update(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) override {
monitor.Start("Update", dList);
UpdateHelper(gpair, dmat, trees);
monitor.Stop("Update", dList);
}
private:
void UpdateHelper(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) {
GradStats::CheckInfo(dmat->info());
// rescale learning rate according to size of trees
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
ValueConstraint::Init(¶m, dmat->info().num_col);
// build tree
try {
for (size_t i = 0; i < trees.size(); ++i) {
this->UpdateTree(gpair, dmat, trees[i]);
}
} catch (const std::exception& e) {
LOG(FATAL) << "GPU plugin exception: " << e.what() << std::endl;
}
param.learning_rate = lr;
}
public:
void InitDataOnce(DMatrix* dmat) {
info = &dmat->info();
monitor.Start("Quantiles", dList);
hmat_.Init(dmat, param.max_bin);
gmat_.cut = &hmat_;
gmat_.Init(dmat);
monitor.Stop("Quantiles", dList);
n_bins = hmat_.row_ptr.back();
int n_devices = dh::n_devices(param.n_gpus, info->num_row);
bst_uint row_begin = 0;
bst_uint shard_size =
::ceil(static_cast<double>(info->num_row) / n_devices);
dList.resize(n_devices);
for (int d_idx = 0; d_idx < n_devices; ++d_idx) {
int device_idx = (param.gpu_id + d_idx) % dh::n_visible_devices();
dList[d_idx] = device_idx;
}
reducer.Init(dList);
// Partition input matrix into row segments
std::vector<size_t> row_segments;
shards.resize(n_devices);
row_segments.push_back(0);
for (int d_idx = 0; d_idx < n_devices; ++d_idx) {
bst_uint row_end =
::min(static_cast<size_t>(row_begin + shard_size), info->num_row);
row_segments.push_back(row_end);
row_begin = row_end;
}
// Create device shards
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
shards[cpu_thread_id] = std::unique_ptr<DeviceShard>(
new DeviceShard(dList[cpu_thread_id], cpu_thread_id, gmat_,
row_segments[cpu_thread_id],
row_segments[cpu_thread_id + 1], n_bins, param));
}
initialised = true;
}
void InitData(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const RegTree& tree) {
monitor.Start("InitDataOnce", dList);
if (!initialised) {
this->InitDataOnce(dmat);
}
monitor.Stop("InitDataOnce", dList);
column_sampler.Init(info->num_col, param);
// Copy gpair & reset memory
monitor.Start("InitDataReset", dList);
omp_set_num_threads(shards.size());
// TODO(canonizer): make it parallel again once HostDeviceVector is
// thread-safe
for (int shard = 0; shard < shards.size(); ++shard)
shards[shard]->Reset(gpair, param.gpu_id);
monitor.Stop("InitDataReset", dList);
}
void AllReduceHist(int nidx) {
for (auto& shard : shards) {
auto d_node_hist = shard->hist.GetHistPtr(nidx);
reducer.AllReduceSum(
shard->normalised_device_idx,
reinterpret_cast<gpair_sum_t::value_t*>(d_node_hist),
reinterpret_cast<gpair_sum_t::value_t*>(d_node_hist),
n_bins * (sizeof(gpair_sum_t) / sizeof(gpair_sum_t::value_t)));
}
reducer.Synchronize();
}
void BuildHistLeftRight(int nidx_parent, int nidx_left, int nidx_right) {
size_t left_node_max_elements = 0;
size_t right_node_max_elements = 0;
for (auto& shard : shards) {
left_node_max_elements = (std::max)(
left_node_max_elements, shard->ridx_segments[nidx_left].Size());
right_node_max_elements = (std::max)(
right_node_max_elements, shard->ridx_segments[nidx_right].Size());
}
auto build_hist_nidx = nidx_left;
auto subtraction_trick_nidx = nidx_right;
if (right_node_max_elements < left_node_max_elements) {
build_hist_nidx = nidx_right;
subtraction_trick_nidx = nidx_left;
}
for (auto& shard : shards) {
shard->BuildHist(build_hist_nidx);
}
this->AllReduceHist(build_hist_nidx);
for (auto& shard : shards) {
shard->SubtractionTrick(nidx_parent, build_hist_nidx,
subtraction_trick_nidx);
}
}
// Returns best loss
std::vector<DeviceSplitCandidate> EvaluateSplits(
const std::vector<int>& nidx_set, RegTree* p_tree) {
auto columns = info->num_col;
std::vector<DeviceSplitCandidate> best_splits(nidx_set.size());
std::vector<DeviceSplitCandidate> candidate_splits(nidx_set.size() *
columns);
// Use first device
auto& shard = shards.front();
dh::safe_cuda(hipSetDevice(shard->device_idx));
shard->temp_memory.LazyAllocate(sizeof(DeviceSplitCandidate) * columns *
nidx_set.size());
auto d_split = shard->temp_memory.Pointer<DeviceSplitCandidate>();
auto& streams = shard->GetStreams(static_cast<int>(nidx_set.size()));
// Use streams to process nodes concurrently
for (auto i = 0; i < nidx_set.size(); i++) {
auto nidx = nidx_set[i];
DeviceNodeStats node(shard->node_sum_gradients[nidx], nidx, param);
const int BLOCK_THREADS = 256;
hipLaunchKernelGGL(( evaluate_split_kernel<BLOCK_THREADS>)
, dim3(uint32_t(columns)), dim3(BLOCK_THREADS), 0, streams[i],
shard->hist.GetHistPtr(nidx), nidx, info->num_col, node,
shard->feature_segments.data(), shard->min_fvalue.data(),
shard->gidx_fvalue_map.data(), GPUTrainingParam(param),
d_split + i * columns, node_value_constraints_[nidx],
shard->monotone_constraints.data());
}
dh::safe_cuda(
hipMemcpy(candidate_splits.data(), shard->temp_memory.d_temp_storage,
sizeof(DeviceSplitCandidate) * columns * nidx_set.size(),
hipMemcpyDeviceToHost));
for (auto i = 0; i < nidx_set.size(); i++) {
auto nidx = nidx_set[i];
DeviceSplitCandidate nidx_best;
for (auto fidx = 0; fidx < columns; fidx++) {
auto& candidate = candidate_splits[i * columns + fidx];
if (column_sampler.ColumnUsed(candidate.findex,
p_tree->GetDepth(nidx))) {
nidx_best.Update(candidate_splits[i * columns + fidx], param);
}
}
best_splits[i] = nidx_best;
}
return std::move(best_splits);
}
void InitRoot(RegTree* p_tree) {
auto root_nidx = 0;
// Sum gradients
std::vector<bst_gpair> tmp_sums(shards.size());
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
dh::safe_cuda(hipSetDevice(shards[cpu_thread_id]->device_idx));
tmp_sums[cpu_thread_id] =
thrust::reduce(thrust::hip::par(shards[cpu_thread_id]->temp_memory),
shards[cpu_thread_id]->gpair.tbegin(),
shards[cpu_thread_id]->gpair.tend());
}
auto sum_gradient =
std::accumulate(tmp_sums.begin(), tmp_sums.end(), bst_gpair_precise());
// Generate root histogram
for (auto& shard : shards) {
shard->BuildHist(root_nidx);
}
this->AllReduceHist(root_nidx);
// Remember root stats
p_tree->stat(root_nidx).sum_hess = sum_gradient.GetHess();
auto weight = CalcWeight(param, sum_gradient);
p_tree->stat(root_nidx).base_weight = weight;
(*p_tree)[root_nidx].set_leaf(param.learning_rate * weight);
// Store sum gradients
for (auto& shard : shards) {
shard->node_sum_gradients[root_nidx] = sum_gradient;
}
// Initialise root constraint
node_value_constraints_.resize(p_tree->GetNodes().size());
// Generate first split
auto splits = this->EvaluateSplits({root_nidx}, p_tree);
qexpand_->push(
ExpandEntry(root_nidx, p_tree->GetDepth(root_nidx), splits.front(), 0));
}
void UpdatePosition(const ExpandEntry& candidate, RegTree* p_tree) {
auto nidx = candidate.nid;
auto left_nidx = (*p_tree)[nidx].cleft();
auto right_nidx = (*p_tree)[nidx].cright();
// convert floating-point split_pt into corresponding bin_id
// split_cond = -1 indicates that split_pt is less than all known cut points
auto split_gidx = -1;
auto fidx = candidate.split.findex;
auto default_dir_left = candidate.split.dir == LeftDir;
auto fidx_begin = hmat_.row_ptr[fidx];
auto fidx_end = hmat_.row_ptr[fidx + 1];
for (auto i = fidx_begin; i < fidx_end; ++i) {
if (candidate.split.fvalue == hmat_.cut[i]) {
split_gidx = static_cast<int32_t>(i);
}
}
auto is_dense = info->num_nonzero == info->num_row * info->num_col;
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
shards[cpu_thread_id]->UpdatePosition(nidx, left_nidx, right_nidx, fidx,
split_gidx, default_dir_left,
is_dense, fidx_begin, fidx_end);
}
}
void ApplySplit(const ExpandEntry& candidate, RegTree* p_tree) {
// Add new leaves
RegTree& tree = *p_tree;
tree.AddChilds(candidate.nid);
auto& parent = tree[candidate.nid];
parent.set_split(candidate.split.findex, candidate.split.fvalue,
candidate.split.dir == LeftDir);
tree.stat(candidate.nid).loss_chg = candidate.split.loss_chg;
// Set up child constraints
node_value_constraints_.resize(tree.GetNodes().size());
GradStats left_stats(param);
left_stats.Add(candidate.split.left_sum);
GradStats right_stats(param);
right_stats.Add(candidate.split.right_sum);
node_value_constraints_[candidate.nid].SetChild(
param, parent.split_index(), left_stats, right_stats,
&node_value_constraints_[parent.cleft()],
&node_value_constraints_[parent.cright()]);
// Configure left child
auto left_weight =
node_value_constraints_[parent.cleft()].CalcWeight(param, left_stats);
tree[parent.cleft()].set_leaf(left_weight * param.learning_rate, 0);
tree.stat(parent.cleft()).base_weight = left_weight;
tree.stat(parent.cleft()).sum_hess = candidate.split.left_sum.GetHess();
// Configure right child
auto right_weight =
node_value_constraints_[parent.cright()].CalcWeight(param, right_stats);
tree[parent.cright()].set_leaf(right_weight * param.learning_rate, 0);
tree.stat(parent.cright()).base_weight = right_weight;
tree.stat(parent.cright()).sum_hess = candidate.split.right_sum.GetHess();
// Store sum gradients
for (auto& shard : shards) {
shard->node_sum_gradients[parent.cleft()] = candidate.split.left_sum;
shard->node_sum_gradients[parent.cright()] = candidate.split.right_sum;
}
this->UpdatePosition(candidate, p_tree);
}
void UpdateTree(HostDeviceVector<bst_gpair>* gpair, DMatrix* p_fmat,
RegTree* p_tree) {
// Temporarily store number of threads so we can change it back later
int nthread = omp_get_max_threads();
auto& tree = *p_tree;
monitor.Start("InitData", dList);
this->InitData(gpair, p_fmat, *p_tree);
monitor.Stop("InitData", dList);
monitor.Start("InitRoot", dList);
this->InitRoot(p_tree);
monitor.Stop("InitRoot", dList);
auto timestamp = qexpand_->size();
auto num_leaves = 1;
while (!qexpand_->empty()) {
auto candidate = qexpand_->top();
qexpand_->pop();
if (!candidate.IsValid(param, num_leaves)) continue;
// std::cout << candidate;
monitor.Start("ApplySplit", dList);
this->ApplySplit(candidate, p_tree);
monitor.Stop("ApplySplit", dList);
num_leaves++;
auto left_child_nidx = tree[candidate.nid].cleft();
auto right_child_nidx = tree[candidate.nid].cright();
// Only create child entries if needed
if (ExpandEntry::ChildIsValid(param, tree.GetDepth(left_child_nidx),
num_leaves)) {
monitor.Start("BuildHist", dList);
this->BuildHistLeftRight(candidate.nid, left_child_nidx,
right_child_nidx);
monitor.Stop("BuildHist", dList);
monitor.Start("EvaluateSplits", dList);
auto splits =
this->EvaluateSplits({left_child_nidx, right_child_nidx}, p_tree);
qexpand_->push(ExpandEntry(left_child_nidx,
tree.GetDepth(left_child_nidx), splits[0],
timestamp++));
qexpand_->push(ExpandEntry(right_child_nidx,
tree.GetDepth(right_child_nidx), splits[1],
timestamp++));
monitor.Stop("EvaluateSplits", dList);
}
}
// Reset omp num threads
omp_set_num_threads(nthread);
}
bool UpdatePredictionCache(const DMatrix* data,
std::vector<bst_float>* p_out_preds) override {
return false;
}
bool UpdatePredictionCache(
const DMatrix* data, HostDeviceVector<bst_float>* p_out_preds) override {
return false;
}
struct ExpandEntry {
int nid;
int depth;
DeviceSplitCandidate split;
uint64_t timestamp;
ExpandEntry(int nid, int depth, const DeviceSplitCandidate& split,
uint64_t timestamp)
: nid(nid), depth(depth), split(split), timestamp(timestamp) {}
bool IsValid(const TrainParam& param, int num_leaves) const {
if (split.loss_chg <= rt_eps) return false;
if (split.left_sum.GetHess() == 0 || split.right_sum.GetHess() == 0)
return false;
if (param.max_depth > 0 && depth == param.max_depth) return false;
if (param.max_leaves > 0 && num_leaves == param.max_leaves) return false;
return true;
}
static bool ChildIsValid(const TrainParam& param, int depth,
int num_leaves) {
if (param.max_depth > 0 && depth == param.max_depth) return false;
if (param.max_leaves > 0 && num_leaves == param.max_leaves) return false;
return true;
}
friend std::ostream& operator<<(std::ostream& os, const ExpandEntry& e) {
os << "ExpandEntry: \n";
os << "nidx: " << e.nid << "\n";
os << "depth: " << e.depth << "\n";
os << "loss: " << e.split.loss_chg << "\n";
os << "left_sum: " << e.split.left_sum << "\n";
os << "right_sum: " << e.split.right_sum << "\n";
return os;
}
};
inline static bool depth_wise(ExpandEntry lhs, ExpandEntry rhs) {
if (lhs.depth == rhs.depth) {
return lhs.timestamp > rhs.timestamp; // favor small timestamp
} else {
return lhs.depth > rhs.depth; // favor small depth
}
}
inline static bool loss_guide(ExpandEntry lhs, ExpandEntry rhs) {
if (lhs.split.loss_chg == rhs.split.loss_chg) {
return lhs.timestamp > rhs.timestamp; // favor small timestamp
} else {
return lhs.split.loss_chg < rhs.split.loss_chg; // favor large loss_chg
}
}
TrainParam param;
common::HistCutMatrix hmat_;
common::GHistIndexMatrix gmat_;
MetaInfo* info;
bool initialised;
int n_devices;
int n_bins;
std::vector<std::unique_ptr<DeviceShard>> shards;
ColumnSampler column_sampler;
typedef std::priority_queue<ExpandEntry, std::vector<ExpandEntry>,
std::function<bool(ExpandEntry, ExpandEntry)>>
ExpandQueue;
std::unique_ptr<ExpandQueue> qexpand_;
common::Monitor monitor;
dh::AllReducer reducer;
std::vector<ValueConstraint> node_value_constraints_;
std::vector<int> dList;
};
XGBOOST_REGISTER_TREE_UPDATER(GPUHistMaker, "grow_gpu_hist")
.describe("Grow tree with GPU.")
.set_body([]() { return new GPUHistMaker(); });
} // namespace tree
} // namespace xgboost
| f035e009052c8107d341d09e35f38168d550a5e9.cu | /*!
* Copyright 2017 XGBoost contributors
*/
#include <thrust/execution_policy.h>
#include <thrust/reduce.h>
#include <thrust/sequence.h>
#include <xgboost/tree_updater.h>
#include <algorithm>
#include <memory>
#include <queue>
#include <utility>
#include <vector>
#include "../common/compressed_iterator.h"
#include "../common/device_helpers.cuh"
#include "../common/hist_util.h"
#include "../common/host_device_vector.h"
#include "../common/timer.h"
#include "param.h"
#include "updater_gpu_common.cuh"
namespace xgboost {
namespace tree {
DMLC_REGISTRY_FILE_TAG(updater_gpu_hist);
typedef bst_gpair_precise gpair_sum_t;
template <int BLOCK_THREADS, typename reduce_t, typename temp_storage_t>
__device__ gpair_sum_t ReduceFeature(const gpair_sum_t* begin,
const gpair_sum_t* end,
temp_storage_t* temp_storage) {
__shared__ cub::Uninitialized<gpair_sum_t> uninitialized_sum;
gpair_sum_t& shared_sum = uninitialized_sum.Alias();
gpair_sum_t local_sum = gpair_sum_t();
for (auto itr = begin; itr < end; itr += BLOCK_THREADS) {
bool thread_active = itr + threadIdx.x < end;
// Scan histogram
gpair_sum_t bin = thread_active ? *(itr + threadIdx.x) : gpair_sum_t();
local_sum += reduce_t(temp_storage->sum_reduce).Reduce(bin, cub::Sum());
}
if (threadIdx.x == 0) {
shared_sum = local_sum;
}
__syncthreads();
return shared_sum;
}
template <int BLOCK_THREADS, typename reduce_t, typename scan_t,
typename max_reduce_t, typename temp_storage_t>
__device__ void EvaluateFeature(int fidx, const gpair_sum_t* hist,
const int* feature_segments, float min_fvalue,
const float* gidx_fvalue_map,
DeviceSplitCandidate* best_split,
const DeviceNodeStats& node,
const GPUTrainingParam& param,
temp_storage_t* temp_storage, int constraint,
const ValueConstraint& value_constraint) {
int gidx_begin = feature_segments[fidx];
int gidx_end = feature_segments[fidx + 1];
gpair_sum_t feature_sum = ReduceFeature<BLOCK_THREADS, reduce_t>(
hist + gidx_begin, hist + gidx_end, temp_storage);
auto prefix_op = SumCallbackOp<gpair_sum_t>();
for (int scan_begin = gidx_begin; scan_begin < gidx_end;
scan_begin += BLOCK_THREADS) {
bool thread_active = scan_begin + threadIdx.x < gidx_end;
gpair_sum_t bin =
thread_active ? hist[scan_begin + threadIdx.x] : gpair_sum_t();
scan_t(temp_storage->scan).ExclusiveScan(bin, bin, cub::Sum(), prefix_op);
// Calculate gain
gpair_sum_t parent_sum = gpair_sum_t(node.sum_gradients);
gpair_sum_t missing = parent_sum - feature_sum;
bool missing_left = true;
const float null_gain = -FLT_MAX;
float gain = null_gain;
if (thread_active) {
gain = loss_chg_missing(bin, missing, parent_sum, node.root_gain, param,
constraint, value_constraint, missing_left);
}
__syncthreads();
// Find thread with best gain
cub::KeyValuePair<int, float> tuple(threadIdx.x, gain);
cub::KeyValuePair<int, float> best =
max_reduce_t(temp_storage->max_reduce).Reduce(tuple, cub::ArgMax());
__shared__ cub::KeyValuePair<int, float> block_max;
if (threadIdx.x == 0) {
block_max = best;
}
__syncthreads();
// Best thread updates split
if (threadIdx.x == block_max.key) {
int gidx = scan_begin + threadIdx.x;
float fvalue =
gidx == gidx_begin ? min_fvalue : gidx_fvalue_map[gidx - 1];
gpair_sum_t left = missing_left ? bin + missing : bin;
gpair_sum_t right = parent_sum - left;
best_split->Update(gain, missing_left ? LeftDir : RightDir, fvalue, fidx,
left, right, param);
}
__syncthreads();
}
}
template <int BLOCK_THREADS>
__global__ void evaluate_split_kernel(
const gpair_sum_t* d_hist, int nidx, uint64_t n_features,
DeviceNodeStats nodes, const int* d_feature_segments,
const float* d_fidx_min_map, const float* d_gidx_fvalue_map,
GPUTrainingParam gpu_param, DeviceSplitCandidate* d_split,
ValueConstraint value_constraint, int* d_monotonic_constraints) {
typedef cub::KeyValuePair<int, float> ArgMaxT;
typedef cub::BlockScan<gpair_sum_t, BLOCK_THREADS, cub::BLOCK_SCAN_WARP_SCANS>
BlockScanT;
typedef cub::BlockReduce<ArgMaxT, BLOCK_THREADS> MaxReduceT;
typedef cub::BlockReduce<gpair_sum_t, BLOCK_THREADS> SumReduceT;
union TempStorage {
typename BlockScanT::TempStorage scan;
typename MaxReduceT::TempStorage max_reduce;
typename SumReduceT::TempStorage sum_reduce;
};
__shared__ cub::Uninitialized<DeviceSplitCandidate> uninitialized_split;
DeviceSplitCandidate& best_split = uninitialized_split.Alias();
__shared__ TempStorage temp_storage;
if (threadIdx.x == 0) {
best_split = DeviceSplitCandidate();
}
__syncthreads();
auto fidx = blockIdx.x;
auto constraint = d_monotonic_constraints[fidx];
EvaluateFeature<BLOCK_THREADS, SumReduceT, BlockScanT, MaxReduceT>(
fidx, d_hist, d_feature_segments, d_fidx_min_map[fidx], d_gidx_fvalue_map,
&best_split, nodes, gpu_param, &temp_storage, constraint,
value_constraint);
__syncthreads();
if (threadIdx.x == 0) {
// Record best loss
d_split[fidx] = best_split;
}
}
// Find a gidx value for a given feature otherwise return -1 if not found
template <typename gidx_iter_t>
__device__ int BinarySearchRow(bst_uint begin, bst_uint end, gidx_iter_t data,
int fidx_begin, int fidx_end) {
bst_uint previous_middle = UINT32_MAX;
while (end != begin) {
auto middle = begin + (end - begin) / 2;
if (middle == previous_middle) {
break;
}
previous_middle = middle;
auto gidx = data[middle];
if (gidx >= fidx_begin && gidx < fidx_end) {
return gidx;
} else if (gidx < fidx_begin) {
begin = middle;
} else {
end = middle;
}
}
// Value is missing
return -1;
}
struct DeviceHistogram {
dh::bulk_allocator<dh::memory_type::DEVICE> ba;
dh::dvec<gpair_sum_t> data;
int n_bins;
void Init(int device_idx, int max_nodes, int n_bins, bool silent) {
this->n_bins = n_bins;
ba.allocate(device_idx, silent, &data, size_t(max_nodes) * size_t(n_bins));
}
void Reset() { data.fill(gpair_sum_t()); }
gpair_sum_t* GetHistPtr(int nidx) { return data.data() + nidx * n_bins; }
void PrintNidx(int nidx) const {
auto h_data = data.as_vector();
std::cout << "nidx " << nidx << ":\n";
for (int i = n_bins * nidx; i < n_bins * (nidx + 1); i++) {
std::cout << h_data[i] << " ";
}
std::cout << "\n";
}
};
// Manage memory for a single GPU
struct DeviceShard {
struct Segment {
size_t begin;
size_t end;
Segment() : begin(0), end(0) {}
Segment(size_t begin, size_t end) : begin(begin), end(end) {
CHECK_GE(end, begin);
}
size_t Size() const { return end - begin; }
};
int device_idx;
int normalised_device_idx; // Device index counting from param.gpu_id
dh::bulk_allocator<dh::memory_type::DEVICE> ba;
dh::dvec<common::compressed_byte_t> gidx_buffer;
dh::dvec<bst_gpair> gpair;
dh::dvec2<bst_uint> ridx; // Row index relative to this shard
dh::dvec2<int> position;
std::vector<Segment> ridx_segments;
dh::dvec<int> feature_segments;
dh::dvec<float> gidx_fvalue_map;
dh::dvec<float> min_fvalue;
dh::dvec<int> monotone_constraints;
std::vector<bst_gpair> node_sum_gradients;
common::CompressedIterator<uint32_t> gidx;
int row_stride;
bst_uint row_begin_idx; // The row offset for this shard
bst_uint row_end_idx;
bst_uint n_rows;
int n_bins;
int null_gidx_value;
DeviceHistogram hist;
TrainParam param;
int64_t* tmp_pinned; // Small amount of staging memory
std::vector<cudaStream_t> streams;
dh::CubMemory temp_memory;
DeviceShard(int device_idx, int normalised_device_idx,
const common::GHistIndexMatrix& gmat, bst_uint row_begin,
bst_uint row_end, int n_bins, TrainParam param)
: device_idx(device_idx),
normalised_device_idx(normalised_device_idx),
row_begin_idx(row_begin),
row_end_idx(row_end),
n_rows(row_end - row_begin),
n_bins(n_bins),
null_gidx_value(n_bins),
param(param) {
// Convert to ELLPACK matrix representation
int max_elements_row = 0;
for (auto i = row_begin; i < row_end; i++) {
max_elements_row =
(std::max)(max_elements_row,
static_cast<int>(gmat.row_ptr[i + 1] - gmat.row_ptr[i]));
}
row_stride = max_elements_row;
std::vector<int> ellpack_matrix(row_stride * n_rows, null_gidx_value);
for (auto i = row_begin; i < row_end; i++) {
int row_count = 0;
for (auto j = gmat.row_ptr[i]; j < gmat.row_ptr[i + 1]; j++) {
ellpack_matrix[(i - row_begin) * row_stride + row_count] =
gmat.index[j];
row_count++;
}
}
// Allocate
int num_symbols = n_bins + 1;
size_t compressed_size_bytes =
common::CompressedBufferWriter::CalculateBufferSize(
ellpack_matrix.size(), num_symbols);
CHECK(!(param.max_leaves == 0 && param.max_depth == 0))
<< "Max leaves and max depth cannot both be unconstrained for "
"gpu_hist.";
int max_nodes =
param.max_leaves > 0 ? param.max_leaves * 2 : n_nodes(param.max_depth);
ba.allocate(device_idx, param.silent, &gidx_buffer, compressed_size_bytes,
&gpair, n_rows, &ridx, n_rows, &position, n_rows,
&feature_segments, gmat.cut->row_ptr.size(), &gidx_fvalue_map,
gmat.cut->cut.size(), &min_fvalue, gmat.cut->min_val.size(),
&monotone_constraints, param.monotone_constraints.size());
gidx_fvalue_map = gmat.cut->cut;
min_fvalue = gmat.cut->min_val;
feature_segments = gmat.cut->row_ptr;
monotone_constraints = param.monotone_constraints;
node_sum_gradients.resize(max_nodes);
ridx_segments.resize(max_nodes);
// Compress gidx
common::CompressedBufferWriter cbw(num_symbols);
std::vector<common::compressed_byte_t> host_buffer(gidx_buffer.size());
cbw.Write(host_buffer.data(), ellpack_matrix.begin(), ellpack_matrix.end());
gidx_buffer = host_buffer;
gidx =
common::CompressedIterator<uint32_t>(gidx_buffer.data(), num_symbols);
common::CompressedIterator<uint32_t> ci_host(host_buffer.data(),
num_symbols);
// Init histogram
hist.Init(device_idx, max_nodes, gmat.cut->row_ptr.back(), param.silent);
dh::safe_cuda(cudaMallocHost(&tmp_pinned, sizeof(int64_t)));
}
~DeviceShard() {
for (auto& stream : streams) {
dh::safe_cuda(cudaStreamDestroy(stream));
}
dh::safe_cuda(cudaFreeHost(tmp_pinned));
}
// Get vector of at least n initialised streams
std::vector<cudaStream_t>& GetStreams(int n) {
if (n > streams.size()) {
for (auto& stream : streams) {
dh::safe_cuda(cudaStreamDestroy(stream));
}
streams.clear();
streams.resize(n);
for (auto& stream : streams) {
dh::safe_cuda(cudaStreamCreate(&stream));
}
}
return streams;
}
// Reset values for each update iteration
void Reset(HostDeviceVector<bst_gpair>* dh_gpair, int device) {
auto begin = dh_gpair->tbegin(device);
dh::safe_cuda(cudaSetDevice(device_idx));
position.current_dvec().fill(0);
std::fill(node_sum_gradients.begin(), node_sum_gradients.end(),
bst_gpair());
thrust::sequence(ridx.current_dvec().tbegin(), ridx.current_dvec().tend());
std::fill(ridx_segments.begin(), ridx_segments.end(), Segment(0, 0));
ridx_segments.front() = Segment(0, ridx.size());
this->gpair.copy(begin + row_begin_idx, begin + row_end_idx);
subsample_gpair(&gpair, param.subsample, row_begin_idx);
hist.Reset();
}
void BuildHist(int nidx) {
auto segment = ridx_segments[nidx];
auto d_node_hist = hist.GetHistPtr(nidx);
auto d_gidx = gidx;
auto d_ridx = ridx.current();
auto d_gpair = gpair.data();
auto row_stride = this->row_stride;
auto null_gidx_value = this->null_gidx_value;
auto n_elements = segment.Size() * row_stride;
dh::launch_n(device_idx, n_elements, [=] __device__(size_t idx) {
int ridx = d_ridx[(idx / row_stride) + segment.begin];
int gidx = d_gidx[ridx * row_stride + idx % row_stride];
if (gidx != null_gidx_value) {
AtomicAddGpair(d_node_hist + gidx, d_gpair[ridx]);
}
});
}
void SubtractionTrick(int nidx_parent, int nidx_histogram,
int nidx_subtraction) {
auto d_node_hist_parent = hist.GetHistPtr(nidx_parent);
auto d_node_hist_histogram = hist.GetHistPtr(nidx_histogram);
auto d_node_hist_subtraction = hist.GetHistPtr(nidx_subtraction);
dh::launch_n(device_idx, hist.n_bins, [=] __device__(size_t idx) {
d_node_hist_subtraction[idx] =
d_node_hist_parent[idx] - d_node_hist_histogram[idx];
});
}
__device__ void CountLeft(int64_t* d_count, int val, int left_nidx) {
unsigned ballot = __ballot(val == left_nidx);
if (threadIdx.x % 32 == 0) {
atomicAdd(reinterpret_cast<unsigned long long*>(d_count), // NOLINT
static_cast<unsigned long long>(__popc(ballot))); // NOLINT
}
}
void UpdatePosition(int nidx, int left_nidx, int right_nidx, int fidx,
int split_gidx, bool default_dir_left, bool is_dense,
int fidx_begin, int fidx_end) {
dh::safe_cuda(cudaSetDevice(device_idx));
temp_memory.LazyAllocate(sizeof(int64_t));
auto d_left_count = temp_memory.Pointer<int64_t>();
dh::safe_cuda(cudaMemset(d_left_count, 0, sizeof(int64_t)));
auto segment = ridx_segments[nidx];
auto d_ridx = ridx.current();
auto d_position = position.current();
auto d_gidx = gidx;
auto row_stride = this->row_stride;
dh::launch_n<1, 512>(
device_idx, segment.Size(), [=] __device__(bst_uint idx) {
idx += segment.begin;
auto ridx = d_ridx[idx];
auto row_begin = row_stride * ridx;
auto row_end = row_begin + row_stride;
auto gidx = -1;
if (is_dense) {
gidx = d_gidx[row_begin + fidx];
} else {
gidx = BinarySearchRow(row_begin, row_end, d_gidx, fidx_begin,
fidx_end);
}
int position;
if (gidx >= 0) {
// Feature is found
position = gidx <= split_gidx ? left_nidx : right_nidx;
} else {
// Feature is missing
position = default_dir_left ? left_nidx : right_nidx;
}
CountLeft(d_left_count, position, left_nidx);
d_position[idx] = position;
});
dh::safe_cuda(cudaMemcpy(tmp_pinned, d_left_count, sizeof(int64_t),
cudaMemcpyDeviceToHost));
auto left_count = *tmp_pinned;
SortPosition(segment, left_nidx, right_nidx);
// dh::safe_cuda(cudaStreamSynchronize(stream));
ridx_segments[left_nidx] =
Segment(segment.begin, segment.begin + left_count);
ridx_segments[right_nidx] =
Segment(segment.begin + left_count, segment.end);
}
void SortPosition(const Segment& segment, int left_nidx, int right_nidx) {
int min_bits = 0;
int max_bits = static_cast<int>(
std::ceil(std::log2((std::max)(left_nidx, right_nidx) + 1)));
size_t temp_storage_bytes = 0;
cub::DeviceRadixSort::SortPairs(
nullptr, temp_storage_bytes, position.current() + segment.begin,
position.other() + segment.begin, ridx.current() + segment.begin,
ridx.other() + segment.begin, segment.Size(), min_bits, max_bits);
temp_memory.LazyAllocate(temp_storage_bytes);
cub::DeviceRadixSort::SortPairs(
temp_memory.d_temp_storage, temp_memory.temp_storage_bytes,
position.current() + segment.begin, position.other() + segment.begin,
ridx.current() + segment.begin, ridx.other() + segment.begin,
segment.Size(), min_bits, max_bits);
dh::safe_cuda(cudaMemcpy(
position.current() + segment.begin, position.other() + segment.begin,
segment.Size() * sizeof(int), cudaMemcpyDeviceToDevice));
dh::safe_cuda(cudaMemcpy(
ridx.current() + segment.begin, ridx.other() + segment.begin,
segment.Size() * sizeof(bst_uint), cudaMemcpyDeviceToDevice));
}
};
class GPUHistMaker : public TreeUpdater {
public:
struct ExpandEntry;
GPUHistMaker() : initialised(false) {}
~GPUHistMaker() {}
void Init(
const std::vector<std::pair<std::string, std::string>>& args) override {
param.InitAllowUnknown(args);
CHECK(param.n_gpus != 0) << "Must have at least one device";
n_devices = param.n_gpus;
dh::check_compute_capability();
if (param.grow_policy == TrainParam::kLossGuide) {
qexpand_.reset(new ExpandQueue(loss_guide));
} else {
qexpand_.reset(new ExpandQueue(depth_wise));
}
monitor.Init("updater_gpu_hist", param.debug_verbose);
}
void Update(const std::vector<bst_gpair>& gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) override {
monitor.Start("Update", dList);
// TODO(canonizer): move it into the class if this ever becomes a bottleneck
HostDeviceVector<bst_gpair> gpair_d(gpair.size(), param.gpu_id);
dh::safe_cuda(cudaSetDevice(param.gpu_id));
thrust::copy(gpair.begin(), gpair.end(), gpair_d.tbegin(param.gpu_id));
Update(&gpair_d, dmat, trees);
monitor.Stop("Update", dList);
}
void Update(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) override {
monitor.Start("Update", dList);
UpdateHelper(gpair, dmat, trees);
monitor.Stop("Update", dList);
}
private:
void UpdateHelper(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const std::vector<RegTree*>& trees) {
GradStats::CheckInfo(dmat->info());
// rescale learning rate according to size of trees
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
ValueConstraint::Init(¶m, dmat->info().num_col);
// build tree
try {
for (size_t i = 0; i < trees.size(); ++i) {
this->UpdateTree(gpair, dmat, trees[i]);
}
} catch (const std::exception& e) {
LOG(FATAL) << "GPU plugin exception: " << e.what() << std::endl;
}
param.learning_rate = lr;
}
public:
void InitDataOnce(DMatrix* dmat) {
info = &dmat->info();
monitor.Start("Quantiles", dList);
hmat_.Init(dmat, param.max_bin);
gmat_.cut = &hmat_;
gmat_.Init(dmat);
monitor.Stop("Quantiles", dList);
n_bins = hmat_.row_ptr.back();
int n_devices = dh::n_devices(param.n_gpus, info->num_row);
bst_uint row_begin = 0;
bst_uint shard_size =
std::ceil(static_cast<double>(info->num_row) / n_devices);
dList.resize(n_devices);
for (int d_idx = 0; d_idx < n_devices; ++d_idx) {
int device_idx = (param.gpu_id + d_idx) % dh::n_visible_devices();
dList[d_idx] = device_idx;
}
reducer.Init(dList);
// Partition input matrix into row segments
std::vector<size_t> row_segments;
shards.resize(n_devices);
row_segments.push_back(0);
for (int d_idx = 0; d_idx < n_devices; ++d_idx) {
bst_uint row_end =
std::min(static_cast<size_t>(row_begin + shard_size), info->num_row);
row_segments.push_back(row_end);
row_begin = row_end;
}
// Create device shards
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
shards[cpu_thread_id] = std::unique_ptr<DeviceShard>(
new DeviceShard(dList[cpu_thread_id], cpu_thread_id, gmat_,
row_segments[cpu_thread_id],
row_segments[cpu_thread_id + 1], n_bins, param));
}
initialised = true;
}
void InitData(HostDeviceVector<bst_gpair>* gpair, DMatrix* dmat,
const RegTree& tree) {
monitor.Start("InitDataOnce", dList);
if (!initialised) {
this->InitDataOnce(dmat);
}
monitor.Stop("InitDataOnce", dList);
column_sampler.Init(info->num_col, param);
// Copy gpair & reset memory
monitor.Start("InitDataReset", dList);
omp_set_num_threads(shards.size());
// TODO(canonizer): make it parallel again once HostDeviceVector is
// thread-safe
for (int shard = 0; shard < shards.size(); ++shard)
shards[shard]->Reset(gpair, param.gpu_id);
monitor.Stop("InitDataReset", dList);
}
void AllReduceHist(int nidx) {
for (auto& shard : shards) {
auto d_node_hist = shard->hist.GetHistPtr(nidx);
reducer.AllReduceSum(
shard->normalised_device_idx,
reinterpret_cast<gpair_sum_t::value_t*>(d_node_hist),
reinterpret_cast<gpair_sum_t::value_t*>(d_node_hist),
n_bins * (sizeof(gpair_sum_t) / sizeof(gpair_sum_t::value_t)));
}
reducer.Synchronize();
}
void BuildHistLeftRight(int nidx_parent, int nidx_left, int nidx_right) {
size_t left_node_max_elements = 0;
size_t right_node_max_elements = 0;
for (auto& shard : shards) {
left_node_max_elements = (std::max)(
left_node_max_elements, shard->ridx_segments[nidx_left].Size());
right_node_max_elements = (std::max)(
right_node_max_elements, shard->ridx_segments[nidx_right].Size());
}
auto build_hist_nidx = nidx_left;
auto subtraction_trick_nidx = nidx_right;
if (right_node_max_elements < left_node_max_elements) {
build_hist_nidx = nidx_right;
subtraction_trick_nidx = nidx_left;
}
for (auto& shard : shards) {
shard->BuildHist(build_hist_nidx);
}
this->AllReduceHist(build_hist_nidx);
for (auto& shard : shards) {
shard->SubtractionTrick(nidx_parent, build_hist_nidx,
subtraction_trick_nidx);
}
}
// Returns best loss
std::vector<DeviceSplitCandidate> EvaluateSplits(
const std::vector<int>& nidx_set, RegTree* p_tree) {
auto columns = info->num_col;
std::vector<DeviceSplitCandidate> best_splits(nidx_set.size());
std::vector<DeviceSplitCandidate> candidate_splits(nidx_set.size() *
columns);
// Use first device
auto& shard = shards.front();
dh::safe_cuda(cudaSetDevice(shard->device_idx));
shard->temp_memory.LazyAllocate(sizeof(DeviceSplitCandidate) * columns *
nidx_set.size());
auto d_split = shard->temp_memory.Pointer<DeviceSplitCandidate>();
auto& streams = shard->GetStreams(static_cast<int>(nidx_set.size()));
// Use streams to process nodes concurrently
for (auto i = 0; i < nidx_set.size(); i++) {
auto nidx = nidx_set[i];
DeviceNodeStats node(shard->node_sum_gradients[nidx], nidx, param);
const int BLOCK_THREADS = 256;
evaluate_split_kernel<BLOCK_THREADS>
<<<uint32_t(columns), BLOCK_THREADS, 0, streams[i]>>>(
shard->hist.GetHistPtr(nidx), nidx, info->num_col, node,
shard->feature_segments.data(), shard->min_fvalue.data(),
shard->gidx_fvalue_map.data(), GPUTrainingParam(param),
d_split + i * columns, node_value_constraints_[nidx],
shard->monotone_constraints.data());
}
dh::safe_cuda(
cudaMemcpy(candidate_splits.data(), shard->temp_memory.d_temp_storage,
sizeof(DeviceSplitCandidate) * columns * nidx_set.size(),
cudaMemcpyDeviceToHost));
for (auto i = 0; i < nidx_set.size(); i++) {
auto nidx = nidx_set[i];
DeviceSplitCandidate nidx_best;
for (auto fidx = 0; fidx < columns; fidx++) {
auto& candidate = candidate_splits[i * columns + fidx];
if (column_sampler.ColumnUsed(candidate.findex,
p_tree->GetDepth(nidx))) {
nidx_best.Update(candidate_splits[i * columns + fidx], param);
}
}
best_splits[i] = nidx_best;
}
return std::move(best_splits);
}
void InitRoot(RegTree* p_tree) {
auto root_nidx = 0;
// Sum gradients
std::vector<bst_gpair> tmp_sums(shards.size());
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
dh::safe_cuda(cudaSetDevice(shards[cpu_thread_id]->device_idx));
tmp_sums[cpu_thread_id] =
thrust::reduce(thrust::cuda::par(shards[cpu_thread_id]->temp_memory),
shards[cpu_thread_id]->gpair.tbegin(),
shards[cpu_thread_id]->gpair.tend());
}
auto sum_gradient =
std::accumulate(tmp_sums.begin(), tmp_sums.end(), bst_gpair_precise());
// Generate root histogram
for (auto& shard : shards) {
shard->BuildHist(root_nidx);
}
this->AllReduceHist(root_nidx);
// Remember root stats
p_tree->stat(root_nidx).sum_hess = sum_gradient.GetHess();
auto weight = CalcWeight(param, sum_gradient);
p_tree->stat(root_nidx).base_weight = weight;
(*p_tree)[root_nidx].set_leaf(param.learning_rate * weight);
// Store sum gradients
for (auto& shard : shards) {
shard->node_sum_gradients[root_nidx] = sum_gradient;
}
// Initialise root constraint
node_value_constraints_.resize(p_tree->GetNodes().size());
// Generate first split
auto splits = this->EvaluateSplits({root_nidx}, p_tree);
qexpand_->push(
ExpandEntry(root_nidx, p_tree->GetDepth(root_nidx), splits.front(), 0));
}
void UpdatePosition(const ExpandEntry& candidate, RegTree* p_tree) {
auto nidx = candidate.nid;
auto left_nidx = (*p_tree)[nidx].cleft();
auto right_nidx = (*p_tree)[nidx].cright();
// convert floating-point split_pt into corresponding bin_id
// split_cond = -1 indicates that split_pt is less than all known cut points
auto split_gidx = -1;
auto fidx = candidate.split.findex;
auto default_dir_left = candidate.split.dir == LeftDir;
auto fidx_begin = hmat_.row_ptr[fidx];
auto fidx_end = hmat_.row_ptr[fidx + 1];
for (auto i = fidx_begin; i < fidx_end; ++i) {
if (candidate.split.fvalue == hmat_.cut[i]) {
split_gidx = static_cast<int32_t>(i);
}
}
auto is_dense = info->num_nonzero == info->num_row * info->num_col;
omp_set_num_threads(shards.size());
#pragma omp parallel
{
auto cpu_thread_id = omp_get_thread_num();
shards[cpu_thread_id]->UpdatePosition(nidx, left_nidx, right_nidx, fidx,
split_gidx, default_dir_left,
is_dense, fidx_begin, fidx_end);
}
}
void ApplySplit(const ExpandEntry& candidate, RegTree* p_tree) {
// Add new leaves
RegTree& tree = *p_tree;
tree.AddChilds(candidate.nid);
auto& parent = tree[candidate.nid];
parent.set_split(candidate.split.findex, candidate.split.fvalue,
candidate.split.dir == LeftDir);
tree.stat(candidate.nid).loss_chg = candidate.split.loss_chg;
// Set up child constraints
node_value_constraints_.resize(tree.GetNodes().size());
GradStats left_stats(param);
left_stats.Add(candidate.split.left_sum);
GradStats right_stats(param);
right_stats.Add(candidate.split.right_sum);
node_value_constraints_[candidate.nid].SetChild(
param, parent.split_index(), left_stats, right_stats,
&node_value_constraints_[parent.cleft()],
&node_value_constraints_[parent.cright()]);
// Configure left child
auto left_weight =
node_value_constraints_[parent.cleft()].CalcWeight(param, left_stats);
tree[parent.cleft()].set_leaf(left_weight * param.learning_rate, 0);
tree.stat(parent.cleft()).base_weight = left_weight;
tree.stat(parent.cleft()).sum_hess = candidate.split.left_sum.GetHess();
// Configure right child
auto right_weight =
node_value_constraints_[parent.cright()].CalcWeight(param, right_stats);
tree[parent.cright()].set_leaf(right_weight * param.learning_rate, 0);
tree.stat(parent.cright()).base_weight = right_weight;
tree.stat(parent.cright()).sum_hess = candidate.split.right_sum.GetHess();
// Store sum gradients
for (auto& shard : shards) {
shard->node_sum_gradients[parent.cleft()] = candidate.split.left_sum;
shard->node_sum_gradients[parent.cright()] = candidate.split.right_sum;
}
this->UpdatePosition(candidate, p_tree);
}
void UpdateTree(HostDeviceVector<bst_gpair>* gpair, DMatrix* p_fmat,
RegTree* p_tree) {
// Temporarily store number of threads so we can change it back later
int nthread = omp_get_max_threads();
auto& tree = *p_tree;
monitor.Start("InitData", dList);
this->InitData(gpair, p_fmat, *p_tree);
monitor.Stop("InitData", dList);
monitor.Start("InitRoot", dList);
this->InitRoot(p_tree);
monitor.Stop("InitRoot", dList);
auto timestamp = qexpand_->size();
auto num_leaves = 1;
while (!qexpand_->empty()) {
auto candidate = qexpand_->top();
qexpand_->pop();
if (!candidate.IsValid(param, num_leaves)) continue;
// std::cout << candidate;
monitor.Start("ApplySplit", dList);
this->ApplySplit(candidate, p_tree);
monitor.Stop("ApplySplit", dList);
num_leaves++;
auto left_child_nidx = tree[candidate.nid].cleft();
auto right_child_nidx = tree[candidate.nid].cright();
// Only create child entries if needed
if (ExpandEntry::ChildIsValid(param, tree.GetDepth(left_child_nidx),
num_leaves)) {
monitor.Start("BuildHist", dList);
this->BuildHistLeftRight(candidate.nid, left_child_nidx,
right_child_nidx);
monitor.Stop("BuildHist", dList);
monitor.Start("EvaluateSplits", dList);
auto splits =
this->EvaluateSplits({left_child_nidx, right_child_nidx}, p_tree);
qexpand_->push(ExpandEntry(left_child_nidx,
tree.GetDepth(left_child_nidx), splits[0],
timestamp++));
qexpand_->push(ExpandEntry(right_child_nidx,
tree.GetDepth(right_child_nidx), splits[1],
timestamp++));
monitor.Stop("EvaluateSplits", dList);
}
}
// Reset omp num threads
omp_set_num_threads(nthread);
}
bool UpdatePredictionCache(const DMatrix* data,
std::vector<bst_float>* p_out_preds) override {
return false;
}
bool UpdatePredictionCache(
const DMatrix* data, HostDeviceVector<bst_float>* p_out_preds) override {
return false;
}
struct ExpandEntry {
int nid;
int depth;
DeviceSplitCandidate split;
uint64_t timestamp;
ExpandEntry(int nid, int depth, const DeviceSplitCandidate& split,
uint64_t timestamp)
: nid(nid), depth(depth), split(split), timestamp(timestamp) {}
bool IsValid(const TrainParam& param, int num_leaves) const {
if (split.loss_chg <= rt_eps) return false;
if (split.left_sum.GetHess() == 0 || split.right_sum.GetHess() == 0)
return false;
if (param.max_depth > 0 && depth == param.max_depth) return false;
if (param.max_leaves > 0 && num_leaves == param.max_leaves) return false;
return true;
}
static bool ChildIsValid(const TrainParam& param, int depth,
int num_leaves) {
if (param.max_depth > 0 && depth == param.max_depth) return false;
if (param.max_leaves > 0 && num_leaves == param.max_leaves) return false;
return true;
}
friend std::ostream& operator<<(std::ostream& os, const ExpandEntry& e) {
os << "ExpandEntry: \n";
os << "nidx: " << e.nid << "\n";
os << "depth: " << e.depth << "\n";
os << "loss: " << e.split.loss_chg << "\n";
os << "left_sum: " << e.split.left_sum << "\n";
os << "right_sum: " << e.split.right_sum << "\n";
return os;
}
};
inline static bool depth_wise(ExpandEntry lhs, ExpandEntry rhs) {
if (lhs.depth == rhs.depth) {
return lhs.timestamp > rhs.timestamp; // favor small timestamp
} else {
return lhs.depth > rhs.depth; // favor small depth
}
}
inline static bool loss_guide(ExpandEntry lhs, ExpandEntry rhs) {
if (lhs.split.loss_chg == rhs.split.loss_chg) {
return lhs.timestamp > rhs.timestamp; // favor small timestamp
} else {
return lhs.split.loss_chg < rhs.split.loss_chg; // favor large loss_chg
}
}
TrainParam param;
common::HistCutMatrix hmat_;
common::GHistIndexMatrix gmat_;
MetaInfo* info;
bool initialised;
int n_devices;
int n_bins;
std::vector<std::unique_ptr<DeviceShard>> shards;
ColumnSampler column_sampler;
typedef std::priority_queue<ExpandEntry, std::vector<ExpandEntry>,
std::function<bool(ExpandEntry, ExpandEntry)>>
ExpandQueue;
std::unique_ptr<ExpandQueue> qexpand_;
common::Monitor monitor;
dh::AllReducer reducer;
std::vector<ValueConstraint> node_value_constraints_;
std::vector<int> dList;
};
XGBOOST_REGISTER_TREE_UPDATER(GPUHistMaker, "grow_gpu_hist")
.describe("Grow tree with GPU.")
.set_body([]() { return new GPUHistMaker(); });
} // namespace tree
} // namespace xgboost
|
63d1abef7a278671f27cf8ede2963c0be0e91ea6.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
float h_A[]= {
0.9873887992938584, 0.7518116992518353, 0.8598940369927895, 0.7155885551816444, 0.8797425552930407, 0.587458783832594, 0.6267841600435446, 0.8217790907969758, 0.818894981367343, 0.7872291426722382, 0.6357493323386308, 0.5550871670133021, 0.8461501536000029, 0.7977451395282227, 0.7555389775190972, 0.5292387580096789, 0.5408085398597656, 0.8163708105099419, 0.5610129281465945, 0.5963355184694463, 0.6615023855648287, 0.8243805863473359, 0.5513643866625441, 0.6167492041543463, 0.6084409256700565, 0.9325979278891088, 0.7816587716738894, 0.6195649477892154, 0.8220997730263204, 0.5107161504409763, 0.7523904022882593, 0.5058736893901825, 0.606176072120739, 0.6434569290861027, 0.7960049633446116, 0.8391603079006115, 0.9028060550539083, 0.7012005336764833, 0.7196394099649538, 0.814032573839834, 0.9332142819092752, 0.7643479164281066, 0.8572039249712273, 0.6581116332456617, 0.8831502252329125, 0.735246302770505, 0.9052967234705973, 0.9386105386049532, 0.6445925301281801, 0.5809045836844399, 0.8756480705242975, 0.9547324386468568, 0.7634880643823876, 0.63017750870097, 0.9706364636854989, 0.684193584527232, 0.7099757252235765, 0.8856831456976266, 0.7217729723184566, 0.882024111590653, 0.7581537817900296, 0.6433412540275687, 0.6035784185185884, 0.915318950046528, 0.977512981758409, 0.5839927084681016, 0.9784115088385195, 0.6514832437172302, 0.8549174047743417, 0.5563793142977376, 0.605903117136309, 0.5537608356289365, 0.6169820307841094, 0.5519704501953835, 0.8571574272739233, 0.6131752749151639, 0.6292018912171328, 0.7613884907224823, 0.6483208000700782, 0.9676032497501634, 0.7211267524828632, 0.54161244110958, 0.5907961081074111, 0.6392989307993807, 0.5312485506823461, 0.6426775737775133, 0.6287493190025022, 0.6853275513104178, 0.801715250665508, 0.9806268492785526, 0.8335524143211881, 0.9523226273681229, 0.7979322321917615, 0.5761906857975251, 0.7609614590249472, 0.6962710813985764, 0.82329046975484, 0.6188244405105823, 0.6709106285130378, 0.747187208460914, 0.518298561325272, 0.9366258706162892, 0.7308534791679668, 0.5523018767708051, 0.911051594106451, 0.673686766883247, 0.9271637293025594, 0.5400298883718841, 0.5075619683495305, 0.5631642672841077, 0.9856701528710157, 0.5029537179843656, 0.5873715239577169, 0.906177537898226, 0.807870656294317, 0.7406076258108997, 0.5309122479310391, 0.9634807267301542, 0.8388565572157327, 0.6516681227367411, 0.8040573439689465, 0.8426067325019722, 0.6862521693092227, 0.770866606504957, 0.966250695695041, 0.9914794295963107, 0.9940888515692078, 0.5943596829204034, 0.7675196897973086, 0.834066484777552, 0.6259818894895984, 0.7678372482110907, 0.7481588138043995, 0.9829697035966287, 0.5703672330089176, 0.6816343339468353, 0.7491289779751693, 0.6092983201326765, 0.9558026008937839, 0.8598379123250666, 0.5832153473618932, 0.9107107140854593, 0.8656108605804345, 0.5240922235248828, 0.7547578744669943, 0.7879631371868944, 0.9720893781370343, 0.706654435937283, 0.8687811722736392, 0.6128092834034508, 0.9832943506687434, 0.8960315888786147, 0.6574120018114272, 0.9184966104333021, 0.8322931005886973, 0.6598222681366639, 0.6905279159038262, 0.9429690741166823, 0.5738758425583128, 0.6074647566406836, 0.9379260564008778, 0.5144041845914757, 0.6805774821279678, 0.6560911555350712, 0.8029975623197818, 0.8386971428278434, 0.641583085490713, 0.6694589742843875, 0.9465007652728994, 0.677386529587717, 0.8581123594398272, 0.985885033034536, 0.5683837089240931, 0.6095121950923683, 0.5741860297389088, 0.925926847289021, 0.6160676124582172, 0.8874308495922196, 0.6979003989459664, 0.5069924947902891, 0.8551803770570955, 0.6942747048544508, 0.683759955437177, 0.9038374300290415, 0.6352176769559298, 0.7649224960110357, 0.8150810778809752, 0.622761078825717, 0.895941646999187, 0.8684371931043464, 0.5909344102941986, 0.7794499197136305, 0.5591928397823457, 0.9780197375505989, 0.855985506811566, 0.7469876487257581, 0.548399917437048, 0.5419548829095413, 0.85090366024281, 0.5982106334019789, 0.7363967412117727, 0.5166940647358599, 0.7486316654216305, 0.9695241405093227, 0.702621234054025, 0.968417262184461, 0.5511685340822354, 0.7269181633451169, 0.7027686640332665, 0.5770704674256446, 0.610897596933887, 0.8086493414676644, 0.8716198458166471, 0.680927438041169, 0.7730517340807052, 0.8438397062201423, 0.7240004605973127, 0.7955193000746689, 0.9105209074583045, 0.5353457297015245, 0.5783003174045958, 0.8932656906865397, 0.7920268837944431, 0.6081406453190547, 0.5437838995685991, 0.5040520258200972, 0.7563145910619486, 0.9565505805524205, 0.5768297704393688, 0.8337801665797757, 0.692709644387637, 0.7037022751242846, 0.8250775049288817, 0.6982573030691286, 0.6385703054173311, 0.8901686884653965, 0.8691044027920404, 0.6434982985686291, 0.650907917102282, 0.7574465848489202, 0.9187222359931069, 0.9537230130118233, 0.7429676147773174, 0.5075474870857042, 0.5276533839836797, 0.8482832398730646, 0.6043779883537685, 0.9994343938372521, 0.7139737092667813, 0.852306786155933, 0.550973208912384, 0.9321179130951442, 0.935564718328989, 0.7608331778895917, 0.7526719187714597, 0.8644567394899946, 0.7627254656885976, 0.6729053192651407, 0.7559562853360888, 0.5382941977731497, 0.6826721482220992, 0.5915794348180616, 0.5154105459823537, 0.8728169991529307, 0.7247913050475217, 0.664857645536003, 0.7321998924143291, 0.6676480472824828, 0.6869264578933358, 0.8575117947360786, 0.6850691221716878, 0.891030255489857, 0.9763827707377857, 0.5810755825490929, 0.87481602764215, 0.6346417988604247, 0.9073063572906421, 0.6589032436094528, 0.6263181749160205, 0.5228315250585129, 0.8789869213331138, 0.8831761226579544, 0.5650108778015349, 0.9963376737661727, 0.5180604480066486, 0.5282625883289407, 0.5901106196379324, 0.5938981093934128, 0.7559324097538445, 0.5999583093892109, 0.8339015810263406, 0.9689040705777066, 0.6195002410591459, 0.6843522088798009, 0.9732095143116724, 0.7679268475703316, 0.8867255160700478, 0.9800235551340614, 0.7906231062110758, 0.5661002926346704, 0.6669240200603289, 0.8533419253191146, 0.9281929293234508, 0.5703898433997759, 0.680833082738628, 0.7251064795681637, 0.7483648850281328, 0.9650960534927178, 0.7687464854531383, 0.6616253139648867, 0.7667357246285597, 0.781715650929798, 0.7131457212317911, 0.6512625095571962, 0.5551442985421178, 0.9645370054033653, 0.6370885504176269, 0.6512580260238058, 0.6857103114748357, 0.6980972466314304, 0.9613502135331046, 0.5168897955571674, 0.6454097143519825, 0.8658894491827238, 0.8039386964878721, 0.722655488761203, 0.804844537343654, 0.6512327522177668, 0.5117040480739877, 0.9543060465550064, 0.6857821765108075, 0.7100018880865389, 0.6976186251145577, 0.6964120095750104, 0.9415602339202684, 0.9831023561342398, 0.78095440772624, 0.55041336112988, 0.6065601574304764, 0.9429543218969325, 0.7674690881802468, 0.6161719984917517, 0.5803077366063498, 0.8760417817529479, 0.9494647471127876, 0.9099851304402415, 0.5497163863626613, 0.6305383524623622, 0.5437045612509942, 0.9136293525084882, 0.7318611584057553, 0.6371043868516049, 0.5333667745975267, 0.8379749091271791, 0.664820248911362, 0.5664260606918152, 0.7544060232997121, 0.9297524068181686, 0.9180898522091927, 0.6325339497089348, 0.8640845531533917, 0.5395553530054222, 0.6665549185681676, 0.6559016159838257, 0.5943869131177766, 0.9020683950098671, 0.706885505153577, 0.8533562559583731, 0.5680467947171077, 0.6379703254505011, 0.9275364488655524, 0.63427794235475, 0.5204359614285576, 0.5837202743611076, 0.5197531303207465, 0.733735555252905, 0.8635854087798438, 0.7308675941733167, 0.7496803428457608, 0.5326748369111625, 0.9735000921134471, 0.5840481121132972, 0.8251425137150197, 0.9862139226131214, 0.536967259258061, 0.9815426367547764, 0.5510447279690006, 0.93040031985411, 0.9978628113712606, 0.868372806575608, 0.5156173366795616, 0.8006590357455698, 0.5328998958678239, 0.6090429205693715, 0.5748196293438881, 0.6797380309528254, 0.5061406811129958, 0.7890955168021314, 0.7468284312386848, 0.7995851422972794, 0.9575932002007685, 0.7574117631629482, 0.8990912995948614, 0.6706076462287267, 0.9256481849545741, 0.758708431923616, 0.8063524504521644, 0.6172003152794898, 0.7501484424264333, 0.7938735957844767, 0.7512640315127024, 0.9156461950750487, 0.6558783781435606, 0.9430122611716341, 0.7971646983654626, 0.5521716250093912, 0.7735692927966907, 0.5230714067203319, 0.7825264805439227, 0.6418739550138816, 0.7563683922854213, 0.9665634886212088, 0.7977156326453572, 0.5741836069248272, 0.8509994904088166, 0.949515272567327, 0.686998792130099, 0.6477396082624807, 0.9122659688563781, 0.7249348640361367, 0.9020419788588603, 0.8106359558438798, 0.8315111270709667, 0.64951842745161, 0.5134593054909165, 0.5566124339940084, 0.6701801055379537, 0.5196038579969711, 0.9748934454742821, 0.5651590840589593, 0.6714294366924799, 0.7863828727018196, 0.913858802503279, 0.809917271789527, 0.9081696787816929, 0.974176842501167, 0.8409665727274855, 0.8933133068985109, 0.5650237361290951, 0.8374208076823695, 0.9725178653476566, 0.691128992251266, 0.8751440072557719, 0.7053925890136412, 0.699350021715855, 0.8450247389883085, 0.8367710863473194, 0.9546152445128686, 0.6942381701549085, 0.6085674036186964, 0.6818954287371239, 0.8153217721476993, 0.5203522161638577, 0.6242281812220885, 0.7220591234127187, 0.8492281398417656, 0.9675597045379287, 0.6767304446653533, 0.7720794407118511, 0.8895473067326978, 0.6226386897238801, 0.7065902138335216, 0.7461493115792395, 0.5284147748971431, 0.7064138718899026, 0.8486717316806678, 0.7452348837799767, 0.9169223242439806, 0.7195800967525802, 0.7106975873561505, 0.7613378577150366, 0.7047971283007108, 0.5877299576665542, 0.8819016979608973, 0.6343976723670517, 0.9699223817830688, 0.734668422333335, 0.8380547270590863, 0.5682157108082594, 0.7005338944801736, 0.9889027074021162, 0.7697621933273877, 0.9157689647258015, 0.7030957793947004, 0.6488721900392699, 0.7921238459341038, 0.7081563009603621, 0.8529680145174764, 0.5501213819692206, 0.9806322253603919, 0.8203514994249395, 0.6824905613670414, 0.5834639748665214, 0.9692467494754631, 0.5717500870275207, 0.8430790705155815, 0.7686240284288777, 0.6465376621086197, 0.5358453593209238, 0.6844386351683291, 0.6712332005145685, 0.871889730385432, 0.9544128184399707, 0.6730161393706668, 0.8206495391467982, 0.8440463809721327, 0.6228005335797644, 0.6862210880707785, 0.8101489856494852, 0.5115898755007777, 0.6861528142909818, 0.7000407491424825, 0.7835174551525574, 0.987838307216956, 0.8505623915916025, 0.7212200618116493, 0.6246698307070163, 0.8339384793662157, 0.6887862865213743, 0.7724905123233807, 0.9192473628267951, 0.7825012627754433, 0.6152579630310967, 0.7162371023580205, 0.7096045299412352, 0.553240285676697, 0.604787426542956, 0.5180470981007429, 0.9726713449986202, 0.7314275065565115, 0.5365298208676914, 0.7386709733166377, 0.5514026978702083, 0.784500668097285, 0.9567590029181687, 0.9819910783250801, 0.7440072840868148, 0.6125839365798385, 0.8905042172122765, 0.8817844435863067, 0.8512991451263401, 0.6917757960395037, 0.5211507582210355, 0.8320884658811438, 0.7275526589445434, 0.8362428074299335, 0.5861152847063916, 0.9515809256753556, 0.6765576461772067, 0.7415408152368544, 0.5135566288360056, 0.5662900491802862, 0.8877930986622692, 0.8559453596822503, 0.8690239842826766, 0.5932881374103105, 0.8477238865051469, 0.9935402879515457, 0.603950296096841, 0.8899416617718054, 0.6684252492290257, 0.6867161129076065, 0.5481489015637779, 0.8516605024137276, 0.6697455535196144, 0.5886181152760277, 0.7933374801185402, 0.5863378661993146, 0.5276553169576783, 0.6721302967261746, 0.6631416062977276, 0.9356530040897495, 0.5095006178486912, 0.7113885164699163, 0.7006150163125408, 0.6073329420764753, 0.9954589453604181, 0.9003499084389798, 0.7481295369947254, 0.7922804499615458, 0.85585361407768, 0.5142092130234189, 0.8477153580977111, 0.946324445939795, 0.7719735908963603, 0.91582336619714, 0.5002952381620078, 0.580959961104103, 0.6507166288630877, 0.8261153893589215, 0.9993180793249838, 0.8974172499600944, 0.8719998871686087, 0.7947200176923674, 0.9608470893505232, 0.8110587203260455, 0.7725943496633663, 0.8659060687224153, 0.51702002479546, 0.7356929706162326, 0.7336955671347789, 0.6999823334612811, 0.7174106321234596, 0.9487653711955819, 0.810831626796905, 0.6180236171489433, 0.9669904726084952, 0.6852119630422148, 0.9063943898496873, 0.5849740987201735, 0.7002424241445055, 0.5038415122374568, 0.6015461030116267, 0.9748354167605575, 0.5110795731189022, 0.8100067744229016, 0.5733595197795776, 0.9534187536207501, 0.9515415950941613, 0.7830184920198511, 0.8753342069000705, 0.5255641041271396, 0.8532316620591187, 0.6953592663450643, 0.6210954292784914, 0.5074243607984454, 0.7206432346215552, 0.7448164616196414, 0.6006678973748265, 0.6819166572477112, 0.5132365595825821, 0.7587411675444842, 0.5233120096555277, 0.7910678385424161, 0.9218974386334835, 0.5718246717588121, 0.9554952723221313, 0.9048385902780773, 0.9376730838075168, 0.6574279541691859, 0.5854076250286624, 0.5008399456675952, 0.9749598658468381, 0.7221839977680268, 0.5431379037657579, 0.6334022485525458, 0.6417610270318581, 0.5910564287623148, 0.8653772929128911, 0.5350040710089436, 0.8769541228832378, 0.7356779217289777, 0.9052991423135377, 0.567185081334656, 0.982558035917428, 0.6630008789438311, 0.9043078404818499, 0.837863405090878, 0.7633462228976969, 0.6387379192710623, 0.5948447091272709, 0.7287043894535463, 0.5545002467465363, 0.925247311899189, 0.7896684213657962, 0.6584713403813098, 0.7709999832081317, 0.9954056096333697, 0.7374027827412089, 0.6621722413970081, 0.7785857447486094, 0.8445328743839065, 0.9158012277091989, 0.9575327105859605, 0.5257103009135398, 0.65428663952549, 0.5750848742950461, 0.9327447694891209, 0.9402892844192234, 0.6815840099317352, 0.7885855380336404, 0.9627699035684346, 0.9638362725045371, 0.9262143778696929, 0.7179475324764755, 0.6084850562812917, 0.7371555099760889, 0.9441719795879823, 0.7641697757574483, 0.9629044485771991, 0.648312345677337, 0.5122006504170717, 0.7986999450352197, 0.5093200892525207, 0.9026511386735185, 0.6268841435153525, 0.863297627716402, 0.9906210977403493, 0.8910432016176726, 0.8665265113678873, 0.637797555206671, 0.5759838522378825, 0.8448416396683712, 0.5110223206151283, 0.5129018975342297, 0.6707369568705517, 0.6207191068862427, 0.5921760374852525, 0.7211631949014823, 0.942437645013692, 0.8841359460244997, 0.7205626915780556, 0.5666770681212792, 0.7601032560593831, 0.6862951945787716, 0.5300083435330237, 0.765042936737357, 0.8090517675044298, 0.6991627162332097, 0.8078110685697564, 0.6769017606894595, 0.6538504580064564, 0.7450865723003313, 0.5897220674899502, 0.6148351110857895, 0.5241240501300686, 0.8578694494753915, 0.5627688046821863, 0.6958871547432977, 0.7220639220692409, 0.9115616081554974, 0.7795972686919863, 0.9455834376852315, 0.9079674760205528, 0.6775286420205712, 0.887392658618295, 0.8432436205204912, 0.9478502109623947, 0.7848358642626148, 0.7764240901328686, 0.5299158340502093, 0.5623006443730525, 0.6618054156426989, 0.6382948266729133, 0.905648000248072, 0.600076481479098, 0.788846984017006, 0.942510863620593, 0.6154150975381208, 0.5383863593501123, 0.9386379702156725, 0.9279887422808115, 0.6547366327923393, 0.6298822932153483, 0.6139763723339837, 0.5638563626122246, 0.9542081160479825, 0.5596408079633629, 0.5820056998779202, 0.661611834221896, 0.9448067739513049, 0.7208292070602556, 0.71969207982869, 0.8691233167990817, 0.8209674235860165, 0.9403547329285875, 0.7001431321880657, 0.60332770388996, 0.8270608081778772, 0.904358089147654, 0.715491892015604, 0.5276054565505, 0.6280174877560531, 0.5778040088877303, 0.5013989150761256, 0.8343070266116003, 0.7247326370520998, 0.5583815164624097, 0.9492705617969692, 0.6471579441946751, 0.6795564462217385, 0.7897745095284089, 0.9770224498198543, 0.8957805235481333, 0.5423929899055222, 0.5497638242492726, 0.5584578950774022, 0.8124314765497315, 0.5187743037316634, 0.5015059790416996, 0.5382073041037504, 0.634683323472422, 0.5883023522478059, 0.5439940807562798, 0.9215328505321165, 0.8758680035920889, 0.7998349225419288, 0.5417486685909989, 0.8227370868417367, 0.9652421499190844, 0.6649222967184754, 0.8357966322019649, 0.927708498219958, 0.7186819032636205, 0.9480469398913703, 0.6121704378660365, 0.7354519369469874, 0.852351017755436, 0.666415973006079, 0.5107955385229805, 0.8114386565673553, 0.6160931628157491, 0.7034629087580055, 0.7712681650618738, 0.931522156025228, 0.8290796674897871, 0.9710505246424211, 0.6728331737059129, 0.6383415298490998, 0.9000891844514854, 0.8539881255474087, 0.6841302548968518, 0.8492232503888326, 0.5222486896014752, 0.5297349586244403, 0.7773631870650417, 0.7222942504283876, 0.95172361271578, 0.5736861323320599, 0.6522021820809887, 0.584399582296829, 0.7080634820293137, 0.6761027116957701, 0.5513831189550765, 0.8402726003151456, 0.9355836214646376, 0.5248596232347256, 0.9289032888962825, 0.816142131142414, 0.5520877758136933, 0.9579246047894387, 0.710273173978247, 0.6833942905681154, 0.8047344403925312, 0.6454781832725036, 0.8791790016000758, 0.7323902841736332, 0.753223312358141, 0.8951747442789569, 0.7433258843796176, 0.5921862113661965, 0.5971416587757398, 0.7135692180643698, 0.9017003787263047, 0.6688884302707774, 0.9721573390545875, 0.8578080151024081, 0.583217888247113, 0.5899661197964297, 0.7019863197452507, 0.9293149287848175, 0.8383217634692846, 0.5107063141767907, 0.6541870061559776, 0.8413751038539888, 0.5667954800176009, 0.5324670287250333, 0.9101636713406163, 0.7977153682242727, 0.8287572352803227, 0.7714548830204764, 0.6507439191749751, 0.5017548276236616, 0.8656335932763315, 0.5043366900889235, 0.9722299516823705, 0.9936647365173927, 0.6679678268244218, 0.7017888788504192, 0.5083322355494563, 0.8187244421301461, 0.7797979423916968, 0.6742662486615709, 0.5188834711495063, 0.7617738961071028, 0.5721371094120598, 0.6097260059716954, 0.7370722634086724, 0.6711299176724804, 0.6044538735398881, 0.9738817575724046, 0.7070246538659721, 0.852297793197975, 0.5426739017077423, 0.9245946431788739, 0.57930020099923, 0.7105064553769993, 0.8965556219427626, 0.5822520240985188, 0.6352382787724777, 0.8607344359514209, 0.8059193413998875, 0.8636140961150588, 0.9310910307548772, 0.8638246265062702, 0.6634730132744152, 0.8875100228321278, 0.5532720081256295, 0.835366110661536, 0.5589804561927798, 0.6738953026831025, 0.5069330772630096, 0.9811773255535292, 0.7523468533347598, 0.5038254299925531, 0.5163153834652874, 0.821636672741925, 0.6044713752347893, 0.8646379166772372, 0.7022417080010763, 0.9372837290910088, 0.5494301802419779, 0.8369631995981521, 0.6821936979926791, 0.8358279394274424, 0.6169579538443483, 0.5828107807220058, 0.6270741816036912, 0.5169275700352334, 0.7479553143958468, 0.8503664805186155, 0.8541561838996096, 0.5104887735620496, 0.5346812325414421, 0.8602484999771941, 0.5591968377104881, 0.6608154356032825, 0.6011866047781195, 0.6376354075151747, 0.6709696650959318, 0.626053819105727, 0.5770500237242404, 0.6202117450755023, 0.8519658297920467, 0.5126088506853511, 0.6766487314230607, 0.7734899634550203, 0.6138884991238456, 0.9707053155758603, 0.6895502145808594, 0.5739856656280006, 0.6197547692578926, 0.5562273578418818, 0.9085701668046209, 0.704456648007084, 0.5864907701542276, 0.814653597937131, 0.794305619244001, 0.7328913252282012, 0.7987260251520019, 0.6309803979959722, 0.7676347771937209, 0.8658128466490442, 0.6295921998966577, 0.5066593457071293, 0.5761235474438036, 0.8920850836573373, 0.6221973098796418, 0.8624486845421405, 0.6780002427886087, 0.5194145385071753, 0.5217080624827046, 0.6881501051777025, 0.7565100533172109, 0.8011067900756234, 0.6141950796415435, 0.6730379840341634, 0.6503075536685277, 0.7493405950077608, 0.6352471867547964, 0.5779925369493126, 0.7547809147142139, 0.6361599203148367, 0.5680089891160098, 0.6947286902696099, 0.9759290622446543, 0.9090167866327912, 0.9895913070291293, 0.6178474895219674, 0.5708131686829274, 0.9814425902984489, 0.5499475469832209, 0.5714350423534797, 0.7985130117015344, 0.8035555938190182, 0.922333304625689, 0.7834668654589376, 0.8531368339810023, 0.6634081877142862, 0.6079291117718983, 0.6676135603114819, 0.6442202689869819, 0.6734866799555255, 0.5183209309242458, 0.6723718631467628, 0.9749627478098409, 0.906680697855986, 0.5355104193178007, 0.9258718694374322, 0.7631564009320196, 0.514835645587129, 0.8626967014573681, 0.5129921417563168, 0.9090552720718426, 0.7672500748697505, 0.9805477520082009, 0.6860282291368236, 0.9499832918674227, 0.6877318048511916, 0.7603607710030336, 0.8611811539995864, 0.7629044537491323, 0.8110572474405566, 0.7514310381470863, 0.998253118494552, 0.6681528143034332, 0.619219832575227, 0.7715641134585902, 0.9222377849483172, 0.906240691908414, 0.549208480742204, 0.9010636943269463, 0.8995010002391182, 0.6051045802206454, 0.9226902468713644, 0.7931084776437555, 0.6585914793534584, 0.5601240147318856, 0.5866716413600057, 0.7848700172561363, 0.680707714544049, 0.8395406788652708, 0.7823887507457589, 0.8496382135859313, 0.9493243949842407, 0.7889211875178391, 0.9117305097667605, 0.760447796377159, 0.9835484994521995, 0.9960579843210042, 0.7272420968306597, 0.5747042652366237, 0.5482793293596866, 0.8103912184221156, 0.8201310789702989, 0.9575389545849866, 0.5758837896142366, 0.6444761200517921, 0.8348750153091231, 0.6739935345157226, 0.9643880799292265, 0.5261763314806887, 0.7495078549856391, 0.7169058575719638, 0.6888556207133727, 0.9219216284054566, 0.9318075158718464, 0.946899745257167, 0.6812970971581149, 0.6922192571750592, 0.6275433755179708, 0.6358797576555584, 0.7022988517700277, 0.8284505040984136, 0.6976715755343152, 0.5160586248813319, 0.9548739335999195, 0.9004962034764263, 0.5831684961413828, 0.9140182838042485, 0.6342052028775893, 0.8432964674632332, 0.533659434576921, 0.5607394505251804, 0.6400592009336713, 0.8573548320401831, 0.5938438320515836, 0.8471809549116193, 0.8234543192143268, 0.5393429056477121, 0.9199756126062356, 0.6864680251157875, 0.5524130510612235, 0.7676331589271698, 0.7655116025427776, 0.9953025921779459, 0.5570419938227202, 0.9375172692938039, 0.8853303360086092, 0.6175030299463964, 0.9610472210633798, 0.9793248335277396, 0.5191287014376579, 0.7753567788242528, 0.7955941019744179, 0.6177725343482777, 0.7633507462052056, 0.5284562292793976, 0.9701896681063125, 0.8072347072472058, 0.733189353883057, 0.5366567094934325, 0.7043852277373038, 0.5890813236043562, 0.5128749592266433, 0.662925715314866, 0.634118704998855, 0.6073286126450305, 0.6339794898714393, 0.9768593614997104, 0.9403310291891498, 0.6613222526244944, 0.8688283540651561, 0.6530872785635146, 0.6823556177341259, 0.8965312739261171, 0.9580323639255706, 0.7094752316152784, 0.918482500424982, 0.7982409299403675, 0.6484321851363647, 0.574382527592835, 0.756294658808444, 0.5565721021824525, 0.5645684831616065, 0.8739784566942836, 0.9873306332583123, 0.6252512468057462, 0.5613924201032876, 0.7389377280453467, 0.6643104747289368, 0.5304822919651617, 0.7979964493627698, 0.6923930751660302, 0.8323096466661393, 0.9735570617704836, 0.5891404986546582, 0.593401389404634, 0.8192316372367301, 0.9237068295025916, 0.6310494756440153, 0.5581849391728807, 0.8510849728043521, 0.5613400282745914, 0.9432726497815518, 0.7141715839921408, 0.9057275545421295, 0.5662998730997464, 0.6568582494975623, 0.6852846273228743, 0.8042002699574438, 0.5105769787671085, 0.7233736503660408, 0.5154833872497582, 0.568486672565546, 0.594443472026247, 0.7948245274034946, 0.5793937766879824, 0.8600956771572617, 0.5320358034617989, 0.9618695984661347, 0.6386498162402656, 0.7838922524187857, 0.8365156140047558, 0.7214695265549724, 0.5814916389385755, 0.7337945470416782, 0.9149735523963454, 0.7069045149880879, 0.5937918340524155, 0.5870837698037336, 0.7760239321042872, 0.5566682240873941, 0.8701858058033329, 0.559957905083224, 0.6868775103852829, 0.6049275948089552, 0.5078142933745745, 0.6561567686059698, 0.9215191193948356, 0.9987068896756399, 0.6503803107297433, 0.9789303619630705, 0.8244886878099575, 0.533970627566881, 0.7823262533935302, 0.8337203859266198, 0.9186001666762071, 0.8612054541402141, 0.6980752332923608, 0.546064862925015, 0.5342341323827181, 0.5841958591588285, 0.7068364820395158, 0.7431607457789626, 0.9352208459670728, 0.8896984989878948, 0.665090196088792, 0.867544278291807, 0.8047371189358428, 0.7172106943146808, 0.5700442417707028, 0.6901429390349088, 0.5596744895598463, 0.7914726333497988, 0.7444565892434192, 0.681840507109756, 0.6227365016031077, 0.5698373434516414, 0.9585100158021241, 0.8330946677632858, 0.9784114892442951, 0.6981278505348296, 0.7700433741729185, 0.6792122274191101, 0.6366476335755539, 0.6942501550318357, 0.9253623727608254, 0.8743869264714181, 0.9846218259186119, 0.7381229735146361, 0.7868145252359038, 0.9022077208149559, 0.6412850102254031, 0.8843707959441309, 0.525813193409796, 0.9010322176428593, 0.8529121450082326, 0.8159254051643297, 0.8657496703058141, 0.7310938215212875, 0.6693534264381795, 0.6081908730780452, 0.5078327869929541, 0.6675349357039286, 0.8329594148439462, 0.6386257269545503, 0.9591838494332057, 0.5171191514607427, 0.8250080292591149, 0.8021918525827711, 0.6240963078146204, 0.6457766937961278, 0.5749080073475301, 0.6092564011025896, 0.7789152677146471, 0.6606618315608496, 0.7061804433153324, 0.5990220416748253, 0.7167414490738575, 0.9484018025959075, 0.9776373065899113, 0.5453540066057403, 0.6557048771442111, 0.9770257825095994, 0.8208287243995628, 0.8557261688043865, 0.9365278118615676, 0.838600723388286, 0.7691412774605069, 0.541459049890217, 0.7180585242481053, 0.5236421046496819, 0.6510866362800691, 0.7312937130588637, 0.6910996167770598, 0.6896662579893474, 0.6545135355239611, 0.9865893564246024, 0.7219395811393453, 0.767728835627317, 0.9742175933968511, 0.6423920162844072, 0.5030002267625844, 0.7805394429434502, 0.583170405113753, 0.8183732553789314, 0.8148633703880361, 0.8085945968220128, 0.9165476813354043, 0.7553756952814482, 0.5137261597678339, 0.721347047682237, 0.6946931478569904, 0.9417279372625851, 0.7728628051376001, 0.7968472591646721, 0.9662044017616189, 0.7877198818958219, 0.7743699516371786, 0.7480903177961968, 0.6683205963221507, 0.5619766221505266, 0.8735973865084579, 0.7234470993613408, 0.6603994612739229, 0.8426060209887806, 0.6740302623469663, 0.9035155963656075, 0.6129785355306381, 0.6625335366056964, 0.6361148890307096, 0.7908923689539582, 0.8452976523762015, 0.5168369797047809, 0.9201647478523824, 0.8320107971030108, 0.7976045095905966, 0.8553390032399679, 0.9990966041766876, 0.5888320460009936, 0.8463714940095114, 0.6383322593457366, 0.5187035016222965, 0.8202002830813369, 0.793093255552435, 0.9691209268577288, 0.6216722337652812, 0.7984991700991679, 0.8407511368146738, 0.5638720447278417, 0.515413076038455, 0.5531994457273571, 0.9299593296115144, 0.5668408990302968, 0.5521774659220027, 0.9326922060082296, 0.9259433492611779, 0.6662838328584184, 0.799808380770201, 0.7374960491378271, 0.5505121481395759, 0.5208967641601245, 0.5700765120263961, 0.9625008398125984, 0.8181170003865718, 0.8207561789153588, 0.9245508365004179, 0.5261435565486274, 0.6247926339144809, 0.7522210450172375, 0.5772851722520327, 0.8917841795495183, 0.5555253768364711, 0.5781479272063148, 0.9487085818671304, 0.5041632945328296, 0.5081146910859649, 0.5791108143367838, 0.7580900802673299, 0.5678307011374661, 0.7354991004488236, 0.7259114791949398, 0.5515278298721178, 0.6865217434019175, 0.790982422313669, 0.6793471171481189, 0.8923010653841341, 0.674624407214061, 0.6385002046554912, 0.6736653988105925, 0.8023843444579186, 0.5955901079078598, 0.9279549580419815, 0.8089958775688229, 0.6201689744414038, 0.5257882065642188, 0.65454071137272, 0.8426701061394113, 0.8601234660163113, 0.7390534707305672, 0.8415096372410609, 0.6998523333068822, 0.5303867615610325, 0.5117662875829172, 0.6021839461879711, 0.6142739759881944, 0.6265751054421258, 0.8751803454950071, 0.6075217130472317, 0.8611795599872722, 0.7726667389659722, 0.866747480914604, 0.7283244235173543, 0.5572137318096201, 0.5373894018489307, 0.9893486339143355, 0.9333162859252834, 0.6864114985291957, 0.6237082155546392, 0.7275215851983154, 0.919660250113697, 0.8908150520192266, 0.8911797988021533, 0.8224829578563083, 0.6712394924189643, 0.8249961661091952, 0.9727623967072361, 0.8884312685511355, 0.6922801866008519, 0.6031860144026449, 0.6527665466923591, 0.5753437286895564, 0.6197467156461114, 0.751226533722879, 0.7960781043721632, 0.8856852588373404, 0.6732690011045864, 0.6505248066373865, 0.7584456067966638, 0.5172164748801227, 0.5766599110641248, 0.7105342033937381, 0.9841448695215047, 0.6067888239328546, 0.5303088375088798, 0.6682100191795148, 0.7530130795877281, 0.7621049427010862, 0.7409890955884882, 0.9880016603229849, 0.9027377669984347, 0.7200918113259207, 0.9816907189261395, 0.8313937744851118, 0.8821471789743529, 0.8428474963856065, 0.686459563587485, 0.7236633950603435, 0.6279475578641325, 0.5530378153345542, 0.8280342069696734, 0.5831296815498765, 0.7309537297075459, 0.5497286650665941, 0.9496104973452879, 0.8913824938394426, 0.7708583858924642, 0.6775361148146136, 0.7627736681038344, 0.7341813047807181, 0.7802086396651471, 0.6111277939896712, 0.5490286952643797, 0.7562168283023454, 0.8647510556594236, 0.8784151847902903, 0.8426734605180034, 0.6829016492887707, 0.5518496793808838, 0.7512867947667048, 0.7097589102559632, 0.7200545520717985, 0.8969926214560151, 0.7166897444881704, 0.9477809956669567, 0.8696349258860455, 0.8836696136852488, 0.8751350632409691, 0.6147781634235743, 0.5841730785261305, 0.6889989172246067, 0.656643224060296, 0.9099243260043262, 0.7822208441723049, 0.6280269525255031, 0.9987036626435553, 0.5570499161369904, 0.7908236317372199, 0.8637018753327514, 0.5907701824553611, 0.5776935567960886, 0.8290296438327065, 0.7435613246129783, 0.6015286749755311, 0.5995235826720258, 0.9375453753476061, 0.7012002371260095, 0.7749991778621644, 0.9253704730883306, 0.6622140668868548, 0.8751943564013189, 0.7848423373736819, 0.8198278792021207, 0.8355433698405764, 0.8469988242147546, 0.7106440610493622, 0.7717760644577925, 0.8482549381045632, 0.7034315994835767, 0.5270767228313629, 0.556492097409392, 0.5250211901533995, 0.8918169642699785, 0.5139485363551743, 0.6954633749790347, 0.987187423155844, 0.5315473511832489, 0.7285022880698455, 0.8661422408107078, 0.6516973230662462, 0.6354979750416763, 0.9830992736962674, 0.8691798460050983, 0.7686794233892067, 0.6525221134454748, 0.9123585394561009, 0.9303674491084057, 0.9794572793236938, 0.5550963053004957, 0.729361192447174, 0.8459382293093394, 0.6879934976664148, 0.9989854797851149, 0.9780797416837166, 0.7613314083550312, 0.9394825964922469, 0.8705502950910118, 0.8511169886138406, 0.9222937230627088, 0.6850968484654274, 0.9835874948069996, 0.9926888396330584, 0.8934012011583182, 0.8097093053680482, 0.9743344885060357, 0.748710060235414, 0.9123053268037992, 0.9856391728506535, 0.7783410740734211, 0.5082660591033406, 0.5874971284407258, 0.9889645244945021, 0.5733249199915006, 0.873712288378057, 0.7326146031690979, 0.842795099683517, 0.5744157743142706, 0.7014663162806251, 0.883831631303523, 0.5849015137361229, 0.9140526361301875, 0.8873271514852801, 0.5494967478417436, 0.874968099053878, 0.7421143213011119, 0.9011196518276421, 0.8632163794597882, 0.8969297013072101, 0.5624541337333624, 0.6778708609603378, 0.924162224627148, 0.5154289743993294, 0.7458758719331515, 0.5469403653409253, 0.9215395156228849, 0.7799415997385775, 0.7862252530612792, 0.9354661780047933, 0.605963339186317, 0.5527759922349667, 0.9182524165405077, 0.7943670702990648, 0.795223026566696, 0.8602001542136881, 0.922973558181093, 0.9179628291958914, 0.580328894939382, 0.9000257481154881, 0.718314169790058, 0.7041269434422357, 0.6900607086063142, 0.8922006735454748, 0.5505014482824976, 0.9037199616632444, 0.6410981719693336, 0.8973433098098043, 0.6861078292486007, 0.8827252899079499, 0.5892580771943755, 0.8016799510270737, 0.8323327364140867, 0.7299127830481688, 0.7835154822704542, 0.6733228975055118, 0.6493436880123682, 0.9059927803158938, 0.7323349679516069, 0.5980729852183468, 0.6863097255040689, 0.6645852343485337, 0.620372040956285, 0.5458973103848912, 0.8657363881097485, 0.7905836114211817, 0.5036995450338966, 0.9195203447568345, 0.6544717617295861, 0.5396829356282755, 0.910378312688753, 0.9492269913122429, 0.8769485873961267, 0.7762565508713445, 0.6520835391671049, 0.5075913506076314, 0.7719496271787065, 0.544706117135028, 0.8568393309344577, 0.8830751740816742, 0.6504940690472749, 0.848284111674153, 0.6821736299621233, 0.9031641413921191, 0.9234596745505517, 0.5735460530083105, 0.8068274671523968, 0.9158344449291893, 0.9632170156195525, 0.5263199366027761, 0.5575760550518836, 0.6103518588063305, 0.8594017070491671, 0.6218180181133764, 0.9487769327859035, 0.8512721338475786, 0.5694329526333961, 0.5084531620731285, 0.7745145186520663, 0.8817808889809999, 0.8925934561560604, 0.5986434715766739, 0.9985606733713617, 0.5367805328811999, 0.826009252291035, 0.7103474443087405, 0.7462800449580925, 0.508985381797112, 0.9120763768249291, 0.9125693158409376, 0.5388215081457304, 0.8092770093092625, 0.6761982074455362, 0.6058736009585359, 0.7814833100226954, 0.8910713075053154, 0.8141887348167696, 0.5480861174366163, 0.6921464246606681, 0.7179643028257499, 0.5061217952170456, 0.968407520029003, 0.922271381525906, 0.7496451988390506, 0.6898703084799815, 0.6696405819330484, 0.9916512508767323, 0.5810620283619279, 0.944418680873119, 0.7048581369198572, 0.8859638380570314, 0.5203577185488292, 0.7776281930212128, 0.8545235567376395, 0.9914514761334134, 0.8169061415794385, 0.5714422667163903, 0.8732905547043115, 0.8270889941149437, 0.5547793537132076, 0.9439222152737667, 0.8142495853668739, 0.9749937837262095, 0.8759747666176125, 0.9555406770052443, 0.6380421063914359, 0.5359711291443892, 0.7155269673448915, 0.7155849643790209, 0.7816730574627351, 0.6798348300810456, 0.6055651759460232, 0.9727261953886397, 0.7912306142888441, 0.8691910097617725, 0.6608557641727806, 0.9626008637892334, 0.6878650911076891, 0.7817239228547587, 0.6481065410657477, 0.9011145559825826, 0.7214803631424349, 0.8620379004461355, 0.933556015313342, 0.8574403831026188, 0.7509415129156659, 0.9211861455236182, 0.6168369960024921, 0.6633820162745845, 0.9616777603445013, 0.8788803736885484, 0.8726030255738653, 0.7217398658580272, 0.6894334283729915, 0.629486895990415, 0.8212938316801428, 0.8573172724235018, 0.6615680851391283, 0.8144552650380986, 0.6082748353307735, 0.8515528129677247, 0.5922861060491702, 0.5837782031173523, 0.8163436323267779, 0.8807466248349433, 0.8191767320897085, 0.6871803714128963, 0.7168929180390207, 0.6235046668715849, 0.8145340226638138, 0.5159909405886565, 0.5044284561596328, 0.89945438240737, 0.5321286412908908, 0.8122698694343966, 0.8228389391244573, 0.758352213812126, 0.8699261645657556, 0.5399257278312445, 0.9526232397688114, 0.6534382808519251, 0.8521659411140631, 0.7888843094987481, 0.9416064310050551, 0.9438150707471553, 0.5216865642431225, 0.7309841642152322, 0.9633209055166374, 0.8194455588639351, 0.73234483506185, 0.7616215726802608, 0.5623033400508781, 0.6825482091927366, 0.8145742683161044, 0.9419404708041773, 0.9961303437964112, 0.7479376090144425, 0.9449365544081527, 0.6154405378779453, 0.5129964237539779, 0.7413598000798663, 0.7343551074670996, 0.7580709330388471, 0.975871992369296, 0.9419762804546217, 0.6398191174701254, 0.7531338332217604, 0.5317905945879506, 0.7310179092115932, 0.9643180739180471, 0.796044811328371, 0.6237793050130007, 0.9597411477600923, 0.8878414142285018, 0.8493077875226656, 0.9336290387397532, 0.5179426739916106, 0.5723008243590332, 0.7757566233445711, 0.6840609486852346, 0.652234926049212, 0.5777603852263218, 0.9870331284699823, 0.7512833845741292, 0.9165848533158021, 0.6072714125597752, 0.6431100929146586, 0.9360107032015679, 0.5064702313656091, 0.6252754061352503, 0.5560803323923713, 0.7341948279094492, 0.6959076329434137, 0.8771624849660196, 0.9681348668289143, 0.9419561971893968, 0.7389396976980893, 0.9176850633936793, 0.7154956216644301, 0.8514518164863573, 0.503210979401889, 0.8138140078539334, 0.7273733408912169, 0.5800344193772096, 0.8678376269051095, 0.8423802884234892, 0.582679274608986, 0.8455068386021287, 0.6359802052655221, 0.9615519937100085, 0.7529528123269568, 0.670191555622708, 0.8481756375788627, 0.8925147501438412, 0.6079072803712378, 0.8553564178531987, 0.9214619013634349, 0.9363278461829714, 0.6613962279357846, 0.9575202896793558, 0.8963647593658532, 0.7214945417027565, 0.6797724487084598, 0.5343306066833253, 0.7840111796104805, 0.5225340993323446, 0.6097697554134376, 0.6027957654053502, 0.5884168595428014, 0.7671935491829984, 0.7625906789963944, 0.5140993703895778, 0.5822805871870742, 0.522130975899081, 0.9314354495336391, 0.5786884035873763, 0.8803766455783981, 0.7274456169467368, 0.788104429418368, 0.7820909474796163, 0.9896442771561808, 0.6317233890195373, 0.789502051221912, 0.6273568669531393, 0.9888161679487676, 0.8705875644915162, 0.7766434403710174, 0.8439368725855813, 0.6838823760520569, 0.8398730379424205, 0.691944593241763, 0.9408130209777104, 0.6240421375854819, 0.9438181567003753, 0.9518497199840132, 0.8839368131749776, 0.7281228207058632, 0.7906076700014837, 0.5751293473149006, 0.6952627781175456, 0.5885996207662241, 0.848828798722356, 0.9778850911050707, 0.8049656021478138, 0.7443301670159348, 0.7943131275225426, 0.6351036904237062, 0.9454439445031071, 0.9535622655787213, 0.8171571753376112, 0.6710498720689988, 0.5057838386909665, 0.5964742308695565, 0.8239156898350537, 0.8458682681662731, 0.602397677496227, 0.9237023225436845, 0.6583706520485939, 0.9058575650144642, 0.9865083929946867, 0.9678433267188755, 0.9359345616332035, 0.6342738515744634, 0.9753849516125628, 0.521345505914977, 0.6832022715895418, 0.7544518346551683, 0.9427488674910933, 0.92749588426484, 0.6233320299507704, 0.7349059203336324, 0.7989041657227978, 0.6038435541096787, 0.6098111990866749, 0.641435549487916, 0.9612985796044152, 0.6733649141176147, 0.5242265959657836, 0.7312717635440174, 0.517813635558183, 0.8180419968007417, 0.8956382248508811, 0.5257390895024322, 0.8234993774964166, 0.8659327428399957, 0.8242762576017832, 0.7680683896904879, 0.7420765338699009, 0.8789098051393052, 0.8107464849248789, 0.8397857153366257, 0.7175672759329513, 0.5499357062097117, 0.7630122507976369, 0.9171029061767833, 0.6933572549595917, 0.993779967646145, 0.9675645744734835, 0.9438460026203901, 0.750077048991092, 0.9932433435426168, 0.6535144138056252, 0.5532606664463415, 0.5807604913910591, 0.6591067629364077, 0.9321426923515623, 0.8602804591276665, 0.7117586120095738, 0.9595108399846513, 0.5546578676665417, 0.7429285429218706, 0.907752797107241, 0.7616888792605699, 0.6533834527948483, 0.9219994275762515, 0.7854194649690828, 0.6659721332603528, 0.5381204717143762, 0.9797029499871286, 0.9369598561407934, 0.5354225013328487, 0.9264853861676166, 0.5827959130681847, 0.5533849823812369, 0.7820105978407299, 0.8116206239684358, 0.7406275945655897, 0.942377702107255, 0.870008326657516, 0.8908852849517801, 0.730095712049561, 0.6223105937207494, 0.7895568415501113, 0.5081894632347548, 0.6265271274961686, 0.6559911014679852, 0.7959568063441748, 0.9583354324014506, 0.7807825635319703, 0.7536583161084436, 0.6196542995883767, 0.7880217834613394, 0.557755390107764, 0.6662984321334995, 0.7418885337537433, 0.5129307496427511, 0.6889549303561692, 0.5849011953042409, 0.5033954697417411, 0.5663993916435768, 0.7035009271348664, 0.5276575347789338, 0.8208531942019319, 0.7092756009221833, 0.8427099711730852, 0.7203772988012176, 0.8650949999317956, 0.9573511121067915, 0.6776408214328489, 0.8716436122448941, 0.707200546821056, 0.9921370941398087, 0.9976850049219468, 0.6047869372433134, 0.5389728309831665, 0.5038706549236316, 0.8054783547721096, 0.683087497253656, 0.5816598075836192, 0.798991842333269, 0.9899004632500551, 0.7815063164579186, 0.784621277222917, 0.5441754750626858, 0.7064582379138946, 0.7849148238528161, 0.8168819218887211, 0.6824168704106084, 0.9498599252451023, 0.6636293529455216, 0.5036305750753175, 0.5431934333070235, 0.8336329816899479, 0.8523386752466892, 0.7301394707570665, 0.5068898679806332, 0.8911485295390987, 0.9284473803097646, 0.777456798602117, 0.5288240123707417, 0.6501697524785257, 0.6840813639452228, 0.6254913508337028, 0.5779870499922055, 0.8342319106343629, 0.9194128733156128, 0.8461565183313522, 0.5765929020399347, 0.5452626722558229, 0.5471054099973672, 0.6728169799539369, 0.9990512692562148, 0.7512401020183725, 0.8163683150572695, 0.5661950187869003, 0.8054136787514152, 0.8275800998064031, 0.6070113230208019, 0.8013315850783045, 0.753029455466661, 0.8822727253933713, 0.9386488438060794, 0.8592869051568598, 0.8224140990742601, 0.5584376686081944, 0.874287512487942, 0.6034046996054155, 0.6293232914353744, 0.9156059707741431, 0.9264787779312122, 0.5934619724323433, 0.5405231096162741, 0.6861139425595335, 0.9976821589013453, 0.6182426734586393, 0.5163627872904224, 0.5582709611032788, 0.8047765331939429, 0.8347931343470598, 0.8018693924217887, 0.6426330522892574, 0.709601899822681, 0.9885430699289512, 0.9531324494912703, 0.7969670086871179, 0.7473191231013494, 0.9017349576914258, 0.919481371596202, 0.8727721677276652, 0.6524529898821583, 0.7938274190855628, 0.9384058628828111, 0.9754218617812969, 0.8745035861515238, 0.9196014951134299, 0.5436080890871006, 0.6930196847209786, 0.8052396500248113, 0.9907365631214111, 0.6265410812501806, 0.5227252898131762, 0.6782288040296625, 0.8540090886627774, 0.720640513312178, 0.7150287267879748, 0.9932442594872414, 0.5789948348619187, 0.5694184185198192, 0.9304966140234037, 0.8553367547422092, 0.7623887177378854, 0.945390662648234, 0.9283148465319575, 0.762487296228556, 0.8988355354195373, 0.9323642639865879, 0.7561109958249421, 0.5802024526535317, 0.667554115530384, 0.8912211149647045, 0.8073465802320483, 0.9062287873294421, 0.6696503320728115, 0.6152429455039171, 0.7037691180542587, 0.9868284579732365, 0.7522094391479366, 0.6088441028490653, 0.6718567186927763, 0.610334209396717, 0.6704597338427467, 0.8576108172349556, 0.960874099765807, 0.6232888497068088, 0.7214825142489882, 0.5787726406083071, 0.6245644752102442, 0.7807269310157239, 0.6226585470598576, 0.8668373989446609, 0.7966042342663935, 0.9529649152868666, 0.5246874246912082, 0.5516869759513185, 0.56649308863381, 0.9789745524951146, 0.9496852871950414, 0.5386779122261065, 0.6445260480802912, 0.9449786083521714, 0.9857149569129251, 0.7136782475089309, 0.8491391179059555, 0.5735115245229583, 0.8764115766898435, 0.8304735633239748, 0.9047207840723044, 0.8236401867685037, 0.8197004187345822, 0.8760787419677281, 0.7839040204620608, 0.8232783380771773, 0.771492062172041, 0.5485215445288206, 0.7101414964546033, 0.7468179716783991, 0.943076129629608, 0.6485582181810982, 0.7776612904434018, 0.6805493872505524, 0.5745717796597926, 0.5866427337649565, 0.724135611361682, 0.7967476937546305, 0.670410213867908, 0.8467577638374563, 0.6019924202801903, 0.7072093950235276, 0.7226107812259568, 0.7483741855845515, 0.7661283903603473, 0.7588226293506459, 0.7585694252451073, 0.6324189699058531, 0.7541006656692513, 0.7893782045464108, 0.9004883931112262, 0.7151894910235427, 0.7386513183293135, 0.5735710217806608, 0.9970499424762891, 0.5744690490242359, 0.5734831096704305, 0.7648374544830877, 0.6084138536456936, 0.8993814916528686, 0.6219328725619941, 0.6647038361940363, 0.9983769845124535, 0.6970367309588639, 0.748881763883876, 0.9302312016221947, 0.8559614670939176, 0.9264529736367809, 0.7842314534586547, 0.6144915054048794, 0.6659744791493221, 0.8607510057940433, 0.6216279200160648, 0.8465737307707903, 0.6192154893752491, 0.761410445626374, 0.6845537501589128, 0.5822004966775038, 0.7585151938962206, 0.5705334080810196, 0.8653205503388014, 0.5735437619255116, 0.957789518523954, 0.5611956880823774, 0.6267432190919702, 0.5730916287650138, 0.9013782578771603, 0.7202175263172577, 0.8730267590171372, 0.606909376491054, 0.7681647833447562, 0.7639656043782035, 0.9704782350051895, 0.9867910550885812, 0.8764019921967688, 0.8946155800168443, 0.8241605835661041, 0.546719957710035, 0.613029757844382, 0.5198881398069312, 0.7041841178827698, 0.5120162595472617, 0.5483044819923304, 0.8641396867321545, 0.8045763034109598, 0.7857834902097532, 0.8945929109204546, 0.8437244013114102, 0.6390155933712549, 0.6328844768875551, 0.6540550854728389, 0.558521050438116, 0.8454703135827077, 0.8738389324751357, 0.9627675330358019, 0.6839433141547349, 0.5446194743290744, 0.5545464021712998, 0.8148212096320018, 0.5472656547976422, 0.6534928121579728, 0.5979760805615549, 0.9338403486903116, 0.8809093097970554, 0.7060206320105125, 0.7332839426558271, 0.9948041698338481, 0.6809615475371826, 0.8578776942938123, 0.5121570500552264, 0.7829225743418553, 0.767110894655811, 0.9345002673850284, 0.5238438401972527, 0.8912824324294575, 0.9839099479804796, 0.9045493868395353, 0.7757209228740327, 0.5422621902409396, 0.6791352597138329, 0.830002906583806, 0.5858233219129294, 0.6173472076730889, 0.8857523456453903, 0.7904803505381142, 0.8200041110072525, 0.550519872584246, 0.8576540285687243, 0.645541987795931, 0.7490207729244723, 0.9539237543461236, 0.7618302060158133, 0.9303987241789051, 0.9853066887221333, 0.5741385400540355, 0.6081142744971055, 0.6583938767262343, 0.7972013829293325, 0.7283149907299662, 0.6269339388294053, 0.7472870402742721, 0.6827034003943063, 0.7190514821296369, 0.6530402891678248, 0.9511876020023078, 0.6208507243431116, 0.9858895260262135, 0.9783557292526361, 0.8675352318529366, 0.7901701040229372, 0.7384341075225125, 0.5367854529855496, 0.9514713536731769, 0.9400296214134887, 0.8286294373372826, 0.5592204709973336, 0.6594761628946928, 0.5950606025035435, 0.7221371633435741, 0.6845300187404055, 0.6170660387797317, 0.9225705849189845, 0.6724326045766347, 0.8798433508040773, 0.603900263976634, 0.987502949288333, 0.79103953492612, 0.6954819090243163, 0.7348708771383707, 0.5707428243098561, 0.8151629932987586, 0.8537667540209408, 0.9377805402154631, 0.8488724945849958, 0.9254917636933604, 0.8854222906172013, 0.7165710663371802, 0.8190160338491574, 0.51638476256942, 0.8811196729855673, 0.5568135002707819, 0.7995426431692553, 0.7566712913162694, 0.5680931671643223, 0.9118411314527897, 0.6056239997061084, 0.7096224146707301, 0.6259768433927364, 0.528086819144072, 0.7316810155778761, 0.9504330410336328, 0.8089135413573729, 0.7704511230428248, 0.5888265572735774, 0.5466739300639514, 0.520011037146751, 0.5535324403975586, 0.7758344818530785, 0.5295013213964835, 0.918319031907036, 0.9254405386441498, 0.9544414844085083, 0.9361466292500608, 0.8095249490891725, 0.9215225305006296, 0.8134777299738183, 0.8728984166239229, 0.6930275532601171, 0.5495887276646917, 0.7332665742716535, 0.5455524857629621, 0.7112191020785851, 0.5082324652641281, 0.8178900535899796, 0.9483264870112225, 0.7059249496675781, 0.9579831924249977, 0.8654557394176491, 0.8962471667054129, 0.7638444577085208, 0.5170223044997904, 0.7124446331320667, 0.8184624136060115, 0.9216749522627476, 0.9346204338082811, 0.5774703631673095, 0.6571942688367508, 0.6476085611240648, 0.6795376787309438, 0.8051189227716655, 0.7847324839482273, 0.5498890859956667, 0.503382356466693, 0.6278923728886376, 0.7405147383469359, 0.6272653080099597, 0.6044025302051528, 0.5075107291637935, 0.5769388842565275, 0.7439351079636309, 0.7375842935492922, 0.588433412698567, 0.897694604274381, 0.6538586549906362, 0.9322155651240338, 0.6812081653246791, 0.5159835576250117, 0.9159540670852794, 0.6801040336152755, 0.9508617737179149, 0.7081380758496996, 0.5657910644518125, 0.8948598847494887, 0.8435651138211895, 0.5835786599321598, 0.8157691373010741, 0.9366961607360489, 0.7718556728430339, 0.8904545469552713, 0.9330147427939863, 0.9459868363002704, 0.98472007324742, 0.580530349160258, 0.6230481862897828, 0.6664815067145218, 0.6405256504734773, 0.5092678856911496, 0.8137112415688681, 0.5919497721138686, 0.7188548862401936, 0.990427901683935, 0.8031162773295735, 0.99443994036148, 0.8724913905480718, 0.9527807305153082, 0.9387583662421254, 0.9473135528521122, 0.7648110400555747, 0.9680545657918271, 0.8925418101594249, 0.6560798735095521, 0.8829964931560974, 0.6812947723753826, 0.7745557698657626, 0.7345052370297118, 0.7729711447297521, 0.8224376907160023, 0.53685459509166, 0.7560634240511975, 0.6230169375302815, 0.82351843136012, 0.7846261481430665, 0.8909830675097586, 0.9535249756865936, 0.8545122143642875, 0.5692994966809904, 0.7310055037602801, 0.561088421506988, 0.7041690300602963, 0.5598430231383706, 0.5138019489570651, 0.8082507801265302, 0.721553519821631, 0.9220932270276918, 0.6974537309754427, 0.6318665980954209, 0.5462624098524729, 0.9376459492711666, 0.8622290862984161, 0.5439099244805919, 0.7119091940786684, 0.9261366617182203, 0.9923356827322658, 0.9932512344698807, 0.8622930107350846, 0.8481662127645284, 0.9784631626259015, 0.7161237607738125, 0.625366362010542, 0.5356446207953458, 0.6819523171304591, 0.7858580899105843, 0.7567774179839761, 0.8375915010799317, 0.6225138573417882, 0.5020076615024974, 0.7787257491624826, 0.5993850142330275, 0.7128345282394499, 0.9644998399095244, 0.5250787290636614, 0.8804221289471544, 0.6697639348392562, 0.6067943250832789, 0.7731582741537109, 0.717586580765594, 0.765957704995815, 0.5048937198652932, 0.5293877833133667, 0.8950017844356397, 0.8649754752455019, 0.8635761481304147, 0.8033042375981699, 0.6573923683609801, 0.9933513565991562, 0.745556782748591, 0.5832627876709022, 0.804792784159251, 0.6703537391978178, 0.5345892589002121, 0.5266145758328914, 0.6936744273242503, 0.953977738807283, 0.5095025488472833, 0.6877211694946471, 0.7026991928767232, 0.7877850570334866, 0.9067944826279861, 0.9919713770217854, 0.7341272534713232, 0.5881305820830549, 0.874634971204568, 0.9408279265678616, 0.7129791441430033, 0.6135002411564466, 0.6093460970667846, 0.5512633305945001, 0.5003194623163676, 0.6218836793251322, 0.9057959060801947, 0.8096462299484817, 0.5865201090301336, 0.6644266052697914, 0.5821255321620544, 0.6737694077609252, 0.9408490711169053, 0.9803262018634382, 0.5675815908419777, 0.8890497163427588, 0.6432480709358777, 0.7036995187158785, 0.6029989485540668, 0.9792933358020077, 0.6008615703088787, 0.7935856636868277, 0.691311055231286, 0.8487556202036233, 0.9088997172692466, 0.7937914728242491, 0.8724869684041543, 0.6461789687105394, 0.8500450208706863, 0.895341255800582, 0.8530365043071434, 0.7010778267959137, 0.9497554733714075, 0.853479067160362, 0.7229990805730497, 0.6880442344189868, 0.7093967323305108, 0.5991225040833598, 0.715637385684554, 0.6808893001243806, 0.6499857435970602, 0.8056052340525278, 0.8640777063972114, 0.7019517302757448, 0.9799679402584431, 0.5048635301426572, 0.9744777299413592, 0.8508266792134296, 0.7638146579387308, 0.927635423868534, 0.9060283786231293, 0.5074630254298911, 0.8038686031748743, 0.9435488355577161, 0.8892298673344221, 0.9703979283815609, 0.6690544470344505, 0.538499388506591, 0.8080789769711325, 0.7573409753967475, 0.7927860153966007, 0.8704800261549808, 0.8647935957063753, 0.5093314174529826, 0.7289534948285683, 0.9014352629665201, 0.5388101607801208, 0.84971139903142, 0.6435907324050032, 0.879654735626995, 0.9748421651815369, 0.8827517698501927, 0.5359806415048227, 0.722714900129343, 0.705895104741256, 0.8340399462483716, 0.5466525677397807, 0.7187620764664665, 0.9564777380911362, 0.6317675191514984, 0.8970731394229103, 0.7944013445000829, 0.5605021530201616, 0.5157465075844236, 0.6453092721562225, 0.6687950587314933, 0.5728515756978991, 0.6445214875803211, 0.5899554103739714, 0.8038675552722574, 0.6884094010600414, 0.5434490271303971, 0.8199926487364024, 0.6282235424064164, 0.6309820906617841, 0.8216629861268814, 0.7727832397671578, 0.6651715819874398, 0.9950743304160083, 0.5561086926716348, 0.5088603347640912, 0.9490916394488289, 0.5034562148516721, 0.9753852693248466, 0.872522814463401, 0.8835354783777347, 0.5143753446374932, 0.8014353265499434, 0.9047694414763487, 0.7845335585820752, 0.83613890565, 0.8750747013552952, 0.6274905064089187, 0.7553997760967045, 0.7255365222169716, 0.6903866376396998, 0.6906804974947551, 0.7246132558682306, 0.6610932056299488, 0.9527014173679889, 0.6832438395059051, 0.9239922976151507, 0.605445939472842, 0.6518395962919392, 0.7825615221043225, 0.8783035633764338, 0.9296747215183203, 0.7650948085791414, 0.6254990259605775, 0.832310611542805, 0.9680099101321902, 0.8834640582615376, 0.5508787430665087, 0.6606917431830048, 0.8479342152676099, 0.5415722007583548, 0.9982898323655339, 0.741767822364219, 0.7734270060414674, 0.5730858189549403, 0.7027092762266414, 0.9840775867170788, 0.7185121421467697, 0.6166732540027338, 0.6386450396881844, 0.6903803994904731, 0.6689520016328263, 0.8724129291014617, 0.8072048182807294, 0.6111730665967825, 0.9979844678728165, 0.65856750010966, 0.6887128889669858, 0.8961592671158936, 0.5017454064829068, 0.5833744352227952, 0.5741302312893259, 0.9048704197522223, 0.811073774219659, 0.7758912868142562, 0.5101485581936804, 0.5804669577242665, 0.9154621796094586, 0.8568878660752544, 0.7044663650025234, 0.8473808302591502, 0.7587301963042391, 0.7597068898790127, 0.6639598059580916, 0.9526009445573413, 0.795388150139859, 0.8941119529143653, 0.509101138193803, 0.9823318255808773, 0.5200357228559707, 0.6148604760584369, 0.9512861625797759, 0.6232341214214554, 0.7817244359143392, 0.5243591197745496, 0.5668473521186234, 0.7429922625282936, 0.7527823755005327, 0.7635484901167766, 0.5033478891560843, 0.9207386027580222, 0.6904885512784903, 0.9358931820662346, 0.6714183271676126, 0.589922425619642, 0.9482513261264185, 0.8503872051326704, 0.6024235544886933, 0.828989988744226, 0.8983536980636105, 0.6315021035035568, 0.8691552181486967, 0.7961208723761972, 0.6222632627249577, 0.8091354571791829, 0.9372354789016715, 0.6682274433388158, 0.5783005493856882, 0.7854347539170581, 0.7081432741705104, 0.9941566639963837, 0.8070382872828568, 0.91359297659777, 0.6458091453374808, 0.7407792288880943, 0.6811437685466257, 0.8238521043850613, 0.7066192949702286, 0.5384035105036746, 0.5942042981170953, 0.6303108367541523, 0.6475269568479717, 0.6236200485774424, 0.6926206627977627, 0.590816538989604, 0.8081660703290244, 0.565123856166814, 0.6947146863654641, 0.8515357889500433, 0.6744710401808556, 0.7292938711062995, 0.9287527117456098, 0.6679845943075561, 0.9353795594610438, 0.7784720876931496, 0.6023619109365209, 0.7344405096150775, 0.7899482896693228, 0.5671708415145532, 0.6381703927846225, 0.5442615677218919, 0.6978801896659872, 0.7479735202683223, 0.9176836717194179, 0.5647710141548132, 0.7798971223410167, 0.8874224358468299, 0.9664822822693326, 0.6812543184143471, 0.6085678255858078, 0.9121963336873223, 0.5320677368042718, 0.535500508987919, 0.5913043525007493, 0.5521535183666735, 0.9705626082504943, 0.9403353998353091, 0.5307004965537641, 0.5217895340905863, 0.6979431684326581, 0.5313635226767695, 0.9296276795583276, 0.8381995208014998, 0.9556625014720754, 0.8906374235043664, 0.774235300490324, 0.7270158592817408, 0.6950464683701295, 0.6825199334185159, 0.8397957120199837, 0.8196128412931585, 0.710882973122642, 0.9194178835384372, 0.6868705969740254, 0.541094981973014, 0.9961196978923738, 0.7267210232537534, 0.800807924763078, 0.5957861188663831, 0.9889453840301158, 0.5936898831957134, 0.8509805684447965, 0.9607123481746482, 0.8854528472342187, 0.8091253575590249, 0.5739854362795154, 0.8345139572745803, 0.5250513699550002, 0.6708977131352494, 0.9403296539095766, 0.6833304472476232, 0.6222121981004657, 0.8755863929456987, 0.7303905317732482, 0.8642803189800249, 0.5511735106365158, 0.9494937312798262, 0.5699385654872359, 0.505132359402374, 0.9974140881157458, 0.687856756480863, 0.9937739630920002, 0.8926179235492215, 0.774088375264701, 0.5080825657079377, 0.887694852523325, 0.8163659198470818, 0.5590779645969867, 0.967342533058992, 0.9898446166097465, 0.8016808697598183, 0.5132767991828122, 0.5821496965526777, 0.5517834967537014, 0.5301365553268005, 0.9811128023354831, 0.881459202190904, 0.7298948089209265, 0.5220359131495351, 0.8178078336012076, 0.8764070986636849, 0.9783086929366673, 0.6630876095188032, 0.7958893916634266, 0.7122869250646933, 0.6228905218729481, 0.7150019795187803, 0.6552499669669585, 0.9181743638779631, 0.5844732798149832, 0.7233144383104835, 0.8749866356445268, 0.6901284928762337, 0.6309827687724614, 0.9953946089600276, 0.5081709869260094, 0.5740407500832568, 0.769492294268926, 0.8830697184307874, 0.8877228599025576, 0.5462369073378359, 0.8361847857578624, 0.9367314784929375, 0.9370194105664932, 0.9795646186224982, 0.9959463341341063, 0.7053447389234516, 0.7519259224933236, 0.5138788635651836, 0.7155195762567934, 0.5130675678446565, 0.7297787406186664, 0.9661081120616193, 0.6330577690524221, 0.6169934634479759, 0.7324269190974457, 0.5944887271653896, 0.5408755055512475, 0.753600871869109, 0.6399316269948929, 0.5858174527666515, 0.61192649746056, 0.8351784594897522, 0.9505497025658094, 0.5106552338023869, 0.8758524002188368, 0.6955561079322564, 0.8010202010159175, 0.9796121589169562, 0.6458954400105581, 0.9091065660710216, 0.6389045132510471, 0.9930467556160967, 0.5120225341159279, 0.8878910767677634, 0.753686480914693, 0.7966223279321847, 0.5784254322799792, 0.6538080542463702, 0.5532628417462575, 0.9039439619337051, 0.5650857715605462, 0.9378189813999607, 0.9119753328028852, 0.945944898264203, 0.9898316111491767, 0.6581655644324289, 0.8449689542771204, 0.9849227361736561, 0.5164754514770347, 0.584445991065059, 0.9427527517520122, 0.8432581927837204, 0.529691591666932, 0.6168893688066365, 0.9217468687817689, 0.7998550697736913, 0.9780229685111574, 0.5941080425682029, 0.7812007454618738, 0.8397371062631311, 0.7013568841279345, 0.6099666366165226, 0.8438033541861623, 0.7802228172174895, 0.9222522481432609, 0.6586713620024369, 0.9180105851375675, 0.502047413892468, 0.7001174084775443, 0.9957326601147267, 0.6034505456124322, 0.571074101024529, 0.6594263196716281, 0.8913072633591923, 0.9240784146972987, 0.7722936180708144, 0.6853785758632799, 0.8044621933480027, 0.5044198594066838, 0.8307248894830055, 0.8217089837963765, 0.5029040070765708, 0.8563889877996372, 0.8082029740856036, 0.5331311627290833, 0.8442655577860678, 0.8618934485915544, 0.6116191451476536, 0.9557684393320957, 0.5230671632186423, 0.7078087242371024, 0.5935970854150886, 0.8614325649563856, 0.6915762812608897, 0.6784777026134367, 0.994905475867571, 0.8661050976729261, 0.5279002414935307, 0.8043799611248024, 0.8034805982474351, 0.7290757900378797, 0.714877210427596, 0.7286126007007971, 0.9694819105887185, 0.8027329320739969, 0.8878448867784556, 0.6083931445595379, 0.7138016945354584, 0.9496240821824656, 0.9984327258715082, 0.9137748205073943, 0.6513977950980288, 0.8623932012221595, 0.7329526065547971, 0.6510107026916572, 0.8587310223521458, 0.5582768717767246, 0.725646065204651, 0.6631143970643891, 0.5906931736913301, 0.7847578360249383, 0.5275823311859287, 0.8544204121858342, 0.6668779121414652, 0.9610960715200905, 0.5587516556825447, 0.7593879272392511, 0.7041264990162612, 0.5272732176654671, 0.9335916784983256, 0.9631760036211098, 0.7133572960360777, 0.5712201436194634, 0.6756100583359628, 0.8798201496038324, 0.8095126230976866, 0.6193487407771692, 0.6605388985717656, 0.9526269294707277, 0.80231213593335, 0.9159154135059735, 0.9551812372368164, 0.7390875429315035, 0.9542402778813668, 0.9873914508271509, 0.60451460707057, 0.6453633694161993, 0.93032336403016, 0.5641585112906244, 0.9763784304980054, 0.8061889789761565, 0.6039636083332457, 0.5106710337524447, 0.8900957257516561, 0.5819154221669627, 0.5694166984262967, 0.8016652750071396, 0.5994384475386708, 0.8016378333475619, 0.7180818188149096, 0.6145685094193055, 0.6421303269867241, 0.9707059499390657, 0.7804254022377699, 0.5956962652843056, 0.6784907179608899, 0.751788111183415, 0.7245531774166134, 0.631322938747352, 0.728740473874387, 0.9435387380383469, 0.966485481358325, 0.7678934612663753, 0.991267847190441, 0.6171168878821345, 0.8835708123011248, 0.8508364595025586, 0.864771267513893, 0.9797203825898155, 0.6639151255188387, 0.5336277665926532, 0.5339478752858859, 0.8455502581869532, 0.8951670332433208, 0.8364871478790152, 0.8364029603007681, 0.5186007194607534, 0.5897407854431802, 0.5232170927452919, 0.7832615190670027, 0.7413716915181425, 0.502268796481854, 0.7907725396689351, 0.7565070338235693, 0.7572011664057978, 0.7476216507272591, 0.749813998820911, 0.9541729694335828, 0.9259174876889282, 0.9921611400319046, 0.923707361154409, 0.8801094367164899, 0.8425973089834287, 0.8394427414717618, 0.7189425653393354, 0.7631559725288615, 0.6131412652566017, 0.913635509417872, 0.8460890909620353, 0.8284503197879329, 0.5719388623401898, 0.5379751707192268, 0.8265144292528965, 0.8583950899967636, 0.9601851658421463, 0.6865991736158725, 0.6378873113055666, 0.8441352440677364, 0.9999664398492836, 0.7646791165007896, 0.8956953844667412, 0.850863880051047, 0.5136381082022257, 0.6872175973555201, 0.6014158816869614, 0.5162464305732939, 0.6032626997710836, 0.6706253258399999, 0.6734469788602336, 0.9393701419611216, 0.7597872541116862, 0.6621101054624523, 0.8267239205962977, 0.9410623824928368, 0.7849663401586624, 0.7573626273090079, 0.6662855067696019, 0.6264420345953654, 0.6589915148652108, 0.7613976436999736, 0.7220022991777333, 0.6461094857226959, 0.9616410017457053, 0.6897841993097822, 0.7582177648405184, 0.7691765806915324, 0.8504365879970939, 0.9946033113172452, 0.6206282227063742, 0.6839644074885809, 0.7936280396072264, 0.5928720052256209, 0.9323288856660945, 0.6253899677474146, 0.9609045692487485, 0.6888134670350534, 0.747600661212018, 0.9339787232858101, 0.9984996761358478, 0.816885273494616, 0.651919558526515, 0.6247067566035547, 0.7982063540215856, 0.9137225084747947, 0.997168048986594, 0.8617462982443338, 0.7116635662702646, 0.7901736499834725, 0.8131802154142262, 0.6865400477039785, 0.7802096094709356, 0.9026817126288131, 0.7563332744489142, 0.7526718616384803, 0.8571568055183006, 0.543750714715775, 0.5047827533804088, 0.8946132980199477, 0.6462919164988017, 0.8939099855149525, 0.7244233365177866, 0.7735783615589767, 0.8186219112107798, 0.7077596119645232, 0.7168345548654707, 0.9293164998053256, 0.8464152039089698, 0.9239726717009955, 0.6075207654375128, 0.816042920925863, 0.6796370205641077, 0.5668961610309629, 0.8276424820299084, 0.8358431742205092, 0.896890834185003, 0.5706047639541183, 0.949714976662783, 0.5226564650683241, 0.8402915915779365, 0.7196399412697037, 0.6019976925438817, 0.8363591741624666, 0.7176290193729993, 0.7846155772010196, 0.9690550551243937, 0.9300534401074247, 0.564331591283334, 0.7426733651419322, 0.800576855920658, 0.8892646825801751, 0.9525076489223661, 0.5166703613825114, 0.5376437684413037, 0.5017491110322156, 0.8362386783232794, 0.598522970976798, 0.8587725635186281, 0.9269816524937021, 0.9344895357407395, 0.685186561978284, 0.9077348982460062, 0.9766783025627479, 0.8808598939587557, 0.5932891962206606, 0.8688068636697768, 0.5871983217647038, 0.8818282955561658, 0.9357222259910203, 0.7129055455094031, 0.7683912779676699, 0.812482452851095, 0.8898016535108264, 0.6696676159839067, 0.7995266925232933, 0.6423646471368172, 0.5746927980371244, 0.6296475793370377, 0.6015402201040969, 0.8805193801026857, 0.8432936733106147, 0.808878125738762, 0.8605739581202787, 0.5820395783532379, 0.8004253728290545, 0.7380041884090668, 0.5384235670434399, 0.6281403576917557, 0.5490106353983426, 0.6726721145054811, 0.9545713446915581, 0.806922246601143, 0.6947492827077411, 0.5132757721620862, 0.9657658990421145, 0.8794605316613481, 0.6525717246349103, 0.8889235111771776, 0.6355662007345888, 0.5311900689739271, 0.8057106465315735, 0.9357800227585769, 0.7610442743626524, 0.7707948831746624, 0.83071203409536, 0.7802100380603928, 0.8752737798305381, 0.51690326774977, 0.8008790417405324, 0.8335526924701131, 0.9091206139088157, 0.59377831427522, 0.559093697068838, 0.5417818039078859, 0.7406629377617774, 0.9378470634303053, 0.9722800512669875, 0.9780862547677114, 0.5222013283677556, 0.6548313513352043, 0.7654896380991474, 0.6622934850186539, 0.6050244117786259, 0.8206844327907622, 0.6486181394841415, 0.8304687876400233, 0.6403731723877808, 0.7405349129365112, 0.6953008946844526, 0.6643987158980633, 0.6210511935508894, 0.5953400661964444, 0.6142412785330393, 0.6659325644050663, 0.74799320668601, 0.6088797834632385, 0.5466288526170267, 0.8687217404893687, 0.9906221944909617, 0.995529278549032, 0.9555917729732528, 0.9400720539879961, 0.850554898650826, 0.825194986962542, 0.7681056828227169, 0.7496478948123333, 0.5756829349435337, 0.8902988991371126, 0.6962430692694471, 0.8247111662738302, 0.5025695662532086, 0.5739773541680367, 0.6935579663815139, 0.9896756233931134, 0.6172456300180056, 0.7150037855218608, 0.8745150350172421, 0.5026827504518516, 0.8911789623812153, 0.7552286442965628, 0.928112836335794, 0.7051736454917445, 0.6576341391620677, 0.647018018815497, 0.5914414991023151, 0.9662074356918231, 0.9867625969400309, 0.9708899395082802, 0.9927808701731684, 0.7014982505908629, 0.7035744346067165, 0.8774459092884344, 0.6165577499879795, 0.9965598328840649, 0.8529241664031699, 0.9258074576763804, 0.9023455452816482, 0.9891836133398053, 0.8981532652294457, 0.7032072045301385, 0.8219453207695039, 0.9256157260867561, 0.6933853809236845, 0.7657723202736783, 0.9311053126834312, 0.5608888778936302, 0.8407405047831945, 0.5698989915547235, 0.7962075190607951, 0.928238224637056, 0.988614397025535, 0.7469438054411988, 0.760002599710114, 0.9571397820774552, 0.7641310244932458, 0.5158996353965655, 0.5833449593391113, 0.6463674186734731, 0.6993349779600868, 0.7807478069607097, 0.9665024479164243, 0.7580946274071965, 0.5477977122641207, 0.6632770427815335, 0.6509354836519371, 0.650348491687402, 0.9928258096771858, 0.961006882949901, 0.604637011758868, 0.8165126229303641, 0.8220578062528626, 0.7243960205877988, 0.8349886717687633, 0.6350318759790843, 0.9354211970078474, 0.8302834776867861, 0.8141942424030251, 0.5195417322899896, 0.870549895325842, 0.7167632030736089, 0.9917397835598657, 0.509073445460271, 0.8434384875269924, 0.5981916862136367, 0.9011079733701952, 0.9734547515619536, 0.7475884068644976, 0.8614263131006505, 0.6308014421111223, 0.9882149729155085, 0.531323487184579, 0.9467724579699688, 0.8126174130199384, 0.5992557160656096, 0.9180439789774294, 0.8835287192782819, 0.8136405625592557, 0.6779135707638064, 0.6528048448425445, 0.7283135585791052, 0.5943559621292048, 0.5947035995831786, 0.8548258373464619, 0.8817026329879902, 0.8359965258416922, 0.930841845892859, 0.6990109655330008, 0.7820014058273201, 0.8279631510339156, 0.5632145627286731, 0.8699646273275121, 0.7326351173172747, 0.865244445469588, 0.8186825732811602, 0.8812283005158243, 0.8592274058104723, 0.6936435029049834, 0.5828773902632715, 0.5756134961686682, 0.7771178971898892, 0.5141994981656365, 0.7948968616832983, 0.936241202945161, 0.7777227925379387, 0.7504686688980853, 0.8841724635038831, 0.7416360032545463, 0.691200833393369, 0.760427965854024, 0.6634004261161615, 0.639707449340936, 0.5816598267394147, 0.7781577435825764, 0.6199100587925591, 0.6148036208870278, 0.8152035556413824, 0.8231155322019577, 0.8498771709174935, 0.8593959760506718, 0.8326517313258281, 0.5790650757896592, 0.7219902876935269, 0.962389546707777, 0.663232576905002, 0.6160446690684831, 0.7958280212958273, 0.8057654292630921, 0.8358309988916616, 0.8702603391787984, 0.5114295707165559, 0.6386463664768136, 0.9796995168734561, 0.9627509449241292, 0.7959860160884582, 0.6967844969464321, 0.9233058536605924, 0.9043227197889021, 0.8342288495244623, 0.6566798297080205, 0.6624586586493044, 0.9108223758089109, 0.5439188310220375, 0.5563382035191877, 0.8167401855856682, 0.7623761866826675, 0.6682570780739743, 0.9430044099956643, 0.9794465905731107, 0.9049306879263085, 0.8456399480372239, 0.8383949389477243, 0.5157818769591855, 0.7719973913535443, 0.5017589002047858, 0.8194300825740211, 0.9664992599727042, 0.7714130204919518, 0.8239114879147778, 0.8336022260470644, 0.5407881768109732, 0.548888255554431, 0.7334923391124962, 0.9993260037325824, 0.5831055377139209, 0.7757473301537889, 0.5750233341657687, 0.9717280815510547, 0.9777089315525089, 0.6360150031038376, 0.7824605942486256, 0.7161245252172561, 0.5743866846774113, 0.5390030783270743, 0.9587063636244466, 0.5569046290519752, 0.6515396925701102, 0.6441144884710852, 0.8526825815972603, 0.7887903087846326, 0.7069091370171728, 0.9319984624678956, 0.8467985790722174, 0.9551739683545195, 0.9837581790619293, 0.6295353497591937, 0.9328025565315761, 0.9743276108400862, 0.747366562724519, 0.8954174799785074, 0.9159682828392663, 0.8347267932712678, 0.6183268642947315, 0.9701725202586081, 0.7658081185525788, 0.7785405276976569, 0.6318474264261338, 0.8974474199375724, 0.9169471755121856, 0.984889934202178, 0.7475771804450195, 0.7698561681380888, 0.9251931728478542, 0.5593314058598247, 0.7843782159726695, 0.8009835075367655, 0.7203101932749918, 0.5277322297595733, 0.9669937475426131, 0.9367155457509946, 0.5577690531788697, 0.9110369569644681, 0.7457074915286481, 0.7304186618651969, 0.8428894833509974, 0.5108224012630123, 0.9676943546082724, 0.8036713958888873, 0.885267727559069, 0.8144193069706074, 0.9596694569027019, 0.6311844659712574, 0.5751165823814123, 0.9022218778050708, 0.8036341059651337, 0.7571526285446812, 0.7606153966512067, 0.7542962261457582, 0.5417601321443573, 0.8702360202561994, 0.7792121247253588, 0.6257131771902875, 0.8211757746396372, 0.7401616375862154, 0.6119868894367452, 0.9280218504261188, 0.7830710375257124, 0.6786682161150337, 0.9572653019915751, 0.9259041021742431, 0.902591370866863, 0.7575094023921543, 0.7471903053588917, 0.5119248721276597, 0.7158750447258047, 0.7141502385957013, 0.773787381507377, 0.9733255733855358, 0.8449819195116596, 0.888260243831964, 0.5309333435165842, 0.7865482264084442, 0.7138982672649223, 0.9035890053085944, 0.5899565807044973, 0.9711261039255104, 0.6959331899043497, 0.5791744334264666, 0.963352961113969, 0.6722253667855222, 0.9033882787120593, 0.6357881834191634, 0.7345409183945768, 0.6628193673467773, 0.7304911433631534, 0.7836367169637122, 0.7070814915091888, 0.8291139568137527, 0.8949011596512841, 0.9595996766716808, 0.7397001469162521, 0.6369420129137708, 0.5288878348055468, 0.718926175951268, 0.6337691299244369, 0.7204681941196361, 0.7477403492789927, 0.9293879847652688, 0.9141085264138719, 0.8976409115215362, 0.8960588192965895, 0.715086694005169, 0.7272749534911531, 0.7898210166072788, 0.7064541242765282, 0.614227686463447, 0.800727923179881, 0.554200733465494, 0.8620737405025363, 0.564742953574842, 0.8980283054021114, 0.6155927097545331, 0.7863255106630963, 0.9198563351920834, 0.7175342904663985, 0.8422087624225225, 0.7687567945114056, 0.6318111954066559, 0.5271827186404288, 0.9031241072332947, 0.7357033808424032, 0.7765302129661655, 0.8242812806622946, 0.5431929879854183, 0.7568232880765904, 0.7604351457091321, 0.8134446419821397, 0.520547685845391, 0.9454374490137045, 0.6428776902226172, 0.6680214832097082, 0.6113945147384074, 0.9185669482439611, 0.8364792311579381, 0.8128784206342594, 0.5565439552310651, 0.5723337717035396, 0.7801911127926036, 0.5114788880227492, 0.8866273280196026, 0.7321802959873767, 0.6854431400579164, 0.8343956065991975, 0.9947782458956891, 0.9267697476607493, 0.8987592220899997, 0.5276322248699591, 0.6087219628412242, 0.908070349821515, 0.8192029549057065, 0.5984122413533788, 0.656550184585949, 0.8899014219434909, 0.8992474236637527, 0.7940172799927752, 0.8578046286309946, 0.5512024884846598, 0.8829382311428956, 0.6764322408456509, 0.6910450470733505, 0.7506957104580123, 0.9652667801984417, 0.5576636065847722, 0.5001336034678397, 0.8612560427781049, 0.5455197392722368, 0.8964506943883528, 0.7124278434479127, 0.7230313338102714, 0.8542426301607602, 0.9083460064094822, 0.7901843266987116, 0.766400088737837, 0.8027042026112892, 0.6313822814959918, 0.5110683664531606, 0.902451992815773, 0.5942052523011239, 0.5629642006125313, 0.8712873368077135, 0.7181788070395319, 0.5971766013482163, 0.5611294967883369, 0.734784944566613, 0.9972781791842404, 0.9245596742948867, 0.927086893235036, 0.987840376549887, 0.7488846440988799, 0.55683334981731, 0.7629506250688594, 0.7091880822559782, 0.7970036038741555, 0.6562893234373567, 0.7406886505645176, 0.7236960936065611, 0.6740988800906067, 0.6787385162263175, 0.5455168871936502, 0.7340961599637108, 0.8427801936021888, 0.6238685890576177, 0.5426474468161566, 0.668647060046739, 0.6412730153606091, 0.5598887810692226, 0.5826220027712274, 0.9929694423367394, 0.7436629279342584, 0.7719988383771945, 0.5580829607833628, 0.9086699564604771, 0.9560213463714858, 0.6561908394486992, 0.8432573404597397, 0.7202234558601825, 0.5605393491624695, 0.7044434978955626, 0.5109059377366789, 0.8854812170969821, 0.7709678389146355, 0.58041858625948, 0.5785701309250384, 0.7896047608525882, 0.9759049624063353, 0.683902704743939, 0.5995070074390483, 0.9510228035501807, 0.9271405020648498, 0.8222320013498718, 0.5246692530938826, 0.637397230951358, 0.6447789982434328, 0.7282379046222353, 0.7824910711013253, 0.803657055911326, 0.9242568313403299, 0.6650894760365497, 0.6725910335767853, 0.6839069197962451, 0.5913553298531732, 0.8575160972504131, 0.5373323425874945, 0.5803507889863313, 0.5351716353746337, 0.8960187466867161, 0.6498872023069457, 0.6242756340110243, 0.8975307052545897, 0.7732563722943215, 0.6445655583011023, 0.5185108714151397, 0.9388222228610057, 0.5459650272388292, 0.9661738406875342, 0.8388304695842718, 0.658000958165494, 0.620423605777592, 0.8078180511643807, 0.8594702252327011, 0.8936639603712191, 0.7333627444443636, 0.666846264842812, 0.7082045589817741, 0.7718997419321766, 0.7364174604361448, 0.5608298455695833, 0.7252935184103513, 0.9682841520044289, 0.6019851937384872, 0.5284922203930709, 0.5716859243003886, 0.597105920340462, 0.8694275970439955, 0.6039315901874155, 0.6752766057743053, 0.6844526421146271, 0.5502475615540927, 0.6307611860874345, 0.6439314921889947, 0.8214214023626731, 0.9164985120291872, 0.8406706700486368, 0.7595000381625292, 0.9415600248718055, 0.8957190354285132, 0.6945613136005576, 0.6258690089418391, 0.5560166735039058, 0.9192160102423296, 0.7455708275092636, 0.6375403434920256, 0.8731102490459601, 0.7280785826867896, 0.7356306556657992, 0.7797719103919338, 0.6434667088810702, 0.5940722328901712, 0.6029958229329007, 0.8719703234718883, 0.8776645255695563, 0.9143051759775978, 0.903360424580822, 0.7684003049244632, 0.6745698158391327, 0.7693117249899923, 0.7538611810165996, 0.5713791502102805, 0.696036054288617, 0.9711223433220706, 0.5654051505104836, 0.6779858458828086, 0.7909977323319507, 0.7908020863177767, 0.9736634109850401, 0.6454849306664883, 0.9665809629458143, 0.6436451033434956, 0.8812244828225564, 0.8788460311224423, 0.8742197100709442, 0.9475290362464279, 0.604277102733094, 0.7082608729812053, 0.787594462632144, 0.8272940000969469, 0.7550623062487953, 0.6262436709712238, 0.8259613777315852, 0.9392351463052253, 0.6048080598993854, 0.5109544702056829, 0.7700297444645009, 0.6252727110667393, 0.8849156591927494, 0.6813893659398719, 0.6857736961523735, 0.7514313446813234, 0.8054492656879684, 0.914375539378816, 0.8589821586551027, 0.9027181738947685, 0.6655947526026252, 0.9800410231591039, 0.6147969671094635, 0.8256598947951299, 0.9994876614739396, 0.905034755321875, 0.5307929828562579, 0.9663827647166294, 0.6375519063456715, 0.8277776606388141, 0.9346118266718322, 0.5462172008323236, 0.9233637099718977, 0.8769175792312149, 0.7363687150535523, 0.5937258264543381, 0.9408963090779094, 0.9698390031170969, 0.9291674640675067, 0.5673521522839533, 0.886867439138874, 0.6842669631526668, 0.6347008199222254, 0.9992557652118594, 0.6379428139083138, 0.9389416582540386, 0.9170653066577314, 0.9683445369944117, 0.5356595293973281, 0.6846048244343576, 0.8685290035737733, 0.8183282315370733, 0.9899951621425016, 0.7106655968227205, 0.5131770279100264, 0.5035138957654663, 0.8313219029892803, 0.8955043680461069, 0.7948879367279194, 0.7669779491508629, 0.6805258853921543, 0.9705489568177557, 0.6494872962960987, 0.5430616596794555, 0.9162288187009686, 0.7163861728835987, 0.7816786642304907, 0.9608059668829074, 0.9755238560145933, 0.808051405803614, 0.5769117012274392, 0.8861168818547742, 0.6671737147837389, 0.7282794186174393, 0.8183073989249189, 0.8104202366438874, 0.7531955636675762, 0.9104062364955002, 0.9321703352628311, 0.5273829163584178, 0.863262471747652, 0.7861153795472978, 0.9697074986285359, 0.9426129467281634, 0.8968024600652929, 0.8260756279191765, 0.6544961699506369, 0.8299875408538939, 0.8109471942706923, 0.7051534131979237, 0.5097263822075413, 0.558426099538821, 0.7009907972038316, 0.602602945613981, 0.9026092990669834, 0.6643551255929487, 0.7880131084583677, 0.84617505344902, 0.8427140711705565, 0.5947040343515664, 0.8804933257015441, 0.9765766081531451, 0.8077733111099449, 0.7552823816793716, 0.5713676539054826, 0.523391126209486, 0.7311165367890352, 0.6431962944604187, 0.7982595027939094, 0.9852662501245025, 0.9367025980892598, 0.8954180995012139, 0.5478590429900279, 0.6095153220462287, 0.8748812699870285, 0.6171960836669927, 0.9567820056547649, 0.815367008817353, 0.9041538851169574, 0.957423039532159, 0.8789806019296218, 0.575527709238939, 0.7926854974932998, 0.7603511143653392, 0.5067900767329327, 0.7751174958620849, 0.9239931441110503, 0.5397469274307606, 0.6307781118863305, 0.9842764700556035, 0.5875971004895171, 0.6518280553275178, 0.5687540505240836, 0.6309634561227406, 0.5506661201805493, 0.8683936583365313, 0.9704710744653563, 0.6248275123035869, 0.6334560228566761, 0.5251546938484566, 0.743404289620226, 0.5443608511072165, 0.5985666407364683, 0.5201726036946988, 0.8804871740518267, 0.5515474882186432, 0.8329696526869317, 0.621898989984784, 0.8106953648623096, 0.6603213565579528, 0.5476280281015058, 0.5630939697958766, 0.7150414427188716, 0.6584786328325312, 0.8224563978784134, 0.5027059980528774, 0.6786464568761472, 0.8066290101341054, 0.7752156577857452, 0.7994321090188186, 0.7921193632882303, 0.9832298653439377, 0.9727390682015862, 0.9085632872837022, 0.7225949678132322, 0.8621331757503197, 0.5547413557217691, 0.7305126385021052, 0.684645767237667, 0.6770599053660074, 0.6486799970218013, 0.9897412059185828, 0.9084754527180561, 0.8295186327326727, 0.683972314863232, 0.7985078849229126, 0.5827918188443317, 0.7259038343843542, 0.9357877429749217, 0.7373146008171165, 0.9084252681858657, 0.6634762984884992, 0.6301098608340822, 0.611598102832037, 0.7076332035798896, 0.7033110561242844, 0.5489422849091968, 0.8300384159668306, 0.847033189254546, 0.6050733693486547, 0.5438123352227251, 0.9870310497778397, 0.6693467715262073, 0.86784415250135, 0.8888280190727151, 0.5751386529008136, 0.9364771006987794, 0.9903105879287664, 0.5128287749875002, 0.9612766087444218, 0.676452193440726, 0.8355552632590293, 0.8249289307670286, 0.7662646868096359, 0.829401600602881, 0.6031700945954391, 0.9570000441738263, 0.6480712325337591, 0.6641655582060739, 0.6175134884969018, 0.9344718965935604, 0.7585978651072163, 0.758530726994734, 0.5553511787610881, 0.9153133181108966, 0.7692729918581285, 0.9085699710502311, 0.7697226107049859, 0.6922865653472797, 0.9715937102591742, 0.6912871167429427, 0.7378484991576852, 0.5473902220201774, 0.7571352654375378, 0.8199565722287203, 0.8649376300160734, 0.6975412880863627, 0.667803697945502, 0.7131294929774812, 0.7437780692065276, 0.9115934737985216, 0.833570688514876, 0.9618480544452224, 0.9488596016581533, 0.5457111752704153, 0.5911318535529964, 0.6587139175566996, 0.529488824778573, 0.5217431752793664, 0.7630002613221223, 0.6256637223385811, 0.7182629318844993, 0.9848445514671771, 0.5108458772401279, 0.6873047980656173, 0.6729941849480114, 0.9830903371507416, 0.9244507193545356, 0.5571174869989765, 0.8274671869886201, 0.7212867514664072, 0.5066716337347985, 0.6073642766071774, 0.5759583510910682, 0.6392699913141688, 0.8821419287471213, 0.7627986438514108, 0.9368771868264514, 0.5364579600933459, 0.8334061208433301, 0.872374307359634, 0.5412341658875819, 0.9630747848957222, 0.9004577751116024, 0.7970348309586386, 0.9390997088931234, 0.8588545494482696, 0.6688610320542108, 0.6337846139637908, 0.7015371150634714, 0.542555322343935, 0.5736183885424244, 0.7820830880078845, 0.8896579470173357, 0.6553068303070203, 0.7857488418417224, 0.8678468638236312, 0.538900480977592, 0.8042116031333993, 0.8421650320120525, 0.903673724335393, 0.8977917882433799, 0.8521333087649696, 0.905494854529006, 0.7958656090338774, 0.9389951584997205, 0.8211036204977693, 0.8961815657287313, 0.8741248936469235, 0.844058471817963, 0.9192326794207668, 0.9489503807752173, 0.5277962005196142, 0.7717221577141159, 0.8192811423415947, 0.6980922385642219, 0.8511163900074494, 0.9867113318044372, 0.598384413304989, 0.8489669064615251, 0.6546631678113445, 0.6926494398001981, 0.5768237212760474, 0.7578008831004295, 0.8657771764492432, 0.8742504096776054, 0.9558601835862719, 0.9788198246738857, 0.9271460742569815, 0.5616688905516399, 0.9351778776581972, 0.5273910301864844, 0.8897451714855891, 0.6595886269518045, 0.746190014200573, 0.6287770881094774, 0.8830018759892078, 0.570546846205167, 0.6718541160917998, 0.6525615345601636, 0.5073108797182075, 0.8942285659343384, 0.8959660614915126, 0.8391330777092527, 0.7087404643333085, 0.6279686950063307, 0.5544870399733262, 0.8375364043685447, 0.7248615624181125, 0.6030244962871678, 0.6891095335741182, 0.95597167840707, 0.9965407275970292, 0.5962327088385707, 0.9698858788541271, 0.8886426413395376, 0.6563022708108133, 0.5077242904534092, 0.7702622803156807, 0.8052120034506155, 0.9794115783759336, 0.7180879553854074, 0.7715566648033443, 0.9501201921467592, 0.9147434585386939, 0.8170467091528383, 0.7975684057140691, 0.8000332292605468, 0.6992493764210201, 0.813848226117116, 0.7189673170562068, 0.580896885382431, 0.9758983852979767, 0.860145301925225, 0.5187466938011907, 0.9256934051185923, 0.681202711966026, 0.5129843708244533, 0.7787862876661878, 0.9005694406508705, 0.6760075707146902, 0.5599756293238809, 0.9542774790346125, 0.9539505648462129, 0.948673740566192, 0.9475567872853535, 0.5410278163886764, 0.6885953569841428, 0.5947732352007191, 0.5822112979985499, 0.880700556854966, 0.5226009808695022, 0.9130461206042946, 0.8708589378486544, 0.6400871395096277, 0.7538517292488903, 0.8731824133293549, 0.8940087179026057, 0.5038005316200165, 0.6819057659913471, 0.6084383301628409, 0.8253972319253389, 0.8022156291376722, 0.949646276940258, 0.6147257105017567, 0.768958505229071, 0.8194892977333649, 0.9717034457497509, 0.9349274046588762, 0.7922906651062107, 0.5133717202558533, 0.8646443852872374, 0.8895708573775845, 0.7958749948262864, 0.88834175898304, 0.7485483747891819, 0.6450008809454224, 0.9830978463183739, 0.7035677984402493, 0.904937252189091, 0.9327698952427328, 0.5415424666718529, 0.5340888441373757, 0.8966961943608454, 0.5997002859756368, 0.7582436416870799, 0.7419071283317895, 0.9970877300535752, 0.7110131560261155, 0.5056410741197108, 0.5396599519712753, 0.8980237206600448, 0.8676883248714351, 0.5508724988145814, 0.9751799776766691, 0.7243352714948192, 0.8273690690401767, 0.8131095766792205, 0.5753134015595363, 0.6145527978948102, 0.8044004257086064, 0.7019887573514607, 0.9944493470600588, 0.5411648157193809, 0.7383370419048048, 0.7436611051168809, 0.9639615878246787, 0.7393764123389583, 0.5988350037165315, 0.7586062331941319, 0.5686701865012188, 0.5719649010134908, 0.7691801273520934, 0.9822250889162654, 0.9756609526193615, 0.982363647753953, 0.5951363827966321, 0.7599154396525774, 0.8513924386235832, 0.877420945164737, 0.6124433299397685, 0.9583082628853181, 0.5871413941156409, 0.8997877500697009, 0.8529751033486126, 0.879615290426673, 0.861602587577484, 0.9619991691239644, 0.6273065159107201, 0.8483070099273461, 0.806448841825495, 0.8063020366307576, 0.8698465418371276, 0.5909834338031927, 0.8937028156575402, 0.7052995625577643, 0.7412832953174442, 0.9976246129597981, 0.697163746836896, 0.9974830338854203, 0.5981707556793968, 0.9492901186610515, 0.9857570287308917, 0.8334562843343944, 0.7839655825523958, 0.8218662266548107, 0.5078246841944598, 0.5462843342921223, 0.8023973294418056, 0.7051423788344798, 0.9201600872259676, 0.7996118395282348, 0.6769380932617316, 0.5198249948465449, 0.7917545544425337, 0.562394489013309, 0.9409420845324046, 0.5711421848526925, 0.9978635044356181, 0.5967466647850618, 0.9588410433043884, 0.7805939382888769, 0.9661235971190982, 0.8363594045432248, 0.8194385247188662, 0.7702647321941387, 0.7908745454218515, 0.7727358129963176, 0.6772096121828708, 0.8017695839349496, 0.6051454917110829, 0.6928756225357249, 0.9795043661147482, 0.6004890832569472, 0.5817885540663513, 0.6511120496620849, 0.975784012359415, 0.6417566623007267, 0.9803740182729255, 0.572758781289773, 0.8711886006166697, 0.9085688136950738, 0.889801142533724, 0.7157565325244062, 0.8770220994366902, 0.7267977507951808, 0.5320327327560139, 0.6790819315574073, 0.7277861503805555, 0.6074138960847969, 0.8712484057998082, 0.5623476644806062, 0.767047595951271, 0.5331057144931326, 0.953760944995629, 0.727624724292464, 0.6816501922038616, 0.9446689797254766, 0.5221832796491161, 0.6215673673685829, 0.6911634672392547, 0.6993818463626181, 0.6199733710813371, 0.8163383776609259, 0.9719710079204597, 0.7823407312046144, 0.591475500590094, 0.7892306118865056, 0.5681091129567502, 0.9445409477824799, 0.5437843058359163, 0.844901177148836, 0.8506263992651755, 0.5494094538542607, 0.9511278838856514, 0.7825395703283897, 0.8526179990023592, 0.7448922818534519, 0.9182013309910455, 0.6466952907739364, 0.6155130835979976, 0.842278840114387, 0.9542197460646129, 0.8306374851616845, 0.6540356031032082, 0.9358511403720955, 0.986015716430377, 0.7159051642344116, 0.777851937872202, 0.8533273583308871, 0.6150645895834719, 0.5586859480476404, 0.8467991930629166, 0.7040514806213705, 0.771163087921058, 0.7461307857566088, 0.5555797140652374, 0.9520281760907703, 0.9398792576860725, 0.7357913320730666, 0.7725509043351712, 0.7525952809414207, 0.5245976633653591, 0.8358848408121968, 0.6665231323754834, 0.898811378576037, 0.7157550111838775, 0.6310681153672546, 0.6336289771062926, 0.5870409790260966, 0.8300110302831794, 0.8990203985303973, 0.732286779065446, 0.5161258595955858, 0.8906158677372108, 0.881719748570164, 0.7425171104395281, 0.5153223098028785, 0.6403454916071163, 0.5010969559706748, 0.9624900656013632, 0.5296207748460224, 0.643972928250884, 0.789311771980802, 0.9832446466948478, 0.5618209450352603, 0.9463197822376137, 0.7122047804787885, 0.9409921727296384, 0.5861815657084631, 0.9406432286537747, 0.7518297261613988, 0.5221058348123369, 0.6368412161129671, 0.9718762964754651, 0.5756261685129601, 0.5938248830677837, 0.631437927302481, 0.7543444536857766, 0.6013261549460671, 0.5485774150802245, 0.9663505547170346, 0.9472535081891276, 0.74336956591351, 0.8969152982478908, 0.9071297612854089, 0.9988553291504345, 0.9083008694768588, 0.9832445348075345, 0.8376971582588302, 0.574074065019787, 0.6271352777137036, 0.87420158201666, 0.6860661876464433, 0.5422531973808841, 0.7050466743437009, 0.628215501217007, 0.9512912740426593, 0.5277154570828894, 0.7027990305507696, 0.6703282557749937, 0.9063591159209086, 0.6447658817282526, 0.825592247392927, 0.7481974072674842, 0.6783459811518568, 0.9531836823385675, 0.6854922632433431, 0.5068996441544156, 0.8422565307493477, 0.5387456696346975, 0.7944114485686848, 0.985216122403962, 0.843171988913989, 0.8486095062050252, 0.6877029306468021, 0.9153442827344144, 0.6878005127721023, 0.9784441275300263, 0.8831981046341608, 0.5153381492011975, 0.8418557019858006, 0.9437923127273671, 0.5185889912521449, 0.6750791070164642, 0.9873287617889113, 0.746292346056705, 0.6620372604551864, 0.6773944451300689, 0.5339192198251799, 0.5432333707129327, 0.8645206274583248, 0.798025320653431, 0.9985642786403853, 0.6778369977093459, 0.5930656509902539, 0.9967674657195609, 0.9467266812818258, 0.9710583114574536, 0.8580410907496788, 0.8708937657757163, 0.9586556066642, 0.5142102606738312, 0.833296543696247, 0.5296117212747624, 0.6353451884055382, 0.8297507818874944, 0.5432639790172981, 0.5912603836667414, 0.7542631287488275, 0.7299020792331655, 0.9581032085542213, 0.8916247969034785, 0.9199354529977577, 0.7150075343984952, 0.9670571561701933, 0.6840448471091817, 0.8547273429827227, 0.9507954557305958, 0.9506283298099676, 0.8466999421378987, 0.7794367073923187, 0.6089496836937078, 0.9863852454970479, 0.7348000898661053, 0.8568530571352062, 0.6899602705477119, 0.8178726545426631, 0.7228297313949346, 0.9495790220083671, 0.8416231787353086, 0.9818617646609685, 0.5804898659610493, 0.7032024096453653, 0.5253914104485315, 0.6001327677368943, 0.9431256746535912, 0.6672752007710029, 0.7742166706547223, 0.6575360386427056, 0.7543611373539297, 0.6360277951629019, 0.5639969389740781, 0.6999010933802963, 0.6720793512072085, 0.567943432957245, 0.78856594607749, 0.7218201867665259, 0.9979858454372079, 0.9627634435305905, 0.5930223518544863, 0.7419302902669207, 0.8019829978350488, 0.7603713750842496, 0.7853626567062293, 0.7366560708386092, 0.9699941472063116, 0.9073919035643001, 0.7713984271180845, 0.8579319776798314, 0.567654637916118, 0.8394817318018596, 0.8349947849443335, 0.8239607711368281, 0.5109436438088244, 0.8793590559685529, 0.790379510048979, 0.8255556376714851, 0.9618511684676747, 0.9935470025827519, 0.5381697039823615, 0.7205556481519007, 0.5645666400895701, 0.5955219615647735, 0.982187812976567, 0.9943309256735963, 0.815779608299485, 0.539011956980312, 0.8860776145549837, 0.5646027702365493, 0.6583234203799238, 0.9478114094580077, 0.6898280721207322, 0.6816957739619807, 0.9956648421868209, 0.586579973411156, 0.6660185947348678, 0.6107984708520174, 0.6245415828215732, 0.7129082160238543, 0.9814318270243487, 0.8262055196207452, 0.7960985912287948, 0.793866576128978, 0.6770261298823774, 0.677145387074513, 0.5857277235730532, 0.5165239593228295, 0.5627864036061102, 0.8138806065942239, 0.8592510598991027, 0.5671355236037516, 0.7079219595064885, 0.626124579692505, 0.5690197168668669, 0.6904603286760356, 0.5423468249186547, 0.7826949184796776, 0.676154018991832, 0.7239454743057088, 0.7687337251796165, 0.8910025136539439, 0.7196902167164678, 0.9136748672852456, 0.6857718584589886, 0.5221395193535193, 0.5250847105733526, 0.5858820991591019, 0.5556958843185388, 0.9797830429935083, 0.9362609537120896, 0.5415861955196428, 0.7929525460492699, 0.8727453243619607, 0.7573991149961978, 0.8297532361577785, 0.6926629163242397, 0.876970125753489, 0.862463789777703, 0.7159600833819445, 0.7739556445663951, 0.5076500922214575, 0.5649878721276727, 0.5255838482914401, 0.6905422313786471, 0.6720541710770958, 0.8558364334957782, 0.8896866308490958, 0.5579288460940572, 0.5998164198510771, 0.7098607353216231, 0.8027775326641822, 0.94100384904476, 0.9551931419726285, 0.9561168652241498, 0.5505916630761435, 0.6739312543889427, 0.6734845710533881, 0.8065948572052405, 0.5435247709716231, 0.5365534015038121, 0.9181202133545633, 0.7825286618404055, 0.8203385102599055, 0.8056116341645434, 0.575473372705372, 0.8105700737188023, 0.5114384470364404, 0.7018932212465012, 0.7723512992241972, 0.9034427371256772, 0.6331950374058064, 0.8026279163744874, 0.809307799847798, 0.7674336079314487, 0.8106761787794912, 0.9608923424974276, 0.8021229948616726, 0.876071980454499, 0.9310461233691565, 0.7898894314687062, 0.8128628771985961, 0.7596032944319986, 0.7344172038185359, 0.9323359569369912, 0.791109563871758, 0.6598891100003068, 0.5723780351604557, 0.9868685873049707, 0.7552130450385539, 0.8136781684878651, 0.9797097982070505, 0.9367749492899892, 0.9084403999674996, 0.8979813965319596, 0.9557207233761542, 0.5561547546652043, 0.8056218476172641, 0.8116056273975298, 0.9370416017775722, 0.8050238137321206, 0.7449834606967906, 0.8168074189001426, 0.793908209518051, 0.6753613811995536, 0.9563288052568868, 0.5538285292535674, 0.5290602173550524, 0.7027379946478465, 0.5089662270512351, 0.9395402648216058, 0.8538859289082626, 0.5046232123504315, 0.9122267296289969, 0.5872586390559771, 0.8772278980247449, 0.7519129836110487, 0.5338820817101135, 0.9959622646109378, 0.6837972631824765, 0.9634434544549246, 0.8978022689885989, 0.8676496991767161, 0.6978899967295747, 0.9983728223248416, 0.8156244842744406, 0.5861666569985702, 0.8080121473132869, 0.6734355807336943, 0.8976377669359636, 0.9233654662914008, 0.5365773925976363, 0.5347693528222686, 0.8369425688051725, 0.795377073513881, 0.8581320702684482, 0.7897843754326357, 0.9228527435837008, 0.839680640417894, 0.5693032191994274, 0.8597316507059979, 0.9733688976178037, 0.9285256773984257, 0.7888520355746378, 0.7742655025231746, 0.9995030599051248, 0.7804059936703094, 0.8406892894848075, 0.8759535583985304, 0.9722484646241145, 0.7285979883664637, 0.7910441738699515, 0.6671077617178438, 0.5893432179457612, 0.8066849345504137, 0.6508954117588077, 0.8258402037135792, 0.8476868468280825, 0.9673376762668603, 0.9588189608714468, 0.6630021897501521, 0.6165019944995626, 0.5364916391181712, 0.9545039360639747, 0.8337734493303537, 0.796324485479156, 0.8695250403035228, 0.9972363916628966, 0.6282951222479424, 0.8517778279971411, 0.9095292341525492, 0.5452068841744167, 0.5173369306097473, 0.7646504151585249, 0.7321719376599234, 0.9106746829268809, 0.7642417315425629, 0.9328256105465984, 0.794583893795509, 0.6261639489244435, 0.9163279449570931, 0.9935363908275694, 0.6299322942715584, 0.6376016110758801, 0.9703696610842076, 0.9099836435414679, 0.9929542459629765, 0.5151871800788789, 0.7940459426464317, 0.8991581291733493, 0.5717482198395478, 0.7814930649761669, 0.6290843615820356, 0.7719328279491089, 0.8846583167738545, 0.7549149861322626, 0.9605081385471741, 0.8950715072900877, 0.9489919038932879, 0.6457781633969703, 0.5211326993057397, 0.5856620178111913, 0.6176383060932618, 0.707855325787361, 0.6896463940370787, 0.8273214515887419, 0.8115939921174724, 0.8956159902499549, 0.9005496130248664, 0.8884410008021397, 0.5494633089602083, 0.775083446726425, 0.6835761604352686, 0.7967729427815374, 0.6035555291145629, 0.6408428275684868, 0.752001929862215, 0.603584761339244, 0.633750943044061, 0.8541867961901726, 0.5400259342968416, 0.7479868779694071, 0.5207718241464703, 0.8931222244568144, 0.5212996458939372, 0.6641410994630046, 0.6648588188350685, 0.7173141291178511, 0.8548215508517898, 0.618934045915059, 0.5653643022230057, 0.9161589530333558, 0.6359982362723728, 0.860201039158693, 0.7581130277193617, 0.5036551279003408, 0.5747324836560131, 0.7917287735780105, 0.910937950572779, 0.874650361705939, 0.9855282922839264, 0.5623972370391024, 0.813383438868597, 0.7575992639390989, 0.6732007875185035, 0.6255791031476721, 0.9469490297827741, 0.7345076277192495, 0.5410624991358772, 0.7045753818255991, 0.7288720628002912, 0.7037611638335839, 0.9710070997330117, 0.8022728542451314, 0.6310398238604729, 0.952034574936194, 0.6182836788806969, 0.66641903325216, 0.7694779980804999, 0.9891983845312803, 0.7347075046579122, 0.713850471015705, 0.6755054144140276, 0.5841446822598649, 0.6645289879729257, 0.7990245084587575, 0.9860326491135885, 0.8665715896946498, 0.885137235474478, 0.6695013770772626, 0.5343888769190845, 0.5525726082196901, 0.7653860836824844, 0.675556749997873, 0.5686548254835562, 0.7894511688748038, 0.9171691834213881, 0.6853958062611103, 0.8427754453212237, 0.857063911007617, 0.9404438402226549, 0.8126191258093736, 0.6809918265922235, 0.6470757787239868, 0.5876452279036182, 0.829879576410377, 0.7162384852645303, 0.6260549161649139, 0.710444479676583, 0.5451052704211538, 0.6002762323931472, 0.7446691178260008, 0.876145471605769, 0.9273397734492796, 0.7135532023146662, 0.5735515811287766, 0.6707526032524721, 0.9231366175113177, 0.5299503310883396, 0.6102037625063448, 0.8548620397036211, 0.9662775008794792, 0.7047486309824791, 0.6598282550796781, 0.8186452549456976, 0.7307936711893506, 0.5796963669927085, 0.7153487965850837, 0.6976484306034063, 0.5538058932457308, 0.5157461955796203, 0.8389740243879036, 0.5185213921067179, 0.830047862624796, 0.7260511079100844, 0.8083399761137046, 0.8353305146130443, 0.9397350134339253, 0.922721361261817, 0.6451504647300298, 0.8319343974797508, 0.8883514234760221, 0.9920005374997083, 0.9161627620844868, 0.913915834872233, 0.8375277152774465, 0.8425135079037128, 0.7971326094375357, 0.6973283877213294, 0.8600447842159022, 0.524816287125067, 0.6741986244051593, 0.9770696957677317, 0.6651074225092114, 0.9865308917306403, 0.6913878168569552, 0.6265605065508788, 0.6885832044628699, 0.8855394512455851, 0.8355385169279774, 0.5600393846737515, 0.561788938636468, 0.5734494906387395, 0.6777384512300886, 0.8740514127925297, 0.5331328009856291, 0.6795788612787532, 0.7502081589669968, 0.9742505964718364, 0.7688371856213465, 0.9307962860161374, 0.6990847073319579, 0.9318477071933249, 0.8411117603170926, 0.669683430506105, 0.9134297771359725, 0.9393936374807517, 0.6439682109164526, 0.8995319404719655, 0.7246949126930551, 0.7375625972260613, 0.5483824042802935, 0.7168530932749351, 0.7869219312770118, 0.8191916059227775, 0.6520389803625781, 0.9615790344326561, 0.8981546868997321, 0.5763503228540976, 0.8508323271806302, 0.7246303941207156, 0.5733035755100346, 0.807452920605709, 0.7816183693779772, 0.9043788739695517, 0.931917051800496, 0.853885227208835, 0.9908708049116044, 0.9004444941868063, 0.7229825981616098, 0.5853520759486186, 0.6614469992984804, 0.640833008481379, 0.638935027042622, 0.7204602367782975, 0.8223946169594003, 0.7523393149576953, 0.5827255716149891, 0.8195806248721509, 0.5635362619083791, 0.6684091281615487, 0.7223512116138041, 0.8019469806231896, 0.699152723669537, 0.6647449280008157, 0.7968719046631176, 0.7175236141100166, 0.9009212248094265, 0.6275057220181938, 0.9608758763865051, 0.8245378739210953, 0.8102616284597515, 0.52986843709753, 0.9899661060895855, 0.9959652401519554, 0.8649556855947742, 0.8422907114174498, 0.892369446895227, 0.7465148028670313, 0.6290369676114235, 0.5732713140423558, 0.876396297322728, 0.8378358417430698, 0.7886391937697059, 0.579914917674193, 0.7525939474208412, 0.9479939518109528, 0.8408257500511414, 0.7480701642385175, 0.6440105798001348, 0.921356437424961, 0.8662492727533518, 0.9723069972928817, 0.5464801602347928, 0.5463556785669488, 0.6468105979276256, 0.9245174044700407, 0.5713486922013681, 0.8970366321997925, 0.8513592958696983, 0.7128646488317867, 0.6997105955267222, 0.8100115737229295, 0.7373809163650213, 0.6470165817848742, 0.6963174758510762, 0.5274010888926779, 0.8975889365665966, 0.8077482533425522, 0.8625951116645407, 0.8559497391443734, 0.800421468791622, 0.742291701107636, 0.8249224897322418, 0.7310529854366228, 0.6556702748808696, 0.5329453351700871, 0.6569613862932132, 0.6614806726116169, 0.5125645474092451, 0.8774323763565144, 0.7142010433248891, 0.5157886257404565, 0.71172074677021, 0.5901800666292902, 0.7632796783891271, 0.7564712933310731, 0.6275467755995017, 0.555714163707107, 0.5232984329484609, 0.5858164114855389, 0.6691295382517765, 0.9879307116315642, 0.8316411522117396, 0.9644566556249771, 0.6239150064370961, 0.8740054518657987, 0.7141144091609906, 0.5174238310094156, 0.5824495372617196, 0.8458167674336057, 0.9699665137004154, 0.936831132380469, 0.5596464468297595, 0.7526481178861877, 0.8919111124733012, 0.745887583955118, 0.6382708786274751, 0.8551014958280978, 0.7599463457389295, 0.513422064782439, 0.7552045947933057, 0.7177106429926867, 0.8394835548470536, 0.5159875435852168, 0.5428751897014781, 0.5170081532880351, 0.5777588700692113, 0.5278637062743623, 0.8615262750068757, 0.6223905186234522, 0.5138209986195428, 0.8275406328102839, 0.83250636627959, 0.5638709770563579, 0.6956838067761415, 0.6315801351063457, 0.5304721356039349, 0.8321053649117072, 0.5583566188287628, 0.8105660904874803, 0.8188693252317127, 0.7278980815718052, 0.7776468116842283, 0.8701592795034652, 0.6514998206636178, 0.6291536707351446, 0.7989005457556451, 0.7473357857139687, 0.8898318548377242, 0.9719293399206964, 0.9956308162218706, 0.651661991643838, 0.6902224302727491, 0.6957873646681374, 0.9365263275030906, 0.5835098903605566, 0.9432560033218156, 0.5848738745120371, 0.6294504239936859, 0.9833601846815168, 0.7711198296801145, 0.8132883447352448, 0.667566509089977, 0.5132598109364381, 0.5938004880928874, 0.9754214637536276, 0.7391286659600977, 0.5672932128794796, 0.8745533569539641, 0.9782993233592302, 0.7156249658037258, 0.7543155028580695, 0.9493102827378608, 0.8937006071472917, 0.7911447932580549, 0.9855381683923405, 0.8586052817521845, 0.7011472712991157, 0.8258136235440261, 0.5642270330473658, 0.9945697183459226, 0.8287928757168879, 0.7429517808377253, 0.9963970534253623, 0.8263644154100245, 0.592981974941447, 0.5399275922481565, 0.942384025710859, 0.961978736420062, 0.8631964717064504, 0.5410013157283347, 0.9561008555994224, 0.8212318894280478, 0.7646049689999906, 0.5728929527415028, 0.9691647555497627, 0.7459129202603499, 0.7441968696867587, 0.587967600124082, 0.5981387848245956, 0.7302061702625109, 0.7670092116832441, 0.8282232802583078, 0.9260691649173141, 0.5704608470512363, 0.8828981671100102, 0.8285353894086851, 0.6752371141436279, 0.7069452263381776, 0.6538969911592452, 0.963741242029586, 0.6266097238908028, 0.7724327193337337, 0.6332091642642583, 0.5393818200303799, 0.8204175713339327, 0.6561739521310066, 0.9127391379226872, 0.859230444508692, 0.7510902091607764, 0.9572991665360684, 0.989044560917625, 0.7307395551279501, 0.7796097869080746, 0.5053404824476303, 0.785468082580528, 0.8965354276764349, 0.5785230460792329, 0.5924894209610301, 0.9935439783676838, 0.702950313477561, 0.7492802775916603, 0.6782285856328405, 0.6636992593684625, 0.6413071793249896, 0.7127315056477913, 0.6717149807885753, 0.6800651661422467, 0.5314709521717758, 0.619031794911634, 0.9220908209323071, 0.7795408405600301, 0.6663889490307411, 0.9367949677592188, 0.5125105652729556, 0.5791191769993037, 0.8987802213652216, 0.7518977878940583, 0.6477597194408394, 0.8349440847509481, 0.7588366672542189, 0.5216838889210402, 0.677521291477891, 0.7729767891939664, 0.8394243558966216, 0.5495148741254388, 0.7217335015167576, 0.8703058371134405, 0.6813693417295164, 0.5216629732123264, 0.6461322147729914, 0.8274025593586677, 0.5143245952205897, 0.6050128448806283, 0.7182118925448941, 0.5459193914765588, 0.5560811780944153, 0.5858193742696332, 0.8516208374866294, 0.8277106093948581, 0.5600161590818933, 0.9472122648505632, 0.7987881032103992, 0.7233940662856884, 0.6822130324651339, 0.5268912120708252, 0.5905966997695185, 0.5802535316619912, 0.7191134556488648, 0.9080316771932189, 0.6329606744414182, 0.8641359791728109, 0.9765125252438982, 0.9685931065462753, 0.6699639215823138, 0.8802026543538926, 0.9771938891565776, 0.7366587434536338, 0.8276317785122175, 0.9161736512830435, 0.7744885568500546, 0.505867798141662, 0.6242417196997347, 0.9528817689868889, 0.7798561263585949, 0.8258365514215982, 0.6733069965736135, 0.9978511727270735, 0.8706103113360352, 0.8396192707496899, 0.5304695688570411, 0.6516695200733587, 0.6871161616416605, 0.7913124779639764, 0.9201542187611528, 0.504636726120587, 0.6032929816378145, 0.584941294261315, 0.5078536898025123, 0.7422833547583874, 0.8078195495055125, 0.7438986163305175, 0.7596122002548324, 0.8952863826102007, 0.6656976827056534, 0.7882499060363626, 0.5396210312617837, 0.9343864188981543, 0.7729317294476681, 0.7192004960320767, 0.8243590827179699, 0.7809904810472734, 0.6541557734332233, 0.5708882931023527, 0.5845614828656698, 0.8649416085569746, 0.5717825690266664, 0.5192678536132578, 0.9867976049493608, 0.6581012196609466, 0.856269384798682, 0.5097993415440876, 0.558771252628327, 0.6715163805268409, 0.7475823337119814, 0.795761369824602, 0.8078417880140336, 0.6667115273132811, 0.8847908372233617, 0.6147671282944352, 0.6702903608147446, 0.5633657914382212, 0.5833104277872495, 0.7493863894298907, 0.5473427957445709, 0.7079197058298325, 0.5094684360890881, 0.6494030138418128, 0.8971081867616094, 0.6387942034652503, 0.6102840522606132, 0.5606148759589, 0.8127051742020828, 0.9174846337979778, 0.8952880071161806, 0.8564621003514851, 0.5601157440121107, 0.9611408541605955, 0.810115390186791, 0.7154596055479665, 0.8266540448579287, 0.7105073411854586, 0.9310212882967593, 0.9486382713183564, 0.7200467194737756, 0.8742759464801076, 0.6754115828272063, 0.5031724758972638, 0.7312701989390804, 0.9201451571155081, 0.6840343237932598, 0.8252796833825833, 0.7540479629989906, 0.8060463352061322, 0.5483948252288107, 0.5552825813405071, 0.7616396609117158, 0.845448221417699, 0.5801568365595007, 0.553384195438791, 0.6129424847858133, 0.9658211048910856, 0.7811506286820906, 0.8359402118206284, 0.8453601594405038, 0.5920941360686369, 0.7877184151949626, 0.8740884330391974, 0.5264885217437107, 0.8506129335091841, 0.576270016265632, 0.5369416809265022, 0.9889506820988547, 0.995905910638684, 0.9228971364899929, 0.6089610303845692, 0.8881934480171148, 0.9605867689342578, 0.5445916802734022, 0.8004925249144923, 0.7736821428517642, 0.9674284444555421, 0.6000681164995152, 0.9805666885371555, 0.5045905913433131, 0.5941443579539708, 0.990891295373342, 0.5995310268036143, 0.8245494399670497, 0.9067837765132312, 0.5409548755572682, 0.6793534535902725, 0.9085698705164229, 0.7759448231163337, 0.6089709083079877, 0.671173175070489, 0.9116523919725262, 0.6735922107479115, 0.7522502946362316, 0.8489750575352031, 0.5558785729060405, 0.9638108610710946, 0.6344430516045432, 0.6021271447198198, 0.5616421882812073, 0.6263138867571627, 0.5650168896746738, 0.9548912666099727, 0.9224179432159643, 0.9982396978101751, 0.6624925167818947, 0.7275826857344327, 0.7428468916621833, 0.5113522621249005, 0.7099543077353638, 0.6136789895185205, 0.566176330932205, 0.9044058351706661, 0.5222989942468068, 0.8691028364899727, 0.6406047504764982, 0.9999638962621761, 0.6086567419249724, 0.8299007577927128, 0.5008941885524376, 0.8264708394235982, 0.9164923073679949, 0.8883675052925453, 0.5726312285056617, 0.5258659090192845, 0.7280041464980949, 0.5419894535729457, 0.5854717612887758, 0.8660783618571757, 0.7660949996614441, 0.5905696803849455, 0.7024906457891481, 0.6382984275581485, 0.6563416428452286, 0.742698328968306, 0.8625016273383006, 0.5790231255312375, 0.7906981315030838, 0.8118233139047113, 0.6069129640598616, 0.7420972399092189, 0.6625821551425948, 0.5511387334031239, 0.5855885566868024, 0.5780197950994483, 0.930914970615489, 0.7769776764434414, 0.5195847460702825, 0.7560905098082302, 0.7810325930949806, 0.6248442056574811, 0.6893606450825218, 0.5400554979142027, 0.6084203764375882, 0.8737176334888201, 0.8388925472795412, 0.6326642722622087, 0.7234750053874823, 0.8669547704578986, 0.6852302002341, 0.8329989070144678, 0.585355950897354, 0.8213552400524653, 0.665553898316559, 0.6742760264666516, 0.5591688271276194, 0.6744201062132077, 0.5296093772485649, 0.6966389245532854, 0.7039997069381282, 0.8672442484056482, 0.8996482073362243, 0.7651940969700124, 0.7566574807660927, 0.8937708738139133, 0.6158672616840297, 0.5858255626715134, 0.5587518788639706, 0.7432256350582742, 0.6566931160184731, 0.5807551700899926, 0.6999807217149822, 0.5560024055517158, 0.5207620491613583, 0.8334856495345682, 0.7039588290850237, 0.5159760498945896, 0.6846647354871318, 0.94121436958637, 0.9566980518608961, 0.8598782459755683, 0.8507336242793094, 0.6420893454957929, 0.6361787704845162, 0.8477081929338466, 0.7197486425216039, 0.7155412770799895, 0.8809080334767825, 0.6305700582155724, 0.6128641525363621, 0.7035717590499397, 0.5759097620503701, 0.6109640740085936, 0.8680808816651573, 0.566432792309644, 0.910678914389057, 0.953958826473933, 0.7417122645877672, 0.7309871729388473, 0.7295136026427798, 0.9572752978335426, 0.9034699550730387, 0.7425787393804987, 0.8790753768769771, 0.6255213094713524, 0.6817685641750417, 0.7138655137680499, 0.5144059864252973, 0.7648557350508789, 0.6768563836001743, 0.9555211660760419, 0.6402147656786543, 0.5549251876053078, 0.6073427721317193, 0.9469104751506763, 0.6608595284673631, 0.5431021971671928, 0.6589210176599933, 0.6433846933529379, 0.8502145585826172, 0.8358936901340516, 0.7489534329572841, 0.5433855677859045, 0.7990010821736313, 0.5202276717513219, 0.9216190816300708, 0.8213768464550064, 0.7645287316950993, 0.5966791134188189, 0.6139969902412653, 0.5335402734917094, 0.9834212167825132, 0.9790826283187544, 0.7423810552969724, 0.7685113381345016, 0.9422489410360663, 0.5568646064115031, 0.7910744665576259, 0.675715320202859, 0.5701441794778337, 0.7674049344275067, 0.8705887506615175, 0.9812709599077807, 0.7350018111579065, 0.7489180081434544, 0.6661191357571838, 0.5274016744220933, 0.9230994102388583, 0.774936010172619, 0.97987361670938, 0.7941351971148187, 0.8486760534445272, 0.6592755516430379, 0.8702930806498126, 0.662050495436882, 0.9669699203734436, 0.6971918123913624, 0.7816517532797163, 0.9962085818011526, 0.5439704358627122, 0.770441558227003, 0.8264843061876778, 0.6172410539719976, 0.7010154892747635, 0.5098522915300356, 0.8144951935127346, 0.7337270485967694, 0.553769739630537, 0.578387735557482, 0.74284345413514, 0.9036528026950206, 0.6231425574652864, 0.7677806412705706, 0.9140413471298745, 0.6980782128135046, 0.5747797523430984, 0.954184329798105, 0.6068396582204484, 0.7000764185471255, 0.7950976222335535, 0.8118936712473293, 0.6389225837564353, 0.9921694816218594, 0.9195860430833087, 0.9925726127071341, 0.6982078652849104, 0.8562201387620285, 0.7318217380990515, 0.7763606954594824, 0.6635644026145875, 0.5560275348368974, 0.8144072354650138, 0.5784904222150111, 0.6654202646069458, 0.8202756119980634, 0.8699006808518717, 0.8697572924647476, 0.8727584157032271, 0.7851207223127985, 0.767380522025157, 0.6778942317730191, 0.561742018649688, 0.7540547196419923, 0.756306435656464, 0.9950489579304028, 0.598133180766286, 0.732140931496772, 0.755755642407592, 0.697597231772253, 0.9595690429885345, 0.721455320588205, 0.5886687952111814, 0.8591031598023079, 0.9343196558589785, 0.7723867451669088, 0.8718298429948594, 0.5487819312756759, 0.5175197282688156, 0.540429857258115, 0.7927245190234234, 0.8705746659653346, 0.7169622680223355, 0.6288778840907572, 0.8318545536081161, 0.9271328813380593, 0.9872669826008607, 0.9737561201385017, 0.6979219251730944, 0.548504992360259, 0.8987346768625405, 0.8504145996482984, 0.5886553273204204, 0.5886200087363622, 0.9658561342517473, 0.7817463148723814, 0.9216709710441051, 0.7418147192564939, 0.9797001928220264, 0.7892298719724157, 0.7266085060063657, 0.5512429161408603, 0.5453045684756637, 0.7794074963577964, 0.6788844750806828, 0.9444348302786665, 0.5799743775641748, 0.663395590011838, 0.7125616622227094, 0.7208713529932012, 0.7921407972349217, 0.7420850755745465, 0.9244302415944134, 0.5981866349562603, 0.9054037627758984, 0.6136734538772737, 0.9413777764555471, 0.544687491269382, 0.7390921959339645, 0.9327392704692592, 0.9944081958126352, 0.775968875168214, 0.9929348796991537, 0.5526803986717477, 0.8729718117629128, 0.8725684548472563, 0.5721658575590217, 0.9904184151131059, 0.7973360362696649, 0.5825997213783789, 0.840874856048067, 0.9718692161197253, 0.5634165458866285, 0.8507776256767259, 0.8183647247531189, 0.8935026905039205, 0.9297431626197505, 0.9997518655161526, 0.7566368785712132, 0.9966969547574145, 0.6217697464734366, 0.5988200976620154, 0.9854282967395169, 0.6531219755145137, 0.5254541300711761, 0.684929534722341, 0.5699171760454422, 0.7859511948030944, 0.7229088661180803, 0.9788086012552427, 0.5542711832201961, 0.7518786456318685, 0.7301249909350578, 0.5286314755222858, 0.5328995152103695, 0.9156860921511794, 0.6359632949544634, 0.8979400247721387, 0.6240484275668206, 0.8280349550547123, 0.7897695780637375, 0.9297950879127492, 0.94831461632572, 0.7890474354032722, 0.9059930776892484, 0.6185128656775127, 0.7427826987921111, 0.6520258967731818, 0.5547015752189834, 0.5107305287443895, 0.5123826471894593, 0.7067358783265882, 0.5160790060356811, 0.9871997095794232, 0.5873747771651854, 0.8715889593028792, 0.8003421831672074, 0.5389060907198611, 0.6223982884236646, 0.6049328918119998, 0.8868953726413653, 0.6935289655004521, 0.5788769553502042, 0.7383768832331861, 0.8914646444669371, 0.5692454464866552, 0.5162187524641388, 0.7489696781199597, 0.8283414383839155, 0.7569821912526336, 0.5859686446538119, 0.5576487325882667, 0.9344223529403728, 0.7059371391258551, 0.7709710537597363, 0.8137673648063715, 0.9502318502201439, 0.56604559022925, 0.7008954382865661, 0.9868361046023131, 0.7977961353525549, 0.582644130574272, 0.7874431347279431, 0.9980584918995759, 0.789310122468776, 0.8407931245525555, 0.7450589217104148, 0.856496841303287, 0.7154242016166859, 0.8319566115246562, 0.6088978777159222, 0.969336674384272, 0.9705392789350298, 0.7169551697467809, 0.8906731667843457, 0.7900737130035047, 0.9315741678111638, 0.5532902419322685, 0.7776793325964888, 0.9544199942698819, 0.9175698103583669, 0.6829769405891046, 0.9753427249884719, 0.877455181497548, 0.945436155873872, 0.8862130736158472, 0.5239998382418403, 0.9951807845553589, 0.9373920663100584, 0.953155583754027, 0.5500753214220481, 0.5429302188383733, 0.6748398075333697, 0.5276516213693832, 0.6267422447447204, 0.9290637028379942, 0.9410184661520131, 0.8974467023840378, 0.5046667412064542, 0.9281950986915797, 0.7839086088974605, 0.757312408193934, 0.5227265463028353, 0.5085361175458687, 0.9552642910403837, 0.6400072724204298, 0.8718148032963335, 0.688454235978393, 0.8201961548826123, 0.5956856484262832, 0.6088169702183419, 0.6230693851788118, 0.7372471386999582, 0.7344219216435369, 0.677404468419645, 0.9436183621479366, 0.8131826761248, 0.5966499538889148, 0.8293835708996893, 0.5528687756498625, 0.8181638766320596, 0.5491017943632179, 0.7162943159925165, 0.9571709326418261, 0.5566281390871448, 0.7634029415072986, 0.7267322764447188, 0.6800937485751964, 0.6459942134065051, 0.641956936445277, 0.8663002042380833, 0.9903672008160498, 0.6350961008943754, 0.5771002468792548, 0.6857680019781651, 0.671582504366157, 0.7495717879438077, 0.5052361627574906, 0.8918282934798505, 0.6932259436280963, 0.7733762853240962, 0.9464617862801776, 0.6317394871307492, 0.9915741686154645, 0.8173798291149876, 0.930846276830288, 0.7409715603793355, 0.863141241340979, 0.7083574061187335, 0.5870550567798098, 0.9064655419389714, 0.8702100081327866, 0.6178158020568172, 0.6133416189033155, 0.8629402381745959, 0.8371627610519305, 0.8240448342017411, 0.5217667100189181, 0.6967444994467542, 0.9309395731799461, 0.7771708240289832, 0.6386817886871949, 0.8060414636004307, 0.6709024534920768, 0.9478630112861427, 0.9416252122550615, 0.5893388949480625, 0.9297881120288859, 0.8705002102219389, 0.5936359457836178, 0.9827648363114123, 0.7058623931400307, 0.5004674246380886, 0.8364468316590703, 0.6785062952862357, 0.659331355168805, 0.8459222114876253, 0.5391456408951694, 0.9263318159482641, 0.6855885061874156, 0.6999265778882334, 0.732879303348767, 0.9021468979612064, 0.523417617574548, 0.6642788547252736, 0.5460133256460644, 0.9058939735781094, 0.8653454111195856, 0.9136304702867994, 0.7656502854182594, 0.8132355529653167, 0.750881964239325, 0.8112980055421954, 0.9846128486295681, 0.7200793790845998, 0.612076020682681, 0.7695077816139116, 0.5461866185288305, 0.6505660198907106, 0.6057007426539242, 0.6008796717637691, 0.5087036892615666, 0.6482333433922169, 0.7453839337875594, 0.67216967599966, 0.9120431541574102, 0.9819188731504498, 0.592712640552282, 0.8867869318909145, 0.5898619442530972, 0.6559715575536251, 0.9790330635490946, 0.7318194395851083, 0.666148786702437, 0.6154312001316915, 0.6121343161041068, 0.7731716845642279, 0.8139244302365114, 0.845788324764464, 0.7727181251506487, 0.9434019127452962, 0.932116124906694, 0.6544944053974786, 0.8106542559567544, 0.9940561159106874, 0.7913202799917609, 0.6051945598054909, 0.6915049742501767, 0.6707422160445976, 0.5119733837228657, 0.7023173961484945, 0.9966167081108428, 0.9084701456403417, 0.7301848527519954, 0.8027385143354412, 0.964179001364555, 0.8449856004345195, 0.7435518846247348, 0.5985957605776905, 0.5427311964343167, 0.5213208581446283, 0.7718283886362429, 0.5107101676330407, 0.5863682606623853, 0.5262775105037584, 0.5099264212001844, 0.8096387894912906, 0.6506463553130811, 0.530695167132094, 0.9025801156754409, 0.5085543553220395, 0.7875605140241649, 0.5440091222206505, 0.5343367951947462, 0.5458568979265516, 0.5825812026814492, 0.9762262231096582, 0.8127175572038849, 0.9572465316496404, 0.8018377434260411, 0.8465443692218235, 0.9912223258934127, 0.5898740340722035, 0.8618218402289439, 0.8054568178386758, 0.8754167006687721, 0.8191801984394935, 0.5673753112483875, 0.6636869166295306, 0.7864600787546964, 0.7585968544529738, 0.9496592610818622, 0.6710023010285042, 0.8622178497899734, 0.8451582122111929, 0.7122899693458771, 0.5872263903515376, 0.8128140191149438, 0.8293619152538592, 0.8505607509397198, 0.7555420773413113, 0.8741703336610389, 0.6606455137158631, 0.9047547062376087, 0.8502786808846343, 0.7023652039309038, 0.9538283860765986, 0.7361363907537689, 0.647365722689624, 0.5626962753836757, 0.7404669455141434, 0.8899010177851683, 0.950879768067799, 0.5881523053138948, 0.5286524071446892, 0.9494193275337024, 0.8497543345740302, 0.7162081193825836, 0.8308799725808909, 0.6264084648854031, 0.783720667576471, 0.6239651014903616, 0.5266778154844676, 0.742103934001898, 0.5783037675746934, 0.580377811199722, 0.8959497909036691, 0.9574207270018958, 0.5062743974852837, 0.7064356673830092, 0.9890743335219413, 0.8339115515489721, 0.9077608286623969, 0.5378171611049112, 0.8085896526404055, 0.9155065699104976, 0.8287540477296389, 0.6084204239008288, 0.8430910329060356, 0.7288838954086436, 0.6964494335194742, 0.9684199255276797, 0.974263049281621, 0.5032965652572909, 0.6796532169493623, 0.8818441667890418, 0.7193361213892565, 0.6580867656990395, 0.7421031429673486, 0.5161400844639802, 0.9807587314573347, 0.9228369259308642, 0.7858428705875412, 0.5793121228065565, 0.6686745445379274, 0.8661662908177137, 0.5042139946494313, 0.7308598244774647, 0.7481727077230486, 0.5700829456503385, 0.7662578480420535, 0.5062432358007942, 0.6612044185661856, 0.9480190161089823, 0.5248777006966632, 0.7564038751398339, 0.7180009449504492, 0.5280339636320365, 0.5049154033553802, 0.5223762471127515, 0.6235389026860633, 0.8074839406227879, 0.5236206338070156, 0.9501033244655243, 0.9166974341246319, 0.9979968536922361, 0.9694511497989351, 0.6577338154527081, 0.733420221320051, 0.911085808167339, 0.6088511329524893, 0.8436063570571029, 0.9860340378088093, 0.8000916384857386, 0.6879469704146447, 0.5518638155769762, 0.6095189135435156, 0.8540756356361728, 0.8012124845236758, 0.5412647071555323, 0.5332288548844843, 0.6964807946539167, 0.7663527391045285, 0.6791806267927301, 0.6899720609541509, 0.8258158324228292, 0.889705109165716, 0.9448683445401949, 0.5330246544356809, 0.523187471928688, 0.9467227892838266, 0.5063658801382236, 0.5618377947696565, 0.7435665126240874, 0.9171033798220989, 0.8668681377000924, 0.6639898529783348, 0.9232576676999282, 0.9451908822507905, 0.8647795478786077, 0.7010414952521387, 0.8522496652523712, 0.7893324603774525, 0.8631777683523909, 0.5274708332407461, 0.6308207413075009, 0.6785751453115113, 0.8737268910725705, 0.7031225075182687, 0.5178435669291306, 0.7586967265524305, 0.6681432455666952, 0.6001424321189532, 0.7900758444544713, 0.772579810303018, 0.8583167698124778, 0.9991204729144947, 0.9058311484857399, 0.785585857100012, 0.9596350202453292, 0.829207093965848, 0.7606933286933126, 0.6607744796560944, 0.7780366704654114, 0.7243791016268555, 0.6630329189362447, 0.797447856337036, 0.9562101626597415, 0.9950306254429521, 0.6909187229153623, 0.5305102919737905, 0.9062634995514816, 0.9684297571941274, 0.542456202767541, 0.5013956374522406, 0.7027813165721821, 0.6610149756565437, 0.7844324333053139, 0.5068948757124958, 0.6764318047834862, 0.509124210715443, 0.6060818631912813, 0.7727987785279576, 0.9005322653805814, 0.6054511375920903, 0.7183600486928043, 0.7532639336766846, 0.8288972832608685, 0.7991053114934424, 0.6460717606878981, 0.5954982703956494, 0.797482150922149, 0.7893324881424203, 0.7199704332415435, 0.7947732035273407, 0.7756141867538251, 0.9374403123180506, 0.7836935664320515, 0.6892599595168971, 0.7935027876326375, 0.8912409642419916, 0.7467791117905522, 0.513447069671293, 0.7400167510964011, 0.7430355999340136, 0.6648681552944034, 0.7365008461322445, 0.929408732786793, 0.743102143589544, 0.5752932739633276, 0.5387026239642294, 0.5529986732193248, 0.9348943544076387, 0.5847856149105057, 0.764596202364437, 0.813281192874856, 0.6764366402738369, 0.7080147916267207, 0.6555647046469313, 0.5094971669252669, 0.8303947011435187, 0.5926718409002953, 0.8634538438306065, 0.631700375560344, 0.7415738418351976, 0.527448151036773, 0.6138124172609558, 0.7580249037370894, 0.5903859151219251, 0.7785907022599959, 0.7025817415475515, 0.5356134066577787, 0.7405400382174052, 0.8237882469811204, 0.9135196699866063, 0.9227127058441235, 0.5423186587874402, 0.938931275686331, 0.578302718124619, 0.8431050863678207, 0.7201973626378665, 0.7475947469607485, 0.9617197559006339, 0.935124773870109, 0.7046462727699845, 0.9905345410006821, 0.6020677338317568, 0.8325787188024278, 0.8660579856618855, 0.9251235908759732, 0.8140973897014125, 0.7821630707985789, 0.6256109694288028, 0.7194304674208734, 0.6420295204005506, 0.9950369669182579, 0.5730960234000713, 0.6752199087057882, 0.722434355438895, 0.8911607512422028, 0.6986476350055311, 0.526934641908096, 0.915622923572968, 0.5684923625200983, 0.739299422941662, 0.8839456957576175, 0.8232188452740963, 0.8809147786438902, 0.6551312228694844, 0.8346235007872929, 0.8678293108082842, 0.8811041908747075, 0.8097269246901037, 0.556952585209645, 0.8225162713772683, 0.7808090781481491, 0.8307705835260529, 0.6727531117038972, 0.7136426214775007, 0.8238007199672936, 0.9321718347837872, 0.6076289277863675, 0.978173526452227, 0.5563655590669047, 0.6185687512213348, 0.9482684602009483, 0.976946958477734, 0.9108070873652383, 0.8352500155012024, 0.5130623982267974, 0.7801763582614247, 0.8003629033045574, 0.900234744844689, 0.7172305463566753, 0.9504561327587432, 0.8477789841252792, 0.7425500559595378, 0.5884451641671771, 0.859439384173212, 0.7274063071173862, 0.7079372579598433, 0.6208253240880307, 0.9219616763690401, 0.6851062563075763, 0.625778245593802, 0.6485176300622154, 0.5415634427638609, 0.8629856474985257, 0.9579187636198818, 0.6323474108385296, 0.9422036159564192, 0.9846337988670013, 0.8422040947091338, 0.7906133775318077, 0.7812985155992334, 0.5382677096942758, 0.7231520517909776, 0.927005825827101, 0.698165975632093, 0.9046519680817373, 0.6329640171384997, 0.7131771791372659, 0.5516687292976387, 0.6847723123069245, 0.8234327851151919, 0.926581893072377, 0.5279033994705327, 0.6847709104631574, 0.8914019456308394, 0.5023099877482639, 0.8964212098043833, 0.5445247880677921, 0.8813898307385049, 0.6279758630293667, 0.687399411999598, 0.9306631948432249, 0.7904630380760012, 0.6826491386510247, 0.9999452625957035, 0.6906171622721939, 0.7483332397285016, 0.6992663306674821, 0.9182271782890892, 0.6441658720253727, 0.6197349607321895, 0.6089405638533535, 0.5590959269391933, 0.6840233811555751, 0.8634167567396069, 0.5775629640345031, 0.7832431093882445, 0.5888850341725682, 0.7693280769535162, 0.545053649471704, 0.8155279253647312, 0.9908338478481195, 0.6824989642736443, 0.6099943513287431, 0.937030873324584, 0.5464096331053752, 0.6319861177127181, 0.6624695330652187, 0.6438209305903408, 0.5418411273570553, 0.6219289587508994, 0.8046536436815259, 0.85411806313106, 0.717884497140869, 0.7437030394199814, 0.905181481025213, 0.6376883293260783, 0.7317658689468882, 0.6699899593772414, 0.8110331652886894, 0.6814058858897345, 0.8762460439884021, 0.9421250190019623, 0.6972391391240665, 0.8684692059141462, 0.8638722556653218, 0.8506112121885805, 0.6161897349007277, 0.5795208853240952, 0.8023234584346195, 0.682422489975136, 0.6987459912900349, 0.6316194084868895, 0.8008922729835422, 0.6391378875410442, 0.9456891243094805, 0.5844106492173343, 0.8081300833115906, 0.5912437738590028, 0.7826574887761839, 0.7083896203948024, 0.812361735537992, 0.9887329664184998, 0.709415691921031, 0.7931497480421082, 0.8432810052694459, 0.6974489175871607, 0.5700171933135946, 0.9051480537682468, 0.9815972390407963, 0.6935902894754422, 0.5936835907093121, 0.9621790943693485, 0.6736258072567722, 0.7460498717768458, 0.6984386200637751, 0.6244499272283864, 0.7234784881714736, 0.5449531565443306, 0.6462030627644535, 0.6436807804631188, 0.5592410093826248, 0.5534538427590218, 0.9741294800015223, 0.7154577709278317, 0.865233347248472, 0.6424675347992923, 0.8867138917108346, 0.9428887283649017, 0.6593080419962039, 0.9128707164056122, 0.7021874324556355, 0.7829099038206966, 0.6817970080679874, 0.5627182237922043, 0.8199655279747192, 0.6188031706586765, 0.8928157924265615, 0.7524285568030584, 0.5539400843751782, 0.5712323780526594, 0.7913035726014457, 0.7885494732366991, 0.9164421023794269, 0.6849948239348143, 0.6618981709898856, 0.8959951319968094, 0.5595001683659165, 0.850735943697977, 0.6419537977935379, 0.6486461563669346, 0.8847729026894345, 0.6360373974915556, 0.5661089609418488, 0.8292362834521492, 0.8595127769795088, 0.6498514451950568, 0.9066375496376333, 0.8274548795808813, 0.6027558984523318, 0.870966731648153, 0.8904695694395122, 0.8822081024860686, 0.5582985053174698, 0.903827000582362, 0.7570766537270583, 0.820025036590202, 0.8304134558081862, 0.8255318451476263, 0.8984194608262897, 0.6641892908296316, 0.5624115157989662, 0.5974086130357328, 0.7640844886735927, 0.763855406209373, 0.911871671578806, 0.719420178387592, 0.8780643770723058, 0.9831180760367884, 0.8095292005502779, 0.9631776148806275, 0.5823854690724775, 0.6898506416526051, 0.667087313049116, 0.9474236869045132, 0.6767248580087356, 0.7842580315618826, 0.6110896308181384, 0.8246583776078205, 0.790694062171817, 0.5906757804387653, 0.5191590542354969, 0.8570511438432631, 0.7292086362771015, 0.5388660722543801, 0.5532835321194154, 0.5602260754073451, 0.7566407389528407, 0.9285366879983619, 0.7625209243809374, 0.870315144187709, 0.6006457241110744, 0.6683367799371698, 0.929115465269101, 0.7822982060756175, 0.8677464137073032, 0.7041909078850701, 0.9082406470593876, 0.6733816437664077, 0.7498250546917955, 0.5031211808751004, 0.8126748208066557, 0.575146851253114, 0.6378892037926742, 0.8861764646260932, 0.7368066267909135, 0.8653774327366743, 0.6449583607764507, 0.7833639354767062, 0.607798957680818, 0.9296321868373932, 0.5534689845542275, 0.5947332549153499, 0.9544291974562324, 0.8827581139441881, 0.5110324973269478, 0.8563563095583379, 0.9071393060542947, 0.983205871879089, 0.7808475031386465, 0.541823862880555, 0.787692142065263, 0.928024542859823, 0.6833637660084826, 0.9051045100934837, 0.8269257746323089, 0.9632361155278159, 0.8391572686300126, 0.8478620002343342, 0.5667229895515555, 0.9235672688253626, 0.5660228267083829, 0.8867440076246689, 0.9604958656318834, 0.720086291457054, 0.9163903859783578, 0.7184250351067835, 0.9452881033299697, 0.7930253179240495, 0.9919601765903948, 0.5635455558797542, 0.6833558507393956, 0.8939870533281535, 0.5559351872496681, 0.5901124892001779, 0.7894310502623622, 0.9954883077999146, 0.5419732573401446, 0.8812578547668756, 0.8869936540390354, 0.7601919090076369, 0.6167998803307115, 0.5473335592065931, 0.9584042255017253, 0.6641805095930514, 0.7918454873563452, 0.7097258280302767, 0.5950250411932116, 0.6963540648198678, 0.8214077421639374, 0.9268588289769741, 0.9674591384037634, 0.9089830885157808, 0.5635471606154849, 0.8323009467657474, 0.9111360551456011, 0.9633567638280351, 0.9504260577762278, 0.9971707231648652, 0.6892221744348523, 0.8749622900411709, 0.7401975824251112, 0.8828778110066671, 0.5330689240468907, 0.9813974896135457, 0.937800272642374, 0.9550396499890136, 0.989887845466565, 0.6602682556364838, 0.5793352438753606, 0.9612596306843622, 0.5941039802528087, 0.5347565173017679, 0.7339232698468336, 0.6034719775995567, 0.9678002054464576, 0.6094155725938877, 0.8757728180211145, 0.8986644075215011, 0.9964237465715663, 0.8834850831326431, 0.6989393102912317, 0.9369691930108796, 0.8803795197232258, 0.6890791922609545, 0.8304238825141343, 0.8283871862073534, 0.6069074368475251, 0.5042718933732288, 0.874881564770181, 0.9930980836484282, 0.9396903652041853, 0.9976834144049032, 0.9916621966692621, 0.6824177017424251, 0.6185273236873059, 0.9492536996869148, 0.8090580427960965, 0.5882582629867509, 0.6662635232318606, 0.8123573940983425, 0.7211661790067311, 0.8033471268847697, 0.9347699961858271, 0.6141039594532915, 0.6690006230374781, 0.9168976110713629, 0.909917603760563, 0.9566498312523335, 0.7814995805480387, 0.6185788579489839, 0.7810700274710775, 0.9658290720274084, 0.6389062067736371, 0.6311493094485516, 0.9085436597719905, 0.8317191343674057, 0.5827164587402915, 0.7281115436684225, 0.5578029582254362, 0.7481954538411857, 0.5603283241339125, 0.7773146235591035, 0.89764552491498, 0.7177524277839147, 0.5232959109067884, 0.8785580302108122, 0.7487271856984389, 0.5551598735970031, 0.6880256474770938, 0.7226131462915846, 0.9748642061896929, 0.5308553938320528, 0.6278322927922046, 0.5889962486365423, 0.9489454836864955, 0.9052400645025782, 0.9177007175000185, 0.5336608745160745, 0.8926068340628924, 0.5264125722019598, 0.7643559529420353, 0.8948331964145936, 0.6189399982998951, 0.9803992428720437, 0.794764322343286, 0.8389209684733205, 0.7831581005968328, 0.9137141932240451, 0.5733079982374567, 0.764369685092105, 0.922427304908964, 0.9386782167301486, 0.56166289482561, 0.6741284010512363, 0.8516557299539492, 0.7413985936693184, 0.8941546529541589, 0.9760914582797859, 0.9913554156798294, 0.9790321350528746, 0.7760494058050345, 0.9138021201738428, 0.5714595081206544, 0.9432359568344238, 0.9894719348834349, 0.7689746058166502, 0.7075574178416477, 0.7986718886660286, 0.6210559012116539, 0.5530190270019291, 0.7191343297811448, 0.8778433153774376, 0.5505485201549929, 0.7121988224592851, 0.5869273135070863, 0.8883141214293889, 0.9082703975123476, 0.8008183588689325, 0.8233184446584468, 0.8887252250485669, 0.5182019836488704, 0.6759597361815535, 0.7025404322933956, 0.5244607378255848, 0.7482285869170124, 0.7561332987177299, 0.6279703657083944, 0.9346770127160512, 0.6769013180533838, 0.5708513313947627, 0.8170190948240923, 0.7550830184478037, 0.6575681025635767, 0.839100656607624, 0.5973482297686862, 0.52490154811497, 0.5285118866285883, 0.9720034683804072, 0.7990417705826873, 0.5538223444203417, 0.8920554201470106, 0.8579497966641154, 0.6227632032627877, 0.6832892395134481, 0.9303614757244842, 0.7174538143055389, 0.7377284419245265, 0.5673916465554394, 0.7787800390013122, 0.5470647686277865, 0.5252973508210435, 0.7753804469798946, 0.803789463373563, 0.6480238580827269, 0.598473302228038, 0.738323501024617, 0.975202630017882, 0.7897916806398986, 0.6198515241353462, 0.9727023448681946, 0.5703324018924077, 0.6857607721693992, 0.705881827670054, 0.9114699846855032, 0.8203671026920278, 0.5962748531539066, 0.9350379905528302, 0.9443253701722936, 0.8122504917989792, 0.8183819516386279, 0.6298577062038122, 0.5568061870376015, 0.716461074498496, 0.5942876825396248, 0.53524554296635, 0.6274444373994248, 0.9375013930480551, 0.8243056006001276, 0.7494323214029268, 0.795052679387855, 0.8156265863355154, 0.5169420185555875, 0.8217750192397972, 0.9562363351393381, 0.7848110047254611, 0.6542061882173233, 0.8827829026756696, 0.9119916779439168, 0.940833509201703, 0.9761467713418084, 0.510276406365012, 0.5482997574546107, 0.819595248720465, 0.537018586781913, 0.7424126040948926, 0.9456767339686969, 0.5208277674743182, 0.5759212463088996, 0.9487802880023768, 0.7039771278795187, 0.7229271552739895, 0.6334351421790536, 0.7612789773836135, 0.8393420230377456, 0.9519929703840919, 0.9119554094107654, 0.8741039011176726, 0.7346091985662366, 0.5546946548046046, 0.6694811499848641, 0.5548986862940739, 0.8070612313548778, 0.512038167556698, 0.6600174786057402, 0.9825434116902625, 0.748298441167299, 0.8843825547535398, 0.6136370639122226, 0.6272363102239724, 0.9808262612731685, 0.7261770194371997, 0.5129965125928413, 0.6522245127657433, 0.5332918229325918, 0.9250878292373019, 0.6737457976630284, 0.916349171291963, 0.5147544622634134, 0.7148286988195396, 0.5581486973956054, 0.6418883668506163, 0.8265318853083734, 0.604630506814558, 0.7490923311679105, 0.9323537909204127, 0.6639948177223299, 0.6385521840069479, 0.9710310858709015, 0.9351154623248409, 0.8648765717588821, 0.6465843991116359, 0.8608376556903408, 0.5489151576490092, 0.8828378853992773, 0.8499751625902094, 0.9196551206463954, 0.769966193341926, 0.9595166787920661, 0.7947530410036192, 0.5983740796415209, 0.995334413222436, 0.6247248030794774, 0.7679869496576972, 0.865528432177616, 0.6005541099028064, 0.998508272512023, 0.5985832374825104, 0.9884254221305551, 0.667570841685164, 0.9919014484941333, 0.7325973624197768, 0.7569059080880322, 0.9374108075712257, 0.8153577304685886, 0.6392366147549873, 0.9444765954401427, 0.6533708650881205, 0.9590909669316695, 0.8071336089654098, 0.5094451049486063, 0.8223383802547928, 0.5177110832814797, 0.7239497684358487, 0.9175950650149987, 0.9434025102449579, 0.8903905823468993, 0.6928333171577858, 0.9742734360694918, 0.9867521706827327, 0.6866235540972349, 0.7175797640767081, 0.5862610975533384, 0.6589536637296737, 0.962635761015789, 0.6706657041454611, 0.5591576999620039, 0.8195600231565325, 0.5066777643899201, 0.7370754893931711, 0.7729647030339319, 0.9861254342481719, 0.6773342809372254, 0.7869420844718428, 0.9767155889788706, 0.7468562222139745, 0.8720736997605765, 0.7114238682896715, 0.5867952643898958, 0.7479901545525189, 0.745610061153996, 0.7591423527976298, 0.9980116331970934, 0.5863766666403107, 0.9032327165024643, 0.6704642203323161, 0.6135556514009155, 0.5575774418674295, 0.7926687865674387, 0.7547550575404812, 0.8037251512217296, 0.9843673726279532, 0.9716320865701586, 0.582717592709689, 0.5971303401883402, 0.8007563950481854, 0.7888516775276819, 0.9481422613366952, 0.9953238047752986, 0.9832623961737361, 0.7219799132434106, 0.8455872580620389, 0.5396499360459502, 0.830864177559174, 0.6908138819882927, 0.8987493684192644, 0.7076421621743866, 0.9661528469500238, 0.9312041828549229, 0.6413671002154083, 0.9724654144202969, 0.6240667456004841, 0.5689660047379268, 0.6049377353384531, 0.7807198997897379, 0.6489079078057904, 0.5851044683046951, 0.6885824750156613, 0.6039487440639157, 0.5581824861987503, 0.6456180501855515, 0.6158196774302943, 0.6274916215732927, 0.8348924287391706, 0.5441106572020864, 0.894715309392571, 0.8305970287354342, 0.9637392399961159, 0.9190942844438741, 0.5016427454144248, 0.5144831030216919, 0.8131114005593121, 0.9235623808057387, 0.7433570510214069, 0.6788725104334902, 0.7897516022310271, 0.5591477460779268, 0.6374299059386337, 0.9851708171764078, 0.6958858733753279, 0.9506119775748911, 0.513408319021233, 0.9600792348777095, 0.9156421620139505, 0.7704056651597084, 0.7976727497407938, 0.5605227718811078, 0.85460033139371, 0.8046598014039189, 0.6275195764182605, 0.8347207519213757, 0.7822643274074341, 0.5494853556067538, 0.8738579123535186, 0.9695555828488676, 0.8177476126986842, 0.5708728885530661, 0.9094723042388146, 0.5054529223538989, 0.5850958733737972, 0.9838063305246768, 0.5719445053050195, 0.6994726015708418, 0.8502696469341248, 0.7428894259622743, 0.6826896677414226, 0.6027978338710778, 0.7800765005289056, 0.9765381668566029, 0.564854911797127, 0.8752874163439239, 0.6905284723514119, 0.6052652823632128, 0.6848586050324097, 0.9888460476207576, 0.6230893611974677, 0.912382456290699, 0.5712182566088918, 0.5849434254286188, 0.7414472625346852, 0.7669127394857238, 0.5044320275556798, 0.8962048770923021, 0.5993120061370527, 0.8614293534385289, 0.6658074171788023, 0.6940460442988545, 0.880751151565772, 0.7038726153451635, 0.8268573820726914, 0.9053628299807599, 0.8070574841760016, 0.8626528716928084, 0.5503410673404576, 0.8863535048138043, 0.8477611325650576, 0.772860133356615, 0.8136988274848229, 0.592896415600819, 0.5123098637893996, 0.963401058773848, 0.9319427506727758, 0.6956187042889097, 0.8434103881842847, 0.5220350562770247, 0.8009722151446066, 0.7662007735098311, 0.9262252944295426, 0.5278996911115839, 0.8407894762476906, 0.8844670228090393, 0.8136158807439549, 0.9807800358126109, 0.837110953261153, 0.5154474480920758, 0.952406893292888, 0.6698458291384572, 0.5408854699303349, 0.890081430179045, 0.9440424281254276, 0.5962738492181906, 0.8973035159181093, 0.9062553101845335, 0.9768469608130562, 0.9460822883473345, 0.6212213051448545, 0.8984890894148366, 0.5388684212609081, 0.87959633774549, 0.981890170002143, 0.6104089615748671, 0.6454584248994808, 0.8690959700722654, 0.8957998519622132, 0.8099108175749712, 0.8762051317716515, 0.7758419832206142, 0.7197542243049246, 0.9535408443157516, 0.7417841174855897, 0.8777900794909659, 0.9898081085644618, 0.5938629243032616, 0.7092309892503118, 0.6199333908313642, 0.6861806076875078, 0.6749749804826768, 0.70155085626105, 0.6874591588766809, 0.722193783005282, 0.5932780631854639, 0.7793315557161268, 0.5261053995243774, 0.527170872978121, 0.946964708980004, 0.6535994526831264, 0.9252854423144676, 0.7035979482773671, 0.6571328418390535, 0.820014118258533, 0.5204791927533091, 0.7708648981333386, 0.8674658206695594, 0.6772478243950472, 0.8456156424242888, 0.6415760694812144, 0.6202574071341787, 0.5866037938667894, 0.6313115618145384, 0.5076518548903715, 0.8114506757453084, 0.7949656356491281, 0.6547089947386857, 0.8299196838884373, 0.8733323945989682, 0.7673443877445063, 0.825239658492004, 0.5383727546109098, 0.8050521998876325, 0.693871784135162, 0.790150321638138, 0.9671155474041429, 0.8345385625881119, 0.6390527760803983, 0.7196780196104722, 0.7297108404327401, 0.8223117239992879, 0.528106986167767, 0.6446182757243938, 0.5165324551297897, 0.6509791326293405, 0.7853669098369815, 0.9679597920857614, 0.9824172410721108, 0.7796542501574901, 0.7941097843890841, 0.5156948763900502, 0.7702891912684908, 0.5224639325317206, 0.5192254517083215, 0.9025875657577043, 0.7054778435741672, 0.6625312882702389, 0.8683453614331207, 0.6615061889572023, 0.6603044004518763, 0.7670858612413115, 0.741160477429066, 0.684320526340791, 0.5575127319239829, 0.8133989676585165, 0.5574323321901469, 0.7663061563581854, 0.9409797529459657, 0.7872306302857395, 0.799802740421403, 0.7858312375531022, 0.7444353176363804, 0.6703003238770929, 0.8606905197151649, 0.8440574790646648, 0.724360366762067, 0.7394832122322267, 0.7090461163394524, 0.7496717730216617, 0.6316884383157633, 0.5039833107046263, 0.9450922349533897, 0.610062832784547, 0.5387940337092456, 0.6111969582693622, 0.5055428578878255, 0.9027872268804821, 0.9214188233498157, 0.5309052871369206, 0.5844678153836728, 0.7830885206614093, 0.5974423205505255, 0.5948451493659399, 0.662791421425807, 0.7968092477987587, 0.5936390549630179, 0.8515458100812232, 0.8680827661127255, 0.7932305245109221, 0.5856393833207925, 0.7500465687373512, 0.5881770580874911, 0.6044073553604223, 0.9523191833734457, 0.6225366060908297, 0.8678223764538484, 0.7871588926823976, 0.7999148363228792, 0.9749887493097211, 0.7053317194838641, 0.6344310414983578, 0.7324744771609336, 0.5659629337798708, 0.698431396109491, 0.9033963509075388, 0.5278950159249328, 0.7473089235199109, 0.8490527089023755, 0.6944203887232383, 0.5882472123511306, 0.6860800718524298, 0.6387911133466985, 0.6012298887741834, 0.7805024508582848, 0.9978915332501517, 0.7861963094081712, 0.9812474806306246, 0.9809376323388178, 0.6559770188200376, 0.6092191913769209, 0.5561220239969688, 0.9005971759804936, 0.6114506497803747, 0.7985914537919179, 0.6923451013806736, 0.7563653708065893, 0.7839784542222588, 0.8237486115443838, 0.648486271491687, 0.6596716651516512, 0.7148940242227969, 0.6453674331741157, 0.9784583716750441, 0.5980227288069259, 0.7485293376807676, 0.6761415092914707, 0.5514351340546952, 0.6209765916795988, 0.8441069373866432, 0.6901091822620243, 0.9024910870746738, 0.945710555481172, 0.9466459401095727, 0.6083913592362316, 0.6840524310856135, 0.5762033746123907, 0.9298961081823907, 0.7750168073162189, 0.5756845534710777, 0.6588004476343264, 0.9793058676608556, 0.76393660283477, 0.9079309409983044, 0.8914774648177928, 0.9664402213809136, 0.6051447739145932, 0.578133363834606, 0.877686817079703, 0.8312341706754509, 0.7094768456189575, 0.6653451110187316, 0.9251910038271268, 0.6233691979301765, 0.7563792024437674, 0.965869686612099, 0.5443736128093062, 0.5073109066723114, 0.9347044690784695, 0.6675689619510556, 0.9857494920491037, 0.7775083776016746, 0.9027519503551656, 0.5207379002086867, 0.725747874354862, 0.8905987738766159, 0.810107477528313, 0.6470813311433794, 0.5194327260229452, 0.689619761961218, 0.9126258159053682, 0.7949076969983372, 0.5608959792275199, 0.841981076932728, 0.8420994560898086, 0.7837619434980692, 0.8699305866547173, 0.505405542651282, 0.6930945016652467, 0.5700536975150028, 0.9041260540477529, 0.751923230439177, 0.7068263068532775, 0.9680622911788768, 0.7372092668996582, 0.9015323672981829, 0.5704829268942331, 0.6107583548361912, 0.6980067747702041, 0.9456155735311171, 0.6432826493933339, 0.6976459653375731, 0.7228300289984193, 0.8414555395059073, 0.7371788405055727, 0.8356300843687037, 0.8765181394543748, 0.8465709494438027, 0.9954526266145027, 0.6220581190266767, 0.8589713961590356, 0.6455874216149693, 0.5564142502284442, 0.6107029226710115, 0.8257671418626729, 0.7315280819186978, 0.8625522121012725, 0.9859049534471638, 0.7557558532587713, 0.9339832860855786, 0.6692369724383893, 0.6683005046703899, 0.6850307485405394, 0.7813189494349133, 0.9867910425719009, 0.5503952321686082, 0.9155230999850068, 0.6371287507546585, 0.5567882464697098, 0.5447148068061822, 0.8190305552911765, 0.7901332718434291, 0.6046373499308118, 0.8746206550771225, 0.6865182564623735, 0.6534072187009643, 0.5629690352926577, 0.5958539693978888, 0.6326708898415155, 0.8584662772902594, 0.9214811686256287, 0.8773049870380747, 0.5333108821600224, 0.5852629299998672, 0.6434319310601955, 0.5960821763681785, 0.8988886386335222, 0.8043395009744014, 0.8631315233484782, 0.7465082133311616, 0.8129631282159757, 0.5133565595095768, 0.6957591266384959, 0.9492992812542964, 0.939578520472076, 0.9529090539091443, 0.8177521142278743, 0.960662890952337, 0.6149329331434714, 0.5459826016112364, 0.5748584816317996, 0.7401767543056044, 0.5534610521102783, 0.7527271959780862, 0.6343273019643734, 0.743192050402436, 0.9883986542463791, 0.9524490624902718, 0.7176272799238748, 0.5060353333580465, 0.9449673807786894, 0.6647259599629014, 0.5783045856991826, 0.9624497466786768, 0.6342406088266015, 0.5969861420756546, 0.5488953715633098, 0.5087529534564489, 0.8990078634803487, 0.8379652660457757, 0.6720411029069739, 0.9187910035306173, 0.5090907563571299, 0.7674626224131831, 0.7050732586677767, 0.8395259102101702, 0.9910482646192444, 0.9047715349277596, 0.8171338862220897, 0.5407596600225018, 0.966304393923187, 0.8189184859280818, 0.6062372609276672, 0.891150620190476, 0.754091002044613, 0.6874115245376372, 0.9680866679407073, 0.7665275467626154, 0.6468906272999635, 0.9889685878460008, 0.7499377992818319, 0.9265723468982825, 0.595851589249215, 0.7263375291416361, 0.6596460537107505, 0.8018437430853087, 0.7823258324919822, 0.5078050448397449, 0.919694537194782, 0.6280772372178982, 0.9109915352571805, 0.7028920427397833, 0.8820795170352813, 0.859285861920684, 0.5062074000009835, 0.7232103971626966, 0.704222482344265, 0.8366451991400203, 0.7043883739235066, 0.538535716405364, 0.6169643918635852, 0.9075756131442001, 0.5549909892886931, 0.7522799564357905, 0.7679477398748977, 0.9550222736609467, 0.8892477913444338, 0.9177966755250088, 0.5421447152691174, 0.9588276568743854, 0.9394600047005173, 0.8220867582731144, 0.6191264059849204, 0.60320019826186, 0.6246685229652142, 0.826824720560491, 0.5477420383053443, 0.6689459502312625, 0.700489796052675, 0.6830944411377629, 0.9886090829230592, 0.5722621879882939, 0.9694060888294513, 0.822433124185574, 0.7044246774840524, 0.687058994957638, 0.884197721970515, 0.5397844859331613, 0.525890324136894, 0.9550868515559202, 0.6818209639415758, 0.6419109075739459, 0.6202969610347184, 0.5404418232394006, 0.8685547847904198, 0.7163211062228048, 0.5657448453305479, 0.8283191691877987, 0.6752995567803959, 0.8321080227589708, 0.7912151234169886, 0.6746075610247682, 0.9901614696191849, 0.5836490231459364, 0.8165756605338481, 0.7177047781602113, 0.5874066587787605, 0.6659407742997443, 0.6286495321059241, 0.9230631231337845, 0.5088931090052156, 0.9288270882228518, 0.9700570401689099, 0.8932195260379787, 0.6636138614676956, 0.6792991176362925, 0.7654488577122858, 0.9113309421179525, 0.8278994894280929, 0.7044883580506986, 0.9847648944944184, 0.8917782208587608, 0.5028125258216587, 0.7587315971908744, 0.8876301876278297, 0.720518973597809, 0.5894305544420884, 0.8047216382297624, 0.964229498291366, 0.8250342935691733, 0.5645338145239858, 0.690242817222331, 0.5888718802804452, 0.6222650676863535, 0.8861178404662651, 0.7768411057009335, 0.8466020298983852, 0.9281431413727381, 0.7473384369264363, 0.9346413557994653, 0.6564049782377739, 0.7488503344295097, 0.8167050417131545, 0.5602432133171374, 0.8304660625129281, 0.9474056475920469, 0.7568689786018029, 0.5657150837011993, 0.6500278000705799, 0.8873175474622383, 0.9170185649316052, 0.9669067249579808, 0.9968378934960915, 0.685132828639246, 0.889085078340348, 0.8391861798305398, 0.9557766901194606, 0.8900462605227575, 0.9144852619271934, 0.8520716828711437, 0.7464227931912865, 0.7001100396355224, 0.760791622080246, 0.966247984787129, 0.8591376273719322, 0.6403613295624015, 0.8244465063744686, 0.8308469432587156, 0.7085095061990113, 0.6728417563309455, 0.7221674085679752, 0.6952198041687603, 0.9648841715979524, 0.9195476876194434, 0.549647355002929, 0.5029583248258827, 0.6604706090332493, 0.9986506976671685, 0.6364681283888003, 0.8371382248640302, 0.5618474772994698, 0.5718789929252271, 0.8951088048394519, 0.9237520506152026, 0.9242688171356082, 0.5777086668664515, 0.5163837926565245, 0.7738016043220457, 0.5301177689913029, 0.6270277866460948, 0.6732041806609235, 0.964470260249753, 0.8633151023273118, 0.976689414024152, 0.9380279321253322, 0.9687023728992579, 0.9981999327882047, 0.8313803185990843, 0.5435430752836788, 0.7699424750117423, 0.5337737693097313, 0.8776075524742454, 0.801248640365464, 0.7882263540221566, 0.7976888313431373, 0.6283284815457992, 0.7981696481150418, 0.8548808879007092, 0.7189474574758452, 0.6091056398973735, 0.8301564913658288, 0.8302103944385775, 0.7923165104875916, 0.6434724838772745, 0.9791388180212339, 0.886048166609346, 0.7494783665395961, 0.5372357218894965, 0.5604638430973171, 0.8875468166574859, 0.522274417212603, 0.6524781791128219, 0.8494438981649473, 0.9563794081295218, 0.7793080776751732, 0.6226376279800832, 0.6499647188975537, 0.9622780815638269, 0.6739950316794051, 0.5830503278051167, 0.9909384163906116, 0.9229574655000586, 0.8054957117964279, 0.8608790360648926, 0.7991250297747969, 0.6165930561587145, 0.9888462481714551, 0.7945958692961449, 0.5104918444845319, 0.752170056003338, 0.6451403172331823, 0.8016384397500439, 0.6427839459525895, 0.9329770798718509, 0.7115456583146487, 0.9799311201200058, 0.84294547185146, 0.9569046384035856, 0.8195571870282377, 0.8438428997638391, 0.7294674631855594, 0.9923149576991636, 0.6449055342365111, 0.6359822546487766, 0.6565602281089213, 0.6936070516014479, 0.5254073296032947, 0.6236960346923517, 0.6455597587203782, 0.9426439708622492, 0.5995835796678165, 0.628208360078927, 0.6056562856413128, 0.8902026014541716, 0.8120766384123927, 0.8082944388928102, 0.576483395195688, 0.5141598664888682, 0.5621912204823203, 0.8290283168950268, 0.9680359912995997, 0.991184060321119, 0.5265172969824787, 0.6702792579120442, 0.9130995904382044, 0.701465872419702, 0.7571831440192661, 0.9767025191689747, 0.8872698257927935, 0.5185427778463663, 0.9991919792361301, 0.8683155876516152, 0.6816748755261264, 0.6614242600269429, 0.7723616011517824, 0.6650401037748832, 0.8417333992182424, 0.581271660855109, 0.7651535508109453, 0.7955689930378556, 0.6102312206498288, 0.7165647253545397, 0.9710788648696189, 0.8554902423650752, 0.8165469767199002, 0.8423265034780636, 0.9174888316756652, 0.6015218586806234, 0.5258050288293181, 0.5979659292935091, 0.907996363153721, 0.7651569215015597, 0.5148805041864857, 0.5665569938731914, 0.6737584960648161, 0.5539573285527013, 0.5046239722235524, 0.9044014056390547, 0.7457475786008743, 0.9350892429081559, 0.8646500134683862, 0.768025806359659, 0.7602333798765681, 0.8521832804839891, 0.617921448233006, 0.8505155006997371, 0.8948143232155232, 0.8155875301421517, 0.5959330292360399, 0.6479649193153777, 0.5287258525840279, 0.9130561459899074, 0.5080109231467653, 0.5038102361438119, 0.6159425276468844, 0.7245418772092773, 0.8292154941991707, 0.7217036683891297, 0.5381433680932408, 0.9589708784327151, 0.5842106793756365, 0.5583215838596737, 0.6406115912514614, 0.5158419601729264, 0.8571715095653699, 0.8624059021159243, 0.5982134751913961, 0.5640917335041823, 0.6069884150868534, 0.6129514130700521, 0.9711141369196864, 0.7694321035587373, 0.8947775848923183, 0.5364833541733806, 0.537254875088343, 0.9907842238761584, 0.9154885586586246, 0.9917301569376393, 0.6818228130335979, 0.5573134868249625, 0.9637548355837025, 0.8729569637477222, 0.6969828467279838, 0.9336814347500525, 0.6731479269003973, 0.726335197550745, 0.8741822198333687, 0.6851197566820009, 0.7708640276377672, 0.9969089771547412, 0.8356900641859437, 0.8131941569517843, 0.6209586182782544, 0.9350638798713068, 0.8414879088722093, 0.5231827369733766, 0.815637438677355, 0.8825787831534658, 0.6401049832413874, 0.7724156119083736, 0.7860780943497805, 0.8460261978985318, 0.5405579110159194, 0.8692922848585638, 0.9388772240327381, 0.7673956466329798, 0.7215387696546396, 0.5897402663974423, 0.7122117621569706, 0.524711780761324, 0.6430890362748214, 0.8376461655296241, 0.8725517560553371, 0.7355345131211943, 0.9729313380546838, 0.6155183318390304, 0.7570880386036924, 0.559207334647746, 0.5493733042318032, 0.9913070756371591, 0.5038017353118773, 0.6975377462593072, 0.5204144279134195, 0.6839792423957542, 0.8171959775934675, 0.6846512193540126, 0.8245415404846141, 0.9253710460151601, 0.859710563450203, 0.8606908259471201, 0.7958713072934969, 0.5398786452591486, 0.7967053259427042, 0.8043306155312099, 0.9088934683221475, 0.6189399100761199, 0.9829670500769315, 0.9241275640442868, 0.766027761758076, 0.9006136559854094, 0.856298762439698, 0.9031670571522328, 0.7445183255546143, 0.9854305555636853, 0.7436526892680521, 0.8109720776395017, 0.5463933833455554, 0.7294192189564201, 0.6217811982606777, 0.5033030699422887, 0.7516109905640331, 0.7789017243226075, 0.9979510131191651, 0.9047157112297082, 0.6812041551992241, 0.7941495908911008, 0.8896530683246585, 0.9556079266097075, 0.9954991521442542, 0.8608608953316448, 0.9821841886452369, 0.7667348601668507, 0.9284655167162871, 0.6654524112179078, 0.7589625028688864, 0.7770424902283924, 0.9080740218051744, 0.7678943810352526, 0.9251704062728019, 0.8821007636331423, 0.829571629069423, 0.5928199267691109, 0.5536543864988481, 0.764053673424593, 0.7175594469065913, 0.9854492691348501, 0.695766851561757, 0.6235547249393035, 0.5937355292260879, 0.6039554377100558, 0.8227801431071882, 0.5034894618961586, 0.5023728374789327, 0.6410547607942583, 0.759198272808225, 0.8178520079093508, 0.9770707233032832, 0.863119221220069, 0.8186526142130977, 0.8119811083664223, 0.7240033945974819, 0.5811145513655792, 0.811087137046126, 0.7285625410676941, 0.5700142675844041, 0.8843508282746113, 0.7763237708898778, 0.7036109846902916, 0.6212409754043421, 0.5919318229590185, 0.9746389515723032, 0.9041945788543153, 0.7867375857300155, 0.7191929286488987, 0.9057908119488471, 0.5439121453205171, 0.9360918762348331, 0.9444838032513845, 0.5414439745374948, 0.6250955079505139, 0.7708779291139141, 0.9329249065935243, 0.526826429772737, 0.9625811417429744, 0.9105125958926505, 0.5745946970360315, 0.8105766529958028, 0.9034233742176413, 0.6512340820266764, 0.8289118714816629, 0.5132026114167766, 0.9839861417164342, 0.7610915085388112, 0.542481156514804, 0.7428611198438744, 0.751977993605593, 0.7141652857329663, 0.7386521097501781, 0.9122343698149528, 0.7313173869657337, 0.5824336699151791, 0.9154733597240479, 0.7036875979875906, 0.6797892475719953, 0.809856701387482, 0.8025130700126737, 0.8583428352531792, 0.9368368999045835, 0.6370331903485782, 0.7409663257299841, 0.8432487777178637, 0.7854154686148724, 0.5075542991042616, 0.5010461678112081, 0.6915997746431641, 0.7618124977463983, 0.9213778067003572, 0.5859307143318153, 0.9485312348362845, 0.7259196400956403, 0.9654278201838141, 0.9198337047612068, 0.798315144297858, 0.5886921460127704, 0.8394727602606584, 0.7900529350233687, 0.709349395221804, 0.612208334094265, 0.5052554892156764, 0.8246831716423764, 0.5431941204086466, 0.8292489615149973, 0.5041272099190738, 0.9712516081000173, 0.7383550071328957, 0.7231548757211612, 0.8249380937465876, 0.748829833661411, 0.6607357852110103, 0.5133354028257245, 0.9724050116558659, 0.9372947724518432, 0.7741576273673098, 0.6320637175510366, 0.5559259639658614, 0.8052421844606892, 0.6408672422542894, 0.563854694177484, 0.5987712661927812, 0.9671748741372802, 0.5130852509683612, 0.7821728459084527, 0.5379121246846426, 0.5210759459057516, 0.6296531703872283, 0.8849822344122216, 0.9016138310836042, 0.9363979320588766, 0.7300698485274305, 0.644769662114963, 0.9115113001611215, 0.927634260405632, 0.9067985599416852, 0.7743693578895099, 0.94118442379976, 0.7488240518751487, 0.6806669027847829, 0.8007390491267787, 0.9837697877112902, 0.5871701092692849, 0.9053625464710147, 0.5996722661548233, 0.5311924428664454, 0.8383195957001361, 0.7601458735311374, 0.5194601556931985, 0.6159926046992705, 0.5643928693116387, 0.717622153181485, 0.9414565788070259, 0.509359299428601, 0.9159594871361636, 0.6709034841769879, 0.5487947893898455, 0.5220571543626504, 0.9721026867046936, 0.7018731985849558, 0.6531016339701496, 0.5228161090222498, 0.669885708907058, 0.6598635135581082, 0.6660720425030038, 0.9439281786500697, 0.8349024624359194, 0.9190180190832482, 0.9453543481896801, 0.8352266132167678, 0.9113429016800408, 0.5410307381671615, 0.801591858514731, 0.8514521653564139, 0.6273742241486819, 0.8609824681824154, 0.9089594017784479, 0.8971486432425554, 0.6581217379463642, 0.6880773703380934, 0.970443047406129, 0.6895933218937774, 0.6119874335330988, 0.7640217471232325, 0.937300443758768, 0.9063328919357494, 0.8814883411574135, 0.7174517567289626, 0.819164200538619, 0.8408079581980816, 0.9330340139358866, 0.9832869183837234, 0.7526390982630653, 0.5904506711739095, 0.8466190615513784, 0.6070621452287174, 0.8408255371093087, 0.870080767776898, 0.7746675247456771, 0.660583892433952, 0.562371963012364, 0.9651501414182981, 0.5232250561084555, 0.696521931704013, 0.8382710969555247, 0.8578399468897053, 0.8537090521847699, 0.5077031586270213, 0.9733732233859858, 0.788453745673855, 0.6552080867822279, 0.8101841556809133, 0.7800259995699201, 0.7354640255688746, 0.9244218479662858, 0.5055706605379062, 0.5745734504532283, 0.8217384543127607, 0.8858133521361724, 0.5786153777481564, 0.8390407327914662, 0.8124136820068582, 0.6014878236681401, 0.9073006855339891, 0.8260264734406813, 0.9116143145074063, 0.9944155352534565, 0.6060509761791864, 0.949353687876273, 0.813174384740174, 0.7115992301326288, 0.9101580897416535, 0.5781935563050543, 0.806907870014486, 0.8863669517666597, 0.8030994178462638, 0.6034376068759153, 0.7459593643480318, 0.5844654045731918, 0.7710178301853505, 0.7997821270849541, 0.7589225378363269, 0.6289754623045469, 0.795538758525274, 0.8795840825431552, 0.6414127148686513, 0.6013013192558978, 0.7334564682537627, 0.564010078837013, 0.8974218357793249, 0.8036524108981242, 0.5256703972266336, 0.6640677742982168, 0.6700648562783202, 0.8198626772341806, 0.9000099005246718, 0.6192191830462286, 0.6717935169183257, 0.7827192104956935, 0.7459959292006348, 0.5874794914315411, 0.7421103797588181, 0.901929901784125, 0.857052197731057, 0.7316367585324375, 0.627095119079192, 0.9646978882501422, 0.9881305235384918, 0.7064103522205463, 0.9222315980739525, 0.6273783184243349, 0.8076928929934373, 0.7029977796657974, 0.6923466982410972, 0.7600609311128235, 0.8330129855621684, 0.6745185299297737, 0.7007612052467347, 0.6329333684492683, 0.520824302249917, 0.702594720021837, 0.7848701967604381, 0.9797661382350698, 0.8225963324972114, 0.640428604240754, 0.9141139263872254, 0.9283180501247527, 0.8769244574041609, 0.7101227255249379, 0.7587246217371414, 0.583581225328399, 0.5103938765949867, 0.8522076157459395, 0.9762090651662043, 0.6013846707217283, 0.5550529066764261, 0.7474179068509649, 0.8174264210371761, 0.9493064790527297, 0.5706880729610262, 0.7110419447244343, 0.6183398517233838, 0.714109877826836, 0.674578299304873, 0.9568921332706481, 0.7444988406481031, 0.9113321153609969, 0.801760312996618, 0.6123331937366758, 0.8758125388352682, 0.855017139480351, 0.9672618635939918, 0.8449304824914279, 0.6242475639700553, 0.6348213813651111, 0.8500303273399108, 0.5049660988472583, 0.8927551792195951, 0.7314869448626017, 0.5849221317987516, 0.6981250576549837, 0.7898751704916, 0.6787804721355284, 0.9804132089032516, 0.8874934924614609, 0.5390152694238435, 0.6307348691395767, 0.5520221909557053, 0.52659984573034, 0.990182459159531, 0.723284714751628, 0.5814335072284014, 0.6459641098993985, 0.572524086060613, 0.9468867924375874, 0.5408346463141627, 0.9709439268347991, 0.8392383154629879, 0.5279227396157655, 0.7791028158046647, 0.7973495864549182, 0.7903998628043261, 0.7370517818779858, 0.5546478287742559, 0.6308313415302487, 0.6826958350517203, 0.8392978305280332, 0.8152574418096666, 0.9502693266790441, 0.502269790246592, 0.9384095734427873, 0.8391041595052795, 0.9692379293552325, 0.9226674223307066, 0.6299964832733436, 0.7778062606416607, 0.8084726123194284, 0.5443077570605985, 0.6818440215431351, 0.947470069633493, 0.9906195364029584, 0.9920882020889541, 0.9617413244434452, 0.8979915032572936, 0.8027581505622108, 0.8714468410442227, 0.5235766089016123, 0.8759622121309891, 0.9092169696195753, 0.8940534140510451, 0.6014641815109796, 0.5723428612874331, 0.5868891428586795, 0.5910844800628822, 0.8369256797141947, 0.9937198251844269, 0.7633675628127174, 0.7672740505538562, 0.6550433225761527, 0.6861970984643666, 0.9782745076416961, 0.8050609929537902, 0.5945829767383457, 0.8445921972317367, 0.5658122573367235, 0.9358146780606683, 0.5744067068228575, 0.8149488097327207, 0.5411194085591801, 0.9264054894221538, 0.947557919112639, 0.74813289434302, 0.8108426399662989, 0.5013982310597229, 0.9819814521666967, 0.7632167367132521, 0.9706380371920609, 0.70807526277163, 0.9749228734160302, 0.5150517941293817, 0.59037097202703, 0.898515946236347, 0.5282876737136945, 0.506257090213091, 0.5697715566593443, 0.9207745249987829, 0.9735131906599973, 0.9257087206081238, 0.7171022273670649, 0.6806250479473042, 0.6937394267087875, 0.8902592401696097, 0.5758590902595675, 0.60933861137171, 0.9018533081764231, 0.8624351727877884, 0.6623986565534126, 0.6709932428200328, 0.5356910647504283, 0.8251995681036985, 0.8369875733741111, 0.9493221151027442, 0.7466150483693323, 0.6346165157237302, 0.6099922396780868, 0.5047434948484081, 0.5362454418531324, 0.8418825708185952, 0.9772767519476092, 0.8302883328812403, 0.9809363668886009, 0.7611394604804222, 0.5194735447676424, 0.9501614088877341, 0.8711708378402658, 0.8819613816301721, 0.5043482723680525, 0.5548184643249052, 0.6902503498095636, 0.8850850820380134, 0.7297370472483857, 0.9644850756526453, 0.7527727155277386, 0.6994509359726416, 0.6903035029879929, 0.8896123863796819, 0.6888424451502102, 0.9443271516720726, 0.7783181416177724, 0.7894766097785287, 0.8373453397652817, 0.6839567644066487, 0.9198121670699277, 0.997454479884944, 0.6995201015412367, 0.7680223677668914, 0.5291837322478512, 0.780919315276073, 0.5489078118900181, 0.8214300161910846, 0.6578488576350723, 0.89725370088685, 0.581003597585297, 0.7463887728308907, 0.6927991059847226, 0.9024447570882199, 0.8646781013810896, 0.8131985468652656, 0.6937959759725554, 0.5066788938190259, 0.6017769285846237, 0.5005856192441145, 0.5104798751306023, 0.6372855932477461, 0.8708037261626824, 0.9185016626825628, 0.8326209961613668, 0.5692929377898216, 0.5511185204858656, 0.7241946683499132, 0.6646736933880473, 0.7386696653969449, 0.6585530961988567, 0.8753120919141952, 0.938136725570386, 0.7105736844753514, 0.5035667221353759, 0.5144153858336054, 0.8709646789844672, 0.6309245636573587, 0.8468275003473489, 0.88722909460463, 0.5419597858796016, 0.8698313812737006, 0.9498320101232165, 0.7750244888165683, 0.9404248076209409, 0.643072936885076, 0.8638458035138918, 0.8446221267921228, 0.9846269103692156, 0.9867147788784079, 0.8222060943782699, 0.6417663031822454, 0.51560599788167, 0.6683332029704998, 0.7507409091314823, 0.585565989815634, 0.543393938380803, 0.8852402826898624, 0.5673424732438195, 0.854237481500755, 0.718628743287321, 0.7157639809546689, 0.847429265774853, 0.6882511162940617, 0.790435868398663, 0.6369012872838766, 0.6051922978602646, 0.7845941490154438, 0.7760737350967675, 0.5163625175981419, 0.5652529234023986, 0.9269271384335279, 0.8736613736629031, 0.8768500069542229, 0.6436811193702837, 0.7540803469813451, 0.5339777358447885, 0.8336461306876831, 0.9446716222767333, 0.7052035935578115, 0.697793368018768, 0.5374339753160251, 0.9943225064073526, 0.8425171133662249, 0.5726878497609498, 0.7551602429527802, 0.8511633848822353, 0.5555054278687432, 0.7094050807001984, 0.6293623409172266, 0.5559783508507372, 0.5263709899306948, 0.7264380632971423, 0.8771998930109449, 0.9720484998070197, 0.5132632097812933, 0.7912191469923227, 0.9045764800378457, 0.7013811574746447, 0.995393976067035, 0.9475881191943981, 0.9700166339320866, 0.7569919389153064, 0.9186299133739342, 0.6177328469445718, 0.6784283785361009, 0.7109504514187086, 0.5687196412037916, 0.5298049325968432, 0.6188795917028926, 0.9521483237796204, 0.775400741826022, 0.9630626088162003, 0.69139591676764, 0.6276158225274666, 0.5753253204322495, 0.8102903241493039, 0.5562320473952866, 0.881523863635707, 0.8512472358942289, 0.7532255333096325, 0.6477994579106783, 0.5391944356596201, 0.5051747741692787, 0.6237332671399846, 0.9921768930194709, 0.9469111174424818, 0.8661253963659128, 0.9630510308378062, 0.8862154916876455, 0.8600486946276191, 0.6070001724884632, 0.6508562737407875, 0.8588820218866781, 0.9057966742207499, 0.7080510777182774, 0.7562125853889038, 0.8798273362527562, 0.9470538156227091, 0.5506040395543239, 0.6258351370262554, 0.6862291754620427, 0.9335478266782042, 0.7309076691738292, 0.9328899626306342, 0.9600281187961766, 0.8618145719028919, 0.9100132335084499, 0.5845356609260903, 0.5826590351876035, 0.8770188362734007, 0.9806201443937809, 0.5058835571413053, 0.9816034964429605, 0.5009575651054831, 0.677901985946012, 0.5556655177318093, 0.9823312276081881, 0.6351630266631905, 0.7978814997198018, 0.8374226042373868, 0.9512451417331937, 0.9098585524977139, 0.6222855525882649, 0.8708253849371301, 0.7822693228013153, 0.608757840741897, 0.7471413614312605, 0.9946918612442477, 0.5820088997566983, 0.6880895050807136, 0.5376494208298923, 0.6867604851025064, 0.8357026266401634, 0.8820940583628254, 0.9806440497530631, 0.5550278431584902, 0.8115186445533049, 0.7658881135396349, 0.7695835736977696, 0.7537005825406946, 0.8909609582469415, 0.8847778524201197, 0.5453463385910424, 0.6923757984359737, 0.5749514852019149, 0.580566554132673, 0.5446706216017035, 0.5822358783939323, 0.6639878088505022, 0.6984369685048839, 0.5954195722133007, 0.9895981211018994, 0.926323377703829, 0.5240050976769072, 0.763324136901913, 0.5233419361841511, 0.6447798783883433, 0.5609348513790751, 0.8503154182523299, 0.8060218548050088, 0.9268769671993392, 0.5625097750266526, 0.8108150707439894, 0.8389616739536372, 0.9688007323838552, 0.5760831755523644, 0.6268990859613558, 0.8223163355011647, 0.6586363546223969, 0.7821860995812859, 0.8894305059573833, 0.5887216989522674, 0.5033057547696105, 0.9368487348714156, 0.7249068959341529, 0.9040239849152019, 0.9413043565305232, 0.6884542903303477, 0.8302629017751373, 0.7806076379429538, 0.9917738244667949, 0.5714852810356259, 0.7973214350950706, 0.7068677572150744, 0.9596695650313554, 0.7451459647315075, 0.7259093125650953, 0.8863104244468041, 0.5758377288977246, 0.5260161183495193, 0.9897247652952286, 0.6634080137468426, 0.908996942339845, 0.6848513580578356, 0.9636109774034507, 0.5324637295556977, 0.5347195917791763, 0.8920373034065497, 0.6741144812617941, 0.7017638833980222, 0.8905845371150697, 0.8647698301517392, 0.7675478233608957, 0.8138616631189186, 0.690379788069674, 0.802042650480522, 0.5148090622851723, 0.7607772130729575, 0.553397955274721, 0.7439448828898523, 0.9648788951042672, 0.8616134285440674, 0.8564807260712901, 0.9033796941955646, 0.8544161874343952, 0.5175544756856811, 0.8603246634739357, 0.9774904578562648, 0.8669888270931063, 0.9101701408239417, 0.6200307611816107, 0.9425085367495392, 0.8704864264373716, 0.629615156593744, 0.5191587940616259, 0.6695325444448256, 0.7787219958589346, 0.7360909330696467, 0.9221676740215257, 0.9592698194750131, 0.893523088181046, 0.7708520889033046, 0.546723489919956, 0.6785838475195127, 0.5769816047744045, 0.6095308869187239, 0.6218049399520028, 0.826367226020052, 0.7806735194237294, 0.5074521833814633, 0.6302818756370565, 0.9740665789741263, 0.5695420254648662, 0.6980025778896471, 0.83849355414753, 0.5070416849412501, 0.8634398754253203, 0.5940267007460027, 0.811167356145722, 0.5737921513998532, 0.6805676799324332, 0.7658354067409919, 0.9862088432259773, 0.5603403895417788, 0.971067145255522, 0.7695544381080934, 0.7024562363006956, 0.5521792493465754, 0.5765032228534068, 0.5227511224794537, 0.5187376691875838, 0.9564890066967324, 0.9143104141090457, 0.5459890442617137, 0.5105876383962179, 0.6607000805081411, 0.8017262944544206, 0.7914319916174193, 0.6984601016933913, 0.5267839051026739, 0.9895487818562132, 0.6967583846499653, 0.8456237305422473, 0.8505068250130096, 0.8404236892164845, 0.5223277026856246, 0.7350632751221642, 0.7478513360256124, 0.5709140645602554, 0.6497437589720589, 0.9315821874307785, 0.5830745417316923, 0.8794462532707762, 0.5825286993613203, 0.9132742966523724, 0.7589471073685652, 0.5664428295090553, 0.8680784165427746, 0.6124331557528492, 0.9282005527193711, 0.6135325695206864, 0.5162735805355139, 0.8720972558354385, 0.8300184555201418, 0.5714878412289728, 0.6516474216750903, 0.9618067653348494, 0.5576427532522531, 0.8766484852658711, 0.7928212551513687, 0.520549602515307, 0.962961542076622, 0.5546793878130727, 0.9165985272178452, 0.5813403921942605, 0.7089531175400872, 0.7982511001989829, 0.7921480304544117, 0.917203113878774, 0.7238324952912244, 0.5702157162017121, 0.7511408103579901, 0.9014957475225556, 0.6530536215092761, 0.7701605744145147, 0.9607636034513183, 0.9235082035057229, 0.7841238155583026, 0.65865027901259, 0.5662098593859496, 0.6583252341804349, 0.6353661893711172, 0.8928199638365821, 0.941021272046916, 0.678524064855992, 0.7251045786947012, 0.8185629356764466, 0.7311685099853377, 0.7684432953033302, 0.5559750395431002, 0.6312420769784525, 0.7486944285320045, 0.8314426954681502, 0.9243389193486644, 0.9780969008408669, 0.9191703830782073, 0.8861832173919919, 0.8719118375411996, 0.917653551103559, 0.5830659197456387, 0.5835380992036987, 0.6597280974405607, 0.7764654427676423, 0.8976094263416179, 0.7211088907643723, 0.5283497518915263, 0.7505947314970762, 0.9191830529720662, 0.5665973386381853, 0.5756201404449801, 0.6557904841305604, 0.5501736268465386, 0.5446684972172702, 0.8866827592522293, 0.9684828917198345, 0.6045099392675662, 0.584753859657151, 0.6578694238181307, 0.9703409506083487, 0.8639807435571696, 0.53234876438837, 0.6069166748999125, 0.5575635313961063, 0.5742925775396357, 0.55127117982953, 0.6201177307466719, 0.5864774599804408, 0.8488886977481279, 0.8576015180619811, 0.9455106967284541, 0.5756384831752883, 0.9209915504205162, 0.7218364353276964, 0.6366873630590932, 0.6001966352881118, 0.6918575067526808, 0.9679114302575542, 0.5081625872474553, 0.9257480202189127, 0.8626307025112363, 0.6059244801838208, 0.6923157665478492, 0.8927648029816333, 0.9733456436185381, 0.888410548652653, 0.5584385641682417, 0.6990689813689233, 0.5854579806455369, 0.5105886776842032, 0.5361390169118683, 0.5127891704636875, 0.5792116786300805, 0.8544885792563428, 0.8634334301815674, 0.5850030941067896, 0.759491444578912, 0.6599862614345653, 0.5598895453156034, 0.9003522951318041, 0.7963644273769439, 0.731864624610999, 0.8110330488839842, 0.8277870006035701, 0.9972417001170639, 0.5368612411040885, 0.7875904919953299, 0.5864501480307132, 0.880158325027953, 0.8379646300149355, 0.7384864122799346, 0.7871775221191359, 0.7430808642457745, 0.7790317999194045, 0.7640648119403626, 0.7782988955335091, 0.6658993617441673, 0.6749291650040998, 0.853594993701126, 0.803271036160479, 0.7019067219602951, 0.8492424919171472, 0.6137011210521294, 0.5979878093590896, 0.737603861505636, 0.843536282794367, 0.7701264856420829, 0.5960804176057448, 0.5163149846209865, 0.5765355386008262, 0.5188159907748922, 0.7341663504279734, 0.8226079405751994, 0.5485749110023204, 0.6863450910995328, 0.6509804636675041, 0.828093545213138, 0.921899607624594, 0.7958666942505822, 0.561957391393225, 0.7510228672527333, 0.6842162401701943, 0.7608048179337645, 0.9922479259390893, 0.7543326179366345, 0.9505778859508318, 0.9094369410213934, 0.607224035275532, 0.9489918794936751, 0.5617163001973768, 0.8674833419807976, 0.768760385169875, 0.711502305795564, 0.7932824692361335, 0.7737370915013346, 0.5368581527974092, 0.5954246579264835, 0.9394155400893004, 0.8498567471676559, 0.7351735667588897, 0.6831426660266782, 0.618604820047765, 0.8654878054426827, 0.9550472643205778, 0.8094100401416611, 0.7113990146698721, 0.6709142992827282, 0.8344426062757473, 0.7966392336847029, 0.9816070996329664, 0.7841384630992015, 0.7463061171453678, 0.5999864591071608, 0.7510655233338428, 0.6060525112870543, 0.5573144449990364, 0.7593518041548635, 0.541153608649809, 0.8808319537698909, 0.8320877963760502, 0.7912685181704213, 0.5187161150448036, 0.7332785324917369, 0.8096976758317269, 0.6995694025763073, 0.8790146909680786, 0.8112255878379765, 0.7250710155051223, 0.7229636208266996, 0.5831596308696787, 0.7286548575549461, 0.6214406924091763, 0.9861081312162732, 0.6056006355323809, 0.803670149484488, 0.6736238354921865, 0.9579653703946036, 0.6274745902060499, 0.5520993635372305, 0.7453950576084044, 0.8234273832681387, 0.9377494624070337, 0.9601218220312074, 0.7770846609051625, 0.912975084956219, 0.9943298495788071, 0.9420209906944768, 0.8793647342947826, 0.7256940311751703, 0.5328656408759596, 0.8036278294266349, 0.7170309169994542, 0.7948157145994988, 0.7467407442877284, 0.8485125491402866, 0.7223255982403344, 0.8546061668400584, 0.9703577271334014, 0.5846451310537677, 0.8380359528436829, 0.5569846212418139, 0.9222121075752167, 0.577411759438068, 0.9501948640649414, 0.7128070525407676, 0.5475740269186674, 0.62615909200518, 0.9563727555962954, 0.6060052570615294, 0.981075793445038, 0.684992675914549, 0.9958927372988307, 0.7100562696906836, 0.8696034029452102, 0.6939042486263106, 0.6650776326484488, 0.9351735080347285, 0.5389109092447024, 0.8945482674749194, 0.6151863563590173, 0.8259633226852874, 0.839392493265354, 0.6510055668812258, 0.5447884953122667, 0.9418795681982146, 0.6497553070995642, 0.5996133064477036, 0.8755838854174027, 0.558426330621109, 0.8983461628132783, 0.592322998189703, 0.8561221676169963, 0.7186347220305191, 0.8659765667164572, 0.767356435537712, 0.6522320498156087, 0.5444765518303644, 0.8786344222540474, 0.9356774403172237, 0.5818521922754842, 0.5613806935040536, 0.524386149902654, 0.8567970538110911, 0.9006639475491149, 0.561743235387137, 0.5086518356756287, 0.7899999643890387, 0.8925980586985871, 0.5038780381836234, 0.6891285916020498, 0.5818171587540529, 0.7126604168352824, 0.9869625305282133, 0.6051505064483057, 0.9776972710431142, 0.8775549171471487, 0.927504766160183, 0.8388414490329836, 0.5304651792592603, 0.9808180561063358, 0.6569423729449981, 0.7050023644679321, 0.737966389521864, 0.804373032315107, 0.8309589922923386, 0.7664885336933998, 0.9041710894146179, 0.503990413027713, 0.9462585530688026, 0.858280044361649, 0.843983545922374, 0.7780605048866656, 0.8801263597521005, 0.5621556946043766, 0.8980832966930808, 0.6351894769263873, 0.9018634859798551, 0.695111102017991, 0.7530369068563314, 0.8798596530447123, 0.666698515036386, 0.6597151105457771, 0.5827450085655166, 0.766944762305096, 0.6567469681268033, 0.5956917404419437, 0.6461195489208349, 0.8773131714490252, 0.9138591400816174, 0.8212073525549424, 0.756470621560927, 0.5775336699859517, 0.8514145853481498, 0.9629592556981399, 0.508849476418975, 0.7094206948288075, 0.6715141363045688, 0.612511601744186, 0.523605489740357, 0.6843204152183133, 0.6688811259074918, 0.7565855763228813, 0.5709666473096875, 0.5146148579600189, 0.6056977772366801, 0.9822381996523488, 0.7967086769697316, 0.6781726640243266, 0.8586264117829994, 0.8531903046909504, 0.56748785610925, 0.5375868176887173, 0.6384547887588237, 0.896463706258016, 0.8769334222000204, 0.9035503871847661, 0.6136433968348743, 0.9909291446288357, 0.6057033990633245, 0.8835491815064885, 0.57350476116494, 0.994307789493899, 0.9937275380642681, 0.6178596871510693, 0.9936817040259434, 0.5140534279675837, 0.7418379009854936, 0.855722205510232, 0.9380321691875562, 0.6536517574355186, 0.7724499877588942, 0.9988838202904361, 0.9417863757568519, 0.7854264992316816, 0.627145493182595, 0.9594990223074542, 0.5391001488960877, 0.888343644417205, 0.9599261291952079, 0.6871360836262242, 0.5338816597179432, 0.5885765471443426, 0.6667540779829335, 0.7381633567958801, 0.8069746514903227, 0.7346142218205988, 0.7131402767554853, 0.6322686323300708, 0.6629548592672683, 0.7849047392411077, 0.919135786747539, 0.6331086200906106, 0.9648504082639688, 0.8337344366070093, 0.7326703745313872, 0.5419813505820243, 0.5390875633958496, 0.631650027215827, 0.9239779227856287, 0.5102534989949137, 0.9078231262888208, 0.6562436137618972, 0.9955842376001659, 0.5255723411215891, 0.819435236319136, 0.8642021478469613, 0.8049753797489945, 0.5497313433564113, 0.7096950726510884, 0.8390427427537246, 0.6836548326691811, 0.7535401954756371, 0.7321320862515761, 0.7747691679075739, 0.7963985142779137, 0.8054598210776285, 0.5821569442990133, 0.6865349056101409, 0.8679973659659792, 0.5281573939819264, 0.5852040853290617, 0.7324912265194881, 0.7635768932497408, 0.6496014646786845, 0.9549479448459536, 0.7602004229246957, 0.500395049448696, 0.8996797820203148, 0.6657051368948376, 0.9862623189531949, 0.6849229777764538, 0.6652448372695348, 0.5660331456462786, 0.8459390167098861, 0.7104257299216901, 0.5593113753140054, 0.9007362196605504, 0.5620827236712584, 0.6463948960475381, 0.8387126451174769, 0.5690553724138053, 0.5476938043121762, 0.9144802007916777, 0.6224735873435555, 0.9989196701334264, 0.6942799385740233, 0.9020573539497969, 0.6555386598008095, 0.858810475180299, 0.6090489489703639, 0.8326878670896096, 0.9417396031978391, 0.6209598797241621, 0.6483004342500661, 0.7229518908669575, 0.64758822208357, 0.7812866514239434, 0.5800575554299143, 0.9443525207073085, 0.942318512567349, 0.5641141182648646, 0.5070077290761492, 0.7590327413460957, 0.929012598421424, 0.9369894808956706, 0.5505103070128217, 0.8909321876407844, 0.9879308863418634, 0.6464894895456865, 0.6870308773686602, 0.7297543937542257, 0.832376462265212, 0.7132234569590689, 0.6100024375338885, 0.8965570543703556, 0.711065841582644, 0.8008049032024553, 0.7069141329859182, 0.895176741091624, 0.7676656953590153, 0.8867930732642746, 0.6082221824373839, 0.5267691987950277, 0.959261500170939, 0.663823471941531, 0.9373039098298896, 0.9869877152551745, 0.8963565427781626, 0.5059279981261998, 0.6327687168473952, 0.5545833019804041, 0.5439842710692638, 0.9842901216637396, 0.8234578050305009, 0.6899382605017323, 0.861703988754922, 0.7697300614363312, 0.8837628207876691, 0.9745364469822664, 0.9149159281710457, 0.6118783526162811, 0.8713456396670551, 0.7400816838737172, 0.7377646618780658, 0.8655925882569805, 0.5336051259766532, 0.56927581599836, 0.7911158030206095, 0.9912339059387534, 0.8022791030593989, 0.6211134027667142, 0.7717519996423194, 0.5300115753641752, 0.779632612167825, 0.8888742421924215, 0.5699443412887741, 0.503785702990222, 0.5191233779223856, 0.6994658881659587, 0.945190129137752, 0.7682806468359054, 0.7203260379319404, 0.7042049521215059, 0.7378476931319551, 0.6128992879476205, 0.8441954678091379, 0.6284531109168126, 0.9424044584476325, 0.6482349350277793, 0.9152618267101418, 0.8563623114412995, 0.5059719937217716, 0.8655926582313207, 0.6740974226185807, 0.6376229364967978, 0.8826911217142752, 0.5423777264499061, 0.5092548848879721, 0.6873553655508358, 0.8413593711568081, 0.5160712461149936, 0.6960860986365209, 0.9080811240415311, 0.7353122765388844, 0.5037718216272287, 0.6977459701292392, 0.6458291010752011, 0.605768878460019, 0.801056823584438, 0.6602765173860039, 0.6150786723854754, 0.7285470505164326, 0.6236320317353334, 0.5038179951078893, 0.5143867255267245, 0.9120739909624298, 0.7704674292866456, 0.561109825310252, 0.7047690588873117, 0.6154653667295957, 0.5073918122161962, 0.6115824617192982, 0.8674366980456347, 0.9408546137495409, 0.9321982240340728, 0.8203028934298107, 0.5341676456114237, 0.5135739056537687, 0.5702991767551058, 0.683704823309798, 0.8645655955287876, 0.5910313862279928, 0.7877073786686619, 0.6191660343587433, 0.7829717815671775, 0.9799556571312161, 0.6669537355778862, 0.7750548799675708, 0.845985192280434, 0.8675210542542673, 0.8425389752312478, 0.5407897358070866, 0.8464502878616322, 0.6760734200565848, 0.8540805703350978, 0.9746630136757349, 0.7003470853245761, 0.8982772511260162, 0.6327407121246358, 0.8520162530433896, 0.5023400267060261, 0.6948488514185361, 0.9605133178519987, 0.6935154947341411, 0.8075575018453995, 0.5858481100033688, 0.5160135467125697, 0.5881137848332003, 0.6075278393757271, 0.8923972737644669, 0.5680926945545068, 0.7228773351963609, 0.8227704083070382, 0.8608003440474061, 0.5542175934097551, 0.7426867364440661, 0.51773277544231, 0.9120097824037312, 0.8765216502348128, 0.9522094595170274, 0.9613422330412948, 0.9181077188071041, 0.5397542685273189, 0.7821264501084136, 0.906847433727098, 0.6776056335699471, 0.84556557154876, 0.6754280556747422, 0.8199499285736269, 0.7933652079853039, 0.8315935725489223, 0.6254791887081658, 0.5720895849104424, 0.7832483487332824, 0.9705055276472447, 0.8403802497641408, 0.9941859450163093, 0.6111510254841188, 0.5223795199696954, 0.613413900013507, 0.7044580806115237, 0.7626567428613238, 0.802294112264313, 0.7530915127578439, 0.9322252511058462, 0.6537450941498497, 0.8660863882027461, 0.6629550750157506, 0.9822872271675317, 0.8866635186314539, 0.626043780982634, 0.7140722571585144, 0.9664189464158224, 0.8850435035521609, 0.9370492607064744, 0.9036959694611908, 0.9113669984364565, 0.9588678484403164, 0.6858296010667827, 0.789158918547276, 0.9882688907981019, 0.9483655352310835, 0.7393455018759894, 0.6738114363365343, 0.7767697903341031, 0.5551908859950679, 0.6972733487991571, 0.8452132485055824, 0.6106008585553313, 0.5709406620088153, 0.7252637744685182, 0.560009816083147, 0.5002988550559482, 0.8704899046125909, 0.8898463352384522, 0.7149982387206864, 0.5018858846784225, 0.6604486401092599, 0.6108890400220219, 0.8811459568869418, 0.5465964940121235, 0.6354006264473788, 0.5296076271100947, 0.7772281447947261, 0.6165074275922287, 0.784220715782949, 0.5924299051425843, 0.5943364595493955, 0.5953408307381827, 0.7242740262046539, 0.677270151159592, 0.9201002327822019, 0.8685795568996109, 0.5949329645964908, 0.890892824669268, 0.805594254625939, 0.9331776387804307, 0.6414108911345016, 0.9820204855247692, 0.5549807260497168, 0.5726572312111382, 0.977092796988629, 0.7640591292778665, 0.8369025546948299, 0.8400303098881634, 0.629085443147372, 0.9743141833184309, 0.902747818284402, 0.5864039021211811, 0.6820535543777666, 0.8655866059113106, 0.9190777208482948, 0.6032466847219249, 0.7042527559997301, 0.8152396842323918, 0.6194464735708992, 0.5690592121408298, 0.6438227706289663, 0.8664312016184608, 0.5556793209301569, 0.8309529659375076, 0.8911603507982938, 0.6144164130683977, 0.9525897959242267, 0.5364463343230963, 0.8850441505828731, 0.8765076451207181, 0.9864735882045688, 0.6901146214054632, 0.5748246745145973, 0.9710504830367865, 0.8374559001496396, 0.8934725334254625, 0.6598920710706344, 0.8732089173929947, 0.5236722490862877, 0.6297275161368325, 0.9239762993291861, 0.585985953640172, 0.6428814969910739, 0.6865338613057339, 0.6794361199121405, 0.8573393139600005, 0.5195761587577319, 0.7765765105846756, 0.9642362060998738, 0.6454259148849095, 0.7193525836980681, 0.935437795555702, 0.5643001227379798, 0.7058222217048769, 0.988381634645902, 0.9545644122129379, 0.6553072881207077, 0.5265025986159857, 0.9984263691535213, 0.7806322169742492, 0.6674569312908449, 0.76516682716171, 0.6152425166453147, 0.6614430092691165, 0.5133697097132088, 0.9690349936271655, 0.8241940819575995, 0.7756016640516044, 0.6542392888413807, 0.5191487173856915, 0.9575228446255082, 0.6399981189890893, 0.6396077077706922, 0.7839140807518601, 0.8518331971197182, 0.9047955326428121, 0.9557036888524012, 0.6095748009888551, 0.7122881718906808, 0.6284968025543377, 0.5293802512893496, 0.837687907039296, 0.8837726861409626, 0.7702730512176547, 0.7264153114996599, 0.7751311884869037, 0.6777656447418796, 0.5864615584024897, 0.597914300338257, 0.8796619210502794, 0.7163996363340591, 0.7458478741075064, 0.6707500675046836, 0.9678311057607853, 0.7051913171020374, 0.686124988542046, 0.6755085831540208, 0.8764003191960028, 0.7283610536878586, 0.8171317595020486, 0.7820399529547777, 0.8839033272537422, 0.9771774825960581, 0.8974280380975556, 0.5074115482952058, 0.5019211829945955, 0.9842334684762297, 0.9312705816486929, 0.8191294842461307, 0.5833723271043336, 0.9387150985111108, 0.8699975280400999, 0.5314302566756691, 0.5979940301055009, 0.6550873772170127, 0.6929661565898313, 0.8229216237366537, 0.8108636310481525, 0.6644635481144322, 0.8392929075307962, 0.6407149539144317, 0.5726695235795791, 0.8993258699190132, 0.617342415319417, 0.6173418072202581, 0.515949082478866, 0.7090577960441311, 0.701026910771775, 0.9758715523811565, 0.8224820272088452, 0.7847474270646511, 0.9320740132989348, 0.5809000074923425, 0.5441530564320736, 0.9301185504923262, 0.6721632073071573, 0.6239179741075087, 0.7922591070159724, 0.7713550874296944, 0.5431218096188757, 0.8351805809734039, 0.7983995997438005, 0.7360127681583297, 0.8120567256027345, 0.5383739112952283, 0.7970098446681161, 0.9461897439290297, 0.9795238814213973, 0.569971499065586, 0.7385968490165837, 0.9190632734078718, 0.983548946570505, 0.8552767320610067, 0.9245794731686738, 0.961645052803171, 0.6302086314551978, 0.9353674083904224, 0.5050418056895051, 0.6740025222187341, 0.8781654171625348, 0.8214454400196911, 0.7955020126058627, 0.75354366875817, 0.5231094927557202, 0.8730890771082633, 0.9947237691071347, 0.7117672401736501, 0.6751937074002503, 0.590072754782101, 0.6270894333735986, 0.8334622212327929, 0.6224529681483855, 0.6675881102518211, 0.6634454980528178, 0.6660975952031778, 0.7245171550859157, 0.5525347498321138, 0.9042775469658066, 0.7400723262228298, 0.9102205279139466, 0.7950624205595939, 0.9470786809151643, 0.7286657119239952, 0.6086190580030413, 0.7002662108507376, 0.6339777605525609, 0.8953563920113836, 0.9442885888498399, 0.6641528143533735, 0.610152589049225, 0.7965103163124092, 0.8328336033195444, 0.7209430526224523, 0.9313917357845523, 0.9220372534160894, 0.5766834556052081, 0.6735912987245654, 0.9933369922223746, 0.6878815387500259, 0.5148709207998274, 0.9864745145456524, 0.9439602749589174, 0.8291674315062441, 0.9209096857623886, 0.9558402123873996, 0.9965099822521073, 0.9729876217822564, 0.5269081296437694, 0.7054546084939644, 0.7216637067330869, 0.8497349739565643, 0.6361786110888317, 0.7082656420529838, 0.5958059144419983, 0.7028561297700717, 0.8529664624973132, 0.6941372799048084, 0.8191939066606607, 0.8528305733676064, 0.5950937594617739, 0.9183594060375802, 0.8652379822113463, 0.913913931181286, 0.636157657887295, 0.9236209555134879, 0.8762023901841629, 0.5607393605058226, 0.6985582732114606, 0.6698366434207803, 0.6926477932512792, 0.8164371360139109, 0.8334002156119587, 0.8509843003387069, 0.5859545781450622, 0.9989106652507256, 0.5110825773714067, 0.7459507642084229, 0.8393679543861883, 0.7325597376433444, 0.8057925236868242, 0.9092572639964291, 0.8439470914650388, 0.8619185944242742, 0.6904872520889789, 0.7624444688315648, 0.849368119652341, 0.6573260238081765, 0.5786654552225146, 0.5720621615637556, 0.6322902275756419, 0.9717601844848069, 0.8743060150894112, 0.7809695985273781, 0.6958949058552815, 0.6776371448953288, 0.7813142452256873, 0.9524531609435704, 0.725417665595256, 0.628460822283234, 0.5144794809600131, 0.5075884890107529, 0.6510661087464835, 0.7755729063041169, 0.5273866438902078, 0.813594565078769, 0.9971686487538629, 0.9919169974140273, 0.6366567171756303, 0.9734999895404741, 0.8217934376608156, 0.8204864488101721, 0.9658312850406073, 0.5704603997159555, 0.81042942636474, 0.5224254281515808, 0.6240117112241167, 0.7006885904946953, 0.8676683617140664, 0.6848349965315454, 0.7851637204493828, 0.7042832955616833, 0.8755139181804547, 0.987498591858453, 0.6241431838155631, 0.5746869942822617, 0.6379326266254486, 0.9950217442098481, 0.978940555100137, 0.6955938843987927, 0.7608925343217726, 0.6584757880537826, 0.6984056962156597, 0.8427813234571482, 0.978332559069222, 0.5188099397580892, 0.8019403052342151, 0.7147314031765011, 0.6818388379273022, 0.9607968701780054, 0.5522395240418694, 0.5477598249999037, 0.9495617607611586, 0.7934610112010256, 0.7189206030077868, 0.5424291596640514, 0.5117574031944878, 0.5483109057442089, 0.8642079447118715, 0.8254989706863742, 0.5865017075417158, 0.6467861199107032, 0.9481762300321965, 0.9522059849357611, 0.8192833491160638, 0.9175512592862551, 0.8457176738615239, 0.5587660546957667, 0.9020762634349468, 0.7498828416678095, 0.7621805288060839, 0.9132761277404602, 0.9244953025296085, 0.8297122621452933, 0.6644057761836575, 0.9426109272885232, 0.5071080250915068, 0.7988814866468554, 0.6158601244965634, 0.5483542489927651, 0.7544121622308155, 0.8011445844712405, 0.5267646209540433, 0.6325253879955648, 0.7210591876718127, 0.9447594869329872, 0.9887642369033545, 0.9823794788575655, 0.5995385470416126, 0.8614065671202878, 0.7465992266263953, 0.9285062696605683, 0.8398291690324572, 0.5127583891069295, 0.8857279825392406, 0.972977266590476, 0.7266717114448826, 0.8767740521607913, 0.9231618528525449, 0.9982364931314549, 0.9089856233942144, 0.7815615026471971, 0.7896595792768591, 0.5391321369011051, 0.774514034308962, 0.6325178325004266, 0.7522075363487735, 0.8579309458387521, 0.8857402693121141, 0.7438343008072901, 0.6057096502562739, 0.6289181629501175, 0.7777223010762986, 0.8505654243704575, 0.5066284441455398, 0.5170448203623286, 0.9726851743795579, 0.9508244925939939, 0.8768249297968697, 0.9380115875876727, 0.5178704441656308, 0.5253882470272493, 0.6260100775140045, 0.7513045042621982, 0.7819720352829094, 0.598005679575413, 0.7475276675699433, 0.9187521787335293, 0.8526545283136443, 0.5754048983393352, 0.9939015951990439, 0.6580168902129289, 0.7819336616794952, 0.8717613294358033, 0.5094145413117634, 0.6803193578525999, 0.7272879559184384, 0.8295110416347915, 0.6028452394562757, 0.8526848855133113, 0.8970867463590959, 0.7810826014552731, 0.6455383764447334, 0.9076203420532147, 0.873517386738601, 0.942079572614273, 0.7585286167762516, 0.9697952540988184, 0.8641540950522425, 0.6960649484547496, 0.7918112686464687, 0.9135843072274434, 0.6151975497109392, 0.8672640158391486, 0.5779901645429907, 0.7430642965134457, 0.9787511164562028, 0.603988895858584, 0.9996881710381167, 0.6126243149276871, 0.6867229576233547, 0.5421378473808007, 0.8776635510397859, 0.9838753452584613, 0.8764657926280812, 0.6632160332271819, 0.7055271061037074, 0.8190420634383195, 0.5557937241838191, 0.7165827121388781, 0.9059393145899601, 0.9766084330570284, 0.6452216215444734, 0.8294425645989654, 0.7132486485935875, 0.785231434853445, 0.964904440713888, 0.9689890911472253, 0.5013222698968688, 0.7175793094677214, 0.6597600832940733, 0.7354100571551818, 0.5550730013142477, 0.9414425807175185, 0.8627324868945545, 0.9165654106369125, 0.84031318001093, 0.8217818774169887, 0.8755846396954176, 0.5888311036316136, 0.7985703522467688, 0.6891447764212351, 0.6297657746645835, 0.9211548374840011, 0.6389686451879681, 0.5791937171109812, 0.9488101840968483, 0.7997395996519749, 0.9991111010784487, 0.7107879138209948, 0.8697761967016673, 0.5011700697347992, 0.9955348788150797, 0.5847984930300185, 0.7925806891538721, 0.7134757448165714, 0.7945181829403748, 0.8070744961430065, 0.5048264965547702, 0.6362012337605427, 0.7830004913926295, 0.5625232436163183, 0.6891708636124461, 0.7772893107236363, 0.6690322186514596, 0.8129134091270663, 0.8556188927023859, 0.8345115022560367, 0.5635901936121988, 0.8765063913469471, 0.9573527788964689, 0.512015793413092, 0.7270065136298236, 0.9562019301136602, 0.9022310281813988, 0.7766298297758149, 0.6753310528758958, 0.8231955094454356, 0.7131497317713209, 0.8530205818344243, 0.9105442426646809, 0.9810123127597673, 0.6206949447198191, 0.6498225747118067, 0.6243024818246932, 0.5959791564283246, 0.970794798905716, 0.5557725560764883, 0.7828660975643242, 0.5308393632283452, 0.615965235725796, 0.8315628272547874, 0.7172700668971504, 0.6943090736330719, 0.8572292013240121, 0.5398695992646744, 0.683698380224502, 0.6843211697742744, 0.6549681400806954, 0.628959113523625, 0.5720256771060206, 0.6067826214578179, 0.8081856241736809, 0.8910831009583976, 0.7660776481250977, 0.7287020717162749, 0.7711156898060247, 0.6022037361476555, 0.9307140831446984, 0.6190715579739503, 0.7175999816137207, 0.8165411348861529, 0.9516666883137002, 0.6873772473734228, 0.553395449238179, 0.9456043543919821, 0.596320053315911, 0.649141586676399, 0.737769495335018, 0.8065596397699167, 0.536277716217153, 0.6287917393162998, 0.8779492652763594, 0.7700242908383386, 0.5875793209466698, 0.804662329407458, 0.551394852755247, 0.9815328291373504, 0.6741533939125959, 0.6283364515358165, 0.6618574641268199, 0.8057777543526028, 0.695318476997167, 0.5759573625279029, 0.9389103614055501, 0.867996275475041, 0.9797727320329215, 0.9090202830774534, 0.8393463128283016, 0.9734424191343594, 0.9319395494299196, 0.9227105658025434, 0.5411865075337616, 0.8752472312450763, 0.8368475954234591, 0.6142431328981649, 0.7971589861145814, 0.5041630392769703, 0.6431124676838564, 0.7792898543280424, 0.6786126090024368, 0.614739835330925, 0.9370247100400129, 0.6802331787977383, 0.7745289907689384, 0.6247579304762141, 0.5094015538876908, 0.8623151996181369, 0.8323931854737929, 0.9621605588140931, 0.5564304662868038, 0.517123420959425, 0.8009113838638573, 0.8493109052295116, 0.650586746604862, 0.5156514704911584, 0.8973549287724435, 0.5897540776044471, 0.6229225841157188, 0.9722430247720271, 0.9556913379722397, 0.9125755108106639, 0.5926783795052869, 0.9297358603277658, 0.9569949221832248, 0.5689227581936549, 0.8177184321698518, 0.7916235764552148, 0.8554449927726228, 0.6314937227754823, 0.854302391442378, 0.7960716326156374, 0.5166873082209469, 0.9232709930148624, 0.5971578462531368, 0.8611598892532397, 0.7864746138581098, 0.8178758824777492, 0.7696070423864945, 0.6706313734474336, 0.9588578775827514, 0.7978048174126441, 0.7108897796200999, 0.7705481285163588, 0.7674964698320862, 0.5059864840817575, 0.9694255383429801, 0.5480076147430462, 0.7387150789009342, 0.6754220177232635, 0.6201968245276501, 0.7490439716546873, 0.6457737033289088, 0.6203282877581834, 0.9936189944312053, 0.7821989393966555, 0.9709351945000992, 0.7036268345499361, 0.5279925112196816, 0.9167151986771864, 0.7278843752926718, 0.9766425692581138, 0.5110534439031481, 0.6015653759001813, 0.5756695460363366, 0.7625784847161581, 0.9524619279684949, 0.7540713711708649, 0.8914779334304, 0.9393171059802479, 0.5792805865730131, 0.9390064788124938, 0.8301885691408923, 0.5748836655329059, 0.9030576860960116, 0.7511096433705231, 0.806090732131312, 0.5855172680882345, 0.8736304190322837, 0.6559456738789438, 0.7103627810253832, 0.6661765199972453, 0.6461561618432691, 0.6652850840594213, 0.6419334317713793, 0.6893909569689854, 0.6162560524579399, 0.6598083180062573, 0.8876264248059333, 0.9079713937963764, 0.8646676610956108, 0.8510730702083682, 0.6897260110123409, 0.7385801075239578, 0.7731964611567061, 0.5301442181259393, 0.8134738047599213, 0.6504274304296309, 0.9466062323944449, 0.8485444189873848, 0.9761468231771553, 0.9074086788317322, 0.9466220732786312, 0.5236905341401845, 0.5730450871861954, 0.9243966308777234, 0.7990645269658774, 0.9830398812201169, 0.9660017121312767, 0.7276251315329868, 0.5349878716784594, 0.9925050334173526, 0.7348009694959676, 0.9410244956154374, 0.6412388494405387, 0.5666298679799027, 0.9375519054649977, 0.7505890207428767, 0.9776968545732554, 0.5955197301577464, 0.6259900795235716, 0.6782165694194369, 0.8470207147815978, 0.8493861522788533, 0.6522001035693636, 0.9511299787024978, 0.9699948639189766, 0.8244032155080091, 0.6520158436537509, 0.5168609015974183, 0.6502240411868032, 0.9525060325800916, 0.8036783643586138, 0.7212276225966066, 0.9382508619700638, 0.5240020272306504, 0.7324350332006805, 0.9813140554204951, 0.9003543747141063, 0.5217231825657669, 0.5629508561265046, 0.6730368925810974, 0.9002282447908012, 0.6932536320819342, 0.8214080989264888, 0.8182238268303644, 0.8806726425549761, 0.8079679178094026, 0.5905927211495435, 0.8384596031831448, 0.9753014930977266, 0.5734144978222617, 0.8642382904074424, 0.7612210024476658, 0.6514620693497826, 0.597691820159839, 0.5499229756927886, 0.7986686146804258, 0.6954871859840197, 0.9193411917471384, 0.8899874867888005, 0.9852701658726961, 0.8555979497429976, 0.7605502312951475, 0.6774715887529084, 0.6439955870703195, 0.9449611701243414, 0.5264656886055306, 0.8193429705924711, 0.9329900565747629, 0.7636474379334581, 0.8370690074495729, 0.9394292757380118, 0.5340531533166586, 0.7345235265602639, 0.8548486586099333, 0.80961144180312, 0.8107153386098287, 0.889160550882558, 0.964209679556586, 0.6300519824221895, 0.5548242564204072, 0.9884296221493493, 0.8254433210606698, 0.8676024953147272, 0.8715123645074461, 0.7719301059950726, 0.9712579991953556, 0.6804708085362001, 0.9754761968101602, 0.820885725481511, 0.6127843337725647, 0.8233539299450792, 0.5108409679087875, 0.7290496460752197, 0.7590101109629589, 0.5583043750615384, 0.6028686080010386, 0.7494023425044154, 0.6210001804477117, 0.7089274927525921, 0.691574805276506, 0.5077979992317956, 0.9666443962623694, 0.897180517207296, 0.7935904212500038, 0.6542605624882754, 0.5205301090821796, 0.8653271283882731, 0.5286915069287332, 0.6464970177205023, 0.5857184849849802, 0.8992145470545718, 0.8503595759645951, 0.7177798968865436, 0.5873346299301578, 0.6379206920889113, 0.845154068650513, 0.7373645877118691, 0.8911432025473851, 0.8675431698808873, 0.5001460936005271, 0.7969907406791372, 0.8918128481734946, 0.6282226380154267, 0.9173477049008709, 0.526730497432457, 0.5398924336730382, 0.6681141364980683, 0.7078701740033191, 0.8448694884597503, 0.6049012198247122, 0.9861776912214084, 0.6541116777027736, 0.6304212261825586, 0.5515770182208763, 0.5321556075229037, 0.5401952214463209, 0.6043018574871679, 0.9003968059601892, 0.8462492965050116, 0.6013268652880979, 0.8184270139839711, 0.9676929960982437, 0.5055220982615558, 0.549215174698464, 0.8964524117738853, 0.9022826547112742, 0.6177586796415726, 0.975283305502959, 0.6310891891759545, 0.5355093049720674, 0.7077897191524722, 0.5385846240236489, 0.8996162091190021, 0.7172590050269201, 0.8609997805411294, 0.8336103080494718, 0.8979316856457391, 0.9532308948104422, 0.5616974372500776, 0.8324772197672143, 0.7875870933842117, 0.7988504664119159, 0.5910589867028837, 0.9754230401914654, 0.760315185194161, 0.9998350745664946, 0.6697806358997829, 0.9691397125418715, 0.9919844842681314, 0.6701608368853383, 0.7380251914738399, 0.9079351151018011, 0.5537526839849789, 0.6271792119989523, 0.9549194003295025, 0.9029372823164187, 0.9727378928511281, 0.9272216345158699, 0.5655870439087404, 0.7872086516402128, 0.864484262442613, 0.8333880920137295, 0.8543236755807355, 0.5592885631133291, 0.9020790514668173, 0.7134953484316429, 0.6896455435998105, 0.8751536022682502, 0.9222078911294758, 0.971213149433584, 0.9269503949863162, 0.7659379675100805, 0.8493327714021233, 0.5112741987556582, 0.7636835836760265, 0.7881221938356416, 0.8398116712053876, 0.8684673801356255, 0.7858545698984838, 0.9011352922594992, 0.798426160164165, 0.9449812523444132, 0.5226148151791528, 0.5051944783238653, 0.5305452162111997, 0.9262830852737979, 0.5654969901238958, 0.5568639250753671, 0.9922587017380136, 0.5354733943901862, 0.8784484276949802, 0.9577825407232277, 0.9919969364586445, 0.848183347286922, 0.6765902214746805, 0.6790046526927083, 0.9169393935042762, 0.7907574458281208, 0.5419709928394273, 0.9791057236285325, 0.7453025771893238, 0.8352097485268257, 0.9261690665750111, 0.5092293215918513, 0.9859252016385147, 0.8300976682071375, 0.6858329332326445, 0.744759185816721, 0.6880187997140393, 0.8537153572942211, 0.6917884399777294, 0.602654515897437, 0.9334368417429058, 0.5064514350767819, 0.6045753471771238, 0.8349455720424548, 0.745337788246808, 0.6723708291687415, 0.5431533077235751, 0.5931939596681384, 0.7463086357333053, 0.6331859262901023, 0.8798093653768164, 0.5684342489849379, 0.9513321405001768, 0.749753290215939, 0.9613572968577907, 0.8834910930957067, 0.5072662501595313, 0.8117600969048402, 0.6109590284654163, 0.6912363269371777, 0.6375477096101214, 0.7079754167450323, 0.9369777259729277, 0.5622086821138723, 0.5159428517894962, 0.5817858955683479, 0.936821564564889, 0.8829846313974147, 0.9214030480840772, 0.6519987782510848, 0.9422236931386736, 0.8382241303464693, 0.5638546710392349, 0.8157152982477439, 0.7438548338290241, 0.5822825479551803, 0.9757027886690821, 0.5696758352387656, 0.7684211788796, 0.8083443525595682, 0.5074533633153278, 0.5470684592493198, 0.5326389437989647, 0.892955331289295, 0.6120859811054101, 0.9042218778905419, 0.9373439648174409, 0.581716312210542, 0.8586402521255003, 0.5700851550004192, 0.9483850031572421, 0.5937785748654218, 0.8040849218487118, 0.5043514760228551, 0.8400254972757859, 0.596545508057013, 0.77776253385561, 0.878176387766555, 0.5385257351899336, 0.6788175738773825, 0.5276128201114227, 0.5253679530381776, 0.6566769924140404, 0.60635185367678, 0.6851159674653068, 0.9164401024509308, 0.5223530098235355, 0.5549770879917308, 0.8465542161985331, 0.5136230239759456, 0.5849255106275656, 0.8155412395475465, 0.8345176581193059, 0.6911763898352143, 0.9617341598838409, 0.7819001845261471, 0.9006620026789871, 0.7651235690695919, 0.5197982344315135, 0.8483610312411956, 0.807341918064338, 0.6446718621650069, 0.9464246217379262, 0.8731851649856529, 0.5852841633062593, 0.6226395757915923, 0.6062611664330723, 0.5031855118402778, 0.9852360873086231, 0.8453183874407348, 0.8036361070735273, 0.5969251423324935, 0.6256004196124241, 0.6694649593146178, 0.7250137671141796, 0.7182892670234547, 0.9344837861594111, 0.6501380230719386, 0.702983345149558, 0.611693325921556, 0.5004674472877298, 0.6428753497662416, 0.9300330698037356, 0.5152879237617565, 0.8357881891489785, 0.6990191184047834, 0.7175368851399005, 0.5677628691790322, 0.9505914833420015, 0.7340871366795099, 0.921838285321497, 0.9101118714923464, 0.5426171251359844, 0.7945743831814782, 0.7345204606001551, 0.5071783760921966, 0.6902858242374234, 0.7865495502233271, 0.5018919913363901, 0.8147897177342058, 0.7527122531501751, 0.9694578647865159, 0.5747134548490442, 0.7930951095268917, 0.9026205186647425, 0.5526137353203876, 0.5735210420053765, 0.5353705851772004, 0.8770517873714587, 0.7751899348822371, 0.6948003781755565, 0.9504451652818802, 0.8349810962504094, 0.91039571855509, 0.7504648437312662, 0.9490445181594824, 0.5213242046408415, 0.6628765755399345, 0.7267215279617082, 0.819826838585604, 0.6237456045060839, 0.9982581947215254, 0.8627516951983798, 0.6712402967615231, 0.5583893386979701, 0.9010668397425341, 0.7890484991306601, 0.994161026456837, 0.5370054249172651, 0.6518544954010561, 0.7720461438514131, 0.8309698510130772, 0.9579206856274818, 0.9080350973365012, 0.6024040410460441, 0.7074435010253978, 0.6730413259251485, 0.7682746128786826, 0.8743352550771215, 0.9891378387210339, 0.8050775975206753, 0.724481307896681, 0.8139969069144883, 0.5299467393077928, 0.5479655829594857, 0.6600763615667558, 0.9254610651507811, 0.728017584384295, 0.6284811774969581, 0.8498206767329421, 0.5474916459016066, 0.8174573288137029, 0.7107010835633948, 0.9697952465084088, 0.922272293993702, 0.6676841393165858, 0.864122012364168, 0.6488831136614128, 0.6770731124732514, 0.6368362435078179, 0.6120298189900517, 0.9053210552119809, 0.7550587431291624, 0.9876562522155796, 0.53366620710658, 0.9319716203995057, 0.9961956515176476, 0.6826118234711609, 0.8815711933206754, 0.7065924539184366, 0.6577548062719158, 0.9033539014938669, 0.7319630943293384, 0.8359529477823275, 0.7175084118480706, 0.6386183640286505, 0.63831412383118, 0.6995685153064359, 0.8840350283723093, 0.5603488719253833, 0.9662939307733498, 0.9597815937975918, 0.7731595044662712, 0.9413418178479915, 0.6349527709438507, 0.8035567623334465, 0.7222331559388301, 0.8750488509204001, 0.9278630800051124, 0.9902034789016922, 0.5062585147608087, 0.7748662689237409, 0.6052763157793787, 0.6538953820050042, 0.7474914117567013, 0.9288701985378154, 0.6655717314052103, 0.9363363540852343, 0.6875242922533249, 0.7587020748920936, 0.9123563513142707, 0.8491267499434676, 0.712876617533011, 0.7552804229323695, 0.9932914995348009, 0.7421041573286726, 0.9123711359391061, 0.5055172216981609, 0.8452159347097528, 0.8955154300013867, 0.7257877361176658, 0.8134652117310373, 0.5312284185390443, 0.8196363717813846, 0.6983387962929233, 0.9621077992699463, 0.6150562813191556, 0.8542436674379374, 0.5460203821315996, 0.8102493166632381, 0.8740240398346449, 0.7712112996206919, 0.5645932313245864, 0.6834288065448748, 0.8960764199136921, 0.7117873453955389, 0.8309099228675942, 0.5108280503025081, 0.8778934828712273, 0.5982680992611251, 0.7107553531598518, 0.933982467840536, 0.9891740652551382, 0.7061081914676502, 0.7346114197251592, 0.6623374121833983, 0.7093952005012708, 0.6363937521544679, 0.7255601875001841, 0.5971621085952324, 0.8823553783988984, 0.5107777083803589, 0.5644322639479459, 0.6431445673003727, 0.961134555411868, 0.5141350620067456, 0.748898990232646, 0.7514112969358766, 0.7684984417768546, 0.6007186459033209, 0.5167826513940413, 0.9542280545932242, 0.8923863866288283, 0.5759362141981992, 0.5643550498985692, 0.5684523643097347, 0.907999598825479, 0.7193195133566304, 0.5062823709453728, 0.6941903780676397, 0.6237885723758938, 0.6357684610949771, 0.8819052454295125, 0.6940710536634377, 0.8890733301457245, 0.8003635422396245, 0.8165966732084837, 0.7884534392389726, 0.5182212517702763, 0.9820730688728021, 0.7568702307856986, 0.8224781418807964, 0.766453665281009, 0.5383596113370459, 0.8271811186820868, 0.867430069306931, 0.5967930978867135, 0.9995263971455526, 0.5422220399123409, 0.7611478434371299, 0.9133945505787316, 0.9823582880555863, 0.9681897128841668, 0.7076195590291467, 0.5594058209470308, 0.6000249937725215, 0.505695420344726, 0.7255622355185358, 0.9359124342277532, 0.9773544962834675, 0.8986352982766777, 0.5120598865689605, 0.6024991466518446, 0.5496814444534126, 0.7447043393857902, 0.991023251891007, 0.8319489514817973, 0.7528421371694575, 0.7864669161589726, 0.9553362215399512, 0.7460551928186692, 0.8960072978768919, 0.7541377501035389, 0.5198628947827011, 0.8817208452476633, 0.736490461373305, 0.7360243339197754, 0.7180342194694209, 0.578490114815507, 0.9945819909563369, 0.7168995437959638, 0.8769727935417968, 0.730781828089259, 0.5373412309987464, 0.9745426889567794, 0.5198991036642928, 0.9415485810264792, 0.5740101721441443, 0.7146122215552424, 0.614207438434992, 0.5545838396416696, 0.6958861485979186, 0.78285509590588, 0.5016887850060157, 0.7228575781606108, 0.5265533298215199, 0.5215703156497278, 0.6999544603352175, 0.7956116535355177, 0.546287925290027, 0.597678599232042, 0.8846788502847893, 0.6322299515066403, 0.5323812172610142, 0.9806411915666119, 0.9425477995280649, 0.7511579592477102, 0.7781673186791609, 0.8126995102356214, 0.6944184534384473, 0.8600113238895368, 0.9829246573262533, 0.9181058211408535, 0.806179863141874, 0.8521185347856611, 0.523732867222278, 0.6831174223016991, 0.6349145709840605, 0.8070347136377105, 0.5933690773079802, 0.798303543259895, 0.8035861007387308, 0.5194767279182548, 0.5876714377575138, 0.5998816338312308, 0.6607617634873977, 0.9010023928602761, 0.7092970857426185, 0.967909613871015, 0.8957784546479876, 0.7001376385947347, 0.716419033363163, 0.7488562930625045, 0.8343616454534679, 0.6859846694646831, 0.8873132739402707, 0.5629998000864833, 0.9767309246001922, 0.6945127764667778, 0.9925008762378507, 0.5958221235704579, 0.5979791579688558, 0.638662599256742, 0.8580347798733042, 0.6739333211726846, 0.9609229949859771, 0.5846027171920167, 0.6579486632547594, 0.7619292962544139, 0.5763202385243843, 0.9844722007725412, 0.9980995167290445, 0.7494356215684506, 0.8497670584288747, 0.8829867575131294, 0.7096548562696231, 0.8440917303029388, 0.6155092530836765, 0.6554747623138648, 0.855157623416734, 0.6308051439829223, 0.7108404623499075, 0.7042803938687132, 0.939333446650733, 0.5439410543876008, 0.8080773954833966, 0.6127560106255089, 0.8169942695195915, 0.9048621075386143, 0.7660767695840212, 0.6989847461780637, 0.6025744931595831, 0.6070196809224504, 0.6365165758438276, 0.5356671205719408, 0.8492860465196099, 0.5635409563238629, 0.6544523324639846, 0.911319509034725, 0.7497824805263384, 0.7314086643263354, 0.7410596356701805, 0.8378051393799768, 0.6502220860661816, 0.9353879076364846, 0.9068042843150717, 0.5010678825227859, 0.6407876866015325, 0.9022135591051752, 0.7570097648658431, 0.963123057420161, 0.500911358234569, 0.856717645915491, 0.8133002577933919, 0.5797631878844306, 0.7682112187829461, 0.702388248713368, 0.9246435341463157, 0.7532147260775108, 0.8002778865652918, 0.9402261193912891, 0.8517303753255494, 0.988266708068056, 0.7372367051948614, 0.9127933513983603, 0.6332244638614524, 0.6499099577522616, 0.582220778065937, 0.7727680039948476, 0.5689207376572375, 0.5545257401910513, 0.9801626229279778, 0.8036267026287641, 0.8668959686743523, 0.5677030284947591, 0.6190271603416966, 0.7704101919086577, 0.6567339109712098, 0.6352322629774196, 0.530518344075291, 0.5946522340769984, 0.8501115377025843, 0.7522424972918569, 0.699565236545256, 0.9883312934057551, 0.6632918016375641, 0.5184331740426261, 0.9888908825070708, 0.9533793862313886, 0.8439796611103045, 0.9344601006241408, 0.9649121102405273, 0.7722240294318, 0.8314039121240637, 0.6403125375925239, 0.7399986366328792, 0.551223153058713, 0.7413107483297836, 0.6451059709809166, 0.7665045938715036, 0.8276853325459788, 0.6476705592754148, 0.7474529773643124, 0.7378589746483949, 0.657425389525355, 0.8228052537427442, 0.9876207344526475, 0.6041881131395088, 0.5105916905329314, 0.7452243249644972, 0.8362996446518783, 0.7107187713025589, 0.5474450500274677, 0.6138268532346454, 0.8759143184797624, 0.6423634493168872, 0.9071394730996003, 0.9227820919681688, 0.6709462199064875, 0.6406632650558759, 0.8670267191527515, 0.5577388873257569, 0.6564524287498101, 0.8862075694741285, 0.5485911122421028, 0.5160424904314438, 0.5490746451951429, 0.7757978511804728, 0.7964567403440965, 0.6360291421378614, 0.552100819623339, 0.5102404529290275, 0.61903233721742, 0.5602929743958001, 0.8104205539486736, 0.7516434093902152, 0.9491892020570627, 0.809582716661314, 0.5350526724479039, 0.9376522829181213, 0.7980187292314701, 0.6711732289362182, 0.9638331767733775, 0.5192768598633815, 0.7054062145770722, 0.5707046159290405, 0.6793888214923139, 0.6705722637229512, 0.5178543902410787, 0.56613792191652, 0.9852291172354863, 0.8826279790482111, 0.6266176898966145, 0.9699913177143805, 0.9421534678837393, 0.7498272714650827, 0.7637785220835558, 0.6820202947098637, 0.5957392743204204, 0.9897783270959086, 0.6653188588285394, 0.9882916608638423, 0.54617069145447, 0.7171587665549799, 0.6386993847923874, 0.8542401233367106, 0.5667989338000348, 0.8387148798685321, 0.5190498645865427, 0.6773491237092091, 0.8613346702426312, 0.8310089593902775, 0.5657292643837617, 0.9162098982032155, 0.7380182327870012, 0.5839135312326849, 0.8553419592650713, 0.7708508010959578, 0.981282445063419, 0.6762198071084993, 0.8871989180992139, 0.9733942659197891, 0.9622744285013725, 0.6498734399943721, 0.8945204184723956, 0.5968594948879855, 0.559559520463623, 0.588627819049609, 0.90221550495004, 0.6318270062160596, 0.8993914744784094, 0.8264071437776128, 0.8980538183040774, 0.8872333656035583, 0.6103706292332663, 0.7410504302653795, 0.8690798678157714, 0.6564132971086496, 0.8537545102618045, 0.549186428548408, 0.666222277451994, 0.5315921388355423, 0.5762409462423292, 0.5414719734542626, 0.9725343985296608, 0.8997993607089361, 0.552074088246099, 0.8444614536413744, 0.7547795083375085, 0.703146368904158, 0.5181690081524495, 0.5350265807506751, 0.6199824085477443, 0.9413794924122612, 0.7431466911973204, 0.7701478568876412, 0.5842518035020553, 0.8745823613522268, 0.6412291621273786, 0.5241891163897179, 0.5931074724394041, 0.5227881398468499, 0.581582110028511, 0.635505485684076, 0.5340600957566659, 0.8220731076026621, 0.5634944471936612, 0.7665433961514938, 0.568525961832018, 0.9104398166114798, 0.7726578057811215, 0.5900618745276619, 0.6965216439126583, 0.7534633765507077, 0.7486021324677488, 0.5657636926813837, 0.8230262442621338, 0.9650388658693315, 0.624822434328154, 0.5371458150275245, 0.545937459362923, 0.5020652961546233, 0.6813568014482896, 0.5733347662639132, 0.5288078758681949, 0.7279501780702446, 0.5005853349224327, 0.9039350718461755, 0.8284952117161213, 0.883604718388063, 0.7538039699723771, 0.6970753991439227, 0.6688356120228607, 0.950784238217671, 0.6109070492996412, 0.8387026554989451, 0.7159971230127539, 0.5684811993246566, 0.6848801781882324, 0.7573659895938796, 0.9080609841323737, 0.8087733262552482, 0.526167251740054, 0.5843020390057706, 0.7950546564769834, 0.9564387020285701, 0.7399191818310287, 0.5167695911834485, 0.7043441134480821, 0.8647735606518205, 0.805543831401889, 0.6788229826846823, 0.936484900849745, 0.8499027583145838, 0.9925723502611259, 0.7801493515709397, 0.6223313055850015, 0.6891695802604186, 0.6419504869469161, 0.9067593963031155, 0.8133798020027384, 0.699861969856153, 0.9132783178443987, 0.9635786810044835, 0.9837630737232537, 0.7190984113686374, 0.5068832790063472, 0.8025485790756941, 0.7234934676121572, 0.7204767250495104, 0.7596203032249237, 0.8909057350688042, 0.8217747340729487, 0.5379858197498273, 0.6082232041739084, 0.6180165193871625, 0.5176958720187006, 0.7678780001622663, 0.9833318632481636, 0.8698167608877534, 0.7787377179460828, 0.790181659533197, 0.9862973988944725, 0.8573298224771931, 0.7312516370354838, 0.6778916203227394, 0.9652980266235556, 0.762724957136334, 0.534287650974718, 0.6483225024922047, 0.8031871909309451, 0.8365184246187242, 0.6660836055303921, 0.5422858087785087, 0.950918191841827, 0.5904758923102564, 0.6787030210507159, 0.6451030277442892, 0.6897884497630882, 0.6072669093896331, 0.8558937489505363, 0.9552923531987132, 0.9400529273118444, 0.729605780438537, 0.5572855538926249, 0.7166380035904023, 0.9505559785056462, 0.9974022073397573, 0.8139105910043838, 0.8182211528343335, 0.7922163673787705, 0.9884145045906232, 0.8768501518539151, 0.8909740209209359, 0.6196389999392471, 0.8562501669922118, 0.773438183655137, 0.8467135012302487, 0.7477396471776081, 0.8431902867028358, 0.6476058808518185, 0.7336659290734897, 0.875540950447818, 0.9316655004981974, 0.56353786855283, 0.7017737382792312, 0.5941221417244399, 0.7728823259477106, 0.6560157186267701, 0.7943106750320005, 0.6245381550656842, 0.5094326349098621, 0.87475632334551, 0.6332620227875889, 0.7292341742165769, 0.6305193401301257, 0.6434851662741208, 0.9219806964628621, 0.7819335306525359, 0.6509965526844219, 0.6621523112656733, 0.9614283703121764, 0.7459636281902615, 0.9533599481344296, 0.5743080481506482, 0.6609326613643105, 0.7708502837938821, 0.5355146526250714, 0.6758259519719573, 0.8061492971616336, 0.5524281563913104, 0.6485666194138588, 0.8652251095915611, 0.6799424058595207, 0.7208081144824017, 0.5955226110500577, 0.6899025855908881, 0.7354379136526205, 0.5523911306789493, 0.5157209813365808, 0.8702283032900038, 0.8440715372634212, 0.7296896589408493, 0.5987234600198093, 0.5505750377742955, 0.5965676211135829, 0.7478898715775867, 0.9592439801162957, 0.6119651303447076, 0.9209972899993574, 0.7560772334305053, 0.8694916687224481, 0.98274767814108, 0.9711723511918628, 0.705035703162124, 0.8213980480412714, 0.8396502166576072, 0.8623825060476598, 0.613176758033498, 0.8383250336550057, 0.6089469304264609, 0.6926288615863125, 0.9231773944455417, 0.9510483637419882, 0.8422814786729875, 0.7336203899188185, 0.9268287525498597, 0.5722259043115312, 0.6122160143250082, 0.7115881668181033, 0.5258207459273239, 0.6897173642674074, 0.7545696555290331, 0.7028748142956739, 0.5920560915181436, 0.6305721983191825, 0.5599615295183049, 0.8094993557939454, 0.5006631246916906, 0.5676861935248256, 0.7817928392270017, 0.9858979070736338, 0.8597833198922227, 0.7359937308509391, 0.8771961512048072, 0.873736687278975, 0.7997728069750321, 0.954480632516743, 0.9809589473391367, 0.5871128382755648, 0.5090838873099327, 0.8544182629313449, 0.8431824269388658, 0.7859613795955045, 0.584125176959931, 0.6748094706785174, 0.7505898152815256, 0.9246019331824926, 0.9797787529411071, 0.620770710647897, 0.5034011056942463, 0.6191125811707758, 0.6177235736290275, 0.9018743359703183, 0.7638766460273363, 0.726816842025744, 0.6841568769404631, 0.9251892906049757, 0.7720537341909475, 0.7223988885963942, 0.523424011972349, 0.756104892050007, 0.9897174740508972, 0.5462348603286602, 0.9858704147792273, 0.9665027470532734, 0.5406307552997291, 0.9334026941396187, 0.6688979649485198, 0.6622858263985272, 0.7559290886007554, 0.8075955364936863, 0.683099701275909, 0.7683694217964472, 0.7943301999742723, 0.8626176470172711, 0.5321148288549546, 0.8621353411013707, 0.9769561732767487, 0.9832958338368774, 0.5067766003846665, 0.7162648220834398, 0.5690371111557195, 0.8071957948162862, 0.9148968808393183, 0.6632628030419498, 0.7288010747913035, 0.6368589185134463, 0.984915954570455, 0.8610395997768026, 0.6442803894691813, 0.5983510255292106, 0.6187861950259554, 0.7464328475873279, 0.9676985570860626, 0.5601891056914603, 0.5676105091840788, 0.7161435430830188, 0.8488994504446536, 0.8802194042081778, 0.8815484860546328, 0.7396252960298021, 0.8900264184413991, 0.8196929159159341, 0.6338341021796803, 0.9491573997291023, 0.7250017618605061, 0.7393374878977477, 0.6634888068239003, 0.5002133324294751, 0.9482445657140255, 0.7321922235191611, 0.6449813985713104, 0.5611403124824423, 0.5139518021534695, 0.8364938648387541, 0.5949106838029408, 0.7642236290606463, 0.9867852199180102, 0.9001953236666607, 0.8338358292387249, 0.7326727835129385, 0.9289604184127332, 0.5818820924292092, 0.9196106633049921, 0.8247986169638404, 0.6428879717983158, 0.847921797555904, 0.8301431409551835, 0.8420152654644588, 0.6395464897143612, 0.6943363643691344, 0.800283702657308, 0.5699748525361599, 0.646212421871912, 0.7378543424870895, 0.9945879346759688, 0.73641866094961, 0.899063263656485, 0.9403173995126992, 0.6023969867161139, 0.8174013990877507, 0.687010325544798, 0.5996434278840846, 0.9568585624536974, 0.858380921698598, 0.5288403909812671, 0.8230958365898113, 0.8317200906444993, 0.6380012965323294, 0.5883999449684438, 0.6118507162998352, 0.9194031714440449, 0.6985664617199094, 0.9802188297451748, 0.9576292987914593, 0.8828809873970835, 0.6454046684597188, 0.9002598940951037, 0.549332828232475, 0.5313302308039773, 0.9328232913375565, 0.6171745165465009, 0.5567268356853643, 0.6913179438221718, 0.6622905942884645, 0.5783261660919164, 0.8367354070861979, 0.793393253453877, 0.9483358748606541, 0.8140213438190338, 0.9741312875029186, 0.974764012449906, 0.6311466110791517, 0.6070168020222835, 0.6569021372702208, 0.9853512292734277, 0.9912670149687963, 0.7082063712508785, 0.7504484053579168, 0.5493057237810139, 0.6443324643983244, 0.5247777864590919, 0.806932107477116, 0.6575990804008911, 0.5399787223761692, 0.5194173091983443, 0.9421427804611395, 0.8251284613489177, 0.671838299780877, 0.8383318199807208, 0.5181156918541963, 0.5688398331629057, 0.5106950037717746, 0.9831253641825289, 0.5753753129138872, 0.9174733590618147, 0.5006334003339404, 0.6169283116271267, 0.9548910664256063, 0.9686920569579851, 0.7313303493272103, 0.9064580188421869, 0.8750948612466151, 0.8795672043547182, 0.9585077995155626, 0.5430588317245382, 0.843282799883136, 0.7367198714104896, 0.7407154324533831, 0.5788869485115818, 0.9271611410340839, 0.5033095368122066, 0.839645517900971, 0.6989412337447727, 0.5512482404264752, 0.7462909391963041, 0.6747481899021995, 0.6382162386636141, 0.5016095749233314, 0.9244356380050298, 0.525517429730898, 0.7892848065650835, 0.6744725835103031, 0.5877816201989499, 0.5351293655452608, 0.9428421579954995, 0.8588284456543451, 0.8205350277851527, 0.6579487357952919, 0.9683844525229272, 0.8294277571062018, 0.6261574420335612, 0.8085714240376758, 0.5413004877655526, 0.5248652583610904, 0.6651887201006249, 0.8569633275243704, 0.8132016019378598, 0.6670023477555598, 0.7416578177232844, 0.8479421430479994, 0.8135966441335819, 0.7944675102941006, 0.7954125901854512, 0.7478495619026826, 0.9206067487081031, 0.7791802899051199, 0.6046166525180403, 0.5442214097773505, 0.8339544320105079, 0.7638572678266764, 0.5604810675737664, 0.651780729823253, 0.7774095921213326, 0.8038750252945328, 0.8847082113799034, 0.9839361799717974, 0.806928653787206, 0.5568865556666117, 0.9333882111333471, 0.5689921886241716, 0.9179758682763774, 0.7036322751740154, 0.9517882613754789, 0.9184539956148371, 0.6156499710830381, 0.9002948435359714, 0.5437491215398236, 0.5087118492436296, 0.7581124451989631, 0.8046216917643716, 0.5888551001785721, 0.8428864139630373, 0.9819691231229266, 0.8874029530438055, 0.9481160404919116, 0.7301778269451996, 0.8007210774211346, 0.9875857287079328, 0.6477440832709934, 0.8451952040792712, 0.713045043334034, 0.5575542775096058, 0.6062838180531767, 0.8177267328320132, 0.809469284447822, 0.8950358838556532, 0.5297537635508214, 0.9376804481630359, 0.8538648007422315, 0.7065942505020391, 0.5665228449355488, 0.5228223772071805, 0.5423031980166386, 0.607217849048903, 0.6533138747894829, 0.8231053059145119, 0.6781137108080908, 0.8199231897345745, 0.6232125817139806, 0.8870960992574561, 0.8865291445470856, 0.6708518075661645, 0.8772691317811334, 0.5554318222652628, 0.757163529639906, 0.603094294880153, 0.8270636252902517, 0.5756403152426864, 0.5591352214168444, 0.5347459090121057, 0.5594076819820335, 0.9583150281758726, 0.7366972374247882, 0.9031893024012025, 0.5040667828246848, 0.9963425744905081, 0.5961566310561349, 0.9325727773120126, 0.7055607659966745, 0.5465757470595469, 0.834254897407683, 0.8426215002199884, 0.6902307723602101, 0.5385999706277449, 0.5891312612050861, 0.7314259389858543, 0.7750079583840197, 0.6922920637022764, 0.6042398649550216, 0.8311768527492058, 0.7255041097152661, 0.7091958217048322, 0.8965424355676423, 0.8874828429178564, 0.6346367688262227, 0.6771334767672541, 0.5030978709083329, 0.7932085535832518, 0.8090073536554385, 0.9276470697917962, 0.863492440403969, 0.9540487636052711, 0.9979044116261868, 0.874426262087324, 0.6209845122023903, 0.5198117267825215, 0.7596010991114961, 0.6068816050869283, 0.8574044181754588, 0.9260263969922107, 0.794729410308298, 0.9535337464467168, 0.6720529206895146, 0.8428869858405797, 0.6451747583832961, 0.8200086578777286, 0.8860360661879199, 0.8196967914636301, 0.578151495039846, 0.8935200466826546, 0.6446373538422607, 0.5052022938424945, 0.9956835877991598, 0.826139232594911, 0.7380467608985413, 0.9855521218632064, 0.7474812575129022, 0.7466608262822564, 0.9195414640626944, 0.752632406412755, 0.5621881940322164, 0.5704602961659846, 0.9514318356153415, 0.9772552616344494, 0.5302083863710287, 0.9407377601962742, 0.5888039834375854, 0.8594303058828564, 0.7064955173583567, 0.7082545186828353, 0.7312958081479153, 0.6489791590127216, 0.8579933269475633, 0.7272272113144742, 0.9330648484649944, 0.9952361179392788, 0.7378593913006237, 0.8912814795088153, 0.9682964060587385, 0.729313041964013, 0.8009444550565927, 0.7264340552044897, 0.8331305369425474, 0.8664551450346678, 0.7657398067593949, 0.5371753005003992, 0.7299137430168663, 0.9694946736075795, 0.55331776265035, 0.6445032801597834, 0.7340397196310149, 0.9489200607523061, 0.8050946527795853, 0.8590708055062088, 0.9079232102868682, 0.5648513297739572, 0.774600402861403, 0.532759765345661, 0.7385416640467504, 0.9244484483442308, 0.977721626522797, 0.8429151846753038, 0.7684286975489876, 0.5103172615544174, 0.7371665578204386, 0.8274208183909645, 0.892980306351551, 0.6230659759562733, 0.5042029029205659, 0.6268966537828067, 0.6838890701015994, 0.6175414431778754, 0.8790002098902838, 0.5323216596531026, 0.9575957369202353, 0.9740918443914875, 0.8257288272254149, 0.8863112910667941, 0.9497354724880929, 0.9480834226983833, 0.6551481238154622, 0.7207744037538291, 0.690800023806602, 0.9779940687140459, 0.6559961849988476, 0.8041515215359362, 0.6953377892793442, 0.5628741689431128, 0.8543536026494406, 0.8670254924639698, 0.8851426942397442, 0.7980586814110158, 0.7162483417183623, 0.7954094731104109, 0.575692941040346, 0.8879975034786929, 0.8031850733093131, 0.8814121587619183, 0.7766552066972554, 0.6418224446295058, 0.6780076155815886, 0.594016394795644, 0.8382025447968245, 0.9240069640185535, 0.7725024393805898, 0.7071985091391314, 0.6175965120889639, 0.7564592116702091, 0.5915212984007185, 0.9901054185845639, 0.9298939723531907, 0.8040734157222291, 0.6400329474296991, 0.6766570142406114, 0.6990236075272397, 0.9573790116003464, 0.8443822793201834, 0.6347101122929197, 0.6634743358176058, 0.9653887406313275, 0.6649421803486476, 0.9040612313704776, 0.7642695199737348, 0.6113464446186639, 0.56513964405314, 0.7825180171618015, 0.6215556025334783, 0.906198726943308, 0.9062670303755653, 0.7058078621837556, 0.7563876244904701, 0.7173995522357026, 0.8992003148344568, 0.9891158109416034, 0.7081827651516076, 0.8552852114925678, 0.5010960531584929, 0.5966687071289287, 0.9383065848803498, 0.8699244646449283, 0.8151704944137104, 0.8757804350274532, 0.8191342283799983, 0.9628724632221091, 0.8604748821000365, 0.7711735179263042, 0.6172476794434352, 0.7325772066916841, 0.5651464746469759, 0.797623009846303, 0.9082364121582304, 0.501988037716937, 0.7688904962756095, 0.6781373818568983, 0.8811996846393295, 0.9107032373253574, 0.6499384426659945, 0.9358528937119429, 0.5884720029639867, 0.6742114807619507, 0.6808918155961305, 0.9626827814426796, 0.7947058720637844, 0.7207659778363282, 0.709694547476996, 0.8101367507347847, 0.5187460462465433, 0.5457232908806744, 0.7080378543458337, 0.9876310882410557, 0.9530365341392568, 0.53275739336059, 0.9893943544032042, 0.8657433510066055, 0.5134422190027759, 0.5476263797984994, 0.9097252245205186, 0.7818469079161963, 0.7905011853070476, 0.7810908569595391, 0.6611346697691549, 0.8270056053170407, 0.8356362594460801, 0.8051210204282475, 0.6619503863611347, 0.7932138517161758, 0.7377249981298051, 0.9240722087257494, 0.5705018424916228, 0.5529281207608724, 0.6551230125875507, 0.9116132412079875, 0.9973071087977619, 0.7794050585425514, 0.8870287301650124, 0.6240574331703597, 0.5342837662229488, 0.6037246590045198, 0.9721117990855233, 0.5198391670771836, 0.5421981918441617, 0.7624535652001616, 0.7207617779447755, 0.9136276233777787, 0.8307801444177153, 0.7991840759456349, 0.8822559958758872, 0.9460652434687777, 0.6364615402034793, 0.7027421205989768, 0.9572896406868436, 0.6176575787253783, 0.9900934191831146, 0.7951286453303787, 0.5609102478491942, 0.7518646080227525, 0.7622018487015555, 0.8759834603195136, 0.6207099771356002, 0.6924116404991272, 0.5623463539096542, 0.5841855360191542, 0.6198045896934621, 0.6939445783893425, 0.9861374881171533, 0.9994766180546557, 0.7339357728132625, 0.7574721583446796, 0.8749928256510482, 0.6725428933796713, 0.7206490286090763, 0.9040920787925146, 0.946221925256375, 0.901153608934848, 0.8862529054719965, 0.5044261431851884, 0.9584112416018697, 0.9116168938533191, 0.9632948049413328, 0.5174303963787432, 0.539675030293061, 0.9501058661192042, 0.6007326223193812, 0.8773217696568403, 0.976876064623956, 0.7885629368148013, 0.5529885019688923, 0.6255036662331821, 0.8086128938298847, 0.9094270473462549, 0.5690175025703094, 0.6977901523185961, 0.8002117603529333, 0.9705866465972526, 0.7810357970139113, 0.5860453273490902, 0.9007923600230889, 0.8381993241797996, 0.5168071169330894, 0.8336859640407193, 0.7201311645088593, 0.7648320114831477, 0.9281847529575721, 0.8307109345418286, 0.909105000364339, 0.859290784325224, 0.6574331473100168, 0.5143455092953935, 0.7109392020262444, 0.7822099380146375, 0.8018694786308795, 0.6504231088397695, 0.5503077085057297, 0.6146658710313486, 0.7248217726390247, 0.7113754212583717, 0.7333285168470127, 0.9296420533798888, 0.7092036146875796, 0.8548623427110593, 0.9884957594757189, 0.66017040265349, 0.5538939617599619, 0.9078240110213871, 0.6332554843292403, 0.8373139522846597, 0.6913389415298188, 0.739076883819859, 0.5161520756452378, 0.5529293688853709, 0.5813048141728102, 0.7316773906633746, 0.8546816183933157, 0.9886688027162323, 0.8548141278880632, 0.7025340312492581, 0.7501370402401862, 0.824399994525453, 0.7110161686653536, 0.9173218457983414, 0.8940861386002706, 0.8947941581836978, 0.5008144257324039, 0.9139545002036649, 0.5755914971902065, 0.661454930951926, 0.5566891685675657, 0.92128613578492, 0.6402927574690132, 0.8393019720417653, 0.6909735101584056, 0.6445263680457776, 0.5898725619266544, 0.8020347549907376, 0.8497660681478771, 0.764601338894326, 0.6691609459951591, 0.6758844350960023, 0.7106424646488643, 0.5643459291407574, 0.592490580950918, 0.6279683368207046, 0.5993305373596496, 0.6446225691773166, 0.6633497983417052, 0.7894722966329022, 0.9917286018920588, 0.8165535148386154, 0.573811617506149, 0.524920614538786, 0.8740742105589654, 0.8413864545903355, 0.5144495512189307, 0.5601261042958363, 0.5319972051780772, 0.9053001686091575, 0.6596962583257554, 0.8702729754905397, 0.6259449827351603, 0.8223985195014694, 0.7675126109373529, 0.6820555596419806, 0.6531211648038937, 0.8726318041872132, 0.6952617147546319, 0.5823533386183327, 0.9411899101825318, 0.7936127848794488, 0.8221282782835981, 0.8364577181950286, 0.8937235169435944, 0.9665555333129181, 0.532427032331159, 0.6674442082773014, 0.5519704340346911, 0.7856921675927673, 0.7731863246622179, 0.5840456712095214, 0.8977931554101175, 0.5612619915722874, 0.7576019679903843, 0.5395595889894131, 0.8309058176314418, 0.871095201603334, 0.9204840177064066, 0.5138189828708883, 0.9551121459311971, 0.8049195685235655, 0.959559003870059, 0.7178824932463901, 0.8654772794462269, 0.942203469502217, 0.6098932444736604, 0.6170410027969834, 0.9633509913952504, 0.9036000390585597, 0.842458129659925, 0.5053479228430853, 0.8924557396970325, 0.9568472491787171, 0.7047834641132019, 0.80039234773462, 0.6352214683892842, 0.9775300619308847, 0.8689286541698134, 0.5438233636327985, 0.8586079486729855, 0.7646132844102016, 0.6780864140497006, 0.9935077189591468, 0.7573439535135984, 0.7621635170723553, 0.9481130350194634, 0.8605045873653961, 0.596093083348427, 0.6081310242217013, 0.9054355958059104, 0.588475654245137, 0.9976674881827121, 0.7638845326230261, 0.612958358409772, 0.6984926298118808, 0.8073261917557664, 0.8014430836897595, 0.996654862374653, 0.9853637908699728, 0.8075019367660864, 0.739513641372262, 0.593340669525357, 0.5252851815274876, 0.8538878982000262, 0.6460051451946003, 0.8045268225349425, 0.7487099455936291, 0.8950382234044014, 0.6253568524548552, 0.9649213361980873, 0.7586779906896218, 0.8819149442644025, 0.9267701531277033, 0.5683557920909426, 0.5486194826600297, 0.7680245771044192, 0.5710978791049075, 0.8351226037004109, 0.6176041466561417, 0.7401150499754716, 0.8035680759272954, 0.9333327619522963, 0.529268901912302, 0.8564920798805422, 0.6744871101832443, 0.7154243865864798, 0.5025404265868024, 0.8658558459816417, 0.5402953125914545, 0.9904045796427889, 0.5698405369397582, 0.8423790849781099, 0.6686561898255599, 0.9729889920954644, 0.6510311301284666, 0.5357195866370923, 0.8879950121102802, 0.963604757056892, 0.8029629046757016, 0.9526649145918784, 0.9635183539878931, 0.5825389269103081, 0.7791498741173992, 0.5860768976907487, 0.6480645597837306, 0.5489627842686593, 0.847667571579112, 0.6303597949098513, 0.8454704076903414, 0.5192265297702889, 0.6790175711190853, 0.5002859364845635, 0.9010596430396682, 0.9629351892642266, 0.6012565167803172, 0.7094113014524047, 0.8095900063873727, 0.5225638360939665, 0.5864812378529753, 0.7019414687987109, 0.900919974469822, 0.9528487081433082, 0.7421431967466539, 0.7952198262255108, 0.5200798136364118, 0.7327141529814345, 0.6578720476213162, 0.5654435593303385, 0.8207314516422188, 0.9512945665180402, 0.5554862832381562, 0.5424151503579611, 0.6411303855857391, 0.8708378962615992, 0.9170765640502352, 0.7592023340145106, 0.7255877006682352, 0.7103543066917855, 0.9671032409531548, 0.9489780505687015, 0.6680889075308347, 0.7911414431315255, 0.5735849626853744, 0.9301721021906866, 0.6703121698058587, 0.87213000974803, 0.7884531767048069, 0.9373454480232152, 0.6803417672436087, 0.5178927622622829, 0.7327124321806318, 0.734825981250777, 0.6552248337317764, 0.945667156698831, 0.8946624680176238, 0.9132626624031277, 0.5300554957250388, 0.9342996495585694, 0.9354313940490537, 0.6073816071379791, 0.7392373120031241, 0.9014442968453957, 0.8542413928149912, 0.5446946791795899, 0.5480014101053915, 0.6784767342108114, 0.6307110646618381, 0.5525438444088975, 0.8040743438735165, 0.6655107175361498, 0.6518946650403372, 0.9226639783217867, 0.9375516007752335, 0.5256075892426182, 0.5758158728895216, 0.5215882190246933, 0.9937380978632739, 0.7953061406913093, 0.9735685121243791, 0.9344831035560708, 0.5913456235056349, 0.6532297091634316, 0.8540641844979103, 0.7536029903966908, 0.9733436247284957, 0.7208370017625696, 0.8283998504588451, 0.5847006855802246, 0.879234136133531, 0.7322187668157178, 0.8457006555501608, 0.7179056270390978, 0.8488457644007341, 0.8854590198028798, 0.7545269027744843, 0.6477787204116452, 0.673780881464239, 0.5642603183693908, 0.8674624356669987, 0.6699202753697991, 0.5026167475466667, 0.8793850480471641, 0.6930136858935904, 0.9788298555177557, 0.8415310331291482, 0.6859326103944168, 0.9943334645780448, 0.5639575461063693, 0.6098772150852901, 0.512072714826915, 0.872188756029891, 0.7108120585350551, 0.6820685850221546, 0.931862498212394, 0.8001149017706455, 0.5325040102975783, 0.7289460903890271, 0.992106810501705, 0.7855460287987073, 0.7954882074027406, 0.9724207550553996, 0.7543280371717662, 0.6094438674649733, 0.9916057288569733, 0.5044935829714525, 0.7378288049225992, 0.5715680343551812, 0.5956324399435835, 0.8110289634967489, 0.7890034698706654, 0.8830610983301034, 0.5383439184925249, 0.6149245541806085, 0.6570312093547024, 0.8324364382889153, 0.560270807339283, 0.7009567666184946, 0.7685298597703141, 0.6380494856021371, 0.9757000817414397, 0.8742027063095026, 0.8581115858347776, 0.7327524467847046, 0.9682627723710371, 0.9421171787284253, 0.9498812545997373, 0.8121433205756099, 0.7402943891750922, 0.6518519250732016, 0.7414994134640374, 0.5860220948699515, 0.6042256620089361, 0.9980803011851321, 0.5376666584265652, 0.8774200146155485, 0.5572822722365318, 0.9659129798942958, 0.8530742354777403, 0.9440257005103037, 0.8146702119070843, 0.5425807563712093, 0.7525964829217188, 0.5067266496823191, 0.8982738548729531, 0.8640593623038115, 0.6024755839420011, 0.5472539562413374, 0.7310357613127889, 0.7345061141530311, 0.5514432738839602, 0.8222571543952855, 0.5985267806223528, 0.6732918763930583, 0.5126243328989664, 0.602796282627811, 0.9038545848411776, 0.5562992497109132, 0.7283594768493142, 0.9670759303445331, 0.5216873204510885, 0.6241677935340629, 0.7871071485298972, 0.841720312119644, 0.9058795299614906, 0.810518375040237, 0.6297791485875859, 0.9013005167493369, 0.723277442337037, 0.6425209146068098, 0.7604141137233287, 0.5304294712548273, 0.75856439304786, 0.6524766883604416, 0.9639691500910417, 0.8618385452938757, 0.5567898430475899, 0.8381824102683619, 0.7792243049176761, 0.8408723605500423, 0.9424684770208771, 0.807956516881617, 0.5740012003734094, 0.8074108870587978, 0.7889621965497904, 0.7301711435985905, 0.6271688760019598, 0.7378048545617066, 0.7088228581417728, 0.742920402008657, 0.7036521840415506, 0.8274060410637236, 0.8086861857039643, 0.5332580927481602, 0.74115365870658, 0.8559384111588787, 0.899952057459892, 0.7140958216052637, 0.833086475541988, 0.9516759901812106, 0.9911054820308663, 0.5291949776766393, 0.960874555869114, 0.6935351299678651, 0.7328514708920983, 0.8860194775565537, 0.8895181134238533, 0.7413263223134978, 0.6499565797012212, 0.6731080883878361, 0.5815320299416548, 0.571400315997216, 0.867849094038181, 0.9669198492749282, 0.5325756053658685, 0.9168638011193779, 0.7401761492162675, 0.7471763649367035, 0.7091181549585397, 0.8471896309903535, 0.9560409000387446, 0.8833557822366912, 0.8828767520365803, 0.6314127309325901, 0.7911161202614427, 0.9730997540607103, 0.8889054760589943, 0.5369510319562425, 0.8317484651900302, 0.6007777792604667, 0.5039725482444919, 0.8991921461070849, 0.6793840184287465, 0.7752310242605054, 0.6145210124418097, 0.8168572468844643, 0.9119425150454119, 0.6879030535411663, 0.9310346081871232, 0.595779677193236, 0.5192613422104018, 0.7110934661573849, 0.5709080375000151, 0.9364641540684039, 0.6718330224892378, 0.8303723184388033, 0.7975777299941886, 0.7982935607573688, 0.8743955628722373, 0.8732118688931778, 0.8150494660786098, 0.9624585917440953, 0.7577797651035538, 0.6882277779996213, 0.699923189667233, 0.5190616044159753, 0.9652602928562288, 0.8900749256426256, 0.9494196024779727, 0.6627513407472663, 0.7876589221303525, 0.8484076340520995, 0.9174413327401914, 0.9485058556233936, 0.7814688772509077, 0.663044185019569, 0.824597324495491, 0.5269046526923491, 0.5568788682220607, 0.9641361239791033, 0.641622181307105, 0.6829348630338041, 0.5444225408310093, 0.6033644874796169, 0.8007338825810714, 0.5014431396129797, 0.701479849527594, 0.8476462596511152, 0.7810154834519312, 0.7897013803579702, 0.7998700275421238, 0.8682668149727999, 0.8355594308095591, 0.6262694162723943, 0.8421018490111611, 0.5176977486784391, 0.7106495203472245, 0.7171781612217167, 0.6043502417887314, 0.7256638988405997, 0.7405708612090589, 0.7898958374508326, 0.9834379865857401, 0.8241143937749215, 0.994286703234658, 0.6464920801319386, 0.7090127885331459, 0.8275568888542515, 0.9934175355776296, 0.5853115211382919, 0.5079279897718372, 0.6144374343827109, 0.6923463424204155, 0.5839217276823967, 0.6901761505227715, 0.8016219066543114, 0.799418360025058, 0.7226090406901493, 0.7585231414246715, 0.9196491312109709, 0.9934552489724622, 0.6597077112160836, 0.964582477964262, 0.7394979322535291, 0.9020581194005117, 0.6650645644693916, 0.6198910086549245, 0.82693982034643, 0.7583803147756455, 0.8763641663105386, 0.7963197256219849, 0.5302979939165374, 0.8504726857907318, 0.9102717707533177, 0.5409788550064714, 0.8296611349255703, 0.9729423159372723, 0.9580459476622217, 0.8026624512823642, 0.9700083453669249, 0.558263776602713, 0.7835469912854751, 0.704240761274481, 0.5855543032210517, 0.6267552285098794, 0.8630917050223377, 0.5132867092349506, 0.9405822827724857, 0.5876536361925431, 0.6785655031621978, 0.7227235303076278, 0.8145186024755074, 0.8937518042037041, 0.8614231843870808, 0.5774819718136622, 0.5254661531451869, 0.6990178624384116, 0.7362328496487699, 0.8570423329145194, 0.6227801206455277, 0.5255950338377362, 0.5129393346497606, 0.5388260432232255, 0.8062493209840043, 0.7538526491672672, 0.9707362910007535, 0.6866577551241418, 0.9314098030297059, 0.5758154344981989, 0.7438560549154873, 0.8934312573080347, 0.9178478088008833, 0.6548190683302479, 0.9974534093096936, 0.7433525208825447, 0.816165042268614, 0.9242934494018713, 0.5146979468157928, 0.5181073064698134, 0.8652069764597382, 0.8062466737145421, 0.8495706986520164, 0.7641522134703127, 0.9792274857782173, 0.8597714432378919, 0.8943467163999336, 0.6130565074562011, 0.9003697851507304, 0.8191735864928249, 0.9168564641409518, 0.9656020557939216, 0.7106585865255974, 0.6169676716588577, 0.6643804287966677, 0.9690239069420783, 0.8487796705520214, 0.7303914647686736, 0.9770095750155487, 0.9184769086434745, 0.5599792975718205, 0.9703232676458453, 0.7824688847837878, 0.6067095276962122, 0.6329120386895555, 0.5251385283920103, 0.7920832308802662, 0.8586985439510537, 0.7073158822122031, 0.5999828918311103, 0.7103185722663545, 0.5624902184071257, 0.6030405694404699, 0.9878505145947867, 0.7567111183987024, 0.7080670858605848, 0.8787488560983544, 0.7730528953338005, 0.5126478023475725, 0.913669870344701, 0.999918708188424, 0.6439796753824638, 0.9629493997388605, 0.8530886846037006, 0.9765071949527935, 0.7438723268594354, 0.5535275894329275, 0.744998710162694, 0.7554275980743763, 0.5974271074174917, 0.7623752602963731, 0.7356251412423941, 0.9254977635687465, 0.5994495913987649, 0.794882397012932, 0.5400976597785363, 0.7197586142413799, 0.7029026605779529, 0.6997736864662375, 0.6540198322863952, 0.9207143663382353, 0.7126159795133034, 0.8072817314429824, 0.5789717832710446, 0.7468691489238302, 0.9597486578385206, 0.8309388441119931, 0.9943375615290231, 0.7400398995543178, 0.8739745050970784, 0.5018169505980701, 0.8499321425213002, 0.5381469573712689, 0.6119259760101172, 0.9394372818513517, 0.9664829940288728, 0.756428816248109, 0.62229996172531, 0.6467299627265314, 0.6184692529255922, 0.5724447248958674, 0.9504113319493129, 0.6560636707934493, 0.9798337408257287, 0.5925485343149435, 0.9210214847352864, 0.9290637083112595, 0.5185794209023, 0.6595389889503143, 0.7839143397548436, 0.9344650931573959, 0.8107941537447956, 0.8977047906415296, 0.8061546159615478, 0.6422334729338721, 0.8000986368846914, 0.837570266653199, 0.9479697466126247, 0.9769436369121411, 0.7591514659860515, 0.5217794934901705, 0.6631037906089404, 0.7014453278559694, 0.7056375901029361, 0.7038344343752501, 0.7229378357223195, 0.8985101740364086, 0.6093419157157274, 0.7599888985998874, 0.8139368753100757, 0.8843076396783626, 0.9711546153449142, 0.5131260354819585, 0.6553766156309444, 0.7333930133407842, 0.9719615967033552, 0.698593789946456, 0.5234581055395304, 0.619225168708301, 0.6445926153622051, 0.6470665078131208, 0.5579855498360009, 0.9190774067944413, 0.7277594278451477, 0.8544711446206477, 0.9712278434262664, 0.545939071755383, 0.9333308592218919, 0.7867782716922255, 0.7301251346348249, 0.952514657228656, 0.9523613611783056, 0.6452672002485345, 0.8044399353937917, 0.6138550762125795, 0.5665285575287085, 0.8469881596276206, 0.6954085305129096, 0.5222198081759082, 0.7408868776476987, 0.9576262365238187, 0.8063626506541061, 0.9253587463590214, 0.9543898361869803, 0.8267814969360034, 0.7466903787156184, 0.6337929180048658, 0.8688805024935999, 0.8394161251036802, 0.5282105074497467, 0.9324429290650151, 0.6141618400789814, 0.974476253327296, 0.5906030290210037, 0.644461727507643, 0.884180528648969, 0.9210048887406175, 0.5195156907723093, 0.6996159790368073, 0.5062985126463901, 0.565542536970985, 0.7525659120727259, 0.8870428752357264, 0.7274871212181603, 0.7150558729994017, 0.7115355524511637, 0.8425320417187991, 0.8707635400745819, 0.8421704677831912, 0.7450197373758904, 0.5475605571254497, 0.9772406408403227, 0.6792523721778114, 0.5149403218385082, 0.5095467469544753, 0.65398220543415, 0.5514566508631242, 0.9697800682253603, 0.6708792526830798, 0.5189015299298219, 0.7246172123907746, 0.8273118586226627, 0.8397315710387439, 0.8581979726569777, 0.5642585957281734, 0.8930215548344725, 0.806961656340923, 0.7646812317260139, 0.6988759290851154, 0.6492313752057677, 0.5686666386545727, 0.7717383051237483, 0.8815174827061891, 0.6385456097120372, 0.9933262472779425, 0.9190152714489946, 0.5459438717254639, 0.6557845379795828, 0.5457781804710642, 0.8835665071244001, 0.8201825646156689, 0.5165740572898071, 0.7526957870601729, 0.7921442376453898, 0.9553669155828027, 0.7679951135530914, 0.5207401602988775, 0.8719544026746546, 0.9405527028112612, 0.6405986472272339, 0.8808614231280492, 0.9697745002832792, 0.5686493881866914, 0.9858924954763013, 0.7858382148589275, 0.750461473755045, 0.8192813683801623, 0.5238130356469359, 0.548474461158597, 0.8446346533047451, 0.7856658179813648, 0.9785386461022298, 0.6358673587576635, 0.6483837289148124, 0.5685160830054268, 0.7567524739180483, 0.8462027346105097, 0.6525058748540521, 0.6032819198111774, 0.8578762917208491, 0.8561329419377326, 0.5375403953962847, 0.8490108251080078, 0.5291565593754215, 0.584176862653752, 0.6915874037293241, 0.9801527323460046, 0.7227781774341981, 0.8068401329768774, 0.6705277351440266, 0.7266884861567869, 0.6416769271588623, 0.6742031294754965, 0.5359402445715485, 0.5050377736672225, 0.9813349413268445, 0.5868868391463163, 0.6033336940883192, 0.8662263817808378, 0.54496082752472, 0.7027708564229318, 0.730546256161734, 0.7502766849234022, 0.9699206394269683, 0.9456798110696089, 0.9980807935744795, 0.9015077791088165, 0.7854239811815253, 0.9977055997713169, 0.5275427567627384, 0.7872186478894312, 0.594035843970514, 0.9136259285599106, 0.8480678598518452, 0.6980773241928999, 0.7531282996536157, 0.8431805824709304, 0.8795131348649945, 0.5111480769628107, 0.5862298930620226, 0.7024153513731018, 0.9931076827101044, 0.5749410092915832, 0.7520412423449729, 0.6327851774984369, 0.6173204609709366, 0.5642466971865178, 0.8826667585530081, 0.5016799794916267, 0.8288560095894906, 0.7291524893613474, 0.9467608512608573, 0.9013399573314715, 0.9100336611907787, 0.8525980534566415, 0.5162044291014531, 0.7739587829533754, 0.5256048106616635, 0.9287200417214254, 0.7873635630755309, 0.7496198946936583, 0.6999027824410239, 0.7514597799148746, 0.9938029958394521, 0.6470667100656019, 0.5742818942872387, 0.9522484916180212, 0.9277595228354928, 0.5718186338384794, 0.9015461275137444, 0.5545338135268492, 0.9735821263214064, 0.8540634871454303, 0.6018289422923582, 0.5694399684473765, 0.92775795376036, 0.989784237393845, 0.756177743223176, 0.9619846840514625, 0.7950335875164432, 0.5330454345564335, 0.8297251483458634, 0.5955972671049539, 0.530669659722911, 0.8020476129309966, 0.7346568838483789, 0.7311313118468726, 0.9186772851202187, 0.5643540708016965, 0.5231991993684042, 0.5976666010080475, 0.7376144986282676, 0.9588868176879566, 0.6348555199309298, 0.7072103733941362, 0.5679864906033341, 0.5810957413246497, 0.8685830219198101, 0.7333995531808541, 0.6730107650444994, 0.6370864243325524, 0.5149755411418743, 0.7865191768810916, 0.5565926324861752, 0.9675705288778039, 0.6531013790726727, 0.9021643409721294, 0.5886573398868753, 0.872254839227059, 0.7765472519313995, 0.6189227217589996, 0.6515166675126096, 0.8023179559385887, 0.7713287876049438, 0.9346392307857893, 0.9148259647917611, 0.925684485038944, 0.6073439871916215, 0.6042807642994896, 0.5087079954356757, 0.5763032402179011, 0.740440850517291, 0.6463142449062094, 0.9674180823922037, 0.7477562803607278, 0.829830385851762, 0.6495336532713432, 0.9906676368887066, 0.6892883720666934, 0.8060183682298594, 0.9809144203707811, 0.8949563458064839, 0.615416589219284, 0.9741570677861509, 0.9414763236222583, 0.6208922531639671, 0.7861770142501945, 0.5464447616059116, 0.8961359522161199, 0.7580976420038936, 0.6663955323223925, 0.9777511091469184, 0.519838818355672, 0.8873207826885912, 0.8593502222317541, 0.8056522280709026, 0.7626029777193443, 0.9847503577622619, 0.6568733520137819, 0.6762807058369137, 0.776802742806078, 0.8405290029909902, 0.636024734820595, 0.8676205621366939, 0.6395419769905688, 0.5831339968468363, 0.8617030625943154, 0.7491400731488127, 0.7894596433812662, 0.6449057394342517, 0.7340708898177761, 0.8301306680685023, 0.699571299257205, 0.8433843841134683, 0.8585711222866501, 0.836909622960043, 0.6563471542857119, 0.6825844790134186, 0.5322180234359055, 0.7174614075307884, 0.6280622689946939, 0.7687271936412682, 0.6228701067166494, 0.711502168700284, 0.7654664880746473, 0.9230703164240847, 0.8799599320028477, 0.9845245618708536, 0.6765474183979379, 0.7423062471732199, 0.5327672155543122, 0.5502633838541371, 0.873628189614569, 0.6707539466892094, 0.8624279960373109, 0.7305898875841086, 0.6269046293225824, 0.7131627891837911, 0.7031686542506326, 0.5538022124742048, 0.5273086261528364, 0.8002385766659954, 0.9678263545537451, 0.9601132550728375, 0.7378865982042333, 0.6753627413897032, 0.6115348897402084, 0.99353711118999, 0.8903567706080096, 0.5660648694192894, 0.6392911820679104, 0.5519514865028583, 0.5032544209417813, 0.5058080767985631, 0.7492712811112723, 0.6204104527384638, 0.5772441852196428, 0.9168657740507214, 0.6711472817538271, 0.5569968034672016, 0.9914832604507591, 0.7664916980893017, 0.6983003846134761, 0.6220336529938084, 0.7163522370467093, 0.9086071288872859, 0.9206007621124379, 0.9815561696567198, 0.649987701735794, 0.8486644504371357, 0.5091022712514166, 0.5170113866586117, 0.8125974002529215, 0.9103372995765238, 0.5840513529453529, 0.974663130034603, 0.5447148528664671, 0.7850234799470661, 0.8000090974103926, 0.6186770676963365, 0.9685388826645669, 0.5427125961573154, 0.8108143305295429, 0.6868065820670468, 0.9201464367126359, 0.7805295790088425, 0.7981295656252361, 0.7825198489864529, 0.77136167823369, 0.5411351185982114, 0.7490848069026367, 0.9109960710191649, 0.6954221061482209, 0.876132313964646, 0.7839188535479105, 0.5959065916127222, 0.5899831055866356, 0.6216391538050359, 0.8164872475100551, 0.5276525080441619, 0.6039237663923909, 0.862575503358074, 0.6559233160488537, 0.8958656123095456, 0.8182065388499276, 0.7357542640222553, 0.8269994064343824, 0.5962880899775413, 0.9457610659174791, 0.6935933741215725, 0.5055466827680736, 0.6586174511031547, 0.8425559153201212, 0.8546397564450383, 0.9406903556570017, 0.7455708325410179, 0.5903310171084577, 0.7192930814583582, 0.8240498696426326, 0.7111323193013765, 0.8405592345761767, 0.7933556706308176, 0.9641798353772905, 0.5651267541300569, 0.573692854175055, 0.6180623280652198, 0.7935515255468684, 0.8176465163657425, 0.6275453984807688, 0.80490807851976, 0.8578686849524206, 0.5986439572121993, 0.55931038684341, 0.6075778825653244, 0.8457178426778365, 0.7234331634575818, 0.9508054903140561, 0.9498119019715562, 0.8425917777828119, 0.7742461050967493, 0.8365273689748934, 0.7463650485870141, 0.9834727292610299, 0.6106010796959975, 0.9678572380873057, 0.6058223853710492, 0.8705510293687646, 0.6714905710294655, 0.7953661400520085, 0.8089756433564692, 0.8943386722407773, 0.6424397651169985, 0.8520094605458445, 0.6334585115398452, 0.6343299703752228, 0.7396958429020348, 0.7668931344019784, 0.7632806778951264, 0.7501548878383469, 0.6337698827979461, 0.5252165285680201, 0.7058042357843524, 0.6810396789742044, 0.9721825906151087, 0.9626393867262901, 0.889827485610013, 0.7072260034489506, 0.9780115765918089, 0.587366632876523, 0.6825849011112375, 0.6433272096689883, 0.9113476885846181, 0.7056442033130179, 0.9630690619813721, 0.756938784827289, 0.9874013432499165, 0.7454150849254997, 0.974164390821676, 0.7263047774834615, 0.7877879239879233, 0.6805929555515541, 0.947796401346259, 0.5726513858244506, 0.6363891927087915, 0.9404317843981893, 0.715321152804641, 0.7650750391907679, 0.9559957688000648, 0.8167712126317009, 0.5874156961558132, 0.9085578930414407, 0.584923259899486, 0.6235291252501154, 0.9133829149687095, 0.7349480498783013, 0.7971030211269032, 0.6702063918959227, 0.9265274896363288, 0.9717341245036597, 0.6129753829400857, 0.6071019604306955, 0.8357802588453267, 0.9986910845148199, 0.5630112332614445, 0.6274575925889969, 0.6906982006150431, 0.808235042870699, 0.8261967873814995, 0.5036665713233555, 0.6536172150670374, 0.55115671064338, 0.8990436548854673, 0.5256096317163559, 0.832644728188767, 0.903187648561397, 0.6993563282549742, 0.7157118350063527, 0.925752246243872, 0.5069545573839858, 0.5353574902846634, 0.9206161558639334, 0.9755952005110591, 0.5678410624660037, 0.7415779544399272, 0.5947700049894925, 0.9665958552000725, 0.54366100801582, 0.8579532738516268, 0.58367786997006, 0.8337782685753433, 0.7009971554626981, 0.6792505371014149, 0.6177472542333593, 0.8911329366141083, 0.5850689718843878, 0.6085805938835258, 0.7071765634168191, 0.6473786028793748, 0.6314677345996935, 0.6408777537568077, 0.7244459489363309, 0.5911796738744892, 0.7985259579515935, 0.8718930640089426, 0.8201030951177775, 0.6902256912509769, 0.5282534318121187, 0.9225735495617371, 0.833829548484009, 0.9455471753495501, 0.9252898750103586, 0.5984139856460557, 0.6155605137966652, 0.7052337003470504, 0.8098645151335981, 0.94320301620167, 0.6355610223931125, 0.6404968336345396, 0.5045659630434205, 0.5420438670118931, 0.9959378006175407, 0.6411545782540233, 0.8408145431170332, 0.7691090547086514, 0.855767000234607, 0.9248903507333931, 0.8925761018610521, 0.5940014271569838, 0.8462231891302229, 0.5883799583157838, 0.7325672483503616, 0.5777006130191813, 0.9487874731857067, 0.5416918080479723, 0.9803377317391954, 0.5932186772864233, 0.9911022899910384, 0.5225095642917901, 0.8748355786306568, 0.9347664112468328, 0.7671170768480544, 0.7511230630359951, 0.7926115328668601, 0.8011434755831824, 0.7467222965439742, 0.6426497493832706, 0.6129566829781676, 0.8787372384880384, 0.5768515028922611, 0.9667655444049905, 0.9123178521240429, 0.5680971847574203, 0.7475333115241547, 0.6414305613973571, 0.8948625503204752, 0.9556017568043409, 0.9301942242041991, 0.5475397491397354, 0.6304767514373419, 0.988121133768731, 0.5665144162486427, 0.8833129806580517, 0.594975288247557, 0.6333362194315971, 0.6140528177155711, 0.8875606535144782, 0.5517504461306182, 0.8545663093950058, 0.695873110489079, 0.9516770547087208, 0.9222799392232177, 0.6602375459435779, 0.8690730779586152, 0.7117527856601786, 0.7851518045040519, 0.9130485543631865, 0.6905647902572492, 0.7965035515360728, 0.5141507983349579, 0.5590169103592779, 0.5049195777044297, 0.8630657490034381, 0.7387210079032935, 0.6715606781435074, 0.858423738855004, 0.6384357014068033, 0.5907157761998877, 0.7445476104946116, 0.8532177664939484, 0.7184615722019274, 0.6853257044334231, 0.7345146737422521, 0.5172140360683788, 0.7033465125950249, 0.5007533818957697, 0.6130814298362306, 0.5387190009105154, 0.5421705292439344, 0.6317835652559851, 0.9877640471838478, 0.5870847910475099, 0.6516696734399904, 0.8297474617092623, 0.8882217848519547, 0.7883723213597544, 0.9021543930452809, 0.7877273139220583, 0.673863101581449, 0.9555531556416397, 0.6065748000706792, 0.6755250214595314, 0.5918257499357787, 0.8115770535397091, 0.9007669235883902, 0.6903202485308408, 0.9506956866027716, 0.918307234163708, 0.8036804452827575, 0.7400459582910954, 0.5588660057289621, 0.7604899449093107, 0.8062702729823931, 0.6830616815283569, 0.6286794899869983, 0.6933959832869562, 0.8330725694837178, 0.5693679021573969, 0.9502972631054072, 0.89914944049091, 0.8434275498115091, 0.6121178735892528, 0.7255298827801085, 0.8343301955644362, 0.9721438709705632, 0.8706972822431123, 0.6163216988608904, 0.655399875644882, 0.7982833080564797, 0.7937541281420173, 0.749688167362051, 0.8821104700944893, 0.9564456980717951, 0.8585891585166625, 0.5573059636407333, 0.519034503858953, 0.7729522759547981, 0.8560497054910678, 0.8673816914892809, 0.8725992071746302, 0.7670855905010547, 0.8893910129784266, 0.9777219155279919, 0.6964699498248106, 0.7043095417782133, 0.5727946803172594, 0.9459731046295441, 0.7440043325494348, 0.9345777057148157, 0.8534085723845773, 0.7015423460088273, 0.7983207949432753, 0.6640041993223922, 0.9764924802923738, 0.8818664702177808, 0.8730142113937751, 0.9861947514151108, 0.5725962158723181, 0.7796298220005412, 0.6639611901761944, 0.8855697096499042, 0.6704378660455509, 0.9878727329331081, 0.977527002293068, 0.9714034830362712, 0.8991829189966053, 0.9736859724417996, 0.688642527335108, 0.9979614364915447, 0.5706440319954017, 0.8614862123550622, 0.8446957295876409, 0.9471830934482874, 0.6483903000061115, 0.7106941512375664, 0.8836144114597386, 0.7880472109107439, 0.9531622041226668, 0.6764464531052417, 0.5213211535905957, 0.9621362947442855, 0.732048831438278, 0.884660047907015, 0.5202631187718056, 0.6824875339779084, 0.8484004531828524, 0.8361601205007829, 0.7282176865170877, 0.7424773913834777, 0.938213245883879, 0.9869894750065208, 0.5347434378431777, 0.6591200173878077, 0.5109397572974913, 0.6106578115914241, 0.5027925312909345, 0.777064505761124, 0.6327574956355382, 0.941746806747922, 0.8656732050945184, 0.8141801367698507, 0.8118076520924107, 0.6187681906507045, 0.8425873979275722, 0.9513368012081322, 0.5242310569379307, 0.9898108642689323, 0.909979319227292, 0.8508640245062338, 0.5133700311369802, 0.719332821717892, 0.6550689103409484, 0.8396774597806604, 0.7510881479373304, 0.8753172900414826, 0.9437719347063167, 0.7278744706003106, 0.6213384169307443, 0.6844171199082414, 0.5743665208829508, 0.6383078943236511, 0.8478110306071993, 0.826162396745944, 0.5101984768386849, 0.6126308070339861, 0.9261591917767587, 0.7283855217463024, 0.8323663837688424, 0.8463730221656547, 0.8175356547290589, 0.9230610460209312, 0.5046086434482555, 0.7186177376590411, 0.5890844163136248, 0.8439280523933059, 0.6921449251158873, 0.6400413411383321, 0.6972802741636357, 0.7611148220709487, 0.8228524555649714, 0.9544484688742333, 0.6309709475332279, 0.8839302388871653, 0.7902581189190847, 0.9583248207119079, 0.6397738619566, 0.7824907837686412, 0.8873083795368126, 0.6461890165433894, 0.8221563477767504, 0.7408830610085715, 0.5530215243206251, 0.9376623748174655, 0.9657797287740992, 0.8088349219191759, 0.7314938308252014, 0.6211945331216485, 0.7437624188995072, 0.6714226259076879, 0.7253198534831313, 0.5877802014054729, 0.757750060782773, 0.6502912169377244, 0.6750404250503503, 0.6322983518465561, 0.6674427647608943, 0.7346185122743434, 0.6513341048292389, 0.7412201151715954, 0.9594290766679617, 0.6676546556562747, 0.8045082652492705, 0.9922498975070639, 0.7807297813926581, 0.7424227134732531, 0.934667164873048, 0.8443354408428665, 0.8396120342141151, 0.5404113473170972, 0.5105534628133782, 0.7515251180779455, 0.7382693045243429, 0.9344050225909937, 0.7392266624997746, 0.9449629279567924, 0.519879736465028, 0.515809303814043, 0.6694155816177072, 0.7067335194593036, 0.5424437996805058, 0.6952919653225159, 0.7151547246339938, 0.8041545797485579, 0.7567808535150029, 0.8296603804725975, 0.5685382123526341, 0.7658350815573991, 0.8992268355126907, 0.6022349266955578, 0.9546857611367081, 0.9880862634889409, 0.6734058523870653, 0.9696333243761182, 0.7713909983050764, 0.5661739335449703, 0.6295565921494818, 0.6771722793723769, 0.825533874799297, 0.7833985134069192, 0.587957099405501, 0.791968326202732, 0.945893284508747, 0.7880829147377657, 0.9255891479090314, 0.861502205970888, 0.9618754848457224, 0.5122788590783685, 0.5272463850474715, 0.7635671928783161, 0.7911052790554383, 0.9095953153517233, 0.9932590100192707, 0.6417056571022876, 0.933609685603735, 0.669330907762275, 0.9299488789131969, 0.8276591531394367, 0.7674309095811542, 0.9149630752460179, 0.9675729330022993, 0.6893783470378614, 0.5272059937202579, 0.7978420633898413, 0.7165306632553137, 0.5968797987021945, 0.5553764561490638, 0.5549516357146942, 0.8394045763511495, 0.7946291863129015, 0.5468355256947136, 0.9372172228655438, 0.7951317274198018, 0.8357140291290894, 0.7676027820701444, 0.7679126143898248, 0.7740061425992015, 0.5967962825800966, 0.9949641988170355, 0.9906662085135637, 0.7377911358064834, 0.6271134827750835, 0.6515599709519271, 0.9344978654000964, 0.9784029535883694, 0.7867583196632044, 0.9549592340892026, 0.7621428691571058, 0.849051833119072, 0.7354306659477683, 0.5263750900132045, 0.654879674725963, 0.665367877913772, 0.8149724137056145, 0.9497042691702391, 0.6737130488682326, 0.9844954381954247, 0.5948859703312691, 0.959602703503232, 0.7076818677776335, 0.7496246590412597, 0.9890241921714451, 0.9236520988240682, 0.6052923634511564, 0.5660739228853422, 0.5956032072035256, 0.7649903834483839, 0.9833139766861053, 0.9402580861134466, 0.7997461103260314, 0.7874360725818331, 0.5976630238869478, 0.5816224999546779, 0.5323414072417287, 0.9942026079552231, 0.8869467553748134, 0.6956286633464144, 0.9571858650701957, 0.7729678002037065, 0.6860701852983284, 0.620842413209661, 0.8493481981436828, 0.6623161658478056, 0.549378824890326, 0.9983781900991624, 0.6548200907178018, 0.8713222649024717, 0.7252468931022844, 0.6601145935643616, 0.689361729020435, 0.9651338512235167, 0.7416007466501786, 0.5125232397258115, 0.97396494697536, 0.6689601692909601, 0.8416732299601938, 0.8787259628063264, 0.7662842215643924, 0.5279813205771746, 0.8588195130146132, 0.9313300499070123, 0.9853355008959279, 0.6822596728874775, 0.8531924301928814, 0.6547752596245604, 0.8305920670927194, 0.5966074925559552, 0.6140109846716453, 0.9492112062964276, 0.6531848822349442, 0.8473438125454132, 0.9082972014599877, 0.8712970128702077, 0.9896324667140708, 0.9685873039978985, 0.5116755846412043, 0.6410447949789866, 0.8833401550793627, 0.9287563684739071, 0.9805092681153245, 0.5389071349940572, 0.7118957418964539, 0.7180361814322411, 0.9493926981513543, 0.6499707979713432, 0.6775976654799096, 0.5173281609636753, 0.7359929573990659, 0.6652353172726306, 0.8777113113291319, 0.5409182374802826, 0.8549808176641516, 0.9447268735723326, 0.5133931635804281, 0.9958655033538839, 0.8244566710763147, 0.6543750967073062, 0.5718112141275475, 0.7452079326973338, 0.5317386013589689, 0.7903162820039236, 0.5710392972756247, 0.7631125903477863, 0.6072812488054216, 0.7848818911421602, 0.59505707959764, 0.5159547213665323, 0.820240443378272, 0.9175641583701688, 0.8173616413271905, 0.8255880046964772, 0.8653740892847746, 0.6531100623950427, 0.9751227222463836, 0.9913003903202153, 0.9256064871563654, 0.6509488932636903, 0.8554145467994025, 0.9804641683897758, 0.748250910594633, 0.6582623468530945, 0.8189544042310368, 0.568160673226402, 0.7920505348753366, 0.9148073663373868, 0.9716243878883549, 0.6145598110821899, 0.8541957877269507, 0.6209906334889206, 0.5947268092386847, 0.5694237828855945, 0.8175712006272982, 0.6044164008318396, 0.5569086873810285, 0.9967777130570172, 0.5360505231014863, 0.62128780258382, 0.827435774557901, 0.7383696755786276, 0.8826855912267114, 0.5884562264985531, 0.9462698959045904, 0.5091499977759634, 0.7179351713544457, 0.97116029838411, 0.8677287022722455, 0.7451957583325988, 0.7262711898239054, 0.70244243356845, 0.8509472462440444, 0.7551387606282918, 0.5878745906995309, 0.8628163622323318, 0.7572732324965594, 0.9382515045039481, 0.5955325447448585, 0.7358004569910537, 0.5020647938897007, 0.7471379268961114, 0.7361210931264659, 0.6902582215022155, 0.7252929647017177, 0.5170629597634017, 0.6176413546407968, 0.9248001044184418, 0.9133398616745327, 0.8453781317900185, 0.5130317693134496, 0.918991901634383, 0.7219709374245697, 0.5405014957349894, 0.7918010123019348, 0.9268402641940823, 0.5998488710512144, 0.5334922078372708, 0.7735504446863721, 0.551138719200235, 0.750310209398845, 0.5835908300973157, 0.9012222203049042, 0.8283390305348182, 0.5015791882039342, 0.8927408129802443, 0.9602211577082627, 0.8524012806776068, 0.7317319097049662, 0.6889779344147263, 0.8118467207352766, 0.9550263458909671, 0.5791904134853487, 0.5288897123145302, 0.9714958322590528, 0.6989029722953104, 0.7648692537111037, 0.9763697200630952, 0.7423837349901403, 0.9832807524885242, 0.9844248077284387, 0.7852738390530735, 0.6707821544011834, 0.5066512142812076, 0.5757673632207636, 0.5698578893014934, 0.7062010907758454, 0.9577541588394863, 0.9050390561508159, 0.5689261491906998, 0.5674755072792318, 0.6407001216547903, 0.6163518364114516, 0.9388744494914778, 0.77899262419674, 0.6052485930601657, 0.856569917286215, 0.5202592526992593, 0.5247302440013556, 0.8809486896490608, 0.5108436959399503, 0.777821057875828, 0.5615231844339984, 0.917479011090887, 0.7122310416154309, 0.7056416668389531, 0.543767119421809, 0.8189983377976505, 0.886994469755521, 0.860377112589684, 0.707350517105379, 0.6871723268042726, 0.6149660689145493, 0.5946349330067251, 0.967554276633743, 0.9480643129855213, 0.694388467466232, 0.8284521679003217, 0.6957864703316914, 0.7620883810911093, 0.7468694837304405, 0.7074403968097327, 0.7723816867849707, 0.9936890237595074, 0.9483269081541742, 0.5553606656578731, 0.9799152230548833, 0.6421944683520071, 0.7962004662133597, 0.7444748545925658, 0.5984196283005223, 0.9576949034916397, 0.6606861684106478, 0.8281609121158005, 0.7639135368370864, 0.7292639297938417, 0.9361434482485281, 0.9230142144446354, 0.7110174200860992, 0.5187287338128823, 0.88376103704641, 0.6845284824622359, 0.585838199372753, 0.5569849561487312, 0.6660857359767516, 0.9224489222107146, 0.5315897448505826, 0.82949068046183, 0.6517346426280031, 0.887224018334924, 0.5184960987723074, 0.5349395060807042, 0.8070930466460358, 0.7423716753725516, 0.7664352166336168, 0.7060901681494651, 0.5494041113697843, 0.9104274954920842, 0.9363391217110448, 0.9938692397128828, 0.5873776376696898, 0.7343904397778451, 0.6783512258596318, 0.7401055103691121, 0.8432102542895126, 0.835216636761799, 0.5488272183337337, 0.5689042066207364, 0.9302467102245138, 0.8406238271973785, 0.8141122974328535, 0.5047185811609332, 0.6667191293044692, 0.7676961537597873, 0.5814293779285339, 0.5211762367026731, 0.8978739921060555, 0.7637964716462842, 0.9996701327656257, 0.7482864646481605, 0.5814683505953564, 0.8944524740163835, 0.9300122581340386, 0.951320892850327, 0.8184795651222116, 0.9331320066280017, 0.9159136969254686, 0.8550669044299256, 0.6431590597532315, 0.8624897733404665, 0.9970078150638182, 0.8196346249540585, 0.5824997362045099, 0.9335978848099897, 0.6699327909661604, 0.7078601115363486, 0.811331032587868, 0.5561992451066126, 0.5542368954468722, 0.852006915542344, 0.9211534381580031, 0.8730347848429313, 0.6029130518711818, 0.7406826056483083, 0.6956951898299826, 0.6760711803829247, 0.7827058833455223, 0.9728093957086771, 0.7258719033003886, 0.6810507960245664, 0.5106475308992868, 0.9749915053383338, 0.6562716411969776, 0.7837939011895791, 0.8616524841227671, 0.6736024891172883, 0.7394923361147343, 0.6328523688925389, 0.5056689748943327, 0.8515571055308842, 0.983697365074383, 0.8922111335267743, 0.7354543157717671, 0.6192889468381435, 0.5234239890139356, 0.6937459604898086, 0.5041453420520396, 0.6245753004713895, 0.6850735630074253, 0.7924072657326906, 0.5266638591727487, 0.9582449239143274, 0.8433764457247972, 0.7190457560839258, 0.6088239266810928, 0.6387592442630036, 0.5403362275292576, 0.7172531232122431, 0.6806253020763153, 0.9883402493551129, 0.6537004234947659, 0.8487236502987262, 0.6927825731795145, 0.7302991282612378, 0.9955947551998894, 0.9703661242712394, 0.813988612653624, 0.8380023735867745, 0.678369692000466, 0.8250674043878861, 0.8784189575139485, 0.8652757291026196, 0.7022565026183005, 0.8957314911383156, 0.9048599581500196, 0.5060270849526625, 0.9785893242472247, 0.976335601183387, 0.7846698195025158, 0.8599352980203638, 0.9232583554273079, 0.5270932983289931, 0.7997702234326293, 0.6614308200662173, 0.9080244380539391, 0.9429442284971115, 0.978725186098746, 0.6247490867804166, 0.8473492472266633, 0.7000774499806137, 0.8796686904041409, 0.965403850878565, 0.5879353962341326, 0.7439381156529752, 0.86339305460274, 0.7076339561073769, 0.9089309734426156, 0.8241164734866248, 0.6381555070268727, 0.7267710368079152, 0.7283253526658762, 0.699124418181267, 0.5405862022686985, 0.942962568737819, 0.9051677164180509, 0.6283708128907433, 0.8556253866897645, 0.7891931890745979, 0.7206734403506707, 0.9979761466037349, 0.6094900270989043, 0.8923271217322917, 0.5327539399855896, 0.7213453068832871, 0.9664109926907024, 0.8907413278731279, 0.7550223511231609, 0.6334851671198913, 0.8460593670628929, 0.9374805673458662, 0.9252330892332654, 0.8542382786278837, 0.8951489507313468, 0.6557178137639451, 0.5987742542731741, 0.599428268289369, 0.753951712884225, 0.7004396668058719, 0.8041160273862841, 0.5444133211405577, 0.5324703687203403, 0.7625054979440904, 0.5498988057858383, 0.9217830144425803, 0.5726731398538574, 0.8606196426693193, 0.6386925983858358, 0.881840092227669, 0.5031699611289897, 0.7569810995365548, 0.6700155229245, 0.5983524504231575, 0.7170145888328379, 0.8634520995764516, 0.8069277854417909, 0.7910626996432562, 0.6955286866866383, 0.7774285729510972, 0.6700820998002754, 0.7502142679179526, 0.9406159663404732, 0.9674200647567774, 0.7456629314064694, 0.5352200062993078, 0.5439495030685587, 0.9892348644916379, 0.5323619923165288, 0.6231826391690451, 0.9671374204424792, 0.8969476845558555, 0.9034267823457647, 0.817016738160679, 0.9759719232206601, 0.5504127148328135, 0.8824012296220627, 0.902491073820954, 0.7029902054300493, 0.9032935171795118, 0.9882215014531425, 0.52505065364266, 0.6150368919154874, 0.8136200531261819, 0.5182639444388634, 0.6927569962336538, 0.9379596251775844, 0.6109069605085446, 0.9345341337549098, 0.6841260724987529, 0.5450160078126683, 0.6964851488517937, 0.9383401258411519, 0.6566097758529471, 0.572267988122926, 0.5708111676277625, 0.8550278609554709, 0.7305087302429221, 0.650333866592846, 0.5221629666947061, 0.8564558228790162, 0.901624689082497, 0.6310768748570621, 0.7920871350479193, 0.5728474414413178, 0.5687979538322416, 0.5496253710654464, 0.7425658509129134, 0.8409147347385486, 0.7227516995004013, 0.5303900417986132, 0.747416254452677, 0.503250322109901, 0.9680119960286357, 0.7354168339608061, 0.7029176967599489, 0.9039132008783681, 0.6432669843691858, 0.9494016972238097, 0.7382668629337623, 0.939885464640732, 0.647300630511157, 0.5538788228194519, 0.9479736160685451, 0.5115954458105253, 0.7394559517975281, 0.8079275477970141, 0.5861958307002769, 0.7429002891680235, 0.5902555070661993, 0.795438675026202, 0.5500592522732718, 0.5726678441277792, 0.5421212615187225, 0.8007524270250612, 0.9633149630900928, 0.9575486439424676, 0.5858831922858345, 0.6727110873400768, 0.6663892501046171, 0.7458316222870764, 0.567256312527662, 0.9276514691912878, 0.8954786222112894, 0.8591746730318648, 0.989740256564736, 0.8716817742298673, 0.5632235933210413, 0.9394291131531951, 0.6300297232751093, 0.9633238872715182, 0.9516675355344046, 0.8948913731033377, 0.5359843809649707, 0.5823324501542979, 0.5519874272131329, 0.754278059040619, 0.5106009510928, 0.5054828285626866, 0.7908634189821557, 0.9470354365503235, 0.9870013718243247, 0.6752089233562393, 0.5494596156166001, 0.524945925873846, 0.6875043974736387, 0.8397533483546602, 0.9318323260046483, 0.5829502064205077, 0.5249156587171724, 0.5472976817106434, 0.9051928973902466, 0.5200880265660839, 0.7674714890415983, 0.7674533816582105, 0.8147759309261648, 0.8592429575607672, 0.7720702704386437, 0.7109997525162242, 0.8453784282610025, 0.5354320705628635, 0.8771477455831613, 0.6503582926368646, 0.7904556484418994, 0.8360845231083132, 0.9936911614929915, 0.9191394782053255, 0.9997108060138429, 0.756868604891024, 0.9458715190721072, 0.881895001951479, 0.629697880472107, 0.8085785729433281, 0.7463239615469885, 0.5449903530654256, 0.9202825068508769, 0.6581250701263357, 0.7277985798751843, 0.7430779720184768, 0.7721715307006058, 0.9041148867585407, 0.5694660309091276, 0.6599195525744342, 0.5230125987399639, 0.9935798184822237, 0.7845426736140133, 0.7238660181306922, 0.5162510882435711, 0.9414620273772247, 0.9493361031013223, 0.9250219575715827, 0.9436543623329385, 0.5195229337348333, 0.8120281821951856, 0.7504694145595917, 0.6989362404057503, 0.9505113958626975, 0.5546269805628171, 0.8072608878750538, 0.6703806542094584, 0.7835672254497643, 0.7774682633671551, 0.8856391589461003, 0.7640826398544762, 0.8612832564369503, 0.8042416783038377, 0.8095924950373594, 0.7225648176160842, 0.7538459521539426, 0.5022517753226947, 0.959549341236969, 0.8128477425968362, 0.979697206169701, 0.6536401333742646, 0.9903391741142655, 0.8944390257065558, 0.6837219377366518, 0.8843673934207564, 0.6422303997899761, 0.8334514158591377, 0.6819220639256356, 0.8324659880525933, 0.8042237396684129, 0.876749618524544, 0.9003409455761971, 0.7679053722821358, 0.8457169230648305, 0.6771304825563456, 0.7243923354735384, 0.6753598355193806, 0.7962208022316213, 0.96021749277897, 0.6009109057109878, 0.5089732398588172, 0.6437203193458254, 0.8156927456480185, 0.6222738173349209, 0.8001474435140241, 0.9103284013609165, 0.7132094048320691, 0.9740468819783081, 0.5975715574662117, 0.5951739785476684, 0.6900207050133464, 0.5194415234826876, 0.8745277651003185, 0.8697264207665802, 0.9616591568278985, 0.5913561757247847, 0.6662644256744552, 0.8871277200081713, 0.8430380145715685, 0.5614994789801107, 0.6438031299862801, 0.932614364318129, 0.9103378844632585, 0.7586637165700286, 0.9496535709850195, 0.7077590230197268, 0.9327096622197601, 0.5090205479547105, 0.6875248727770196, 0.9629801886648701, 0.5291042111011951, 0.7318625456163282, 0.8922485579726929, 0.5328924305393162, 0.6360806076585432, 0.9507053115483265, 0.6723443401453597, 0.8748931232675252, 0.5110484330999676, 0.891259033183217, 0.8251635759600849, 0.9356969944058524, 0.683647573122731, 0.710401323380361, 0.8031918462587397, 0.8036664288959889, 0.6791361826848318, 0.6443588166544139, 0.6042577822251713, 0.7001688612213066, 0.846107530337044, 0.8839699105813503, 0.9314405412877006, 0.6027760362771782, 0.8693561131843666, 0.6221043116730598, 0.8607020773643287, 0.8770269726932126, 0.5065318074232659, 0.8135415353568549, 0.8581408861655095, 0.7173179215706429, 0.8872756965301565, 0.7405505953434839, 0.917903331949478, 0.5360663558644425, 0.6330553983367058, 0.8076804910392079, 0.9282424228768693, 0.5066105825253024, 0.8865432689071195, 0.7553905884064944, 0.5334416000264531, 0.7348566051935188, 0.5693801077733233, 0.8686582452547993, 0.926458006729881, 0.6463567151329663, 0.8235507171975225, 0.5081398137517226, 0.9490627675166219, 0.8149748314073859, 0.9308525570697097, 0.5343970082038276, 0.8432990032030294, 0.6388188751930824, 0.819568593366333, 0.9180346520257816, 0.6535278526059163, 0.744274859612418, 0.5792907610153912, 0.9003594191924336, 0.8498826340995325, 0.54260202680788, 0.7239058165455026, 0.56280072630326, 0.8733953835547152, 0.6009920513378229, 0.838210158945921, 0.7732883309903154, 0.534338558764933, 0.9239397985663762, 0.6274148288885035, 0.7436578637732165, 0.6120979928978032, 0.8395997621065037, 0.5727888812840303, 0.8981148825027938, 0.7216909233994175, 0.5440526272691553, 0.6490480383199015, 0.7371733839110077, 0.5309837336193385, 0.911205805941631, 0.6135455319716627, 0.9455103112462493, 0.5455529895568935, 0.8292019939086602, 0.9542844725654218, 0.823137308187675, 0.917119006373954, 0.554583233979806, 0.7496857961111092, 0.9047608097044433, 0.7707432867722575, 0.8430211752031473, 0.871510791352035, 0.8715546235803515, 0.5878581638227152, 0.5279208791132977, 0.5495564040103198, 0.8633508137324646, 0.7311212644035529, 0.6731746379546557, 0.8303585425564641, 0.7091247009659816, 0.8534716132039443, 0.6952213265734877, 0.9233900162244564, 0.8852047641656889, 0.8995041579582304, 0.5535313943560587, 0.7125398716606071, 0.9519781224001678, 0.6886712490948241, 0.6307673358398552, 0.9676660569353659, 0.9286773414775522, 0.5771912584870778, 0.6303764592870227, 0.6224732135628139, 0.587874989493494, 0.7981572702780241, 0.9856312963958009, 0.8568638245955562, 0.8148285026255537, 0.5972151953725953, 0.6313441506208026, 0.5916099494055579, 0.8987532751178772, 0.8485269265717199, 0.8385657015689096, 0.8238179638702743, 0.5252586534743733, 0.5595108609223918, 0.9533761863424886, 0.6173674713425912, 0.9254518730360977, 0.6002916609180879, 0.9479327355364423, 0.8070911519280003, 0.6573725109932331, 0.5427684368608436, 0.7356729619856339, 0.7925365666487685, 0.9541802262571117, 0.745197422818148, 0.8489192737261049, 0.9822240528906638, 0.5551818165676696, 0.548713142629028, 0.9223964439377499, 0.9947184892930079, 0.8482257974351359, 0.5919697830621584, 0.8414521461193776, 0.7570321568000173, 0.7401319624261391, 0.7317191568406143, 0.5010765895301859, 0.9661902290693248, 0.9090594649974777, 0.7320991234985228, 0.8880670759368288, 0.9854461823680787, 0.6492683545303297, 0.9680572996125916, 0.7088908867021231, 0.7292015374421914, 0.8195211263519313, 0.9819059762380196, 0.8547140668291493, 0.5001919910460377, 0.8452299703514194, 0.5237187655124695, 0.7332753907260237, 0.5002675843333758, 0.5570987886269929, 0.7116748266910675, 0.7119993382038367, 0.7078866364881864, 0.9784338485490467, 0.8890589829520655, 0.9596437182918276, 0.8099399797199245, 0.7659058440541853, 0.6038234808070502, 0.9633838373117354, 0.6584121825425486, 0.6461498003148242, 0.6318085250864389, 0.523564213400133, 0.8185310890056452, 0.8518225077140272, 0.5858543096502866, 0.6981161915838443, 0.5554301663179835, 0.5596312336792709, 0.936146533399872, 0.9010945669419965, 0.8184317983242588, 0.8417269427129945, 0.8572793073141916, 0.6609640497807869, 0.9019040158131688, 0.6170872003883656, 0.7663917477815194, 0.6083766605082885, 0.9197142761487394, 0.5758475701272423, 0.8359036441751355, 0.8395703136023571, 0.8703754303392663, 0.5715560054464053, 0.9776592118169302, 0.8653008439765006, 0.6074887686014806, 0.9071417673686484, 0.9099396008409713, 0.7435389527548693, 0.8388390287467222, 0.990031987607209, 0.5743361482836808, 0.7483465665941489, 0.8797660631187392, 0.9600809242212449, 0.6736343672556557, 0.6179150874457315, 0.6666461804372749, 0.9596041822273376, 0.726544596023039, 0.7250255641737688, 0.6529358159241512, 0.5778601008987923, 0.677667276408966, 0.967142877290657, 0.8709451119749212, 0.8617239131194382, 0.7076750477644922, 0.8437228728990607, 0.8622839921386125, 0.7899890494940633, 0.9658545976778634, 0.5907622624307489, 0.9379239398926424, 0.9134402701275139, 0.5457025662390356, 0.6930627058900751, 0.5963501773117819, 0.8967859509761852, 0.8915027881314701, 0.7130093115915805, 0.9417614486989895, 0.6705809728758956, 0.9865368667569798, 0.9041934505558886, 0.7570340736124279, 0.9016433049380661, 0.6305229999760478, 0.7646393191990671, 0.7690685766984223, 0.5897278789155327, 0.8189621864513721, 0.8947039595985113, 0.8208988626539566, 0.7659181934539182, 0.8343983186708968, 0.7139124505609165, 0.6294388883843887, 0.6371334959416384, 0.7402056227004399, 0.57811373917259, 0.8716726111366291, 0.6707811007468705, 0.8420063172405982, 0.9379240350197012, 0.6124072295331157, 0.9918505211743132, 0.6013956125116559, 0.5695019869935365, 0.9397094811157003, 0.6923189606202846, 0.5965823734197184, 0.603864922542172, 0.8317230758377578, 0.6079119544053078, 0.8098986352058803, 0.7728726903226556, 0.856609885576662, 0.534934908524715, 0.7182362017475978, 0.6884504985863846, 0.9770317587493619, 0.9238796965355298, 0.6257545554645731, 0.8595133625692812, 0.8515141736343026, 0.8530681671089508, 0.9883471930158194, 0.8232878990181589, 0.6843627676706499, 0.8256571254008812, 0.9212904528151343, 0.5598344596013302, 0.8271230098228421, 0.697449298733564, 0.7198789199531745, 0.6999913193035551, 0.6950389834955947, 0.7210971679056206, 0.8725332782875135, 0.9906341306886367, 0.8911899609459182, 0.7238772810196441, 0.6713927472585235, 0.7880650925650681, 0.8149563001484751, 0.6367379029286075, 0.536320457790124, 0.5188652559279731, 0.75717380220809, 0.6694084884949354, 0.8500650807365921, 0.7599519735631817, 0.7914465392624968, 0.8339160402305674, 0.8149237764786896, 0.7246270642416208, 0.6557225812971468, 0.6498280658711797, 0.6905656542527914, 0.7274145501910039, 0.9204391112667425, 0.8797999579459693, 0.9809436082942427, 0.739623327697916, 0.9868881761673968, 0.7035869774997601, 0.5994445047263126, 0.9842868391167456, 0.6507109472715845, 0.5233470121967255, 0.9632312610166427, 0.5773762529922528, 0.9685819032554913, 0.9305151070337487, 0.5377372979106767, 0.7523785326563084, 0.7593582163525627, 0.6644237572778873, 0.8105593841521526, 0.5354647455750365, 0.5248822368860631, 0.6344227402918634, 0.9550789140894109, 0.5759588599809551, 0.695955734138771, 0.7013591622277824, 0.9193770380112353, 0.7019502365594561, 0.5800719083178398, 0.8774296876449357, 0.8228356776594488, 0.7963922149784137, 0.7997571250016482, 0.6975569061300322, 0.8631800430230436, 0.7600392104729214, 0.5961224335402444, 0.599174308996147, 0.8201217143510203, 0.6098363947154055, 0.8856755480312839, 0.6257494406434567, 0.6793374340968334, 0.734527271826901, 0.7010263415792775, 0.5417472915192215, 0.722559815817592, 0.6192879843862154, 0.8167111170543475, 0.6178319613801877, 0.9975258513307164, 0.6150837341629469, 0.712294454576075, 0.6265602956096077, 0.763583415501647, 0.8834434923479741, 0.5481883871129645, 0.566130583405451, 0.9668398207078497, 0.845443014650263, 0.8226685685594096, 0.8297573626045827, 0.8975194412552796, 0.7980579343939219, 0.6724568246734264, 0.5509015035729953, 0.9259492683981364, 0.5038483354414469, 0.689455952880528, 0.9838310738362588, 0.8625707852837226, 0.5972986930690213, 0.8133681491545282, 0.5850368975726663, 0.9874051593473326, 0.7611168199051446, 0.9612134227354303, 0.5457735955221172, 0.6529018990330377, 0.793810485200132, 0.7846638461803441, 0.9753187806079724, 0.8813954915791266, 0.6258602198875896, 0.5957488706179525, 0.5596895336321174, 0.560334031272805, 0.6524024492449543, 0.571469723163627, 0.7362202763680905, 0.6314938641448926, 0.8762976384233954, 0.6801937129218505, 0.572290291748957, 0.9821016854385454, 0.6870307064800496, 0.6040373860657068, 0.9938160490954875, 0.9228351587732821, 0.8777693483837028, 0.5801430368357818, 0.5670360968736152, 0.5018190289595287, 0.9176946286727767, 0.6294299026189129, 0.7877868368703617, 0.5686510507364229, 0.7100717138858371, 0.8284136826976698, 0.7760015046622153, 0.5284264672953611, 0.7094346465899929, 0.851179056088719, 0.6090869900326652, 0.8255506210271901, 0.6254086370234067, 0.9225118343753056, 0.6542977626068898, 0.8621253451299491, 0.7322243386536684, 0.89314459655352, 0.99186685540296, 0.8885671634262813, 0.882636922191278, 0.9211751623317582, 0.7703784786369213, 0.9213989548950225, 0.7506007355406015, 0.5652979165865422, 0.9054538254334973, 0.8280509130053384, 0.6534997565822991, 0.7035844351074101, 0.8786213223919563, 0.5002622680298601, 0.9196491062734166, 0.621969475111839, 0.5900073223367661, 0.9257157991526146, 0.8466652695511823, 0.8057217296607049, 0.9700339202189824, 0.9755305502547167, 0.8907764072380082, 0.829322081019049, 0.9243767822042652, 0.6386501678250907, 0.8083394739827247, 0.8292693507447831, 0.6152639715332868, 0.7834564797830301, 0.6351548398556361, 0.7709067246991628, 0.5063990288418727, 0.7195109599250681, 0.8692945607383379, 0.8026113136122033, 0.8525507636313043, 0.7191981863880199, 0.698954370781712, 0.7778545903882806, 0.9050291324002481, 0.8252380726004185, 0.6697069078069675, 0.510732185698751, 0.7285912960341605, 0.6686070239780529, 0.9477352596626458, 0.5270396596225668, 0.5523404466387184, 0.9213768701055932, 0.6463435528342942, 0.9896291980595978, 0.754611423847984, 0.784692589326482, 0.676917978305079, 0.9395716412706485, 0.5697769986643388, 0.7358062810072697, 0.84407465721453, 0.5733472974333713, 0.6882053138366622, 0.8092771455275645, 0.5054626226372774, 0.7555974596403034, 0.7871300872319758, 0.922095354717079, 0.7200022579477379, 0.8108543889973014, 0.6208019879947237, 0.8128531951442257, 0.8864422962749832, 0.9949727379650514, 0.8140669641469285, 0.6040181583813033, 0.7654129400932397, 0.9469859647325307, 0.7283047325598728, 0.8663279538201877, 0.914251981126085, 0.7415014292210734, 0.5551218306420135, 0.8499395990316685, 0.8199980574328417, 0.8604539980793853, 0.697632410788736, 0.5717748744174089, 0.7351373766310042, 0.9063292743138935, 0.7906732863299288, 0.8591303276611542, 0.8720484978052556, 0.878330219499994, 0.8444238198743392, 0.9688140891171336, 0.6464594649056853, 0.6353426915757203, 0.6596794785450979, 0.7486480762810894, 0.7278415550134789, 0.7830463210065692, 0.8869415123279181, 0.8989054491843984, 0.9464270367445593, 0.624551981132227, 0.5092169110246041, 0.6147418084226464, 0.548995468313392, 0.8817579427073924, 0.9553657655093459, 0.6209159926895798, 0.8728398620000358, 0.8057402786730579, 0.8707806891925278, 0.6693458319922526, 0.5290444428569943, 0.6404376986117952, 0.5063439385573243, 0.5880530699113931, 0.7854470339620684, 0.6938528624160932, 0.5884418261451894, 0.879470381476402, 0.8380641863691269, 0.8601040444363444, 0.5406621431704167, 0.8235231522669397, 0.6584342598149804, 0.9053349932336556, 0.9010295856056476, 0.9328266491017219, 0.8699294307672004, 0.6654825124083519, 0.8929244040118703, 0.8987472243757435, 0.7652559388894355, 0.6082240249568855, 0.8679090358491541, 0.8918391365756224, 0.925650798877639, 0.9394088519679353, 0.7980263417392327, 0.9020988617944756, 0.814312922745758, 0.5496699253328989, 0.579034471196649, 0.7066901691702194, 0.5032697112759446, 0.7463659026335441, 0.7366594835952226, 0.7386064455549383, 0.8754143516412421, 0.9300365724980599, 0.9487341281269664, 0.5720187313182905, 0.6898738043153317, 0.8108884538033547, 0.532553766021189, 0.9546739445772359, 0.737660989879929, 0.941231646347479, 0.7838573942002796, 0.6416676294725769, 0.6266124277749099, 0.891447556939266, 0.9318735158964565, 0.8487166086098987, 0.6332776217174128, 0.5619968553228676, 0.6788368342524369, 0.913557050304491, 0.5663368698107717, 0.5684391091538139, 0.7484133393441603, 0.9181868781072946, 0.9963104165935384, 0.5790323949318685, 0.8630475268475988, 0.7101504506514861, 0.5557608305832116, 0.9344406320497918, 0.6994968428747592, 0.7691000741103, 0.932170295897798, 0.6469728258645577, 0.5358167768865234, 0.6774118547408581, 0.8658416341015767, 0.9931766534503468, 0.6342085207833337, 0.9294110152463781, 0.6812170070929462, 0.9445358478877848, 0.77980492911586, 0.6936986391566373, 0.5231642704972512, 0.5242102342332418, 0.6825645426605559, 0.6380560169610061, 0.5203077124396525, 0.7335474693976765, 0.7042575337662922, 0.939075706872667, 0.6532267705779444, 0.5145936861549685, 0.6544793252692663, 0.9848015525744973, 0.9383662967327824, 0.6225797149672913, 0.8314485790966457, 0.6037461637489013, 0.9810135248279988, 0.5883961699458089, 0.85984655069748, 0.967923293847639, 0.7138506274367453, 0.935592575389645, 0.95859102496783, 0.6004434369117359, 0.6414630967858177, 0.8731364850057799, 0.8059444079495045, 0.5071774609788526, 0.8412650657844363, 0.8273731857784714, 0.9179942174203419, 0.5313591028084552, 0.5366937837452341, 0.9759065397930753, 0.8184876097630014, 0.7483668765325533, 0.8129246134924277, 0.8454562265116412, 0.8571586228631605, 0.5934572513346849, 0.5958944369277269, 0.7902251805166294, 0.946322511730954, 0.9060146358055648, 0.6974267279089563, 0.9373356627270346, 0.6554319032664524, 0.9290127518918248, 0.6720098615894343, 0.5773513255023872, 0.7294858716268897, 0.5699362756507433, 0.6982829460262744, 0.7741045639055804, 0.9686850052346936, 0.8377695022292413, 0.5085833705276828, 0.8718130320811324, 0.8146788281692112, 0.9089570503586755, 0.6933395555435059, 0.7214909942478068, 0.8712887962743158, 0.9328608606660285, 0.7454036404942891, 0.819088161523567, 0.6936381501507735, 0.5755181526256923, 0.5762463298886263, 0.9322943028213184, 0.8921372498613127, 0.9151074050481909, 0.9132004408894685, 0.8829948290583975, 0.6881621065137493, 0.5602301988050824, 0.8098164994251436, 0.5801714810205715, 0.9919085820753104, 0.7304073952146082, 0.8879258507196011, 0.9517345204383761, 0.8712577545588546, 0.7942291626964373, 0.6936814186816014, 0.6252377411277974, 0.8804326579098559, 0.6857656474374987, 0.5773709121372876, 0.9700685409749432, 0.7905126122984611, 0.934451617240993, 0.7299079251844932, 0.9813702470437848, 0.755754068561012, 0.5363775224287579, 0.8899482434738628, 0.8514660030958761, 0.8937644692165438, 0.8332932153552433, 0.936481144309697, 0.907365140412276, 0.6944667069507959, 0.8470491509946576, 0.5196965102959252, 0.6232798602626008, 0.9079543414539137, 0.9792285682626908, 0.8899424940030569, 0.6062049219393977, 0.5116308883022502, 0.6358133334873683, 0.7462572040780625, 0.5779117785976842, 0.6961681482879001, 0.6679779105667063, 0.7459418411874663, 0.8500858059109204, 0.7836777656961976, 0.6412676257181826, 0.7002818854515703, 0.8875136432569686, 0.870743045055081, 0.8862172562087383, 0.7544110182110199, 0.7769079537914624, 0.8744392940331157, 0.6995542575493949, 0.96756872887658, 0.8659980179743741, 0.6813504946259897, 0.945984983409238, 0.884123471742815, 0.8892331933418398, 0.6154926707064852, 0.5314440321261372, 0.7908072917231381, 0.6787708598476818, 0.6479974659125484, 0.8282887038264117, 0.9385841840092874, 0.9890142643710187, 0.569926860366947, 0.9822368160120056, 0.7532410907150756, 0.7751516957872714, 0.7165484831966095, 0.644602823735265, 0.9965141262161649, 0.5536435091704918, 0.9758477336536578, 0.8748721792816425, 0.6546945913187501, 0.8394746391876223, 0.6406052986749842, 0.8689401936390726, 0.86702731020933, 0.8680804686357925, 0.5936932933808515, 0.6525446993877912, 0.5525202801240094, 0.5665065206217348, 0.9235933385078359, 0.6353642341492539, 0.7722257994536738, 0.8425837195718471, 0.9294472116262056, 0.6240230364478249, 0.7998762873872352, 0.802023000656183, 0.9503572511888485, 0.6377817952503599, 0.5755981242898598, 0.7654824444524457, 0.8458176880843108, 0.7247248836289101, 0.5480671466666179, 0.6765792771850871, 0.6276647332968965, 0.9054208746796488, 0.899505801829026, 0.631874122055393, 0.7137305293474405, 0.9652677798132492, 0.9422810075666137, 0.7196776283796811, 0.7276060045309921, 0.7761078956020362, 0.846040290949595, 0.8180133389317968, 0.6372344973175124, 0.7208088063389941, 0.6620583378619909, 0.8314206206671365, 0.7206236302251262, 0.9808155300743373, 0.6423145425655168, 0.6917933146829285, 0.6088829428473109, 0.628495869596509, 0.9177827352178074, 0.5448016418787337, 0.8789005779416436, 0.5895210020082906, 0.7527498029289019, 0.6502138323528538, 0.5755506030119268, 0.657845784970061, 0.8014372311309008, 0.8605404752192274, 0.8885206651939175, 0.504697436862318, 0.6985036604563393, 0.5741777087842082, 0.9693259697537402, 0.8579287034168623, 0.7230017728679223, 0.7160935055268138, 0.9016806570796924, 0.9301590751985429, 0.986694454978214, 0.6994623798084253, 0.963089373539157, 0.8264828968068575, 0.8159564237150068, 0.8441843742303872, 0.7651718254053662, 0.5316950240730565, 0.8911100979876814, 0.5502682715007543, 0.6221600069223253, 0.6194959919179438, 0.8874207950364286, 0.642469130784818, 0.9529627393760566, 0.6601893549886589, 0.7667154200234481, 0.7528848890321487, 0.9518619371724941, 0.5053605280044817, 0.8402712045476693, 0.913646153991235, 0.5366872275872266, 0.7838638680690067, 0.5268660350552707, 0.7485119378438744, 0.7046631466380575, 0.6060979829801671, 0.9337944246214678, 0.71972630057489, 0.6225778805609192, 0.6382212725799861, 0.5782568077688623, 0.8945069924760021, 0.8154081587055575, 0.7097990349860837, 0.5173025302805005, 0.8483005918918864, 0.5009217295488378, 0.5523240059916473, 0.6119293628645588, 0.623241408035738, 0.9871597820431952, 0.921854814192075, 0.742867953050623, 0.522306125408798, 0.9754751622018948, 0.5693391689548466, 0.7237274769889732, 0.8071007634484024, 0.7927208174527345, 0.9461433005537063, 0.5829505704623301, 0.7120305454924842, 0.8434731962086107, 0.8805935910995606, 0.9816074894135689, 0.6867184357513225, 0.8772947335997604, 0.6045964678026401, 0.9772138180979945, 0.5316850962814412, 0.8561055872279952, 0.8606083537024988, 0.8981934707172539, 0.73226797816047, 0.8296181697188578, 0.7192468935417173, 0.9303660344985301, 0.7788780115192184, 0.5145129763134144, 0.8372166506589418, 0.8332496199582448, 0.9113834905454494, 0.663849958706458, 0.598461054574396, 0.7144852298345242, 0.5655853882349011, 0.9585249396304119, 0.6702358440534328, 0.8825355389073131, 0.6064727546507727, 0.5090048127601292, 0.9101697489598053, 0.5237493053855733, 0.8913547224897025, 0.9077501651760949, 0.6607417240867757, 0.6525855701321379, 0.6036174959688273, 0.5156862770511335, 0.5670935724315327, 0.9123829051465445, 0.9391932815009898, 0.5600845066419246, 0.6262427230195646, 0.8049067463943487, 0.6310683391134254, 0.5500576025120758, 0.7987117572532403, 0.8777358203958683, 0.9799708356965583, 0.8945173424852917, 0.971553034417135, 0.5448171076528603, 0.7162747898900904, 0.7381164289078417, 0.8182175972028088, 0.8086698850672467, 0.8446317273980307, 0.8954885251363656, 0.5043713661090332, 0.7320774540817886, 0.6934448556664115, 0.6364279029565802, 0.6261720855253188, 0.7313784804398242, 0.6886900815263787, 0.9428258107700518, 0.6650569184409043, 0.5008197649449992, 0.5285263038841622, 0.713203885084617, 0.924072371213154, 0.5921661459018377, 0.6963285680564366, 0.8163754688343632, 0.8507722533165127, 0.6026483515167427, 0.7270539451007435, 0.924401060030186, 0.6777622573793854, 0.7685052924336775, 0.8823807727853561, 0.5710420829061305, 0.6907618072608626, 0.5367303276060931, 0.8115298892531734, 0.6673427750675556, 0.6039079731669543, 0.9811396285897305, 0.8960056655884889, 0.5419095461419736, 0.986992781154586, 0.5956884472882465, 0.8307485281914686, 0.9573097754580868, 0.8881319261712346, 0.6367708671526264, 0.6426245553546756, 0.6745105360554788, 0.9227928866145358, 0.7392969037252086, 0.934810440852729, 0.5232600928271176, 0.9837136144115068, 0.7104567306829652, 0.827287005890277, 0.8679184720742357, 0.6823820536546974, 0.5514283526922968, 0.8484382157382288, 0.6995511535246917, 0.6046461926121218, 0.898684405568067, 0.8006954809658918, 0.9645056906718927, 0.7061137506150457, 0.7854159045338056, 0.5199596079698164, 0.510783239891246, 0.820738433408156, 0.8046840269813915, 0.6955492316255776, 0.8006886956106973, 0.8226686588252492, 0.5066287535076102, 0.6720479155096375, 0.7687810480328479, 0.758756419383932, 0.5962616296102722, 0.7215994169805456, 0.8299313045843191, 0.697828582659427, 0.747690712707612, 0.803434129796558, 0.9392789809800473, 0.8903635103591012, 0.5688283378366967, 0.6430600706528449, 0.5992143512166934, 0.8173821079978172, 0.8171177692944582, 0.9281412377720009, 0.9453048895672894, 0.5985809964443454, 0.5904804599578916, 0.8070482727496618, 0.9547868535799804, 0.9601070149313242, 0.8073472975369453, 0.8811182115760151, 0.7557946975078904, 0.8932696429963148, 0.5979002771980465, 0.6143306838016174, 0.7003491543831273, 0.9706558677545312, 0.8345052827152688, 0.6043590663509073, 0.7534357295169616, 0.825693150701581, 0.9368469569740768, 0.6765046561859618, 0.8275597962982628, 0.7052759828617714, 0.5113812452602686, 0.859846943001501, 0.7248171550945293, 0.577943080990916, 0.6944239854758485, 0.803694566802411, 0.8927325479013575, 0.7269005811171001, 0.6372315616993336, 0.7919942279331202, 0.5374270793208369, 0.9336385087021728, 0.8258264662033288, 0.9089269234481692, 0.8444052506270987, 0.5095547344422691, 0.9138323035981581, 0.781552825068054, 0.739357717869308, 0.95617864326298, 0.6661452546442292, 0.8624675424929229, 0.9198550507911829, 0.5060347596378691, 0.9351827253675272, 0.7817429918939869, 0.6669631270383325, 0.5654394251714668, 0.7201706438713744, 0.7342471135866857, 0.8777257827267055, 0.5218291958506822, 0.6370804743572227, 0.7349898108582831, 0.9837625596754564, 0.8087118287710444, 0.729797390589807, 0.7470146648309424, 0.5977955030359361, 0.9282967248061811, 0.5668448210208202, 0.5857799722161341, 0.6446618105702766, 0.8497010908938512, 0.8236345294929699, 0.7393477624538376, 0.6007446657759112, 0.7850265486750115, 0.544238587761736, 0.5538856160067437, 0.8111966912298059, 0.6806086653214072, 0.7226450580728447, 0.6565964028089295, 0.8657338377767876, 0.7957583217992465, 0.6584339148871559, 0.8706894013964432, 0.5110608469390159, 0.7889622931684609, 0.7002080432580475, 0.8828410868409673, 0.9443012350042692, 0.6826085506813105, 0.8116906307663019, 0.7068564468887504, 0.6929013493324293, 0.9215950366493268, 0.9831463025133085, 0.578244612076138, 0.929945016238592, 0.9287771125414088, 0.6104813722053676, 0.7149426366911438, 0.9500669020348942, 0.5225889473156284, 0.7361802623858815, 0.9182563325242139, 0.8012710050239102, 0.9666120786648764, 0.6843271753346931, 0.6997884024488636, 0.7246412705368658, 0.8918468203389381, 0.8378188362632849, 0.829358043551502, 0.9575368660181574, 0.7652349358427792, 0.589630299838575, 0.6497442204229729, 0.6332364052280572, 0.9808047168181049, 0.6181527180242847, 0.6937639596474915, 0.5398481678352309, 0.5408391321937773, 0.707552119444441, 0.5186960243475969, 0.7633731316739147, 0.585850117877341, 0.9291123194591197, 0.5543474421366852, 0.5010102939000097, 0.7291358740131062, 0.6111487804316194, 0.6350312470594712, 0.7094644103530963, 0.5824088123695201, 0.5058475341383775, 0.954371859054518, 0.8387003981409975, 0.679834269101285, 0.904760706527558, 0.65912678876593, 0.5737417536138796, 0.5494570433747031, 0.7252616281414295, 0.601071313123927, 0.9853960503910091, 0.9159316699022979, 0.5586162143132106, 0.906515682810115, 0.9629405140391729, 0.5671523302725427, 0.5373915646382306, 0.9925161740677075, 0.6136157512625664, 0.6755398340203194, 0.9382692915998012, 0.6459507370951898, 0.9212619631693029, 0.9214167711821708, 0.6625079296306995, 0.8216577849492726, 0.6400175030835098, 0.7255841950639178, 0.8457287431648677, 0.9212334363004152, 0.8619838352955513, 0.9965670496981753, 0.937074305364326, 0.9792565059150025, 0.9296871314849058, 0.7815232490586057, 0.8270093147375583, 0.680413613653737, 0.8541457990119107, 0.6019844643046897, 0.8129059907032314, 0.6766881558627598, 0.6453907131265963, 0.9036867389568202, 0.8220751157803439, 0.8588217446284641, 0.800687727689506, 0.8667867306114545, 0.7992686645536142, 0.7119206037546809, 0.928184834933865, 0.7017662761754369, 0.5855293400783592, 0.5103348385656353, 0.8104774258914332, 0.8097155441155661, 0.960534579534743, 0.5745136849480499, 0.9872047518490732, 0.8526273116330593, 0.9872155789852602, 0.704377764957298, 0.700261001933158, 0.8149935489753289, 0.9223244800802561, 0.9199153517889798, 0.6536025134809145, 0.9607487595775027, 0.9180800343692364, 0.5500480234477544, 0.5690598531524578, 0.5235329768143471, 0.8847420902859047, 0.8459851813539597, 0.5554714738227784, 0.7345974771877974, 0.9470722722451532, 0.8454641187746459, 0.5861008508456326, 0.9165776476617067, 0.9707510112573625, 0.6818010541418731, 0.732862613296853, 0.9352892717007105, 0.7586378464292858, 0.9328317031753112, 0.6494307251278756, 0.8850860627482279, 0.5291896498891226, 0.7980067161813527, 0.5674013723620981, 0.501804614541191, 0.8039564808990349, 0.8396240421298976, 0.9340453060005437, 0.9613499929453269, 0.887873112924209, 0.580286958536088, 0.8924571136863934, 0.5525422189320257, 0.5817844757018871, 0.6433498755343848, 0.6384144907437633, 0.9788282708483573, 0.5888932108410001, 0.5063216243243223, 0.691714999710124, 0.6203549592478863, 0.5443010214947974, 0.8168494740453971, 0.8173398258167437, 0.5648446658618881, 0.6501550431692101, 0.5598246901600388, 0.8117952376991919, 0.8710087813366333, 0.7744836749878097, 0.9339784955659863, 0.9322028787590706, 0.7602687389282472, 0.5221340374501722, 0.5849142333824029, 0.6018237954597535, 0.6044301351142533, 0.7100916037231741, 0.7278721173575009, 0.5616504723384774, 0.9194888137065569, 0.9980917760244059, 0.5918343242821408, 0.518055477413587, 0.8610131875593439, 0.9391613177219105, 0.7551303376901097, 0.6394010537241595, 0.841719102840123, 0.8815818829703372, 0.8454979778684353, 0.7652497352627352, 0.5432407907533959, 0.9413402260485355, 0.626018320496792, 0.514296291409247, 0.9824020169689043, 0.5826054759919537, 0.5821591959357435, 0.5384641534199771, 0.7150673338962302, 0.7227180658808705, 0.7423114229861261, 0.5079485733651261, 0.7200793361176809, 0.5240874924517691, 0.8827730584609181, 0.7611517670005998, 0.7261639658494563, 0.8244267329582455, 0.7699812734424862, 0.7672603916387513, 0.7502390778799506, 0.5565813434634632, 0.9174250163811881, 0.9361934932764155, 0.8993255505064174, 0.5748655265450534, 0.7850924266128367, 0.8208493139787343, 0.5653320956170416, 0.7778864268189238, 0.5522409994980007, 0.8479900454517142, 0.7869067614543279, 0.9069007903502726, 0.5609002059278448, 0.9372075885568765, 0.9654380121949285, 0.7620013008717111, 0.5344566057633835, 0.6694980491632667, 0.6537052045077048, 0.8648769521135958, 0.818724691125107, 0.7054145079056782, 0.5187392273003997, 0.7171697897883642, 0.7675361719746989, 0.7807295216698074, 0.6416790042496008, 0.5814354436379123, 0.9770344446389401, 0.6910725901174375, 0.7279765206476698, 0.9552200558068759, 0.5448138165474341, 0.5354216316851583, 0.7433782309602714, 0.8953129151683852, 0.805726686411463, 0.8662358479689999, 0.8264434169329369, 0.5672128268602199, 0.6703545155849401, 0.8492678827042711, 0.6200230947551246, 0.9501488143093397, 0.8049162932356457, 0.5956506436643882, 0.9187919722019803, 0.9490427016389626, 0.5094101145317544, 0.9433309394955347, 0.71901091520548, 0.8670586130389584, 0.5307944818017456, 0.9266713906366744, 0.8172707634060714, 0.6912013252853286, 0.8296209782455062, 0.7851428778045789, 0.6726275490977538, 0.6131090906183514, 0.549138349627065, 0.9298524401993814, 0.9086542909860191, 0.516076700729678, 0.7626594182816999, 0.7682710345909604, 0.5892828817905763, 0.7562344897044883, 0.6705042001383732, 0.8195554939204085, 0.5785726167620542, 0.6587536995889962, 0.824068641568459, 0.6106726739205528, 0.9385686090808221, 0.5381230612053538, 0.7728556332135625, 0.5293773714263048, 0.5473732048007478, 0.7572327410791762, 0.6233348472678646, 0.9544789368187834, 0.7797762208440862, 0.5691995916647099, 0.5560260925886724, 0.8181710165097844, 0.5866468754971805, 0.6231898109570044, 0.8195607193087007, 0.8994095957061641, 0.6138943091178989, 0.5387187209812139, 0.8947273968148315, 0.6779697154105471, 0.5495519725054899, 0.623217732924805, 0.8860108608549999, 0.7336650834838911, 0.7336045806686481, 0.7069769316619007, 0.9031293717768276, 0.7488142795354035, 0.9171159460288453, 0.5719574832635872, 0.6121947492641258, 0.7165662971740419, 0.7540067848815454, 0.9925388388229216, 0.5058925009309874, 0.6744290621363491, 0.8993875655725099, 0.6577543893048403, 0.6994512912452544, 0.8135240546092297, 0.9749484501465884, 0.7125987780451927, 0.5744350566868435, 0.5797320929000025, 0.8054643287562131, 0.6284259267383185, 0.7660724135978039, 0.9702103920515848, 0.6927669858744101, 0.8516292246109416, 0.591387785010644, 0.8169041585967426, 0.6392328229888922, 0.8361528164095303, 0.8924693740316749, 0.7307491960782482, 0.9236214122628688, 0.8303091463382949, 0.7535288381237668, 0.9189938400428435, 0.7476437508149341, 0.7939391841438821, 0.7080080382380015, 0.6696750432252534, 0.5531033379396197, 0.7922135375873472, 0.8063902601089512, 0.9511481389816467, 0.9603077475090473, 0.8156494660183116, 0.6762565949755422, 0.5635744445363029, 0.6859553166812113, 0.5986727167701833, 0.8622839333166493, 0.6540539557722994, 0.5931668376571586, 0.6614597019296573, 0.5204832650572606, 0.9708100950596977, 0.8934987364141608, 0.9262666211305699, 0.8733315216809402, 0.6263421449552287, 0.547815591473194, 0.7836166452266253, 0.9778183266632787, 0.5015223255795536, 0.8158401343390014, 0.9573364308546022, 0.9944381720328365, 0.9333030115301115, 0.8673581015538037, 0.7399447933339833, 0.5380895222676113, 0.9165368112798763, 0.5238585917867291, 0.6792657837727658, 0.8900301648727886, 0.6807635976443359, 0.9807423966194158, 0.9916443249659614, 0.8171149566717312, 0.9819294189248793, 0.5536478141060576, 0.8784249407223808, 0.7298227927643457, 0.6917012063996453, 0.5194448387194714, 0.9879785222614108, 0.7818221564247472, 0.5215628058996142, 0.7851164614972422, 0.7353513977669848, 0.6725453040247473, 0.9056912418357372, 0.6431844281928047, 0.6094376135232016, 0.5552696913870567, 0.7933005987960309, 0.7809579361551753, 0.6068959551739526, 0.7057622985968406, 0.7909927734528304, 0.8981386018170148, 0.9039473477154409, 0.9080808903322466, 0.5589510948028189, 0.9442747743723179, 0.6317949107355636, 0.7501694509382109, 0.905769893359669, 0.5596322057954084, 0.5997193838304051, 0.9918021220034815, 0.8702740116841037, 0.5982550356754845, 0.8257397982589274, 0.9832839376604717, 0.6553288006595597, 0.7968838633288003, 0.6806822604466776, 0.6180568270504645, 0.6006866713046872, 0.8373563029406477, 0.8723082403793632, 0.8992433624745335, 0.8506934561302821, 0.6346706769547703, 0.5381690588306538, 0.8019060810215302, 0.70756561476592, 0.7406549217162186, 0.9106456021134568, 0.9315555213042366, 0.5838573044509356, 0.5921076772935208, 0.9830229441049899, 0.6091802687981522, 0.8808193115544318, 0.5121738865240186, 0.7382054941197458, 0.8225807999454189, 0.9662821345788324, 0.783328742184834, 0.6100425583040143, 0.8083319474499165, 0.9478149377467171, 0.5887962422495199, 0.662776658129458, 0.9119949498325581, 0.5646511921686301, 0.6105476026868892, 0.9137849226290309, 0.6676848691492069, 0.9916546904669725, 0.7071153331775715, 0.880356472360275, 0.670473895195578, 0.5874350023164503, 0.6691283857569259, 0.8676692429157332, 0.8355742790634211, 0.790429290674606, 0.5179519296419617, 0.706143486022673, 0.6613124243594701, 0.8513807888709114, 0.9642632994452718, 0.672812093727439, 0.6003552493416822, 0.5193018025130905, 0.9391806250604582, 0.9128595068893728, 0.5635989822132025, 0.601900685011939, 0.642937980191853, 0.9464067424484208, 0.6357986212359624, 0.9605281203088403, 0.6265624236762644, 0.6841186373615582, 0.5706833200137444, 0.5117014391946626, 0.9807465238329962, 0.6925888782807452, 0.8948987793034926, 0.7760488365945022, 0.8006271821533772, 0.7777935685879073, 0.5834647912596274, 0.6390857320007233, 0.6059441691717313, 0.5254953474980226, 0.8695298453180756, 0.9445792774767746, 0.8495389133296328, 0.8430772508504265, 0.8024386019104166, 0.8859539089682905, 0.8617765059585353, 0.9556095227662997, 0.8386130936909042, 0.992854096089629, 0.9781431382421417, 0.9740621158192262, 0.9795839424576225, 0.7316123943316077, 0.637500956534508, 0.5744873077544087, 0.8900003115075965, 0.7588619773158123, 0.7971313305850782, 0.7655277419619404, 0.8080211815930484, 0.879829058569263, 0.7678219209760455, 0.8007457513515248, 0.8440218053999587, 0.9358740157233272, 0.8954157149651811, 0.8898239228759193, 0.62084171348521, 0.8058211257288226, 0.5986780264665617, 0.5316494294950777, 0.6143144776559449, 0.8982369786109139, 0.6412085901179733, 0.9557438387907651, 0.6288504538194435, 0.5238027621059875, 0.9828250774301893, 0.5930770661323328, 0.7869844993027729, 0.9028095140015056, 0.9322755068299916, 0.5645258596307741, 0.671451861400594, 0.6179949192762324, 0.9378382935172491, 0.5170692087009898, 0.9272370458809852, 0.8532495243050184, 0.7404546548595912, 0.8648470202018697, 0.741351991600657, 0.7099183435720557, 0.8214263220945159, 0.6640872514628752, 0.9295238794285094, 0.8447926357584428, 0.8868957721121788, 0.5581567659353635, 0.6030084887203805, 0.73816140428827, 0.9738786609641676, 0.63667766310822, 0.7633682910604083, 0.6230613315997334, 0.6725817681949531, 0.5365515195277178, 0.6729438769902798, 0.7770421530280182, 0.5692562242277834, 0.7362597722488464, 0.9967598105974301, 0.5867994391298361, 0.7546119077158426, 0.9079048335745924, 0.716839554982337, 0.6932236269609025, 0.6533014925315919, 0.8205611415485256, 0.8270058316272428, 0.873385801767504, 0.9531440376231459, 0.5106003626690268, 0.5756267517745504, 0.9863427966037137, 0.5506769356085484, 0.7254828442509949, 0.5116212727258098, 0.6186230391817611, 0.8586570534517697, 0.9532610130363958, 0.9180573295406202, 0.9946379045695151, 0.6392890641730955, 0.5844113979621328, 0.7566016077021036, 0.9562296618599735, 0.5440891072962203, 0.5892149746313978, 0.8810600488448812, 0.7602006291655237, 0.9373782649150524, 0.7114035591603116, 0.9753169116365457, 0.7280777360098114, 0.6821595569586887, 0.7051750063771887, 0.8853001434547524, 0.7979726242602916, 0.8261955215855878, 0.5330031946551241, 0.7389997000484372, 0.7350256101282162, 0.5098200865449231, 0.7493262376281986, 0.6996428674210832, 0.9749463634553068, 0.6071408647178612, 0.6040630766354176, 0.8691430784397196, 0.954261239809522, 0.9087684233431155, 0.7968022013253276, 0.8863948513046718, 0.5952017814082411, 0.5891800982791624, 0.8279367299686065, 0.649047221115377, 0.8330134767147563, 0.7483006446927826, 0.6622247039878666, 0.5104615312118881, 0.6961062797354989, 0.6044977490851379, 0.5200566651402432, 0.5336001171363012, 0.6481540373072839, 0.7824494454613985, 0.890568614945058, 0.887483563559037, 0.6284119500276946, 0.8264907597872868, 0.6280917370870286, 0.8798332971796903, 0.5531821013795257, 0.667983380472517, 0.8884915839201823, 0.7686466788408275, 0.5108638052232215, 0.6111279498327642, 0.7558804615949186, 0.6001590743321701, 0.9649507385850997, 0.6998249737671545, 0.9660039881618034, 0.7972753125126381, 0.8822113493199573, 0.7224076703047217, 0.8063506486031418, 0.8295362880836274, 0.8766525688304758, 0.6104269873048054, 0.8780467320312644, 0.8836927263075003, 0.8433099878185824, 0.5859276428038894, 0.898090845145721, 0.9514058657028159, 0.5790627442895877, 0.6988804585023938, 0.5963067805874516, 0.7944495642003595, 0.9765649915349922, 0.7357959887057839, 0.9195920725798357, 0.7951247927944047, 0.8277599535340755, 0.9499986384953341, 0.9007908851012366, 0.9617700965817559, 0.7342141439569473, 0.9032385335052477, 0.9298877256105538, 0.5263179008338112, 0.8206208178736214, 0.6984123326374202, 0.7373826526646536, 0.8253583819214596, 0.7145041105298012, 0.8314144274808882, 0.7641099637675486, 0.555850294728095, 0.5987921045080615, 0.5877123834599385, 0.6102891502306875, 0.6262030053748175, 0.9190591603543772, 0.6753552388618456, 0.6941668201180827, 0.8424638534984955, 0.5087523831702235, 0.9554338504895508, 0.8493980657650739, 0.6878567846076095, 0.6673045300696474, 0.5657361875847682, 0.8798863022320624, 0.9929071065839855, 0.7828741905838716, 0.5314326531118494, 0.9808085897404591, 0.8169039932855212, 0.7931398260097962, 0.513967869949689, 0.9006486510302826, 0.5368508478593494, 0.8671488524528435, 0.7409100122368453, 0.7090615477367499, 0.7423345045994588, 0.6132251820892181, 0.6280419804159559, 0.5842311790950155, 0.7766670776772406, 0.9497395396032123, 0.609587451543607, 0.7854174674020289, 0.9162471203411267, 0.970203869079852, 0.5001077913234084, 0.7471308725317996, 0.5355209510624983, 0.7705109864096444, 0.5689056823756423, 0.9988781564675742, 0.5670518536920683, 0.6741522033778033, 0.7945686264065945, 0.7672732170001308, 0.7257390072134765, 0.92608195988571, 0.5027112617648226, 0.6018089581581929, 0.8219393504341785, 0.6208764652367729, 0.9612497586270571, 0.9883161088566469, 0.5232401721275393, 0.5104225499023938, 0.5077575588931924, 0.8335767840380756, 0.7959511503091753, 0.5352222836242408, 0.6511147333649907, 0.9567803674046308, 0.9866157959660984, 0.6100309816983829, 0.6421645973394098, 0.5806784490837507, 0.9636815728916079, 0.8480289763547508, 0.7460411464819453, 0.656942029016816, 0.9795514871391415, 0.6080935950465294, 0.6971731105769332, 0.609014603973723, 0.9896267864314611, 0.5667960195388214, 0.6100503426980004, 0.6033996274766369, 0.7884592023409424, 0.9926674677955019, 0.530820588347308, 0.5149821070000781, 0.7461596422300791, 0.8398438100243715, 0.615181371677159, 0.6070164864344438, 0.8876056890276789, 0.6485849621866591, 0.5829167661639467, 0.7017394128365209, 0.7638445597933172, 0.6476979742022393, 0.6484755000637785, 0.8553840599280442, 0.6370734356072674, 0.8953573884461131, 0.5739232892197037, 0.9757461416855726, 0.9893236256035285, 0.7950013080046234, 0.816888932609179, 0.912134601491003, 0.5864819849851688, 0.9220468892364075, 0.8891371909682344, 0.5549494298831095, 0.922101690130134, 0.6774924866530108, 0.6578190083806826, 0.8114093397387124, 0.8632462207017746, 0.5196112444627099, 0.8139056378110145, 0.932823465145063, 0.9506175294112928, 0.7729018655284537, 0.7663535411865742, 0.6013872580811671, 0.6407279222680933, 0.9075664258356486, 0.9936085968064035, 0.9058038459554446, 0.6105426331756547, 0.5852071045610002, 0.6147934566583941, 0.90040133913682, 0.9350872785860924, 0.67397024907991, 0.6261533628476448, 0.9086845641383878, 0.8297941668665715, 0.8338863368028818, 0.9789117758368311, 0.5067277273110224, 0.7025364732797106, 0.5799260062883949, 0.526676196187154, 0.5336837565134911, 0.9400003946973048, 0.7584676783138947, 0.8585385525255365, 0.5809574595682784, 0.573418269998746, 0.603520602801052, 0.8754864643072038, 0.9025398432611202, 0.7034825596645171, 0.7880578166151615, 0.549926115135047, 0.9775060349157777, 0.9926911838378207, 0.8512498196772168, 0.949504186615385, 0.6220922650463347, 0.671450867340028, 0.904569232306978, 0.5056095909222976, 0.7671893065942637, 0.5894131599014745, 0.952292674336187, 0.9634137038434161, 0.7388973376391076, 0.8770693932369702, 0.6024595147502296, 0.9583324581372875, 0.5862566939963755, 0.9904601759109344, 0.8138276493846204, 0.754440443410336, 0.7264152528503482, 0.6698674721080063, 0.8884350006207464, 0.7777244289964946, 0.5843924819383784, 0.7087505081398215, 0.5116956063344552, 0.8308285303262801, 0.647771147639093, 0.970177336793457, 0.6800351322249889, 0.9391932068703879, 0.5150880777823545, 0.909086875804257, 0.5282664500058354, 0.8652836257436988, 0.7549252173857661, 0.8586864542140499, 0.7532460063496969, 0.7363669565359923, 0.6732446722288692, 0.6033693904537701, 0.935920939748808, 0.5443711548646133, 0.508167632015564, 0.9895305515994335, 0.5367064059373066, 0.8893495992005211, 0.8636900988532821, 0.9366349319339944, 0.8409800324377763, 0.5982949955976071, 0.9260852817503642, 0.7637855207715174, 0.5525908627461599, 0.5747974067981491, 0.7457594718137939, 0.768964114326153, 0.7274350393428974, 0.6533388379848588, 0.9031107399173428, 0.5842183690293414, 0.8065167303017156, 0.5275518394247172, 0.8809785951517772, 0.9265902428374679, 0.8553838026551426, 0.8505861083483168, 0.9322917869847243, 0.8019246652654337, 0.8199613036198672, 0.9157168053995585, 0.8496867045987623, 0.7870528970165103, 0.5649310020383024, 0.7375437008278796, 0.984777520983459, 0.6440158066098562, 0.6333047874595591, 0.9525244084209348, 0.9614248019318086, 0.6859398152404146, 0.862200669060869, 0.9645026869825488, 0.6849663999160116, 0.5591452271800644, 0.7159192248862378, 0.6617733790802944, 0.62650921232497, 0.5522922665821568, 0.8689029307521416, 0.5569186993858304, 0.5005153163285683, 0.7930489776167897, 0.6555441799787574, 0.6881506969232936, 0.6999104660651791, 0.6520685478277732, 0.8846334854754535, 0.8331891851920623, 0.826085227806438, 0.6594839405039785, 0.6457383420610981, 0.7802823728728494, 0.6429101352403399, 0.9937098138673386, 0.7496120789724958, 0.5027326542547299, 0.7397422628674488, 0.9569007304711585, 0.6442846343125499, 0.6561915422481485, 0.9203871241487018, 0.9732227969571839, 0.9766960716612716, 0.7497333684130689, 0.9188664050539851, 0.9812466588605451, 0.9116764847538489, 0.7198374065496713, 0.5986146934981549, 0.6638660580483118, 0.6656548392213165, 0.7000846088543592, 0.6790366220319749, 0.887441087183533, 0.8428774807347288, 0.9924065066244201, 0.9964653283593661, 0.9431135093499841, 0.938375267740317, 0.9610242740305264, 0.9622654101060852, 0.766800011018764, 0.6651595015140515, 0.5248700255702683, 0.8932086365563614, 0.7418141415529029, 0.5723537930876963, 0.5916266174070797, 0.511249812484809, 0.8183847097213011, 0.9847909364091945, 0.5072155697041554, 0.9312710444664858, 0.5630675614149057, 0.9923650730826388, 0.6187504651290718, 0.7230566802275064, 0.6539261624655841, 0.5768514287571231, 0.5084405243391019, 0.6908400040824, 0.5126799776188905, 0.5254756999848434, 0.9323363457472048, 0.8695482010937409, 0.8530410775881112, 0.8282913844381703, 0.9266284721903882, 0.5756639230307617, 0.9849052855470803, 0.8757777580149644, 0.8058671101565849, 0.646489174518726, 0.8603010664942259, 0.5650932317353499, 0.6063807062972439, 0.7010414217151526, 0.7419738942147474, 0.509973692901851, 0.5909383679432383, 0.5207576531842327, 0.7384856283789779, 0.8232025903388719, 0.7740666236765349, 0.9849587442556228, 0.8154500850054734, 0.6860720564432513, 0.7313584105348412, 0.985356169950417, 0.5014922106636961, 0.5751347595966856, 0.8714996229912217, 0.7231544528416598, 0.7784171920787816, 0.5147248086199119, 0.5237420037568199, 0.7744833206631607, 0.5407634280638618, 0.8805983230739471, 0.779184789534105, 0.9976023267817777, 0.6010858426128272, 0.8766635459822778, 0.6912205329917829, 0.9625043142824259, 0.9325279418902659, 0.8036640610433555, 0.9266994939974034, 0.6986485187560085, 0.5870602119740085, 0.729016920965841, 0.5681766518140237, 0.6895428983187974, 0.6953653821301782, 0.5800925105987058, 0.7090850594925923, 0.5965642657996979, 0.6866002043306021, 0.9879227377471946, 0.8829015593403515, 0.8897886051328652, 0.988940967703487, 0.682227259118608, 0.6439080578262575, 0.7258804400829034, 0.9483540713972664, 0.9013657005108984, 0.5538720057050319, 0.7240750722452398, 0.5947016621300524, 0.5859054896422802, 0.5028927366474962, 0.8624378725612251, 0.862109904675565, 0.8408581889356344, 0.542347264471424, 0.6384974906243308, 0.9109356791867668, 0.9721697431727869, 0.6946997762198643, 0.6771874706321697, 0.8629534706077234, 0.587157770836181, 0.6278416483055564, 0.755252479607486, 0.8001940216490813, 0.6897838837204391, 0.7682367901360949, 0.7374969714897545, 0.6692761161734813, 0.8307896128994712, 0.5987759068152645, 0.5267400948648391, 0.8209915729944635, 0.9354457588993365, 0.7346336391154463, 0.5208950879936357, 0.5152458580787012, 0.5162693958365707, 0.6007480855526313, 0.7567277918588743, 0.7286123020428749, 0.9722739314877813, 0.9910387210797802, 0.6373178536198866, 0.8795360558094407, 0.5073743383155735, 0.7762255855657092, 0.9200367130626725, 0.5414025048605998, 0.6910788069317417, 0.9764425689056941, 0.9290925303767923, 0.8665151933665369, 0.8713800884969309, 0.5228937941741174, 0.8374958541493844, 0.5209200719489682, 0.8031673156887926, 0.5699565669446751, 0.8173689270038537, 0.9628011941070238, 0.6007234688053097, 0.8194579043497145, 0.6308715393632731, 0.9133753382608989, 0.8841176180925738, 0.7589274497138447, 0.8842235760926966, 0.9245602199466619, 0.844932764728551, 0.5918149474602655, 0.8162803504609809, 0.8502697951364524, 0.5577625103123148, 0.6338482882930698, 0.8475893775011948, 0.8294700438679016, 0.590612940440558, 0.7220519584807517, 0.720998561279346, 0.5364785869288524, 0.6324350402468186, 0.6797461328098662, 0.7150357493549208, 0.8126797759105548, 0.9615183399957616, 0.5664315932841029, 0.7386383132793328, 0.6923160555925448, 0.5812429078200578, 0.5240781620501637, 0.7158931531471068, 0.9800609309095221, 0.9381751653844411, 0.9569425440815218, 0.6989147390869284, 0.7138616730857947, 0.8609427234090132, 0.6037951748198764, 0.8617018041584844, 0.6877064071662848, 0.5194064315381073, 0.8921922457470632, 0.9251025171684291, 0.8866837775202792, 0.7391840048065557, 0.8507733292699444, 0.9576579724524135, 0.9501159491930236, 0.7006578159450552, 0.731548987778654, 0.5318013466540317, 0.6391027272936769, 0.5484466300337825, 0.8544119762441061, 0.8920871048873813, 0.8691866100528673, 0.8713115509885052, 0.9982734939667268, 0.846050307501981, 0.5900835353013174, 0.6741664986943938, 0.8316978468278676, 0.9958713919192317, 0.7773946242859386, 0.9519630475564447, 0.5743788676940488, 0.6834036029343544, 0.5575384054929318, 0.5916462696953225, 0.6608656674932359, 0.6171211459272401, 0.5633585013839235, 0.546244789847371, 0.5879765854497712, 0.7888252502327275, 0.7361865076391902, 0.6626996949708714, 0.8012277618212593, 0.813295360947399, 0.8986058834569033, 0.629694384057869, 0.533937252184638, 0.9030684133166259, 0.5580448874988113, 0.917382683038561, 0.5393857888605292, 0.6806711320823102, 0.6097303920924761, 0.87543072927103, 0.8208807980936934, 0.5627574429127506, 0.7220667168997277, 0.9788826630470595, 0.8495284977248745, 0.8870089294912713, 0.744670564972224, 0.6670068334186505, 0.9763674995123103, 0.9426513136863462, 0.7982047975703189, 0.9282844262200249, 0.8946757211242724, 0.5545024043520332, 0.854504306560756, 0.7321718083400874, 0.9504929068439297, 0.6704042203903242, 0.8781150453812689, 0.6296619507920407, 0.5963254058286369, 0.7752050906098102, 0.7579872033525521, 0.7897689779029856, 0.8735232646087325, 0.9403768014370739, 0.619792934156036, 0.9958765621100101, 0.7245435609407784, 0.8975078595785051, 0.6181007980109336, 0.7936566693294603, 0.6142982571440926, 0.986306048823961, 0.6887424946038161, 0.6014109440196064, 0.8104625498229934, 0.6332024099396935, 0.6265170945139777, 0.7804634971340563, 0.9725628249164371, 0.6432146886890275, 0.9327880969422266, 0.614716360791742, 0.6099905889416517, 0.9008243080962429, 0.812755956249082, 0.5982938326050673, 0.6008988971418594, 0.8611805764140765, 0.8958317383333332, 0.7644763648594979, 0.5596466500746113, 0.6154910223781517, 0.7509078644714049, 0.9773966140940897, 0.556922465627413, 0.7496379636539334, 0.8248489646429209, 0.6106481049018282, 0.8857120205077571, 0.5980205793783464, 0.8486535854013247, 0.5967336785361519, 0.7092803751746117, 0.8484195016629228, 0.9554495197630495, 0.7808571637871959, 0.6854793199132342, 0.5023760141103455, 0.8319359636596395, 0.5988864629055742, 0.9070641318963879, 0.760161468803567, 0.8132713645843868, 0.725732999677553, 0.6877025387734634, 0.8756165331397308, 0.7325536154166636, 0.5330136445478102, 0.5494694698726035, 0.9896389987655843, 0.9261230546730739, 0.8270971074029774, 0.9229345532867236, 0.7947193174924847, 0.6051723619499199, 0.6573239977355712, 0.918437165155914, 0.7612218976209681, 0.8769713639800476, 0.9942218936993565, 0.8599864603057725, 0.6678161051554549, 0.96566581374995, 0.797718152170851, 0.9630315615545855, 0.5220830862013065, 0.5436483739239337, 0.9238800349980816, 0.6322734364995453, 0.8422508052110824, 0.8288857980193218, 0.7378726766983306, 0.793627599753062, 0.5818603234883037, 0.5543089596469717, 0.8648697157606338, 0.7172351299406766, 0.7335318218890651, 0.84289214703081, 0.6347663243335435, 0.7354594471651297, 0.6447097530043265, 0.5083084172616474, 0.6715298370993449, 0.8829288233864638, 0.6088077196041822, 0.9407348034672658, 0.5401318951551903, 0.6719096960411514, 0.8914979974267347, 0.6379766584243111, 0.8332111546572223, 0.7086482463168271, 0.894024677054385, 0.861830705584811, 0.6080674737186377, 0.8864450656447257, 0.9926100951267988, 0.9365776074610255, 0.8658521475169587, 0.9951544080181769, 0.9111630295300589, 0.9317136542910582, 0.8629326585124106, 0.6277889543812196, 0.9536307516073739, 0.6781690109766044, 0.6528278873897136, 0.9659092436861738, 0.7450753670137623, 0.8236393384677134, 0.9766317282408963, 0.8905256976157563, 0.5771623025525665, 0.7305687237360632, 0.8271459845336397, 0.5046171107124436, 0.5084636485886664, 0.9230318624667173, 0.7314006846703984, 0.7508341822131658, 0.8635801423772156, 0.5505696670599269, 0.5198872566059642, 0.6357803225021852, 0.6655032684220468, 0.575315668856279, 0.6130667502376471, 0.6048784281293965, 0.6735812888024371, 0.5393215053363495, 0.6901395072182567, 0.8107703026304062, 0.5780360353444264, 0.8750599482359597, 0.969836968942128, 0.9949978991148865, 0.7394184538190778, 0.876923506923901, 0.8749724271209025, 0.9959833886897901, 0.5946749628935264, 0.8630066853079273, 0.6045541479242412, 0.6093056354171189, 0.5407858991381196, 0.6630883231548675, 0.9150805966745916, 0.6356531336063166, 0.7707351115882846, 0.8451548330576568, 0.5792300907579675, 0.8037397626964997, 0.8452987786236947, 0.5001049371347607, 0.9653707575202087, 0.5104426566089337, 0.9338694798354858, 0.7088954955401505, 0.7306917234830347, 0.583628795912216, 0.8703434394173266, 0.7122168826429262, 0.9351436879204507, 0.5579797421393533, 0.9264755915770924, 0.994895075775918, 0.7867571067920697, 0.5527260568482943, 0.8121109650052776, 0.6506275040672831, 0.7597614788948351, 0.5829524254778413, 0.989638150510799, 0.8446367892717019, 0.9114227781967834, 0.7783937023247071, 0.8996414643191961, 0.7534455517932817, 0.9722682787539343, 0.7898404575168421, 0.7871846652034293, 0.720258413054604, 0.9783681918037124, 0.6219661066835587, 0.7939522309419589, 0.8674315647498054, 0.6401888502423487, 0.7024200806826043, 0.6280884838502879, 0.6137891191618068, 0.9290269162550782, 0.5210218802581632, 0.7505164107381102, 0.8819968754397312, 0.5199607952551983, 0.9455055147855291, 0.5998223144237302, 0.5750758248440155, 0.5277525472348243, 0.5766644971891486, 0.9153661113072205, 0.878172199917149, 0.8680300704264579, 0.8083304085629923, 0.7036312693323872, 0.836247665348488, 0.8387633126278156, 0.9638961276598534, 0.9610135723484353, 0.6450094218223643, 0.527896925636012, 0.854628976324487, 0.9493328084349651, 0.9836230150693288, 0.7900358522985655, 0.7221307853188477, 0.8533235205128917, 0.6155346354720634, 0.6681443421464842, 0.9499841986282038, 0.9560910189100138, 0.6340850774713702, 0.6138743697618192, 0.5424015123998764, 0.9694548734220323, 0.9718870054300206, 0.923862870024328, 0.9832774911735007, 0.5286528363973282, 0.6441473646805325, 0.8869586791099198, 0.9456319666221495, 0.95748434547416, 0.8625109066402763, 0.9769327243493382, 0.7629591726896536, 0.7127677853954424, 0.6871252648473529, 0.6717682379594729, 0.6962149505060752, 0.9394914095102198, 0.5134293390075373, 0.9793994810637391, 0.6614010008876348, 0.7542254720211434, 0.774661702063538, 0.6693573744214153, 0.7665268167913708, 0.9769479763836038, 0.7313121136708929, 0.6280581244863137, 0.8902778262426377, 0.9347267343131227, 0.7262244579506973, 0.663096997615913, 0.8112436064148648, 0.7264525121580883, 0.7730931913394221, 0.9699925263949603, 0.5342511204209792, 0.7421698609873034, 0.9437572744222591, 0.529309523549663, 0.9459754810765224, 0.944272188130256, 0.8731883214923744, 0.567791578844798, 0.7361467548949141, 0.501119838901559, 0.6819846480815484, 0.8490872370389784, 0.8020605422790728, 0.5475532167819923, 0.6687061017653687, 0.6934772369658809, 0.619454566668237, 0.8231477927531579, 0.7970690204414381, 0.5403531740071976, 0.5731136306224034, 0.8992533821828224, 0.5320860576029478, 0.6092350893606375, 0.5740938046472922, 0.9337773423819651, 0.7428036575780705, 0.8012309380410311, 0.562811675067627, 0.881949390418958, 0.604865011031052, 0.7268149514700009, 0.6743165696057445, 0.5913894176663095, 0.5352581437741399, 0.6021175410394202, 0.5381205350080014, 0.9647204120640933, 0.6035908425853351, 0.6487166851709993, 0.7524578859060108, 0.9583618140653417, 0.8362828452281004, 0.564616377801481, 0.5655328441731231, 0.7547750399834279, 0.88877003072104, 0.5706587765257691, 0.8881339021443573, 0.9660456482819386, 0.7832747569150654, 0.8374040792445226, 0.9914699660271556, 0.8114178855493205, 0.5639005318460607, 0.8916940058121576, 0.6757067353162709, 0.6877664033238768, 0.5761010193240725, 0.6674993679070078, 0.9395421156103043, 0.5798457837911739, 0.9983987830051435, 0.7071884944746065, 0.8256760383666366, 0.6460438793937959, 0.6714470067679281, 0.9906639738832455, 0.8025248712180177, 0.863141745559328, 0.559325988243583, 0.5245995831358898, 0.9073766792674052, 0.7008705186644293, 0.6078353505644609, 0.5336652533698281, 0.9617224453939192, 0.5986546989815029, 0.6883011375024359, 0.9203714323448899, 0.8390065157324615, 0.6591435171678895, 0.9392665534268031, 0.782791937890579, 0.9135709855629803, 0.9908261210944602, 0.6841900912176888, 0.7762256007857546, 0.9901665332947485, 0.9794746556813303, 0.9464693086495546, 0.5588584023377541, 0.657363113366515, 0.8492017751845609, 0.997548594078151, 0.7605701687248002, 0.9636452815015037, 0.5291694367081701, 0.6628540692264258, 0.8571326467852118, 0.6077733612897798, 0.889001965714332, 0.6966414828652824, 0.8359198462528628, 0.8385069740041668, 0.5290524635290386, 0.9633891947834591, 0.9213116274325535, 0.9430503077300825, 0.6824618787744492, 0.5329359452245918, 0.6832073429066335, 0.6709374551701558, 0.830343045254301, 0.6126331472800396, 0.5158651450428671, 0.6953098232241226, 0.5837688743307526, 0.53873305721889, 0.7120053560526478, 0.5883817199951319, 0.6427011723670903, 0.8410989220683847, 0.8841825761003963, 0.7411728644047801, 0.9110514286462196, 0.574511189946177, 0.6402034734943416, 0.9328854632090081, 0.6219657213353631, 0.9248066161740306, 0.9956689442662165, 0.5031993992698266, 0.5540837064884941, 0.5713108808453706, 0.5409148072497503, 0.7581135798922998, 0.9706703124196017, 0.5946962335463815, 0.9431103666163755, 0.9382339646500049, 0.7667904006618882, 0.9472520225366493, 0.6049130081848644, 0.8643161464721132, 0.7752475076540872, 0.9235330641037861, 0.9602990648508699, 0.9391513290425617, 0.6107586474146771, 0.959102072029905, 0.8077274973626447, 0.9420182971098144, 0.5846194596402036, 0.9224922141737522, 0.6468536189141743, 0.5107251392834443, 0.9875954866390352, 0.988886464113754, 0.999128524199684, 0.7511508615546936, 0.5827027203069446, 0.7300474276953147, 0.9702254708682891, 0.9938889250266276, 0.9353939616250462, 0.6670280325662341, 0.5693441060847221, 0.6657122775260471, 0.9160901393944223, 0.5015158751130294, 0.8564273779763915, 0.6269930956038059, 0.9788042676660584, 0.8788628288536022, 0.6837860965953013, 0.7268199388075041, 0.9121921636206214, 0.5318593298785799, 0.9478997464160536, 0.8496146602594765, 0.5306632796262026, 0.6529091636619895, 0.754332048121442, 0.7540832590198174, 0.913130736456073, 0.9777680415486281, 0.7933144448035119, 0.5698877479784985, 0.7049560524985807, 0.9660310970129298, 0.8730871405647098, 0.5022359668290797, 0.8361452193742491, 0.8789347828745644, 0.8803637683880439, 0.8303946443774581, 0.9199073667156497, 0.5592118904531931, 0.7788191617926258, 0.7181952124561519, 0.5394584538946618, 0.8792124915731894, 0.5705889666759183, 0.9028454886140406, 0.5269217709162939, 0.7835534820554899, 0.7973912893059065, 0.8350301333734604, 0.6173946642243562, 0.912225947750313, 0.7613676359484377, 0.8666410708070279, 0.7433306322151733, 0.7986153816707307, 0.9418625418560324, 0.7781039617280509, 0.8988599939381468, 0.9609747217738431, 0.6109910351883645, 0.6380297784290965, 0.6055740346750813, 0.925880326310319, 0.7814199412256441, 0.6661429249660642, 0.8646617878694394, 0.7937580890021599, 0.805820435600122, 0.5548969409168725, 0.7646065979143399, 0.635066039687797, 0.5158411710457242, 0.8398644995184579, 0.9122442698412118, 0.5779472593159217, 0.5324577850535944, 0.7387657363609752, 0.9173982058528497, 0.7587292703150919, 0.9204360593112825, 0.7858183093225538, 0.8930991285567282, 0.7344354509736029, 0.8523650883294467, 0.6718650951354465, 0.7040880994441097, 0.6985732923356214, 0.7595406220891134, 0.7117473924071003, 0.5377095572519146, 0.508496937913319, 0.6088301119738412, 0.5440897735065158, 0.9925308627141868, 0.5727636927083487, 0.6568539541242226, 0.9159509206177514, 0.6709768961943048, 0.886917772275823, 0.5805273399549014, 0.9102522936513038, 0.6457088394408899, 0.7341910322727467, 0.8440365566470693, 0.6712389259342945, 0.6588564310627341, 0.6699261202530095, 0.6356987456437819, 0.5767476484141116, 0.9023056671480393, 0.908809231105673, 0.5771672065969221, 0.6904679210475118, 0.9257998075781795, 0.939693820710982, 0.6255934519740256, 0.8593446193206129, 0.9331695121308919, 0.6158449787900016, 0.7121422437811906, 0.5905986152459961, 0.9235831723050871, 0.8086297800373792, 0.7763046137517042, 0.6144977591436239, 0.9066660342169339, 0.8686774346667927, 0.8020727598929596, 0.8661968485039668, 0.5317893910382823, 0.5055058015989973, 0.8974581431385158, 0.9229003052302918, 0.545230014805189, 0.6862339349920704, 0.8087335196194918, 0.8568632308299993, 0.6446490479223607, 0.9019208330441122, 0.9977441779202174, 0.8053722584889235, 0.8157482842550148, 0.8110724125215207, 0.7775227737896604, 0.7577211504052248, 0.6267527925010816, 0.8024796643678551, 0.5069584559467297, 0.9743648577206413, 0.8235144685342601, 0.5444160634233013, 0.5331905565177204, 0.5627994194325795, 0.6876184328805781, 0.7376105192728734, 0.740598599085917, 0.8780639088287092, 0.5500131431766575, 0.6716226086555825, 0.5360703717640731, 0.657707038427384, 0.9916963783227621, 0.5737691718231144, 0.7161124713950369, 0.6389137018642257, 0.8174411238515575, 0.9127808825045742, 0.8653678758256087, 0.8954960243979542, 0.6419279792815611, 0.6584797238009628, 0.7833864847848482, 0.9760066930766084, 0.7485300347455834, 0.9884731242844362, 0.5568357118993823, 0.868969306200195, 0.7221886406505018, 0.9536402688060611, 0.6701037821191091, 0.8085265052250152, 0.9931941293304539, 0.8910317113453939, 0.7255748710244365, 0.5768869825977552, 0.7622853200999105, 0.7990271843703116, 0.6874753075877574, 0.7169638039864927, 0.7201170302090745, 0.8768716895308791, 0.7139387670575805, 0.7262156990102899, 0.5078539847443538, 0.6825698770392494, 0.6328570933720068, 0.6388821902675493, 0.6599520626821146, 0.5625116802608923, 0.7324032161449936, 0.5205007486569978, 0.7701403984515194, 0.9570710502878808, 0.8898502784233658, 0.5512356082019727, 0.5064446264287277, 0.8990731552776915, 0.8684046945320483, 0.6975130089875201, 0.7043659318813005, 0.6932812804185962, 0.7998014063846279, 0.7875362087956033, 0.872680931590418, 0.9339676524628638, 0.518986742332526, 0.9586933108860793, 0.8292282264396341, 0.9267108451075718, 0.827000882228903, 0.6829822497336355, 0.8056138192454605, 0.6847452386737695, 0.8857001358518253, 0.9836479557331272, 0.5438371485763621, 0.8041982216052506, 0.7127156741600928, 0.5690128036709612, 0.9335964037478017, 0.7478652929745271, 0.5203927672240416, 0.6190372154980266, 0.7590017578009732, 0.9224158988010482, 0.6434574186058744, 0.7653545440212672, 0.8524415986643952, 0.5176542190821016, 0.7404148670637212, 0.5842084915765599, 0.5180537157111598, 0.572017013718523, 0.5588002379999379, 0.7791839484613869, 0.8165640844276504, 0.5829001142676878, 0.8699364084940925, 0.7204962714522614, 0.6490824760133743, 0.8832645766562235, 0.5271665320660437, 0.9224603687469346, 0.9780094908866144, 0.6595515234645046, 0.9945798089582172, 0.5119227881274448, 0.5714722668282363, 0.8662525262371915, 0.8097715794551488, 0.6784397510998249, 0.9713071265382353, 0.5942636694031491, 0.9425047594712187, 0.8830466277214701, 0.6096415217753139, 0.8655782286754452, 0.8370143694675711, 0.7619027294010448, 0.8531256257351945, 0.989984182723862, 0.6977224847501801, 0.5265197574276371, 0.505271628780527, 0.9827645143159802, 0.8487614797462973, 0.5262107904373793, 0.8208110831262713, 0.6321990718210003, 0.9174513624271482, 0.5443595928032439, 0.9914127768864583, 0.6007701077868521, 0.698501911781718, 0.6055264279968526, 0.8342303285865502, 0.6260236959678184, 0.765857265694642, 0.8543767624820197, 0.5595244234877692, 0.7652825444586732, 0.5244524766072682, 0.5824947831552241, 0.7505017093481471, 0.6439090949950373, 0.6034146339062469, 0.8911796351334358, 0.5857851191242187, 0.9786854470522823, 0.6734047803245933, 0.9228186593478598, 0.9404570161335738, 0.9926509934032259, 0.5147906734751659, 0.59753913343529, 0.5431690125819801, 0.9755529615905001, 0.6368605278117863, 0.5599955002538435, 0.6071570546465792, 0.7847002472794575, 0.6019007644434755, 0.5197872399342747, 0.7206496847215327, 0.6269358055996912, 0.6628330707632195, 0.8367824452566546, 0.5163093868390709, 0.9482899366411253, 0.6317798931446121, 0.7814465637261034, 0.7739164216653627, 0.9745182140339803, 0.6479981184115878, 0.9290668947397578, 0.7635842499924838, 0.7283369045211436, 0.6563301684366629, 0.5351995038848518, 0.9418424463820771, 0.6158203245620251, 0.7664412687103574, 0.5922880098063183, 0.683827936211759, 0.6407773877496161, 0.7239313410309494, 0.5950984928603514, 0.6706669146256363, 0.7646240572491172, 0.521728944452631, 0.9274743354191357, 0.9024016856985297, 0.8652510688901921, 0.5078873880391597, 0.6706622681211505, 0.615496485853271, 0.605162721166922, 0.8705877234123856, 0.9850222573934676, 0.7259545483547576, 0.9377920066269823, 0.9104869155875125, 0.8451911743868028, 0.9304170304935692, 0.7606084947117299, 0.9994851268251944, 0.9280530436793701, 0.5756704969683611, 0.7302228679140619, 0.7577897191649279, 0.7494328641586734, 0.9058187378099181, 0.5704378096146449, 0.8680861734015508, 0.5902829276173948, 0.7215348436667526, 0.6188155574536989, 0.5093198203881067, 0.9054565941566697, 0.6798526575382959, 0.9687139023337543, 0.719902718911374, 0.5775124308951177, 0.6135097016023643, 0.752184869970272, 0.8482653892565428, 0.9682814846144081, 0.5263322003947282, 0.5513579906173498, 0.7756323731354249, 0.5604320288315894, 0.8990027678603424, 0.576241852014543, 0.8532421452891362, 0.9374811943298395, 0.8722849163158974, 0.8730305618312975, 0.5500099247444153, 0.5142533537895289, 0.987583733032257, 0.7485904231041761, 0.5304211198968328, 0.7849624838766822, 0.9323613802231593, 0.9724150826861191, 0.5797430343754679, 0.955176170850734, 0.9913995348045034, 0.8508290744198354, 0.7917833854005378, 0.9248189037294872, 0.714665056349017, 0.5487685033567022, 0.8393070770052148, 0.9199879376291988, 0.8323490438680774, 0.933238216289273, 0.9744679686738166, 0.7862234948180713, 0.9560845258293993, 0.528524416656819, 0.6948280747192335, 0.8405835547663963, 0.8048494655611536, 0.8287718261961008, 0.8375139487401841, 0.6469251488020429, 0.5926722291140718, 0.5581991650501407, 0.68225332403746, 0.9580744749468568, 0.8421622600829135, 0.5194900901821802, 0.8336270136780056, 0.9276114160803508, 0.6626584540575065, 0.5403859771942585, 0.8531329187325178, 0.5637544320904068, 0.6987768269898261, 0.9527788123798944, 0.6433991932708388, 0.6877846818183168, 0.9374437405259599, 0.6151533681615675, 0.5704518871297708, 0.8920486473918239, 0.8676105400032761, 0.7786818210984977, 0.9821569418816641, 0.6722396915756141, 0.62153080536385, 0.7445885773533947, 0.6797743754861881, 0.8807381328153923, 0.505461725081236, 0.6159742298304187, 0.8249045783455491, 0.9092092347635592, 0.7140878739212886, 0.9621689041776074, 0.922583959871037, 0.5189620041940193, 0.7183000283616761, 0.5501550381409948, 0.8270557267479248, 0.6362123414580534, 0.9724321774813887, 0.5976866929868196, 0.8317399851682332, 0.5929961986270168, 0.5833087692466963, 0.5969001978750569, 0.8020899188828122, 0.9562329822399562, 0.9083005255596819, 0.604751910509324, 0.6256851481834282, 0.656459338064894, 0.9897222634989712, 0.5019374428947282, 0.7873395634768123, 0.629859576210666, 0.9855009764501927, 0.9512019539168605, 0.8676071009663353, 0.9999774250795208, 0.760640987266507, 0.7195442797195608, 0.9960228838797002, 0.6159353726518506, 0.6553897052772657, 0.9293659934909615, 0.7876343995315683, 0.8163301539375496, 0.5451602089683572, 0.9193306140949336, 0.775662775302384, 0.6177981117456984, 0.711232714129105, 0.5406245907328964, 0.9850619886430564, 0.7688628114133249, 0.9851013729939717, 0.898890666045394, 0.6698892347587144, 0.7695315039704123, 0.8014326886619136, 0.8026705272480148, 0.9554683387248024, 0.7927906739122454, 0.9513252545616433, 0.5583748304712671, 0.7858327157191018, 0.6796911975357001, 0.6858848137132365, 0.8075843826588807, 0.6644092707123079, 0.6471210466628707, 0.9181738467248485, 0.6406944454003772, 0.9394965532151454, 0.5639762626623858, 0.5118348800817215, 0.7225877951199275, 0.947360420430391, 0.9606037751934948, 0.6444631210385297, 0.8984729784720482, 0.5109984391527393, 0.7978697012501457, 0.5444475161752657, 0.7763952918669244, 0.6345612036325404, 0.8327451238108659, 0.8924912978199484, 0.5039206000282631, 0.6870252181414209, 0.6633430962701836, 0.7217054293661156, 0.5013272760731053, 0.6635517297393985, 0.6272472784505518, 0.7823628975301169, 0.7945818640966602, 0.7433663624833196, 0.8681189185377732, 0.821032913392513, 0.7556619480437063, 0.7423392621394114, 0.7913239269134973, 0.9814804972226786, 0.9907591871892574, 0.6580371999369794, 0.6684985119931101, 0.8318633391470769, 0.9130541422678072, 0.9636641247720316, 0.5594218978815797, 0.8923725856741409, 0.7519039702088162, 0.6655319347594945, 0.6629016905515412, 0.6906329500508592, 0.9345345276957204, 0.5151871703619326, 0.8214006325628158, 0.8039247093564152, 0.9055284484703978, 0.6825634647562393, 0.8138334631722521, 0.7775115448282964, 0.5183461110739542, 0.7559249889788832, 0.8099482139623844, 0.9675187998106507, 0.7507840203114416, 0.5333897353659749, 0.9215866698676763, 0.8579760790284859, 0.5600512876497092, 0.8938952825200117, 0.7961221843515032, 0.5438277439752797, 0.5496852514495915, 0.5333075298328729, 0.6187926978479243, 0.708280464155822, 0.7225052739340487, 0.5957404883641293, 0.635706087634931, 0.5103617889722701, 0.6281723036903588, 0.5805623117354277, 0.5516131592626556, 0.7108379352935396, 0.5658843974024206, 0.8868427699677788, 0.7041305189519829, 0.5754465214278747, 0.9276959842602779, 0.510843449246049, 0.9976779553141659, 0.6491425548921557, 0.5421772926742012, 0.7537215077841725, 0.9028006410131948, 0.5869239801112562, 0.7417068119420782, 0.7610482751847713, 0.6243523927664607, 0.8811677656242266, 0.7267856265438216, 0.6824964320212823, 0.6623633071646956, 0.8546002060867981, 0.6829797492224212, 0.7330502459938577, 0.56314843869452, 0.6242536187099437, 0.7179663387202555, 0.8928784742094524, 0.5539807956725318, 0.8202636628422189, 0.5934573513472396, 0.5278616550704691, 0.8910985191693424, 0.5166626241597116, 0.7820279304314096, 0.6117405399467083, 0.655517941350608, 0.5990413232681231, 0.7277218031159405, 0.8881811832131111, 0.6253899095673823, 0.5022719689255692, 0.7565879749631125, 0.8287532017758004, 0.5113635109353638, 0.5938116997945808, 0.8074694498880592, 0.7445948788642843, 0.7518312595773975, 0.653475796946003, 0.5773481168936356, 0.7687814588786883, 0.9400237791801073, 0.7291165198628897, 0.9080613101516024, 0.5364166302728399, 0.8293215407425376, 0.5895220002171028, 0.608902662607063, 0.6564632370797786, 0.782063294272668, 0.6162326180713753, 0.8304540120314123, 0.6584459814192004, 0.7624270884362871, 0.9527791991634176, 0.718775138127759, 0.873206804833677, 0.86849384235325, 0.6310796669511953, 0.5441817907135311, 0.6440271974766812, 0.9379708604413772, 0.9270794727647482, 0.7555673692767391, 0.9942280293641161, 0.8255441774307202, 0.8114400448785974, 0.7163285025384967, 0.9307712166283605, 0.8782711070580267, 0.8968004072456401, 0.7549042036801079, 0.6514234565745659, 0.7258464065999979, 0.7882073558605801, 0.8141282259599119, 0.8486698986814731, 0.5350817344502337, 0.6135456239693732, 0.7173055874302068, 0.6688574399512488, 0.6678946748953268, 0.845921998765806, 0.6058458013290207, 0.7824779073989613, 0.6374462476682488, 0.5854171065940504, 0.8390176597016082, 0.6142741713385914, 0.5746760557963155, 0.7630400479143152, 0.6561635137188541, 0.7111629793811978, 0.8782398885108742, 0.8790379252126461, 0.9157072415689174, 0.5024634439484983, 0.8391036370371256, 0.9790076151891898, 0.9587814784216695, 0.7054486253294767, 0.6668526815472458, 0.5137353822027135, 0.6509597053834795, 0.7283636280795134, 0.5356573075256076, 0.7596088498660876, 0.7042549964579398, 0.8799527419314296, 0.5143585816603526, 50000.0};
int h_B[]= {
3, 5, 7, 9, 11, 13, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 476, 478, 480, 482, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573, 575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 642, 644, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 668, 670, 672, 674, 677, 679, 681, 683, 687, 689, 691, 693, 695, 697, 700, 702, 704, 706, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 768, 770, 773, 775, 778, 780, 782, 784, 786, 788, 790, 792, 795, 797, 800, 802, 805, 807, 809, 811, 813, 815, 817, 819, 822, 824, 827, 829, 831, 833, 836, 838, 840, 842, 846, 848, 850, 852, 854, 856, 858, 860, 862, 864, 867, 869, 872, 874, 877, 879, 881, 883, 885, 887, 890, 892, 894, 896, 898, 900, 903, 905, 908, 910, 913, 915, 918, 920, 923, 925, 928, 930, 933, 935, 938, 940, 942, 944, 946, 948, 951, 953, 955, 957, 959, 961, 964, 966, 968, 970, 972, 974, 977, 979, 982, 984, 987, 989, 992, 994, 996, 998, 1001, 1003, 1005, 1007, 1010, 1012, 1016, 1018, 1020, 1022, 1025, 1027, 1030, 1032, 1035, 1037, 1040, 1042, 1045, 1047, 1050, 1052, 1055, 1057, 1060, 1062, 1065, 1067, 1070, 1072, 1075, 1077, 1080, 1082, 1085, 1087, 1090, 1092, 1095, 1097, 1100, 1102, 1104, 1106, 1108, 1110, 1113, 1115, 1118, 1120, 1123, 1125, 1128, 1130, 1133, 1135, 1138, 1140, 1143, 1145, 1148, 1150, 1153, 1155, 1158, 1160, 1163, 1165, 1168, 1170, 1172, 1174, 1176, 1178, 1181, 1183, 1186, 1188, 1191, 1193, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216, 1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1326, 1328, 1330, 1332, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369, 1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407, 1409, 1411, 1413, 1415, 1418, 1420, 1423, 1425, 1428, 1430, 1433, 1435, 1438, 1440, 1443, 1445, 1448, 1450, 1453, 1455, 1457, 1459, 1461, 1463, 1466, 1468, 1471, 1473, 1475, 1477, 1479, 1481, 1483, 1485, 1488, 1490, 1492, 1494, 1497, 1499, 1501, 1503, 1506, 1508, 1512, 1514, 1516, 1518, 1520, 1522, 1525, 1527, 1530, 1532, 1537, 1539, 1541, 1543, 1545, 1547, 1550, 1552, 1555, 1557, 1560, 1562, 1565, 1567, 1569, 1571, 1573, 1575, 1578, 1580, 1583, 1585, 1588, 1590, 1593, 1595, 1598, 1600, 1603, 1605, 1608, 1610, 1613, 1615, 1618, 1620, 1623, 1625, 1628, 1630, 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1738, 1740, 1742, 1744, 1747, 1749, 1752, 1754, 1757, 1759, 1762, 1764, 1767, 1769, 1772, 1774, 1777, 1779, 1781, 1783, 1785, 1787, 1790, 1792, 1795, 1797, 1800, 1802, 1805, 1807, 1810, 1812, 1815, 1817, 1820, 1822, 1825, 1827, 1830, 1832, 1835, 1837, 1840, 1842, 1845, 1847, 1850, 1852, 1855, 1857, 1860, 1862, 1865, 1867, 1869, 1871, 1873, 1875, 1878, 1880, 1883, 1885, 1888, 1890, 1893, 1895, 1898, 1900, 1903, 1905, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938, 1940, 1943, 1945, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976, 1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014, 2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052, 2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090, 2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129, 2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167, 2169, 2171, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2199, 2201, 2203, 2205, 2208, 2210, 2212, 2214, 2217, 2219, 2221, 2223, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242, 2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280, 2282, 2284, 2287, 2289, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356, 2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432, 2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470, 2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509, 2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547, 2549, 2551, 2553, 2555, 2557, 2559, 2562, 2564, 2566, 2568, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585, 2587, 2589, 2592, 2594, 2596, 2598, 2601, 2603, 2605, 2607, 2610, 2612, 2615, 2617, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2658, 2660, 2662, 2664, 2667, 2669, 2672, 2674, 2677, 2679, 2682, 2684, 2687, 2689, 2691, 2693, 2695, 2697, 2700, 2702, 2705, 2707, 2710, 2712, 2715, 2717, 2720, 2722, 2725, 2727, 2729, 2731, 2733, 2735, 2738, 2740, 2743, 2745, 2748, 2750, 2753, 2755, 2758, 2760, 2763, 2765, 2768, 2770, 2773, 2775, 2778, 2780, 2783, 2785, 2788, 2790, 2793, 2795, 2798, 2800, 2803, 2805, 2808, 2810, 2813, 2815, 2817, 2819, 2821, 2823, 2826, 2828, 2831, 2833, 2836, 2838, 2841, 2843, 2846, 2848, 2851, 2853, 2856, 2858, 2861, 2863, 2865, 2867, 2869, 2871, 2874, 2876, 2879, 2881, 2884, 2886, 2889, 2891, 2894, 2896, 2899, 2901, 2904, 2906, 2909, 2911, 2914, 2916, 2919, 2921, 2924, 2926, 2929, 2931, 2934, 2936, 2939, 2941, 2944, 2946, 2949, 2951, 2954, 2956, 2959, 2961, 2964, 2966, 2969, 2971, 2974, 2976, 2979, 2981, 2984, 2986, 2989, 2991, 2994, 2996, 2999, 3001, 3004, 3006, 3009, 3011, 3013, 3015, 3017, 3019, 3022, 3024, 3027, 3029, 3032, 3034, 3037, 3039, 3041, 3043, 3046, 3048, 3051, 3053, 3059, 3061, 3063, 3065, 3067, 3069, 3072, 3074, 3077, 3079, 3082, 3084, 3087, 3089, 3092, 3094, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117, 3119, 3122, 3124, 3127, 3129, 3132, 3134, 3137, 3139, 3142, 3144, 3147, 3149, 3155, 3157, 3159, 3161, 3163, 3165, 3168, 3170, 3172, 3174, 3176, 3178, 3181, 3183, 3186, 3188, 3194, 3196, 3199, 3201, 3204, 3206, 3208, 3210, 3213, 3215, 3217, 3219, 3224, 3226, 3228, 3230, 3232, 3234, 3236, 3238, 3241, 3243, 3245, 3247, 3249, 3251, 3254, 3256, 3259, 3261, 3264, 3266, 3269, 3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3290, 3292, 3295, 3297, 3300, 3302, 3305, 3307, 3310, 3312, 3315, 3317, 3320, 3322, 3325, 3327, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344, 3346, 3348, 3351, 3353, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382, 3385, 3387, 3389, 3391, 3393, 3395, 3398, 3400, 3403, 3405, 3407, 3409, 3411, 3413, 3416, 3418, 3421, 3423, 3426, 3428, 3431, 3433, 3436, 3438, 3441, 3443, 3446, 3448, 3451, 3453, 3456, 3458, 3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3497, 3499, 3501, 3503, 3505, 3507, 3510, 3512, 3515, 3517, 3519, 3521, 3523, 3525, 3528, 3530, 3533, 3535, 3538, 3540, 3543, 3545, 3547, 3549, 3551, 3553, 3556, 3558, 3561, 3563, 3566, 3568, 3571, 3573, 3576, 3578, 3582, 3584, 3586, 3588, 3593, 3595, 3598, 3600, 3603, 3605, 3608, 3610, 3613, 3615, 3617, 3619, 3622, 3624, 3627, 3629, 3641, 3643, 3646, 3648, 3651, 3653, 3656, 3658, 3661, 3663, 3665, 3667, 3669, 3671, 3674, 3676, 3679, 3681, 3684, 3686, 3689, 3691, 3694, 3696, 3699, 3701, 3703, 3705, 3708, 3710, 3713, 3715, 3721, 3723, 3725, 3727, 3729, 3731, 3734, 3736, 3739, 3741, 3744, 3746, 3749, 3751, 3753, 3755, 3757, 3759, 3762, 3764, 3767, 3769, 3772, 3774, 3777, 3779, 3781, 3783, 3785, 3787, 3790, 3792, 3795, 3797, 3800, 3802, 3805, 3807, 3810, 3812, 3815, 3817, 3820, 3822, 3825, 3827, 3829, 3831, 3834, 3836, 3839, 3841, 3847, 3849, 3852, 3854, 3857, 3859, 3862, 3864, 3867, 3869, 3872, 3874, 3877, 3879, 3882, 3884, 3887, 3889, 3891, 3893, 3895, 3897, 3900, 3902, 3905, 3907, 3910, 3912, 3915, 3917, 3920, 3922, 3925, 3927, 3930, 3932, 3935, 3937, 3939, 3941, 3943, 3945, 3948, 3950, 3953, 3955, 3958, 3960, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3980, 3982, 3984, 3986, 3990, 3992, 3995, 3997, 4000, 4002, 4005, 4007, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028, 4030, 4032, 4034, 4036, 4039, 4041, 4044, 4046, 4049, 4051, 4054, 4056, 4059, 4061, 4064, 4066, 4069, 4071, 4074, 4076, 4079, 4081, 4084, 4086, 4089, 4091, 4093, 4095, 4097, 4099, 4102, 4104, 4107, 4109, 4112, 4114, 4117, 4119, 4122, 4124, 4127, 4129, 4132, 4134, 4137, 4139, 4142, 4144, 4147, 4149, 4152, 4154, 4157, 4159, 4161, 4163, 4165, 4167, 4170, 4172, 4175, 4177, 4180, 4182, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4202, 4204, 4207, 4209, 4215, 4217, 4219, 4221, 4223, 4225, 4228, 4230, 4233, 4235, 4238, 4240, 4243, 4245, 4248, 4250, 4253, 4255, 4258, 4260, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295, 4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333, 4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371, 4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409, 4411, 4413, 4415, 4417, 4419, 4421, 4424, 4426, 4428, 4430, 4433, 4435, 4437, 4439, 4442, 4444, 4446, 4448, 4451, 4453, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484, 4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4521, 4523, 4525, 4527, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560, 4562, 4564, 4566, 4568, 4570, 4572, 4575, 4577, 4579, 4581, 4584, 4586, 4589, 4591, 4593, 4595, 4597, 4599, 4601, 4603, 4606, 4608, 4610, 4612, 4617, 4619, 4621, 4623, 4625, 4627, 4630, 4632, 4635, 4637, 4640, 4642, 4645, 4647, 4650, 4652, 4655, 4657, 4660, 4662, 4665, 4667, 4670, 4672, 4675, 4677, 4680, 4682, 4685, 4687, 4689, 4691, 4694, 4696, 4699, 4701, 4707, 4709, 4711, 4713, 4715, 4717, 4720, 4722, 4725, 4727, 4730, 4732, 4735, 4737, 4740, 4742, 4745, 4747, 4750, 4752, 4754, 4756, 4758, 4760, 4763, 4765, 4768, 4770, 4773, 4775, 4778, 4780, 4782, 4784, 4786, 4788, 4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826, 4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4847, 4849, 4851, 4853, 4855, 4857, 4860, 4862, 4865, 4867, 4870, 4872, 4875, 4877, 4879, 4881, 4884, 4886, 4888, 4890, 4894, 4896, 4899, 4901, 4904, 4906, 4909, 4911, 4913, 4915, 4918, 4920, 4922, 4924, 4928, 4930, 4933, 4935, 4938, 4940, 4943, 4945, 4948, 4950, 4953, 4955, 4958, 4960, 4962, 4964, 4967, 4969, 4972, 4974, 4980, 4982, 4984, 4986, 4989, 4991, 4993, 4995, 4998, 5000, 5004, 5006, 5008, 5010, 5013, 5015, 5018, 5020, 5023, 5025, 5028, 5030, 5032, 5034, 5037, 5039, 5042, 5044, 5047, 5049, 5051, 5053, 5055, 5057, 5060, 5062, 5065, 5067, 5070, 5072, 5075, 5077, 5080, 5082, 5085, 5087, 5090, 5092, 5095, 5097, 5099, 5101, 5103, 5105, 5108, 5110, 5113, 5115, 5118, 5120, 5123, 5125, 5127, 5129, 5131, 5133, 5136, 5138, 5141, 5143, 5146, 5148, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169, 5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5188, 5190, 5193, 5195, 5198, 5200, 5202, 5204, 5206, 5208, 5211, 5213, 5215, 5217, 5219, 5221, 5224, 5226, 5228, 5230, 5232, 5234, 5237, 5239, 5242, 5244, 5247, 5249, 5252, 5254, 5257, 5259, 5262, 5264, 5267, 5269, 5272, 5274, 5276, 5278, 5280, 5282, 5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321, 5323, 5325, 5327, 5330, 5332, 5334, 5336, 5339, 5341, 5344, 5346, 5352, 5354, 5356, 5358, 5360, 5362, 5365, 5367, 5370, 5372, 5375, 5377, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396, 5398, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5418, 5420, 5423, 5425, 5431, 5433, 5436, 5438, 5441, 5443, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5467, 5469, 5472, 5474, 5477, 5479, 5482, 5484, 5487, 5489, 5492, 5494, 5497, 5499, 5501, 5503, 5505, 5507, 5510, 5512, 5514, 5516, 5518, 5520, 5523, 5525, 5528, 5530, 5533, 5535, 5538, 5540, 5542, 5544, 5547, 5549, 5552, 5554, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587, 5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625, 5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663, 5666, 5668, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701, 5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739, 5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5760, 5762, 5764, 5766, 5769, 5771, 5773, 5775, 5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5815, 5817, 5819, 5821, 5823, 5825, 5828, 5830, 5832, 5834, 5837, 5839, 5841, 5843, 5846, 5848, 5850, 5852, 5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890, 5892, 5895, 5897, 5899, 5901, 5904, 5906, 5908, 5910, 5912, 5914, 5917, 5919, 5921, 5923, 5925, 5927, 5929, 5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967, 5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5998, 6000, 6002, 6004, 6006, 6008, 6010, 6012, 6014, 6016, 6019, 6021, 6027, 6029, 6032, 6034, 6037, 6039, 6041, 6043, 6046, 6048, 6051, 6053, 6059, 6061, 6063, 6065, 6067, 6069, 6072, 6074, 6077, 6079, 6082, 6084, 6087, 6089, 6091, 6093, 6095, 6097, 6100, 6102, 6105, 6107, 6110, 6112, 6115, 6117, 6120, 6122, 6125, 6127, 6129, 6131, 6133, 6135, 6138, 6140, 6143, 6145, 6148, 6150, 6153, 6155, 6157, 6159, 6162, 6164, 6166, 6168, 6172, 6174, 6177, 6179, 6182, 6184, 6187, 6189, 6192, 6194, 6197, 6199, 6202, 6204, 6207, 6209, 6212, 6214, 6217, 6219, 6222, 6224, 6227, 6229, 6231, 6233, 6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6252, 6254, 6256, 6258, 6261, 6263, 6266, 6268, 6273, 6275, 6278, 6280, 6286, 6288, 6291, 6293, 6296, 6298, 6301, 6303, 6306, 6308, 6310, 6312, 6314, 6316, 6319, 6321, 6324, 6326, 6329, 6331, 6334, 6336, 6338, 6340, 6342, 6344, 6347, 6349, 6352, 6354, 6356, 6358, 6360, 6362, 6365, 6367, 6369, 6371, 6374, 6376, 6378, 6380, 6383, 6385, 6389, 6391, 6393, 6395, 6398, 6400, 6403, 6405, 6408, 6410, 6412, 6414, 6416, 6418, 6421, 6423, 6426, 6428, 6431, 6433, 6436, 6438, 6441, 6443, 6446, 6448, 6451, 6453, 6456, 6458, 6461, 6463, 6466, 6468, 6471, 6473, 6476, 6478, 6481, 6483, 6485, 6487, 6489, 6491, 6494, 6496, 6499, 6501, 6504, 6506, 6509, 6511, 6514, 6516, 6519, 6521, 6524, 6526, 6529, 6531, 6533, 6535, 6537, 6539, 6542, 6544, 6547, 6549, 6552, 6554, 6557, 6559, 6562, 6564, 6567, 6569, 6572, 6574, 6577, 6579, 6582, 6584, 6587, 6589, 6592, 6594, 6597, 6599, 6602, 6604, 6607, 6609, 6612, 6614, 6617, 6619, 6621, 6623, 6626, 6628, 6630, 6632, 6637, 6639, 6641, 6643, 6645, 6647, 6650, 6652, 6655, 6657, 6660, 6662, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689, 6691, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6719, 6721, 6724, 6726, 6729, 6731, 6734, 6736, 6739, 6741, 6744, 6746, 6749, 6751, 6754, 6756, 6759, 6761, 6766, 6768, 6771, 6773, 6776, 6778, 6781, 6783, 6785, 6787, 6789, 6791, 6794, 6796, 6799, 6801, 6804, 6806, 6809, 6811, 6814, 6816, 6818, 6820, 6823, 6825, 6828, 6830, 6836, 6838, 6841, 6843, 6846, 6848, 6851, 6853, 6856, 6858, 6860, 6862, 6865, 6867, 6869, 6871, 6875, 6877, 6880, 6882, 6885, 6887, 6890, 6892, 6894, 6896, 6899, 6901, 6904, 6906, 6912, 6914, 6916, 6918, 6921, 6923, 6926, 6928, 6934, 6936, 6938, 6940, 6942, 6944, 6947, 6949, 6952, 6954, 6957, 6959, 6962, 6964, 6966, 6968, 6970, 6972, 6975, 6977, 6979, 6981, 6983, 6985, 6988, 6990, 6992, 6994, 6996, 6998, 7001, 7003, 7006, 7008, 7011, 7013, 7016, 7018, 7020, 7022, 7024, 7026, 7029, 7031, 7034, 7036, 7039, 7041, 7044, 7046, 7048, 7050, 7053, 7055, 7058, 7060, 7066, 7068, 7070, 7072, 7074, 7076, 7079, 7081, 7084, 7086, 7089, 7091, 7094, 7096, 7098, 7100, 7102, 7104, 7107, 7109, 7112, 7114, 7117, 7119, 7122, 7124, 7126, 7128, 7130, 7132, 7135, 7137, 7140, 7142, 7145, 7147, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7179, 7181, 7183, 7185, 7187, 7189, 7192, 7194, 7197, 7199, 7202, 7204, 7207, 7209, 7212, 7214, 7217, 7219, 7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258, 7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296, 7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334, 7336, 7338, 7340, 7343, 7345, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372, 7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410, 7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448, 7450, 7452, 7454, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7474, 7476, 7478, 7480, 7483, 7485, 7487, 7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7516, 7518, 7520, 7522, 7525, 7527, 7529, 7531, 7533, 7535, 7537, 7539, 7542, 7544, 7547, 7549, 7552, 7554, 7557, 7559, 7561, 7563, 7565, 7567, 7570, 7572, 7575, 7577, 7579, 7581, 7583, 7585, 7588, 7590, 7593, 7595, 7597, 7599, 7602, 7604, 7607, 7609, 7615, 7617, 7620, 7622, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639, 7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7666, 7668, 7670, 7672, 7674, 7676, 7679, 7681, 7684, 7686, 7688, 7690, 7692, 7694, 7697, 7699, 7701, 7703, 7705, 7707, 7710, 7712, 7715, 7717, 7720, 7722, 7725, 7727, 7730, 7732, 7734, 7736, 7739, 7741, 7743, 7745, 7749, 7751, 7754, 7756, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791, 7793, 7795, 7797, 7800, 7802, 7804, 7806, 7810, 7812, 7815, 7817, 7820, 7822, 7825, 7827, 7829, 7831, 7833, 7835, 7838, 7840, 7843, 7845, 7848, 7850, 7853, 7855, 7858, 7860, 7863, 7865, 7868, 7870, 7873, 7875, 7878, 7880, 7886, 7888, 7890, 7892, 7894, 7896, 7899, 7901, 7904, 7906, 7909, 7911, 7914, 7916, 7919, 7921, 7924, 7926, 7929, 7931, 7933, 7935, 7937, 7939, 7942, 7944, 7947, 7949, 7952, 7954, 7957, 7959, 7962, 7964, 7966, 7968, 7970, 7972, 7975, 7977, 7980, 7982, 7985, 7987, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 6196, 6191, 6196, 6191, 6196, 6191, 8012, 8014, 8016, 8018, 8020, 8022, 5236, 5251, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 5236, 5251, 4979, 4977, 4979, 4977, 1907, 1737, 1632, 1627, 1632, 1627, 2988, 2983, 3008, 3003, 7065, 7063, 8169, 8171, 8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 6908, 6903, 6908, 6903, 6026, 6024, 6026, 6024, 6221, 6226, 6221, 6226, 6636, 6634, 6649, 6649, 6636, 6634, 6460, 6460, 6465, 6465, 6465, 6460, 6465, 6460, 6908, 6903, 6908, 6903, 6911, 6909, 8386, 8388, 8390, 8392, 8394, 8396, 8398, 8400, 2802, 2797, 2802, 2797, 3771, 3766, 3771, 3766, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 3621, 3633, 845, 845, 1907, 1737, 1824, 1824, 1907, 1737, 1388, 1907, 1737, 1388, 1536, 1524, 1524, 1536, 1909, 1909, 2968, 2963, 2968, 2963, 2988, 2983, 2988, 2983, 3008, 3003, 2968, 2963, 2968, 2963, 2988, 2983, 2988, 2983, 3008, 3003, 3289, 3289, 3258, 3253, 3258, 3253, 3402, 3397, 3402, 3397, 3455, 3455, 3058, 3056, 3058, 3056, 3154, 3152, 3154, 3152, 3193, 3191, 3193, 3191, 3223, 3221, 3223, 3221, 3637, 3635, 3640, 3638, 3637, 3635, 3592, 3590, 3592, 3590, 3621, 3633, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 3720, 3718, 3720, 3718, 3846, 3844, 3846, 3844, 3989, 3989, 4212, 4214, 4214, 4212, 4706, 4704, 4706, 4704, 4719, 4734, 4706, 4704, 4706, 4704, 4719, 4734, 4977, 4977, 4979, 4979, 5027, 5022, 5027, 5022, 4706, 4704, 4706, 4704, 4719, 4734, 4719, 4734, 4777, 4777, 4616, 4614, 4616, 4614, 4706, 4704, 4706, 4704, 4893, 4893, 4927, 4927, 4979, 4977, 4979, 4977, 5430, 5428, 5430, 5428, 5351, 5349, 5351, 5349, 5430, 5428, 5430, 5428, 5430, 5428, 5430, 5428, 6058, 6056, 6058, 6056, 6221, 6226, 6221, 6226, 6636, 6634, 6636, 6634, 5777, 6780, 6780, 6793, 6793, 5777, 6026, 6024, 6026, 6024, 6152, 6147, 6152, 6147, 6221, 6226, 6221, 6226, 6285, 6283, 6221, 6226, 6221, 6226, 6285, 6283, 6026, 6024, 6026, 6024, 6058, 6056, 6058, 6056, 6171, 6171, 6285, 6283, 6272, 6272, 6285, 6283, 6636, 6634, 6636, 6634, 6765, 6765, 6835, 6833, 6835, 6833, 6874, 6874, 6911, 6909, 6911, 6909, 6933, 6931, 6933, 6931, 7065, 7063, 7065, 7063, 7614, 7612, 7614, 7612, 7614, 7612, 7913, 7898, 7974, 7974, 7883, 7885, 7913, 7898, 7614, 7612, 7614, 7612, 7748, 7748, 7809, 7809, 7885, 7883, 10416, 10418, 10420, 10422, 10424, 10426, 10428, 10430, 10432, 10434, 10437, 10439, 10442, 10444, 10446, 10448, 10450, 10452, 10454, 10456, 10458, 10460, 10462, 10464, 10467, 10469, 10471, 10473, 10476, 10478, 10481, 10483, 10489, 10491, 10493, 10495, 10497, 10499, 10502, 10504, 10507, 10509, 10512, 10514, 10517, 10519, 10521, 10523, 10526, 10528, 10531, 10533, 10539, 10541, 10543, 10545, 10547, 10549, 10552, 10554, 10556, 10558, 10560, 10562, 10565, 10567, 10570, 10572, 10575, 10577, 10580, 10582, 10585, 10587, 10589, 10591, 10593, 10595, 10598, 10600, 10603, 10605, 10608, 10610, 10613, 10615, 10617, 10619, 10622, 10624, 10627, 10629, 10635, 10637, 10639, 10641, 10643, 10645, 10648, 10650, 10652, 10654, 10656, 10658, 10660, 10662, 10664, 10666, 10668, 10670, 10672, 10674, 10677, 10679, 10681, 10683, 10685, 10687, 10690, 10692, 10694, 10696, 10698, 10700, 10702, 10704, 10706, 10708, 10710, 10712, 10714, 10716, 10718, 10720, 10722, 10724, 10727, 10729, 10732, 10734, 10737, 10739, 10741, 10743, 10746, 10748, 10751, 10753, 10759, 10761, 10764, 10766, 10769, 10771, 10774, 10776, 10779, 10781, 10783, 10785, 10788, 10790, 10792, 10794, 10798, 10800, 10803, 10805, 10808, 10810, 10813, 10815, 10817, 10819, 10821, 10823, 10826, 10828, 10830, 10832, 10834, 10836, 10838, 10840, 10842, 10844, 10846, 10848, 10850, 10852, 10854, 10856, 10859, 10861, 10863, 10865, 10868, 10870, 10872, 10874, 10876, 10878, 10881, 10883, 10885, 10887, 10889, 10891, 10894, 10896, 10899, 10901, 10904, 10906, 10909, 10911, 10914, 10916, 10919, 10921, 10924, 10926, 10929, 10931, 10934, 10936, 10939, 10941, 10944, 10946, 10949, 10951, 10954, 10956, 10962, 10964, 10966, 10968, 10970, 10972, 10974, 10976, 10978, 10980, 10982, 10984, 10986, 10988, 10990, 10992, 10994, 10996, 10998, 11000, 11003, 11005, 11007, 11009, 11012, 11014, 11016, 11018, 11020, 11022, 11024, 11026, 11028, 11030, 11033, 11035, 11037, 11039, 11042, 11044, 11046, 11048, 11050, 11052, 11054, 11056, 11058, 11060, 11062, 11064, 11066, 11068, 11070, 11072, 11074, 11076, 11079, 11081, 11083, 11085, 11088, 11090, 11092, 11094, 11097, 11099, 11101, 11103, 11105, 11107, 11109, 11111, 11113, 11115, 11117, 11119, 11121, 11123, 11125, 11127, 11129, 11131, 11133, 11135, 11137, 11139, 11141, 11143, 11145, 11147, 11150, 11152, 11156, 11158, 11160, 11162, 11164, 11166, 11169, 11171, 11174, 11176, 11179, 11181, 11184, 11186, 11189, 11191, 11194, 11196, 11199, 11201, 11204, 11206, 11209, 11211, 11214, 11216, 11218, 11220, 11225, 11227, 11230, 11232, 11235, 11237, 11239, 11241, 11243, 11245, 11248, 11250, 11253, 11255, 11258, 11260, 11263, 11265, 11268, 11270, 11273, 11275, 11278, 11280, 11283, 11285, 11288, 11290, 11293, 11295, 11298, 11300, 11303, 11305, 11307, 11309, 11312, 11314, 11317, 11319, 11324, 11326, 11328, 11330, 11332, 11334, 11336, 11338, 11340, 11342, 11344, 11346, 11349, 11351, 11353, 11355, 11357, 11359, 11361, 11363, 11366, 11368, 11370, 11372, 11374, 11376, 11378, 11380, 11382, 11384, 11386, 11388, 11390, 11392, 11394, 11396, 11398, 11400, 11402, 11404, 11406, 11408, 11410, 11412, 11414, 11416, 11418, 11420, 11422, 11424, 11426, 11428, 11431, 11433, 11435, 11437, 11439, 11441, 11443, 11445, 11447, 11449, 11451, 11453, 11456, 11458, 11460, 11462, 11465, 11467, 11469, 11471, 11474, 11476, 11479, 11481, 11487, 11489, 11491, 11493, 11496, 11498, 11501, 11503, 11509, 11511, 11513, 11515, 11517, 11519, 11521, 11523, 11525, 11527, 11529, 11531, 11534, 11536, 11539, 11541, 11544, 11546, 11549, 11551, 11554, 11556, 11559, 11561, 11567, 11569, 11572, 11574, 11577, 11579, 11582, 11584, 11587, 11589, 11591, 11593, 11596, 11598, 11601, 11603, 11608, 11610, 11612, 11614, 11616, 11618, 11621, 11623, 11626, 11628, 11631, 11633, 11636, 11638, 11640, 11642, 11644, 11646, 11649, 11651, 11654, 11656, 11659, 11661, 10731, 10726, 10923, 10928, 10923, 10928, 10961, 10959, 11741, 11743, 11745, 11747, 11750, 11752, 11754, 11756, 11759, 11761, 11763, 11765, 11767, 11769, 11771, 11773, 11775, 11777, 11779, 11781, 11783, 11785, 11787, 11789, 11791, 11793, 11795, 11797, 11799, 11801, 11803, 11805, 11807, 11809, 11811, 11813, 11816, 11818, 11821, 11823, 11826, 11828, 11830, 11832, 11834, 11836, 11838, 11840, 11843, 11845, 11848, 11850, 11852, 11854, 11858, 11860, 11862, 11864, 11866, 11868, 11870, 11872, 11874, 11876, 11878, 11880, 11882, 11884, 10903, 10898, 10961, 10959, 10903, 10898, 11486, 11484, 11505, 11500, 11508, 11506, 11505, 11500, 11508, 11506, 11486, 11484, 11486, 11484, 11505, 11500, 11505, 11500, 11505, 11500, 11508, 11506, 11566, 11564, 10536, 10538, 10634, 10632, 10488, 10486, 10488, 10486, 10538, 10536, 10538, 10536, 10634, 10632, 10634, 10632, 10756, 10758, 10758, 10756, 10731, 10726, 10758, 10756, 10758, 10756, 10812, 10731, 10726, 10758, 10756, 10758, 10756, 10812, 10758, 10756, 10758, 10756, 10797, 10797, 10923, 10928, 10923, 10928, 10961, 10959, 10961, 10959, 11224, 11222, 11173, 11168, 11173, 11168, 11282, 11277, 11282, 11277, 11193, 11188, 11193, 11188, 11224, 11222, 11224, 11222, 11323, 11323, 11484, 11486, 11486, 11484, 11505, 11500, 11508, 11506, 11505, 11500, 11508, 11506, 11486, 11484, 11486, 11484, 11505, 11500, 11506, 11505, 11500, 11508, 11505, 11500, 11508, 11506, 11566, 11564, 11566, 11564, 11595, 11607, 11648, 11648, 11486, 11484, 11486, 11484, 11508, 11506, 11508, 11506, 11566, 11564, 11566, 11564, 11595, 11607, 13310, 13312, 13314, 13316, 13318, 13320, 13322, 13324, 13326, 13328, 13331, 13333, 13336, 13338, 13341, 13343, 13346, 13348, 13351, 13353, 13356, 13358, 13361, 13363, 13366, 13368, 13370, 13372, 13374, 13376, 13379, 13381, 13384, 13386, 13389, 13391, 13394, 13396, 13398, 13400, 13402, 13404, 13406, 13408, 13410, 13412, 13414, 13416, 13418, 13420, 13422, 13424, 13426, 13428, 13431, 13433, 13436, 13438, 13441, 13443, 13446, 13448, 13451, 13453, 13456, 13458, 13461, 13463, 13465, 13467, 13469, 13471, 13474, 13476, 13479, 13481, 13484, 13486, 13489, 13491, 13494, 13496, 13499, 13501, 13504, 13506, 13509, 13511, 13514, 13516, 13519, 13521, 13524, 13526, 13528, 13530, 13532, 13534, 13537, 13539, 13542, 13544, 13547, 13549, 13552, 13554, 13557, 13559, 13562, 13564, 13567, 13569, 13572, 13574, 13577, 13579, 13582, 13584, 13587, 13589, 13592, 13594, 13596, 13598, 13600, 13602, 13605, 13607, 13610, 13612, 13615, 13617, 13620, 13622, 13624, 13626, 13629, 13631, 13634, 13636, 13642, 13644, 13646, 13648, 13651, 13653, 13656, 13658, 13664, 13666, 13668, 13670, 13672, 13674, 13677, 13679, 13682, 13684, 13687, 13689, 13692, 13694, 13697, 13699, 13702, 13704, 13706, 13708, 13710, 13712, 13714, 13716, 13719, 13721, 13723, 13725, 13728, 13730, 13734, 13736, 13738, 13740, 13743, 13745, 13748, 13750, 13753, 13755, 13758, 13760, 13762, 13764, 13766, 13768, 13771, 13773, 13776, 13778, 13781, 13783, 13786, 13788, 13790, 13792, 13795, 13797, 13799, 13801, 13805, 13807, 13810, 13812, 13815, 13817, 13820, 13822, 13824, 13826, 13828, 13830, 13833, 13835, 13838, 13840, 13843, 13845, 13848, 13850, 13852, 13854, 13856, 13858, 13861, 13863, 13866, 13868, 13871, 13873, 13876, 13878, 13881, 13883, 13886, 13888, 13891, 13893, 13896, 13898, 13900, 13902, 13905, 13907, 13910, 13912, 13918, 13920, 13923, 13925, 13928, 13930, 13933, 13935, 13938, 13940, 13943, 13945, 13948, 13950, 13953, 13955, 13957, 13959, 13961, 13963, 13966, 13968, 13970, 13972, 13975, 13977, 13980, 13982, 13988, 13990, 13993, 13995, 13998, 14000, 14003, 14005, 14008, 14010, 14013, 14015, 14018, 14020, 14023, 14025, 14028, 14030, 14033, 14035, 14038, 14040, 14043, 14045, 14048, 14050, 14052, 14054, 14057, 14059, 14062, 14064, 14125, 14127, 14129, 14131, 14133, 14135, 14137, 14139, 14141, 14143, 14145, 14147, 14149, 14151, 14153, 14155, 13478, 13473, 13483, 13488, 13478, 13473, 13483, 13488, 13483, 13488, 13860, 13875, 13860, 13875, 14218, 14220, 14222, 14224, 14226, 14228, 14230, 14232, 14234, 14236, 14238, 14240, 14242, 14244, 14246, 14248, 14250, 14252, 14254, 14256, 14258, 14260, 14262, 14264, 14267, 14269, 14271, 14273, 14275, 14277, 14279, 14281, 14283, 14285, 14287, 14289, 14291, 14293, 14295, 14297, 14299, 14301, 14303, 14305, 14307, 14309, 14311, 14313, 14315, 14317, 14319, 14321, 14323, 14325, 14327, 14329, 11857, 11856, 14368, 14370, 14372, 14374, 14376, 14378, 14380, 14382, 14384, 14386, 14388, 14390, 14393, 14395, 14397, 14399, 14401, 14403, 14405, 14407, 14409, 14411, 14413, 14415, 14418, 14420, 14422, 14424, 14426, 14428, 14430, 14432, 14434, 14436, 14438, 14440, 14443, 14445, 14447, 14449, 14452, 14454, 14456, 14458, 14461, 14463, 14465, 14467, 14469, 14471, 14473, 14475, 14477, 14479, 14481, 14483, 14485, 14487, 13641, 13639, 13641, 13639, 13663, 13661, 13663, 13661, 13804, 13804, 13917, 13915, 13917, 13915, 13987, 13985, 13987, 13985, 14069, 14067, 14069, 14067, 15219, 15221, 15223, 15225, 15227, 15229, 15232, 15234, 15236, 15238, 15240, 15242, 15244, 15246, 15248, 15250, 15252, 15254, 15256, 15258, 15260, 15262, 15264, 15266, 15268, 15270, 15273, 15275, 15278, 15280, 15283, 15285, 15288, 15290, 15293, 15295, 15298, 15300, 15303, 15305, 15308, 15310, 15313, 15315, 15321, 15323, 15326, 15328, 15331, 15333, 15336, 15338, 15341, 15343, 15346, 15348, 15351, 15353, 15356, 15358, 15361, 15363, 15365, 15367, 15369, 15371, 15374, 15376, 15379, 15381, 15384, 15386, 15389, 15391, 15393, 15395, 15398, 15400, 15402, 15404, 15408, 15410, 15412, 15414, 15416, 15418, 15320, 15318, 15360, 15355, 15360, 15355, 15388, 15373, 15272, 15272, 15360, 15355, 15360, 15355, 15388, 15373, 15307, 15307, 15360, 15355, 15360, 15355, 15388, 15373, 15320, 15318, 15320, 15318, 15407, 15407, 15974, 15972, 15974, 15972, 16411, 16413, 16416, 16418, 16420, 16422, 16431, 16433, 16444, 16446, 16448, 16450, 16452, 16454, 16456, 16458, 16659, 16661, 16663, 16665, 16667, 16669, 16672, 16674, 16677, 16679, 16682, 16684, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17024, 17026, 17028, 17030, 17032, 17034, 17036, 17038, 17040, 17042, 17044, 17046, 17048, 17050, 17052, 17054, 17056, 17058, 17060, 17062, 17064, 17066, 17068, 17070, 17072, 17074, 17076, 17078, 17080, 17082, 17084, 17086, 17088, 17090, 17092, 17094, 17096, 17098, 17100, 17102, 17104, 17106, 17108, 17110, 17112, 17114, 17116, 17118, 17120, 17122, 17124, 17126, 17128, 17130, 17132, 17134, 17136, 17138, 17140, 17142, 17144, 17146, 17148, 17150, 17152, 17154, 17156, 17158, 17160, 17162, 17164, 17166, 17168, 17170, 17172, 17174, 17176, 17178, 17180, 17182, 17184, 17186, 17188, 17190, 17192, 17194, 17196, 17198, 17200, 17202, 17204, 17206, 17208, 17210, 17212, 17214, 17216, 17218, 17220, 17222, 17224, 17226, 17228, 17230, 17232, 17234, 17236, 17238, 17240, 17242, 17244, 17246, 17248, 17250, 17252, 17254, 17256, 17258, 17260, 17262, 17264, 17266, 17268, 17270, 17272, 17274, 17276, 17278, 17280, 17282, 17284, 17286, 17288, 17290, 17292, 17294, 17296, 17298, 17300, 17302, 17304, 17306, 17308, 17310, 17312, 17314, 17316, 17318, 17320, 17322, 17324, 17326, 17328, 17330, 17332, 17334, 17336, 17338, 17340, 17342, 17344, 17346, 17348, 17350, 17352, 17354, 17356, 17358, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17376, 17378, 17380, 17382, 17384, 17386, 17388, 17390, 17392, 17394, 17396, 17398, 17400, 17402, 17404, 17406, 17408, 17410, 17412, 17414, 17416, 17418, 17420, 17422, 17424, 17426, 17428, 17430, 17432, 17434, 17436, 17438, 17440, 17442, 17444, 17446, 17448, 17450, 17452, 17454, 17456, 17458, 17460, 17462, 17464, 17466, 17468, 17470, 17472, 17474, 17476, 17478, 17480, 17482, 17484, 17486, 17488, 17490, 17492, 17494, 17496, 17498, 17500, 17502, 17504, 17506, 17508, 17510, 17512, 17514, 17516, 17518, 17520, 17522, 17524, 17526, 17528, 17530, 17532, 17534, 17536, 17538, 17540, 17542, 17544, 17546, 17548, 17550, 17552, 17554, 17556, 17558, 17560, 17562, 17564, 17566, 17568, 17570, 17572, 17574, 17576, 17578, 17580, 17582, 17584, 17586, 17588, 17590, 17592, 17594, 17596, 17598, 17600, 17602, 17604, 17606, 17608, 17610, 17612, 17614, 17616, 17618, 17620, 17622, 17624, 17626, 17628, 17630, 17632, 17634, 17636, 17638, 17640, 17642, 17644, 17646, 17648, 17650, 17652, 17654, 17656, 17658, 17660, 17662, 17664, 17666, 17668, 17670, 17672, 17674, 17676, 17678, 17680, 17682, 17684, 17686, 17688, 17690, 17692, 17694, 17696, 17698, 17700, 17702, 17704, 17706, 17708, 17710, 17712, 17714, 17716, 17718, 17720, 17722, 17724, 17726, 17728, 17730, 17732, 17734, 17736, 17738, 17740, 17742, 17744, 17746, 17748, 17750, 17752, 17754, 17756, 17758, 17760, 17762, 17764, 17766, 17768, 17770, 17772, 17774, 17776, 17778, 17780, 17782, 17784, 17786, 17788, 17790, 17792, 17794, 17796, 17798, 17800, 17802, 17804, 17806, 17808, 17810, 17812, 17814, 17816, 17818, 17820, 17822, 17824, 17826, 17828, 17830, 17832, 17834, 17836, 17838, 17840, 17842, 17844, 17846, 17848, 17850, 17852, 17854, 17856, 17858, 17860, 17862, 17864, 17866, 17868, 17870, 17872, 17874, 17876, 17878, 17880, 17882, 17884, 17886, 17888, 17890, 17892, 17894, 17896, 17898, 17900, 17902, 17904, 17906, 17908, 17910, 17912, 17914, 17916, 17918, 17920, 17922, 17924, 17926, 17928, 17930, 17932, 17934, 17936, 17938, 17940, 17942, 17944, 17946, 17948, 17950, 17952, 17954, 17956, 17958, 17960, 17962, 17964, 17966, 17968, 17970, 17972, 17974, 17976, 17978, 17980, 17982, 17984, 17986, 17988, 17990, 17992, 17994, 17996, 17998, 18000, 18002, 18004, 18006, 18008, 18010, 18012, 18014, 18016, 18018, 18020, 18022, 18024, 18026, 18028, 18030, 18032, 18034, 18036, 18038, 18040, 18042, 18044, 18046, 18048, 18050, 18052, 18054, 18056, 18058, 18060, 18062, 18064, 18066, 18068, 18070, 18072, 18074, 18076, 18078, 18080, 18082, 18084, 18086, 18088, 18090, 18092, 18094, 18096, 18098, 18100, 18102, 18104, 18106, 18108, 18110, 18112, 18114, 18116, 18118, 18120, 18122, 18124, 18126, 18128, 18130, 18132, 18134, 18136, 18138, 18140, 18142, 18144, 18146, 18148, 18150, 18152, 18154, 18156, 18158, 18160, 18162, 18164, 18166, 18168, 18170, 18172, 18174, 18176, 18178, 18180, 18182, 18184, 18186, 18188, 18190, 18192, 18194, 18196, 18198, 18200, 18202, 18204, 18206, 18208, 18210, 18212, 18214, 18216, 18218, 18220, 18222, 18224, 18226, 18228, 18230, 18232, 18234, 18236, 18238, 18240, 18242, 18244, 18246, 18248, 18250, 18252, 18254, 18256, 18258, 18260, 18262, 18264, 18266, 18268, 18270, 18272, 18274, 18276, 18278, 18280, 18282, 18284, 18286, 18288, 18290, 18292, 18294, 18296, 18298, 18300, 18302, 18304, 18306, 18308, 18310, 18312, 18314, 18316, 18318, 18320, 18322, 18324, 18326, 18328, 18330, 18332, 18334, 18336, 18338, 18340, 18342, 18344, 18346, 18348, 18350, 18352, 18354, 18356, 18358, 18360, 18362, 18364, 18366, 18368, 18370, 18372, 18374, 18376, 18378, 18380, 18382, 18384, 18386, 18388, 18390, 18392, 18394, 18396, 18398, 18400, 18402, 18404, 18406, 18408, 18410, 18412, 18414, 18416, 18418, 18420, 18422, 18424, 18426, 18428, 18430, 18432, 18434, 18436, 18438, 18440, 18442, 18444, 18446, 18448, 18450, 18452, 18454, 18456, 18458, 18460, 18462, 18464, 18466, 18468, 18470, 18472, 18474, 18476, 18478, 18480, 18482, 18484, 18486, 18488, 18490, 18492, 18494, 18496, 18498, 18500, 18502, 18504, 18506, 18508, 18510, 18512, 18514, 18516, 18518, 18520, 18522, 18524, 18526, 18528, 18530, 18532, 18534, 18536, 18538, 18540, 18542, 18544, 18546, 18548, 18550, 18552, 18554, 18556, 18558, 18560, 18562, 18564, 18566, 18568, 18570, 18572, 18574, 18576, 18578, 18580, 18582, 18584, 18586, 18588, 18590, 18592, 18594, 18596, 18598, 18600, 18602, 18604, 18606, 18608, 18610, 18612, 18614, 18616, 18618, 18620, 18622, 18624, 18626, 18628, 18630, 18632, 18634, 18636, 18638, 18640, 18642, 18644, 18646, 18648, 18650, 18652, 18654, 18656, 18658, 18660, 18662, 18664, 18666, 18668, 18670, 18672, 18674, 18676, 18678, 18680, 18682, 18684, 18686, 18688, 18690, 18692, 18694, 18696, 18698, 18700, 18702, 18704, 18706, 18708, 18710, 18712, 18714, 18716, 18718, 18720, 18722, 18724, 18726, 18728, 18730, 18732, 18734, 18736, 18738, 18740, 18742, 18744, 18746, 18748, 18750, 18752, 18754, 18756, 18758, 18760, 18762, 18764, 18766, 18768, 18770, 18772, 18774, 18776, 18778, 18780, 18782, 18784, 18786, 18788, 18790, 18792, 18794, 18796, 18798, 18800, 18802, 18804, 18806, 18808, 18810, 18812, 18814, 18816, 18818, 18820, 18822, 18824, 18826, 18828, 18830, 18832, 18834, 18836, 18838, 18840, 18842, 18844, 18846, 18848, 18850, 18852, 18854, 18856, 18858, 18860, 18862, 18864, 18866, 18868, 18870, 18872, 18874, 18876, 18878, 18880, 18882, 18884, 18886, 18888, 18890, 18892, 18894, 18896, 18898, 18900, 18902, 18904, 18906, 18908, 18910, 18912, 18914, 18916, 18918, 18920, 18922, 18924, 18926, 18928, 18930, 18932, 18934, 18936, 18938, 18940, 18942, 18944, 18946, 18948, 18950, 18952, 18954, 18956, 18958, 18960, 18962, 18964, 18966, 18968, 18970, 18972, 18974, 18976, 18978, 18980, 18982, 18984, 18986, 18988, 18990, 18992, 18994, 18996, 18998, 19000, 19002, 19004, 19006, 19008, 19010, 19012, 19014, 19016, 19018, 19020, 19022, 19024, 19026, 19028, 19030, 19032, 19034, 19036, 19038, 19040, 19042, 19044, 19046, 19048, 19050, 19052, 19054, 19056, 19058, 19060, 19062, 19064, 19066, 19068, 19070, 19072, 19074, 19076, 19078, 19080, 19082, 19084, 19086, 19088, 19090, 19092, 19094, 19096, 19098, 19100, 19102, 19104, 19106, 19108, 19110, 19112, 19114, 19116, 19118, 19120, 19122, 19124, 19126, 19128, 19130, 19132, 19134, 19136, 19138, 19140, 19142, 19144, 19146, 19148, 19150, 19152, 19154, 19156, 19158, 19160, 19162, 19164, 19166, 19168, 19170, 19172, 19174, 19176, 19178, 19180, 19182, 19184, 19186, 19188, 19190, 19192, 19194, 19196, 19198, 19200, 19202, 19204, 19206, 19208, 19210, 19212, 19214, 19216, 19218, 19220, 19222, 19224, 19226, 19228, 19230, 19232, 19234, 19236, 19238, 19240, 19242, 19244, 19246, 19248, 19250, 19252, 19254, 19256, 19258, 19260, 19262, 19264, 19266, 19268, 19270, 19272, 19274, 19276, 19278, 19280, 19282, 19284, 19286, 19288, 19290, 19292, 19294, 19296, 19298, 19300, 19302, 19304, 19306, 19308, 19310, 19312, 19314, 19316, 19318, 19320, 19322, 19324, 19326, 19328, 19330, 19332, 19334, 19336, 19338, 19340, 19342, 19344, 19346, 19348, 19350, 19352, 19354, 19356, 19358, 19360, 19362, 19364, 19366, 19368, 19370, 19372, 19374, 19376, 19378, 19380, 19382, 19384, 19386, 19388, 19390, 19392, 19394, 19396, 19398, 19400, 19402, 19404, 19406, 19408, 19410, 19412, 19414, 19416, 19418, 19420, 19422, 19424, 19426, 19428, 19430, 19432, 19434, 19436, 19438, 19440, 19442, 19444, 19446, 19448, 19450, 19452, 19454, 19456, 19458, 19460, 19462, 19464, 19466, 19468, 19470, 19472, 19474, 19476, 19478, 19480, 19482, 19484, 19486, 19488, 19490, 19492, 19494, 19496, 19498, 19500, 19502, 19504, 19506, 19508, 19510, 19512, 19514, 19516, 19518, 19520, 19522, 19524, 19526, 19528, 19530, 19532, 19534, 19536, 19538, 19540, 19542, 19544, 19546, 19548, 19550, 19552, 19554, 19556, 19558, 19560, 19562, 19564, 19566, 19568, 19570, 19572, 19574, 19576, 19578, 19580, 19582, 19584, 19586, 19588, 19590, 19592, 19594, 19596, 19598, 19600, 19602, 19604, 19606, 19608, 19610, 19612, 19614, 19616, 19618, 19620, 19622, 19624, 19626, 19628, 19630, 19632, 19634, 19636, 19638, 19640, 19642, 19644, 19646, 19648, 19650, 19652, 19654, 19656, 19658, 19660, 19662, 19664, 19666, 19668, 19670, 19672, 19674, 19676, 19678, 19680, 19682, 19684, 19686, 19688, 19690, 19692, 19694, 19696, 19698, 19700, 19702, 19704, 19706, 19708, 19710, 19712, 19714, 19716, 19718, 19720, 19722, 19724, 19726, 19728, 19730, 19732, 19734, 19736, 19738, 19740, 19742, 19744, 19746, 19748, 19750, 19752, 19754, 19756, 19758, 19760, 19762, 19764, 19766, 19768, 19770, 19772, 19774, 19776, 19778, 19780, 19782, 19784, 19786, 19788, 19790, 19792, 19794, 19796, 19798, 19800, 19802, 19804, 19806, 19808, 19810, 19812, 19814, 19816, 19818, 19820, 19822, 19824, 19826, 19828, 19830, 19832, 19834, 19836, 19838, 19840, 19842, 19844, 19846, 19848, 19850, 19852, 19854, 19856, 19858, 19860, 19862, 19864, 19866, 19868, 19870, 19872, 19874, 19876, 19878, 19880, 19882, 19884, 19886, 19888, 19890, 19892, 19894, 19896, 19898, 19900, 19902, 19904, 19906, 19908, 19910, 19912, 19914, 19916, 19918, 19920, 19922, 19924, 19926, 19928, 19930, 19932, 19934, 19936, 19938, 19940, 19942, 19944, 19946, 19948, 19950, 19952, 19954, 19956, 19958, 19960, 19962, 19964, 19966, 19968, 19970, 19972, 19974, 19976, 19978, 19980, 19982, 19984, 19986, 19988, 19990, 19992, 19994, 19996, 19998, 20000, 20002, 20004, 20006, 20008, 20010, 20012, 20014, 20016, 20018, 20020, 20022, 20024, 20026, 20028, 20030, 20032, 20034, 20036, 20038, 20040, 20042, 20044, 20046, 20048, 20050, 20052, 20054, 20056, 20058, 20060, 20062, 20064, 20066, 20068, 20070, 20072, 20074, 20076, 20078, 20080, 20082, 20084, 20086, 20088, 20090, 20092, 20094, 20096, 20098, 20100, 20102, 20104, 20106, 20108, 20110, 20112, 20114, 20116, 20118, 20120, 20122, 20124, 20126, 20128, 20130, 20132, 20134, 20136, 20138, 20140, 20142, 20144, 20146, 20148, 20150, 20152, 20154, 20156, 20158, 20160, 20162, 20164, 20166, 20168, 20170, 20172, 20174, 20176, 20178, 20180, 20182, 20184, 20186, 20188, 20190, 20192, 20194, 20196, 20198, 20200, 20202, 20204, 20206, 20208, 20210, 20212, 20214, 20216, 20218, 20220, 20222, 20224, 20226, 20228, 20230, 20232, 20234, 20236, 20238, 20240, 20242, 20244, 20246, 20248, 20250, 20252, 20254, 20256, 20258, 20260, 20262, 20264, 20266, 20268, 20270, 20272, 20274, 20276, 20278, 20280, 20282, 20284, 20286, 20288, 20290, 20292, 20294, 20296, 20298, 20300, 20302, 20304, 20306, 20308, 20310, 20312, 20314, 20316, 20318, 20320, 20322, 20324, 20326, 20328, 20330, 20332, 20334, 20336, 20338, 20340, 20342, 20344, 20346, 20348, 20350, 20352, 20354, 20356, 20358, 20360, 20362, 20364, 20366, 20368, 20370, 20372, 20374, 20376, 20378, 20380, 20382, 20384, 20386, 20388, 20390, 20392, 20394, 20396, 20398, 20400, 20402, 20404, 20406, 20408, 20410, 20412, 20414, 20416, 20418, 20420, 20422, 20424, 20426, 20428, 20430, 20432, 20434, 20436, 20438, 20440, 20442, 20444, 20446, 20448, 20450, 20452, 20454, 20456, 20458, 20460, 20462, 20464, 20466, 20468, 20470, 20472, 20474, 20476, 20478, 20480, 20482, 20484, 20486, 20488, 20490, 20492, 20494, 20496, 20498, 20500, 20502, 20504, 20506, 20508, 20510, 20512, 20514, 20516, 20518, 20520, 20522, 20524, 20526, 20528, 20530, 20532, 20534, 20536, 20538, 20540, 20542, 20544, 20546, 20548, 20550, 20552, 20554, 20556, 20558, 20560, 20562, 20564, 20566, 20568, 20570, 20572, 20574, 20576, 20578, 20580, 20582, 20584, 20586, 20587, 20588, 20589, 20590, 20591, 20592, 20594, 20596, 20598, 20599, 20600, 20601, 20602, 20603, 20604, 20605, 20606, 20607, 20608, 20609, 20610, 20611, 20612, 20613, 20614, 20615, 20616, 20617, 20618, 20619, 20620, 20621, 20622, 20623, 20624, 20625, 20626, 20628, 20630, 20632, 20634, 20636, 20638, 20639, 20640, 20641, 20642, 20643, 20644, 20645, 20646, 20647, 20648, 20649, 20650, 20651, 20652, 20653, 20654, 20655, 20656, 20657, 20658, 20659, 20660, 20661, 20662, 20663, 20664, 20665, 20666, 20667, 20668, 20669, 20670, 20672, 20674, 20676, 20678, 20679, 20680, 20681, 20682, 20683, 20684, 20685, 20686, 20687, 20688, 20689, 20690, 20691, 20692, 20693, 20694, 20695, 20696, 20697, 20698, 20699, 20700, 20701, 20702, 20703, 20704, 20705, 20706, 20707, 20708, 20709, 20710, 20711, 20712, 20713, 20714, 20715, 20716, 20717, 20718, 20719, 20720, 20721, 20722, 20723, 20724, 20725, 20726, 20727, 20728, 20729, 20730, 20731, 20732, 20733, 20734, 20735, 20736, 20737, 20738, 20739, 20740, 20741, 20742, 20743, 20744, 20745, 20746, 20747, 20748, 20749, 20750, 20751, 20752, 20753, 20754, 20755, 20756, 20757, 20758, 20759, 20760, 20761, 20762, 20763, 20764, 20765, 20766, 20767, 20768, 20769, 20770, 20771, 20772, 20773, 20774, 20775, 20776, 20777, 20778, 20779, 20780, 20781, 20782, 20783, 20784, 20785, 20786, 20787, 20788, 20789, 20790, 20791, 20792, 20793, 20794, 20795, 20796, 20797, 20798, 20799, 20800, 20801, 20802, 20803, 20804, 20805, 20806, 20807, 20808, 20809, 20810, 20811, 20812, 20813, 20814, 20815, 20816, 20817, 20818, 20819, 20820, 20821, 20822, 20823, 20824, 20825, 20826, 20827, 20828, 20829, 20830, 20831, 20832, 20833, 20834, 20835, 20836, 20837, 20838, 20839, 20840, 20841, 20842, 20843, 20844, 20845, 20846, 20847, 20848, 20849, 20850, 20851, 20852, 20853, 20854, 20855, 20856, 20857, 20858, 20859, 20860, 20861, 20862, 20863, 20864, 20865, 20866, 20867, 20868, 20869, 20870, 20871, 20872, 20873, 20874, 20875, 20876, 20877, 20878, 20879, 20880, 20881, 20882, 20883, 20884, 20885, 20886, 20887, 20888, 20889, 20890, 20891, 20892, 20893, 20894, 20895, 20896, 20897, 20898, 20899, 20900, 20901, 20902, 20903, 20904, 20905, 20906, 20907, 20908, 20909, 20910, 20911, 20912, 20913, 20914, 20915, 20916, 20917, 20918, 20919, 20920, 20921, 20922, 20923, 20924, 20925, 20926, 20927, 20928, 20929, 20930, 20931, 20932, 20933, 20934, 20935, 20936, 20937, 20938, 20939, 20940, 20941, 20942, 20943, 20944, 20945, 20946, 20947, 20948, 20949, 20950, 20951, 20952, 20953, 20954, 20955, 20956, 20957, 20958, 20959, 20960, 20962, 20964, 20966, 20968, 20970, 20972, 20974, 20976, 20978, 20980, 20982, 20984, 20986, 20988, 20990, 20992, 20994, 20996, 20998, 21000, 21002, 21004, 21006, 21008, 21010, 21012, 21014, 21016, 21018, 21020, 21022, 21024, 21026, 21028, 21030, 21032, 21034, 21036, 21038, 21040, 21042, 21044, 21046, 21048, 21050, 21052, 21054, 21056, 21058, 21060, 21062, 21064, 21066, 21068, 21070, 21072, 21074, 21076, 21078, 21080, 21082, 21084, 21086, 21088, 21090, 21092, 21094, 21096, 21098, 21100, 21102, 21104, 21106, 21108, 21110, 21112, 21114, 21116, 21118, 21120, 21122, 21124, 21126, 21128, 21130, 21132, 21134, 21136, 21138, 21140, 21142, 21144, 21146, 21148, 21150, 21152, 21154, 21156, 21158, 21160, 21162, 21164, 21166, 21168, 21170, 21172, 21174, 21176, 21178, 21180, 21182, 21184, 21186, 21188, 21190, 21192, 21194, 21196, 21198, 21200, 21202, 21204, 21206, 21208, 21210, 21212, 21214, 21216, 21218, 21220, 21222, 21224, 21226, 21228, 21230, 21232, 21234, 21236, 21238, 21240, 21242, 21244, 21246, 21248, 21250, 21252, 21254, 21256, 21258, 21260, 21262, 21264, 21266, 21268, 21270, 21272, 21274, 21276, 21278, 21280, 21282, 21284, 21286, 21288, 21290, 21292, 21294, 21296, 21298, 21300, 21302, 21304, 21306, 21308, 21310, 21312, 21314, 21316, 21318, 21320, 21322, 21324, 21326, 21328, 21330, 21332, 21334, 21336, 21338, 21340, 21342, 21344, 21346, 21348, 21350, 21352, 21354, 21356, 21358, 21360, 21362, 21364, 21366, 21368, 21370, 21372, 21374, 21376, 21378, 21380, 21382, 21384, 21386, 21388, 21390, 21392, 21394, 21396, 21398, 21400, 21402, 21404, 21406, 21408, 21410, 21412, 21414, 21416, 21418, 21420, 21422, 21424, 21426, 21428, 21430, 21432, 21434, 21436, 21438, 21440, 21442, 21444, 21446, 21448, 21450, 21452, 21454, 21456, 21458, 21460, 21462, 21464, 21466, 21468, 21470, 21472, 21474, 21476, 21478, 21480, 21482, 21484, 21486, 21488, 21490, 21492, 21494, 21496, 21498, 21500, 21502, 21503, 21504, 21505, 21506, 21507, 21508, 21509, 21510, 21512, 21514, 21516, 21518, 21520, 21522, 21524, 21526, 21528, 21530, 21532, 21534, 21536, 21538, 21540, 21542, 21544, 21546, 21548, 21550, 21552, 21554, 21556, 21558, 21560, 21562, 21564, 21566, 21568, 21570, 21572, 21574, 21576, 21578, 21579, 21580, 21581, 21582, 21583, 21584, 21585, 21586, 21587, 21588, 21589, 21590, 21591, 21592, 21593, 21594, 21595, 21596, 21597, 21598, 21599, 21600, 21601, 21602, 21603, 21604, 21605, 21606, 21607, 21608, 21609, 21610, 21611, 21612, 21613, 21614, 21615, 21616, 21617, 21618, 21619, 21620, 21621, 21622, 21623, 21624, 21625, 21626, 21627, 21628, 21629, 21630, 21631, 21632, 21633, 21634, 21635, 21636, 21637, 21638, 21639, 21640, 21641, 21642, 21643, 21644, 21645, 21646, 21647, 21648, 21649, 21650, 21651, 21652, 21653, 21654, 21655, 21656, 21657, 21658, 21659, 21660, 21661, 21662, 21663, 21664, 21665, 21666, 21667, 21668, 21669, 21670, 21671, 21672, 21673, 21674, 21675, 21676, 21677, 21678, 21679, 21680, 21681, 21682, 21683, 21684, 21685, 21686, 21687, 21688, 21689, 21690, 21691, 21692, 21693, 21694, 21695, 21696, 21697, 21698, 21699, 21700, 21701, 21702, 21703, 21704, 21705, 21706, 21707, 21708, 21709, 21710, 21711, 21712, 21713, 21714, 21715, 21716, 21717, 21718, 21719, 21720, 21721, 21722, 21723, 21724, 21726, 21728, 21730, 21732, 21734, 21736, 21738, 21740, 21742, 21744, 21746, 21748, 21750, 21752, 21754, 21756, 21758, 21760, 21762, 21764, 21766, 21768, 21770, 21772, 21774, 21776, 21778, 21780, 21782, 21784, 21786, 21788, 21790, 21792, 21794, 21796, 21798, 21800, 21802, 21804, 21806, 21808, 21810, 21812, 21814, 21816, 21818, 21820, 21822, 21824, 21826, 21828, 21830, 21832, 21834, 21836, 21838, 21840, 21842, 21844, 21846, 21848, 21850, 21852, 21854, 21856, 21858, 21860, 21862, 21864, 21866, 21868, 21870, 21872, 21874, 21876, 21878, 21880, 21882, 21884, 21886, 21888, 21890, 21892, 21894, 21896, 21898, 21900, 21902, 21904, 21906, 21908, 21910, 21912, 21914, 21916, 21918, 21920, 21922, 21924, 21926, 21928, 21930, 21932, 21934, 21936, 21938, 21940, 21942, 21944, 21946, 21948, 21950, 21952, 21954, 21956, 21958, 21960, 21962, 21964, 21966, 21968, 21970, 21972, 21974, 21976, 21978, 21980, 21982, 21984, 21986, 21988, 21990, 21992, 21994, 21996, 21998, 22000, 22002, 22004, 22006, 22008, 22010, 22012, 22014, 22016, 22018, 22020, 22022, 22024, 22026, 22028, 22030, 22032, 22034, 22036, 22038, 22040, 22042, 22044, 22046, 22048, 22050, 22052, 22054, 22055, 22056, 22057, 22058, 22059, 22060, 22061, 22062, 22063, 22064, 22065, 22066, 22067, 22068, 22070, 22072, 22074, 22076, 22078, 22080, 22082, 22084, 22086, 22088, 22090, 22092, 22094, 22096, 22098, 22100, 22102, 22104, 22106, 22108, 22110, 22112, 22114, 22116, 22118, 22120, 22122, 22124, 22125, 22126, 22128, 22130, 22132, 22134, 22136, 22138, 22140, 22142, 22144, 22146, 22148, 22150, 22152, 22154, 22156, 22158, 22160, 22162, 22164, 22166, 22168, 22170, 22172, 22174, 22176, 22178, 22180, 22182, 22184, 22185, 22186, 22187, 22188, 22189, 22190, 22191, 22192, 22193, 22194, 22195, 22196, 22197, 22198, 22199, 22200, 22201, 22202, 22203, 22204, 22205, 22206, 22208, 22210, 22212, 22214, 22216, 22218, 22220, 22222, 22224, 22226, 22228, 22230, 22232, 22234, 22236, 22238, 22240, 22242, 22244, 22246, 22248, 22250, 22252, 22254, 22256, 22258, 22260, 22262, 22264, 22266, 22268, 22270, 22272, 22274, 22276, 22278, 22280, 22282, 22284, 22286, 22288, 22290, 22292, 22293, 22294, 22295, 22296, 22297, 22298, 22299, 22300, 22301, 22302, 22303, 22304, 22305, 22306, 22307, 22308, 22309, 22310, 22311, 22312, 22313, 22314, 22315, 22316, 22317, 22318, 22319, 22320, 22321, 22322, 22323, 22324, 22325, 22326, 22328, 22330, 22332, 22334, 22336, 22338, 22340, 22342, 22344, 22346, 22348, 22350, 22352, 8, 9, 10, 11, 12, 13, 14, 15, 24149, 24151, 24153, 5246, 5241, 3631, 3626, 3631, 3626, 24160, 24162, 24164, 24166, 5246, 5241, 4976, 4971, 24170, 4976, 4971, 24172, 5002, 4997, 23486, 5002, 4997, 23489, 5017, 5012, 5027, 5022, 5041, 5036, 5041, 5046, 777, 772, 826, 821, 22380, 950, 22383, 963, 1069, 1064, 1079, 1074, 1089, 1084, 1099, 1094, 1122, 1117, 1112, 1122, 1117, 1127, 1137, 1132, 1147, 1142, 1157, 1152, 1162, 1167, 1190, 1185, 1180, 1190, 1185, 1195, 1834, 1839, 22396, 22798, 1877, 22397, 1864, 1882, 1887, 1892, 1897, 24174, 1388, 1452, 1470, 1465, 1534, 1529, 24176, 1447, 1442, 1534, 1529, 1549, 1577, 1612, 1607, 24178, 1839, 1834, 1849, 1844, 22763, 1877, 1854, 1859, 1864, 1887, 1882, 1897, 1892, 1737, 1909, 2860, 2855, 2873, 2898, 2893, 2908, 2903, 24180, 2993, 2998, 24182, 7038, 7033, 7028, 7062, 7057, 24184, 24192, 24194, 22430, 22432, 24196, 6031, 24198, 6036, 6081, 6076, 6071, 6081, 6076, 6086, 6109, 6104, 6099, 6109, 6104, 6114, 6124, 6119, 5827, 5814, 6196, 6191, 6201, 6211, 6216, 24200, 6211, 6216, 24202, 6270, 6265, 5916, 6277, 6282, 6285, 6283, 6295, 6290, 6305, 6300, 22461, 6351, 6346, 6402, 6397, 6364, 6328, 6323, 6333, 6318, 6455, 6450, 6455, 6450, 6470, 6475, 685, 6503, 6498, 6508, 6493, 6518, 6513, 6523, 6528, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 5670, 5665, 24204, 6654, 6659, 6654, 6659, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 5670, 5665, 24208, 6659, 6654, 6664, 22489, 6718, 6733, 6743, 6738, 6748, 6753, 22497, 6775, 6770, 646, 641, 6425, 6420, 22508, 6445, 6440, 6455, 6450, 6455, 6450, 6470, 6475, 685, 646, 641, 6425, 6420, 22508, 6445, 6440, 6455, 6450, 6455, 6450, 6470, 6475, 685, 6503, 6498, 22515, 6518, 6513, 6523, 6528, 6551, 6546, 6556, 6541, 6566, 6561, 6576, 6571, 646, 641, 6425, 6420, 22529, 6445, 6440, 6455, 6450, 24214, 6455, 6450, 24216, 6475, 6470, 685, 24218, 24220, 24222, 7753, 7665, 7758, 2747, 2742, 24228, 2812, 2807, 24230, 24232, 24234, 3776, 3761, 2850, 2845, 2860, 2855, 2883, 2878, 2888, 3631, 3626, 3631, 3626, 24236, 24238, 24240, 24242, 3650, 3645, 3660, 3655, 23198, 3678, 3673, 3542, 3537, 3565, 3560, 3570, 3555, 23162, 3592, 3590, 3631, 3626, 777, 772, 826, 821, 871, 866, 22571, 22557, 22575, 871, 866, 876, 799, 794, 804, 912, 907, 902, 912, 907, 917, 22565, 826, 821, 871, 866, 22571, 22573, 22575, 871, 866, 876, 22580, 889, 912, 907, 902, 912, 907, 917, 927, 922, 937, 932, 22593, 950, 22596, 963, 986, 981, 976, 986, 981, 991, 1014, 1009, 22606, 1014, 1009, 22609, 1029, 1024, 1039, 1034, 1049, 1044, 1059, 1054, 1069, 1064, 1079, 1074, 1089, 1084, 1099, 1094, 1122, 1117, 1112, 1122, 1117, 1127, 1137, 1132, 1147, 1142, 1157, 1152, 1167, 1162, 1190, 1185, 1180, 1190, 1185, 1195, 22647, 1437, 1432, 1761, 1756, 1776, 22654, 1799, 1794, 1809, 1804, 1819, 1814, 1824, 1834, 1839, 22663, 1854, 1859, 1877, 22666, 1864, 1882, 1887, 1897, 1892, 24248, 1388, 1761, 1756, 1799, 1794, 1809, 1804, 1819, 1814, 1819, 1814, 1819, 1814, 1829, 1834, 1839, 1849, 1844, 22798, 1877, 22688, 1864, 1882, 1887, 1892, 1897, 24252, 24255, 22694, 1437, 1432, 1447, 1442, 1417, 1427, 1422, 1437, 1432, 1447, 1442, 1452, 1510, 1505, 1470, 1465, 1534, 1529, 1534, 1529, 1510, 1505, 22717, 1510, 1505, 22720, 1534, 1529, 1534, 1529, 1559, 1554, 1549, 1559, 1554, 1564, 1587, 1582, 1577, 1587, 1582, 1592, 1602, 1597, 1612, 1607, 1622, 1617, 1632, 1627, 1751, 1746, 1761, 1756, 1789, 1799, 1794, 1809, 1804, 1819, 1814, 1829, 1824, 1839, 1834, 1849, 1844, 22763, 1887, 1882, 1892, 1887, 1882, 1897, 1737, 1909, 1751, 1746, 1761, 1756, 1771, 1766, 1776, 22780, 1789, 1799, 1794, 1809, 1804, 1819, 1814, 1829, 1824, 1839, 1834, 1849, 1844, 1859, 1854, 1864, 22798, 1877, 1887, 1882, 1897, 1892, 1907, 1907, 22807, 2686, 2681, 1947, 1942, 2699, 1947, 1942, 2704, 2714, 2709, 2719, 2714, 2709, 2724, 2747, 2742, 2752, 2747, 2742, 2737, 2762, 2757, 2772, 2767, 2782, 2777, 2787, 2782, 2777, 2792, 2850, 2845, 2860, 2855, 2883, 2878, 2888, 2883, 2878, 2873, 2898, 2893, 2908, 2903, 2918, 2913, 2923, 2928, 2938, 2933, 2943, 2938, 2933, 2948, 2953, 2958, 24264, 2938, 2933, 2948, 2943, 2958, 2953, 24266, 2978, 2973, 24268, 2978, 2973, 24270, 2998, 2993, 24272, 2898, 2893, 2908, 2903, 2918, 2913, 2928, 2923, 2938, 2933, 2943, 2938, 2933, 2948, 2958, 2953, 24274, 2938, 2933, 2948, 2943, 2958, 2953, 24276, 2978, 2973, 24278, 2978, 2973, 24280, 2998, 2993, 24282, 3031, 3026, 3021, 3055, 3050, 22889, 2291, 2286, 3076, 3071, 3086, 3081, 3091, 2291, 2286, 22900, 3086, 3081, 3096, 3131, 3126, 3141, 3136, 3151, 3146, 3152, 22907, 2472, 3191, 3203, 3198, 3131, 3126, 3141, 3136, 3151, 3146, 3152, 22920, 2472, 3193, 3203, 3198, 3021, 3055, 3050, 22929, 22931, 22933, 3131, 3126, 3141, 3136, 3151, 3146, 3154, 3185, 3180, 2472, 3193, 3191, 3203, 3198, 3268, 3263, 22951, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3319, 3329, 3324, 22959, 3268, 3263, 3223, 3221, 3299, 3294, 3299, 3294, 3299, 3294, 3304, 3314, 3309, 3240, 3324, 3329, 24286, 3324, 3329, 24288, 2619, 2614, 24290, 2619, 2614, 24292, 3355, 3350, 3420, 3415, 23141, 3397, 3402, 22984, 3420, 3415, 3425, 3430, 3435, 3440, 3450, 3445, 23141, 3402, 3397, 23145, 3420, 3415, 3430, 3425, 3440, 3435, 3450, 3445, 2676, 2671, 2686, 2681, 22995, 2704, 2699, 2714, 2709, 2724, 2719, 2747, 2742, 2737, 2747, 2742, 2752, 2762, 2757, 2772, 2767, 2782, 2777, 2792, 2787, 2802, 2797, 2812, 2807, 2835, 2830, 2825, 2835, 2830, 2840, 2850, 2845, 2860, 2855, 2878, 2883, 2873, 2883, 2878, 2888, 2898, 2893, 2908, 2903, 2918, 2913, 2928, 2923, 2938, 2933, 2948, 2943, 2958, 2953, 2968, 2963, 2978, 2973, 2988, 2983, 2998, 2993, 3008, 3003, 3031, 3026, 3021, 3031, 3026, 3036, 3055, 3050, 24296, 3055, 3050, 24298, 23071, 3076, 3071, 3086, 3081, 3096, 3091, 3131, 3126, 3141, 3136, 3151, 3146, 24300, 3131, 3126, 3141, 3136, 3151, 3146, 24302, 3185, 3180, 3190, 24304, 3203, 3198, 3185, 3180, 3190, 24306, 3203, 3198, 3268, 3263, 24308, 3268, 3263, 24310, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3240, 3324, 3329, 3258, 3253, 3268, 3263, 23115, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3319, 3329, 3324, 23128, 23130, 3402, 3397, 3355, 3350, 3420, 3415, 3425, 3430, 3440, 3435, 3445, 3450, 3384, 23141, 3402, 3397, 23145, 3420, 3415, 3430, 3425, 3440, 3435, 3450, 3445, 3455, 3532, 3527, 3542, 3537, 3565, 3560, 3570, 3555, 23162, 3592, 3590, 3602, 3597, 3612, 3607, 24312, 24314, 24316, 3650, 3645, 3514, 3509, 23170, 3532, 3527, 3542, 3537, 3565, 3560, 3555, 3565, 3560, 3570, 3580, 3575, 24318, 23184, 24320, 3602, 3597, 3612, 3607, 3631, 3626, 3631, 3626, 24324, 24326, 24328, 24330, 3650, 3645, 3660, 3655, 23198, 3678, 3673, 3688, 3683, 3698, 3693, 3717, 3712, 24332, 3717, 3712, 24334, 3743, 3738, 3733, 3743, 3738, 3748, 3771, 3766, 3761, 3771, 3766, 3776, 3799, 3794, 3789, 3799, 3794, 3804, 3814, 3809, 3824, 3819, 3843, 3838, 24336, 3843, 3838, 24338, 3856, 3851, 3866, 3861, 3876, 3871, 3886, 3881, 23244, 3899, 3909, 3904, 3914, 3924, 3919, 3934, 3929, 3957, 3952, 3947, 3957, 3952, 3962, 3999, 3994, 4009, 4004, 23260, 23272, 4048, 4043, 4038, 4048, 4043, 4053, 4063, 4058, 4063, 4058, 3999, 3994, 4009, 4004, 23270, 23272, 4048, 4043, 4038, 4048, 4043, 4053, 4063, 4058, 4068, 4078, 4073, 4088, 4083, 4111, 4106, 4101, 4111, 4106, 4116, 4126, 4121, 4136, 4131, 4146, 4141, 4156, 4151, 4179, 4174, 4169, 4179, 4174, 4184, 4211, 4206, 4211, 4206, 4211, 4206, 24344, 23313, 4227, 4237, 4232, 4242, 4252, 4247, 4262, 4257, 4455, 4450, 4616, 4614, 4639, 4634, 4629, 4639, 4634, 4644, 4654, 4649, 4664, 4659, 4674, 4669, 4679, 4674, 4669, 4684, 4698, 4703, 24346, 4698, 4703, 24348, 4729, 4724, 4729, 4724, 4729, 4724, 24350, 4574, 4744, 4574, 4749, 4767, 4762, 4767, 4777, 4698, 4703, 24352, 4698, 4703, 24354, 4729, 4724, 4729, 4724, 4729, 4724, 24356, 4574, 4744, 4574, 4749, 4772, 4762, 4772, 4777, 4869, 4864, 4874, 4859, 5002, 4997, 23343, 5002, 4997, 23346, 4574, 4744, 4574, 4749, 4903, 4898, 23354, 4952, 4947, 4846, 4976, 4971, 5017, 5012, 24362, 5017, 5012, 24364, 4441, 4441, 5036, 4455, 4450, 4616, 4614, 4654, 4649, 4664, 4659, 4684, 4679, 4698, 4703, 24366, 4698, 4703, 24368, 4729, 4724, 4729, 4724, 4729, 4724, 24372, 4574, 4744, 4574, 4749, 4767, 4762, 4767, 4777, 4574, 4744, 4574, 4749, 4772, 4772, 4772, 4762, 23404, 24376, 23406, 24378, 4639, 4634, 4629, 4639, 4634, 4644, 4654, 4649, 4664, 4659, 4674, 4669, 4684, 4679, 4703, 4698, 24380, 4703, 4698, 24382, 4729, 4724, 4719, 4729, 4724, 4734, 4739, 4749, 4744, 4772, 4767, 4762, 4772, 4767, 4777, 4869, 4864, 4874, 4859, 4903, 4898, 23447, 4932, 4937, 4927, 4932, 4937, 4942, 4952, 4947, 4846, 4869, 4864, 4859, 4869, 4864, 4874, 4903, 4898, 4903, 4898, 4903, 4898, 4908, 4937, 4932, 4937, 4932, 4937, 4932, 4942, 4952, 4947, 4957, 4976, 4971, 24388, 4976, 4971, 24390, 5002, 4997, 23486, 5002, 4997, 23489, 5017, 5012, 5027, 5022, 5041, 5036, 5041, 5046, 5069, 5064, 5059, 5069, 5064, 5074, 5084, 5079, 5094, 5089, 5117, 5112, 5107, 5117, 5112, 5122, 5145, 5140, 5135, 5145, 5140, 5150, 23520, 5210, 23535, 5223, 5246, 5241, 5236, 5246, 5241, 5251, 5261, 5256, 5271, 5266, 5192, 5187, 5197, 23532, 5210, 23535, 5223, 5246, 5241, 5236, 5246, 5241, 5251, 5261, 5256, 5271, 5266, 23548, 5284, 5348, 5343, 24392, 24394, 5435, 5445, 5440, 5486, 5481, 5496, 5491, 5329, 5329, 5348, 5343, 24396, 5348, 5343, 24398, 5374, 5369, 5364, 5374, 5369, 5379, 5427, 5422, 24400, 5427, 5422, 24402, 5400, 23577, 5427, 5422, 24404, 5427, 5422, 24406, 5435, 5445, 5440, 5486, 5481, 5496, 5491, 5471, 5466, 5476, 5486, 5481, 5496, 5491, 23597, 5509, 23600, 5522, 5532, 5527, 5537, 5551, 5546, 5551, 5556, 6055, 6050, 24408, 6055, 6050, 24410, 6081, 6076, 6071, 6081, 6076, 6086, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24412, 6211, 6216, 24414, 6270, 6265, 5916, 6277, 6282, 6285, 6283, 6503, 6498, 23628, 6518, 6513, 6528, 6523, 6586, 6581, 6591, 6596, 6606, 6601, 6616, 6611, 5670, 5665, 24416, 6659, 6654, 6649, 6659, 6654, 6664, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 23847, 24418, 6659, 6654, 6649, 6659, 6654, 6664, 23646, 6718, 6733, 6743, 6738, 6748, 6753, 6763, 6758, 23654, 23656, 23658, 23660, 6763, 6758, 23664, 23666, 24426, 6031, 24428, 6036, 6124, 6119, 5814, 6124, 6119, 5827, 6142, 6137, 24430, 6142, 6137, 24432, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24434, 6211, 6216, 24436, 6270, 6265, 5916, 6282, 6277, 24438, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24440, 6211, 6216, 24442, 6265, 6270, 5916, 6282, 6277, 24444, 6295, 6290, 6305, 6300, 6328, 6323, 6333, 6318, 6402, 6397, 6364, 6402, 6397, 6407, 23713, 23715, 24446, 6036, 6031, 23719, 6023, 6018, 24448, 6036, 6031, 6055, 6050, 24450, 6055, 6050, 24452, 6081, 6076, 6071, 6081, 6076, 6086, 6109, 6104, 6099, 6109, 6104, 6114, 6124, 6119, 23743, 6142, 6137, 6152, 6147, 6181, 6176, 6181, 6176, 6181, 6176, 6186, 6196, 6191, 6206, 6201, 6216, 6211, 6226, 6221, 6270, 6265, 6270, 6265, 6282, 6277, 24456, 6270, 6265, 6270, 6265, 6282, 6277, 24460, 6295, 6290, 6305, 6300, 6328, 6323, 6318, 6328, 6323, 6333, 23786, 6351, 6346, 6402, 6397, 6364, 6387, 6382, 23794, 6387, 6382, 23797, 6402, 6397, 6407, 23802, 6425, 6420, 6435, 6430, 6445, 6440, 6455, 6450, 6465, 6460, 6475, 6470, 6480, 6503, 6498, 6493, 6503, 6498, 6508, 6518, 6513, 6528, 6523, 6551, 6546, 6541, 6551, 6546, 6556, 6566, 6561, 6576, 6571, 6586, 6581, 6596, 6591, 6606, 6601, 6616, 6611, 23845, 24462, 23847, 24464, 6659, 6654, 6649, 6659, 6654, 6664, 23855, 6718, 6733, 6743, 6738, 6748, 6753, 6763, 6758, 23862, 6780, 23880, 6793, 6728, 6723, 6718, 6728, 6723, 6733, 6743, 6738, 6753, 6748, 6763, 6758, 6775, 6770, 6780, 23880, 6793, 6803, 6798, 6813, 6808, 6832, 6827, 24468, 6832, 6827, 24470, 6845, 6840, 6855, 6850, 6884, 6879, 6884, 6879, 6884, 6879, 6889, 6908, 6903, 24474, 6908, 6903, 24476, 6930, 6925, 24478, 6930, 6925, 24480, 6956, 6951, 6946, 6956, 6951, 6961, 23916, 6974, 23919, 6987, 7010, 7005, 7000, 7010, 7005, 7015, 7038, 7033, 7028, 7038, 7033, 7043, 7062, 7057, 24482, 7062, 7057, 24484, 7088, 7083, 7078, 7088, 7083, 7093, 7116, 7111, 7106, 7116, 7111, 7121, 7144, 7139, 7134, 7144, 7139, 7149, 7201, 7196, 7206, 7191, 7216, 7211, 7178, 7201, 7196, 7191, 7201, 7196, 7206, 7216, 7211, 7221, 7556, 7551, 7574, 7569, 24486, 7624, 7619, 7719, 7714, 7665, 7546, 7541, 7592, 7587, 24488, 7611, 7606, 24490, 7696, 7719, 7714, 7729, 7724, 23991, 7546, 7541, 7556, 7551, 23997, 7574, 7569, 7347, 7342, 7592, 7587, 7611, 7606, 24007, 7729, 7719, 7714, 24012, 7882, 7877, 7908, 7903, 7961, 7956, 7984, 7979, 7824, 24023, 7847, 7842, 7852, 7872, 7867, 7951, 7946, 7984, 7979, 7984, 7979, 7819, 7814, 24038, 7882, 7877, 7882, 7877, 7908, 7903, 7923, 7918, 7928, 7546, 7541, 7556, 7551, 24053, 7574, 7569, 24057, 7592, 7587, 7611, 7606, 24500, 7611, 7606, 24502, 24084, 7709, 7624, 7619, 7696, 7719, 7714, 7724, 7719, 7714, 7729, 24074, 7665, 7753, 7758, 7683, 7678, 24081, 7696, 24084, 7709, 7719, 7714, 7729, 7724, 24091, 24093, 7758, 7753, 24097, 7819, 7814, 7824, 7819, 7814, 7837, 7847, 7842, 24107, 7819, 7814, 7824, 24112, 7837, 7847, 7842, 7852, 7862, 7857, 7872, 7867, 7882, 7877, 24508, 7908, 7903, 7898, 7908, 7903, 7913, 7923, 7918, 7928, 24133, 7941, 7951, 7946, 7961, 7956, 7984, 7979, 7974, 7984, 7979, 7989, 10802, 10802, 24781, 10736, 10802, 10802, 10802, 10802, 10802, 10802, 10802, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 24783, 10918, 10913, 24785, 10938, 10933, 10948, 10943, 10958, 10953, 24787, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 11576, 11571, 11581, 11576, 11571, 11586, 11605, 11600, 11576, 11571, 11586, 11581, 11605, 11600, 11630, 11625, 11620, 11630, 11625, 11635, 11658, 11653, 11663, 11658, 11653, 11658, 11653, 24823, 10938, 10933, 10948, 10943, 10958, 10953, 24825, 24827, 10908, 10893, 11483, 11478, 11483, 11478, 11483, 11478, 24829, 24831, 24833, 24835, 24837, 11483, 11478, 24839, 11483, 11478, 24841, 24843, 24845, 24847, 24849, 11543, 11538, 24851, 10535, 10530, 10535, 10530, 10441, 10436, 10551, 10574, 10569, 24855, 24520, 10466, 10485, 10480, 24857, 10485, 10480, 24859, 10511, 10506, 10501, 10511, 10506, 10516, 10535, 10530, 24861, 10535, 10530, 24863, 24537, 10551, 24540, 10564, 10574, 10569, 10584, 10579, 10607, 10602, 10597, 10607, 10602, 10612, 10631, 10626, 24865, 10631, 10626, 24867, 24557, 10647, 10750, 10755, 10750, 10755, 10755, 10750, 24871, 10768, 10763, 10778, 10773, 10807, 10802, 10802, 10807, 24873, 10736, 10750, 10755, 24875, 10755, 10750, 24877, 10768, 10763, 10778, 10773, 10807, 10802, 10807, 10802, 24880, 10736, 10750, 10755, 24882, 10755, 10750, 24884, 10768, 10763, 10778, 10773, 10802, 10807, 10802, 10807, 10731, 10726, 10736, 10755, 10750, 24887, 10755, 10750, 24889, 10768, 10763, 10778, 10773, 10807, 10802, 10807, 10802, 10807, 10802, 10812, 24596, 10825, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 24893, 10918, 10913, 24895, 10938, 10933, 10948, 10943, 10953, 10958, 24897, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 10928, 10923, 10938, 10933, 10948, 10943, 10958, 10953, 24899, 11183, 11178, 11193, 11188, 11183, 11178, 11154, 11149, 24901, 11234, 11229, 24903, 11234, 11229, 24905, 11154, 11149, 11252, 11247, 11262, 11257, 11272, 11267, 24907, 11272, 11267, 24909, 11292, 11287, 11297, 11292, 11287, 11302, 11321, 11316, 11078, 11183, 11178, 24911, 11183, 11178, 24913, 11198, 24661, 11183, 11178, 11188, 11183, 11178, 11193, 11203, 11213, 11208, 11154, 11149, 24915, 11234, 11229, 11173, 11168, 11183, 11178, 11193, 11188, 11203, 11198, 11213, 11208, 24686, 24917, 11234, 11229, 24690, 11252, 11247, 11262, 11257, 11272, 11267, 11282, 11277, 11292, 11287, 11302, 11297, 11321, 11316, 11321, 11316, 11483, 11478, 11483, 11478, 11483, 11478, 24923, 24925, 24927, 24929, 24931, 11483, 11478, 24933, 11483, 11478, 24935, 24937, 24940, 24943, 24945, 11543, 11538, 11553, 11548, 11563, 11558, 24947, 11543, 11538, 11553, 11548, 11563, 11558, 24949, 11576, 11571, 11581, 11576, 11571, 11586, 11605, 11600, 11576, 11571, 11586, 11581, 11605, 11600, 11630, 11625, 11658, 11653, 11658, 11653, 11658, 11653, 11483, 11478, 24955, 11483, 11478, 24957, 11505, 11500, 24959, 11505, 11500, 24961, 11543, 11538, 11553, 11548, 11563, 11558, 24963, 11543, 11538, 11553, 11548, 11563, 11558, 24965, 11576, 11571, 11586, 11581, 11605, 11600, 11605, 11600, 11630, 11625, 11620, 11630, 11625, 11635, 11658, 11653, 11648, 11658, 11653, 11663, 13435, 13440, 13450, 13445, 13460, 13455, 24990, 25134, 25136, 13498, 13493, 13430, 13440, 13435, 13450, 13445, 13460, 13455, 24990, 25138, 25140, 13498, 13493, 13430, 13435, 13440, 13450, 13445, 13460, 13455, 24790, 13478, 13473, 25142, 13498, 13493, 13430, 13885, 13880, 13895, 13890, 13561, 13556, 13571, 13566, 13581, 13576, 13586, 13581, 13576, 13591, 11825, 11820, 24809, 13335, 13330, 13345, 13340, 13355, 13350, 13365, 13360, 13388, 13383, 13378, 13388, 13383, 13393, 13701, 13696, 11847, 11842, 13727, 13732, 25055, 13732, 13727, 25176, 13742, 13747, 13757, 13752, 13780, 13775, 13785, 13770, 24970, 24972, 13335, 13330, 13345, 13340, 13355, 13350, 13365, 13360, 13388, 13383, 13378, 13388, 13383, 13393, 13435, 13440, 13450, 13445, 13460, 13455, 24990, 13478, 13473, 13483, 13488, 13498, 13493, 13430, 13440, 13435, 13450, 13445, 13460, 13455, 25003, 13478, 13473, 13488, 13483, 13498, 13493, 13503, 13513, 13508, 13523, 13518, 13546, 13541, 13536, 13546, 13541, 13551, 13561, 13556, 13571, 13566, 13581, 13576, 13591, 13586, 13614, 13609, 13604, 13614, 13609, 13619, 13638, 13633, 25207, 13638, 13633, 25209, 13660, 13655, 25211, 13660, 13655, 25213, 13686, 13681, 13676, 13686, 13681, 13691, 13701, 13696, 25052, 13732, 13727, 25055, 13732, 13727, 25058, 13747, 13742, 13757, 13752, 13780, 13775, 13770, 13780, 13775, 13785, 13814, 13809, 13814, 13809, 13814, 13809, 13819, 13842, 13837, 13832, 13842, 13837, 13847, 13870, 13865, 13860, 13870, 13865, 13875, 13885, 13880, 13895, 13890, 13914, 13909, 25217, 13914, 13909, 25219, 13927, 13922, 13937, 13932, 13947, 13942, 13952, 25104, 13965, 13984, 13979, 25221, 13984, 13979, 25223, 13997, 13992, 14007, 14002, 14017, 14012, 14027, 14022, 14037, 14032, 14047, 14042, 14066, 14061, 25225, 14066, 14061, 25227, 15282, 15277, 15292, 15287, 15302, 15297, 15302, 15297, 14157, 14157, 14157, 15317, 15312, 25272, 15325, 15335, 15340, 15350, 15345, 25274, 15350, 15345, 25276, 15383, 15378, 25278, 14266, 14266, 14266, 15302, 15297, 15282, 15277, 15292, 15287, 15302, 15297, 15297, 15302, 15330, 15330, 15350, 15345, 25282, 15350, 15345, 25284, 15383, 15378, 25286, 15282, 15277, 15292, 15287, 15302, 15297, 15282, 15277, 15292, 15287, 15302, 15297, 15325, 15335, 15340, 15325, 15350, 15345, 25290, 15350, 15345, 25292, 15383, 15378, 25294, 25201, 25203, 25205, 15420, 15282, 15277, 15292, 15287, 15302, 15297, 15272, 15282, 15277, 15292, 15287, 15302, 15297, 15307, 15317, 15312, 25296, 15330, 15325, 15335, 15340, 15282, 15277, 15292, 15287, 15302, 15297, 15272, 15282, 15277, 15292, 15287, 15302, 15297, 15307, 15317, 15312, 25298, 15330, 15325, 15340, 15335, 15350, 15345, 15360, 15355, 15383, 15378, 15373, 15383, 15378, 15388, 25266, 25268, 25270, 15420, 25302, 25304, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16681, 16676, 16671, 16681, 16676, 16686, 14, 15, 25331, 25332, 25333, 25334, 25335, 25336, 25341, 25342, 25343, 25344, 25346, 25347, 25349, 25350, 25351, 25352, 25353, 25354, 25355, 25356, 25357, 25358, 25359, 25360, 25361, 25362, 25363, 25364, 25365, 25366, 25367, 25368, 25369, 25370, 25371, 25372, 25373, 25374, 25375, 25376, 25377, 25378, 25379, 25380, 25381, 25382, 25383, 25384, 25385, 25386, 25387, 25388, 25389, 25390, 25391, 25392, 25393, 25394, 25395, 25396, 25397, 25398, 25399, 25400, 25401, 25402, 25403, 25404, 25405, 25406, 25407, 25408, 25409, 25411, 25412, 25413, 25414, 25415, 25416, 25418, 25419, 25420, 25421, 25422, 25423, 25424, 25425, 25427, 25428, 25429, 25430, 25431, 25432, 25433, 25434, 25435, 25436, 25437, 25438, 25439, 25440, 25441, 25442, 25443, 25444, 25445, 25446, 25447, 25448, 25450, 25451, 25453, 25454, 25455, 25456, 25457, 25461, 25462, 25464, 25466, 25467, 25468, 25469, 25470, 25471, 25472, 25473, 25474, 25475, 25476, 25477, 25478, 25479, 25480, 25481, 25482, 25483, 25484, 25485, 25486, 25487, 25489, 25490, 25492, 25493, 25494, 25495, 25496, 25497, 25498, 25499, 25500, 25501, 25502, 25503, 25504, 25505, 25506, 25507, 25508, 25509, 25510, 25511, 25512, 25513, 25514, 25515, 25516, 25517, 25518, 25519, 25520, 25521, 25522, 25523, 25524, 25525, 25526, 25527, 25528, 25529, 25530, 25531, 25532, 25533, 25534, 25535, 25536, 25537, 25539, 25540, 25541, 25542, 25543, 25544, 25545, 25546, 25547, 25548, 25549, 25550, 25551, 25552, 25554, 25555, 25556, 25557, 25558, 25559, 25560, 25561, 25562, 25563, 25564, 25565, 25566, 25567, 25568, 25569, 25570, 25571, 25572, 25573, 25574, 25575, 25576, 25577, 25578, 25579, 25580, 25581, 25582, 25583, 25584, 25585, 25586, 25587, 25588, 25589, 25590, 25591, 25592, 25593, 25594, 25595, 25596, 25597, 25598, 25599, 25600, 25601, 25602, 25603, 25604, 25605, 25606, 25607, 25608, 25609, 25610, 25611, 25612, 25613, 25614, 25615, 25616, 25617, 25618, 25620, 25621, 25623, 25624, 25625, 25629, 25630, 25631, 25632, 25633, 25635, 25636, 25640, 25641, 25642, 25643, 25644, 25645, 25646, 25647, 25648, 25649, 25650, 25651, 25652, 25657, 25658, 25659, 25660, 25661, 25662, 25663, 25664, 25665, 25666, 25667, 25668, 25669, 25670, 25671, 25672, 25673, 25674, 25675, 25676, 25677, 25678, 25679, 25680, 25681, 25682, 25683, 25684, 25685, 25686, 25687, 25688, 25689, 25690, 25691, 25692, 25693, 25694, 25695, 25696, 25697, 25698, 25699, 25700, 25701, 25702, 25703, 25704, 25705, 25706, 25707, 25708, 25709, 25710, 25711, 25712, 25713, 25714, 25715, 25716, 25717, 25718, 25719, 25720, 25721, 25722, 25723, 25724, 25725, 25726, 25727, 25728, 25729, 25730, 25731, 25732, 25733, 25734, 25735, 25736, 25737, 25738, 25739, 25740, 25741, 25742, 25743, 25744, 25745, 25746, 25747, 25748, 25749, 25750, 25751, 25752, 25753, 25754, 25755, 25756, 25757, 25758, 25759, 25760, 25761, 25762, 25763, 25764, 25765, 25766, 25767, 25768, 25769, 25770, 25771, 25772, 25773, 25774, 25775, 25776, 25777, 25778, 25779, 25780, 25781, 25782, 25783, 25784, 25785, 25786, 25787, 25788, 25789, 25790, 25791, 25792, 25793, 25794, 25795, 25796, 25798, 25799, 25800, 25801, 25802, 25803, 25804, 25805, 25806, 25807, 25808, 25809, 25810, 25811, 25812, 25813, 25814, 25815, 25816, 25817, 25818, 25819, 25820, 25821, 25822, 25823, 25826, 25827, 25828, 25829, 25830, 25831, 25832, 25833, 25834, 25835, 25836, 25837, 25838, 25839, 25840, 25841, 25842, 25843, 25844, 25845, 25846, 25847, 25848, 25849, 25850, 25851, 25852, 25853, 25854, 25855, 25856, 25857, 25858, 25859, 25860, 25861, 25862, 25863, 25864, 25865, 25866, 25867, 25868, 25869, 25870, 25871, 25872, 25873, 25874, 25875, 25876, 25877, 25878, 25879, 25880, 25881, 25882, 25883, 25884, 25885, 25886, 25887, 25888, 25889, 25890, 25891, 25892, 25893, 25894, 25895, 25896, 25897, 25898, 25899, 25900, 25901, 25902, 25903, 25904, 25905, 25906, 25907, 25908, 25909, 25910, 25911, 25912, 25913, 25914, 25915, 25916, 25917, 25918, 25919, 25920, 25921, 25922, 25923, 25924, 25925, 25926, 25927, 25928, 25929, 25930, 25931, 25932, 25933, 25934, 25935, 25936, 25937, 25938, 25939, 25940, 25941, 25942, 25943, 25944, 25945, 25946, 25947, 25948, 25949, 25950, 25951, 25952, 25953, 25954, 25955, 25956, 25957, 25958, 25959, 25960, 25961, 25962, 25963, 25964, 25965, 25966, 25967, 25968, 25969, 25970, 25971, 25972, 25973, 25974, 25975, 25976, 25977, 25978, 25979, 25980, 25981, 25982, 25983, 25984, 25985, 25986, 25987, 25988, 25989, 25990, 25991, 25993, 25994, 25995, 25996, 25997, 25998, 26000, 26001, 26003, 26004, 26006, 26007, 26009, 26010, 26011, 26012, 26013, 26014, 26015, 26016, 26017, 26018, 26019, 26020, 26021, 26022, 26023, 26024, 26026, 26027, 26028, 26029, 26030, 26031, 26033, 26034, 26036, 26037, 26039, 26040, 26042, 26043, 26044, 26045, 26046, 26047, 26048, 26049, 26050, 26051, 26052, 26053, 26054, 26055, 26056, 26057, 26058, 26059, 26060, 26061, 26062, 26063, 26064, 26065, 26066, 26067, 26068, 26069, 26070, 26071, 26072, 26073, 26074, 26075, 26076, 26077, 26078, 26079, 26080, 26081, 26082, 26083, 26084, 26085, 26086, 26087, 26088, 26089, 26090, 26091, 26092, 26093, 26094, 26095, 26096, 26097, 26098, 26099, 26100, 26101, 26102, 26103, 26104, 26105, 26106, 26107, 26108, 26109, 26110, 26111, 26112, 26113, 26114, 26115, 26116, 26117, 26118, 26119, 26120, 26121, 26122, 26123, 26124, 26125, 26126, 26127, 26128, 26129, 26130, 26131, 26132, 26133, 26134, 26135, 26137, 26138, 26140, 26141, 26143, 26144, 26146, 26147, 26148, 26149, 26150, 26151, 26152, 26153, 26154, 26155, 26156, 26157, 26158, 26159, 26160, 26161, 26162, 26163, 26164, 26165, 26166, 26167, 26168, 26169, 26170, 26171, 26172, 26173, 26174, 26175, 26176, 26177, 26178, 26179, 26180, 26181, 26182, 26183, 26184, 26185, 26186, 26187, 26188, 26189, 26190, 26191, 26192, 26193, 26194, 26195, 26196, 26197, 26198, 26199, 26200, 26201, 26202, 26203, 26204, 26205, 26206, 26207, 26208, 26209, 26210, 26211, 26212, 26213, 26214, 26215, 26216, 26217, 26218, 26219, 26220, 26221, 26222, 26223, 26224, 26225, 26226, 26227, 26228, 26229, 26230, 26231, 26232, 26233, 26234, 26235, 26236, 26237, 26238, 26239, 26240, 26241, 26242, 26243, 26244, 26245, 26246, 26247, 26248, 26249, 26250, 26252, 26253, 26255, 26256, 26257, 26258, 26259, 26260, 26261, 26262, 26263, 26264, 26265, 26266, 26267, 26269, 26270, 26271, 26272, 26273, 26274, 26276, 26277, 26278, 26280, 26281, 26282, 26283, 26284, 26286, 26287, 26288, 26289, 26291, 26292, 26294, 26295, 26296, 26297, 26298, 26299, 26300, 26301, 26302, 26303, 26304, 26305, 26306, 26307, 26308, 26309, 26310, 26311, 26312, 26313, 26314, 26315, 26316, 26317, 26318, 26319, 26320, 26321, 26322, 26323, 26324, 26325, 26326, 26327, 26328, 26329, 26330, 26331, 26332, 26333, 26334, 26335, 26336, 26337, 26338, 26339, 26340, 26341, 26342, 26343, 26344, 26345, 26346, 26347, 26348, 26349, 26350, 26351, 26352, 26353, 26354, 26355, 26356, 26357, 26358, 26359, 26360, 26361, 26362, 26363, 26367, 26368, 26369, 26370, 26371, 26372, 26373, 26374, 26375, 26376, 26377, 26378, 26379, 26380, 26381, 26382, 26383, 26385, 26387, 26388, 26389, 26390, 26391, 26392, 26393, 26394, 26399, 26400, 26401, 26402, 26403, 26404, 26405, 26406, 26407, 26408, 26409, 26410, 26411, 26413, 26414, 26416, 26417, 26418, 26419, 26420, 26421, 26422, 26423, 26424, 26425, 26426, 26427, 26428, 26429, 26430, 26431, 26432, 26433, 26434, 26435, 26436, 26437, 26438, 26439, 26441, 26442, 26444, 26445, 26446, 26447, 26448, 26449, 26450, 26451, 26452, 26453, 26454, 26455, 26456, 26457, 26458, 26459, 26460, 26461, 26462, 26463, 26464, 26465, 26466, 26467, 26468, 26469, 26470, 26471, 26472, 26473, 26474, 26475, 26476, 26477, 26478, 26479, 26480, 26481, 26482, 26483, 26484, 26485, 26486, 26487, 26488, 26489, 26490, 26491, 26492, 26493, 26494, 26495, 26496, 26497, 26498, 26499, 26500, 26501, 26502, 26503, 26504, 26505, 26506, 26507, 26508, 26509, 26510, 26511, 26512, 26513, 26514, 26515, 26516, 26517, 26518, 26519, 26520, 26521, 26522, 26523, 26524, 26525, 26526, 26527, 26529, 26530, 26531, 26532, 26533, 26534, 26535, 26536, 26537, 26538, 26539, 26540, 26541, 26542, 26543, 26544, 26545, 26546, 26547, 26548, 26549, 26550, 26551, 26552, 26553, 26554, 26555, 26556, 26557, 26558, 26559, 26561, 26562, 26564, 26565, 26566, 26567, 26568, 26569, 26571, 26572, 26573, 26574, 26575, 26576, 26577, 26578, 26579, 26580, 26582, 26583, 26585, 26586, 26587, 26588, 26589, 26590, 26592, 26593, 26594, 26595, 26596, 26597, 26598, 26599, 26600, 26601, 26602, 26603, 26604, 26605, 26606, 26607, 26608, 26609, 26610, 26611, 26612, 26613, 26614, 26615, 26616, 26617, 26618, 26619, 26620, 26621, 26622, 26623, 26625, 26626, 26628, 26629, 26630, 26631, 26632, 26633, 26634, 26635, 26636, 26637, 26638, 26639, 26640, 26641, 26642, 26644, 26645, 26647, 26648, 26649, 26650, 26651, 26652, 26654, 26655, 26656, 26657, 26658, 26659, 26660, 26661, 26662, 26663, 26664, 26665, 26666, 26667, 26668, 26669, 26670, 26672, 26674, 26675, 26676, 26677, 26678, 26679, 26680, 26681, 26682, 26683, 26684, 26685, 26686, 26687, 26688, 26689, 26691, 26692, 26694, 26695, 26696, 26697, 26698, 26699, 26700, 26701, 26702, 26703, 26704, 26705, 26706, 26707, 26708, 26709, 26710, 26711, 26712, 26713, 26714, 26715, 26716, 26717, 26718, 26719, 26720, 26721, 26722, 26723, 26724, 26725, 26726, 26727, 26728, 26729, 26730, 26731, 26732, 26733, 26734, 26735, 26736, 26737, 26738, 26739, 26740, 26741, 26742, 26743, 26744, 26745, 26746, 26747, 26748, 26749, 26751, 26752, 26754, 26755, 26756, 26757, 26758, 26759, 26760, 26761, 26762, 26763, 26764, 26765, 26766, 26767, 26768, 26769, 26770, 26771, 26772, 26773, 26774, 26775, 26776, 26777, 26778, 26779, 26780, 26781, 26782, 26783, 26784, 26785, 26786, 26787, 26788, 26789, 26790, 26791, 26792, 26793, 26794, 26795, 26796, 26797, 26798, 26799, 26800, 26801, 26802, 26803, 26804, 26805, 26806, 26807, 26808, 26809, 26810, 26811, 26812, 26813, 26814, 26815, 26816, 26817, 26818, 26819, 26820, 26821, 26822, 26823, 26824, 26827, 26828, 26829, 26830, 26831, 26832, 26833, 26834, 26835, 26836, 26837, 26839, 26840, 26842, 26843, 26844, 26845, 26846, 26847, 26848, 26849, 26851, 26852, 26854, 26855, 26856, 26857, 26859, 26860, 26862, 26863, 26864, 26865, 26866, 26867, 26868, 26869, 26870, 26871, 26872, 26873, 26874, 26875, 26876, 26877, 26878, 26879, 26880, 26881, 26882, 26883, 26884, 26885, 26886, 26887, 26888, 26890, 26891, 26893, 26894, 26895, 26896, 26897, 26898, 26899, 26900, 26901, 26902, 26903, 26904, 26905, 26906, 26908, 26909, 26911, 26912, 26913, 26914, 26915, 26916, 26917, 26918, 26919, 26920, 26921, 26922, 26923, 26924, 26925, 26926, 26927, 26928, 26929, 26930, 26931, 26932, 26933, 26934, 26936, 26937, 26938, 26939, 26940, 26941, 26942, 26943, 26944, 26945, 26946, 26947, 26948, 26949, 26950, 26952, 26953, 26954, 26955, 26956, 26957, 26958, 26959, 26960, 26961, 26962, 26963, 26964, 26965, 26966, 26967, 26968, 26969, 26970, 26971, 26972, 26973, 26974, 26976, 26978, 26979, 26980, 26981, 26982, 26983, 26984, 26985, 26986, 26988, 26989, 26991, 26992, 26993, 26994, 26995, 26996, 26997, 26998, 27000, 27001, 27003, 27004, 27005, 27006, 27007, 27009, 27010, 27011, 27012, 27013, 27014, 27015, 27016, 27018, 27019, 27021, 27022, 27023, 27024, 27025, 27027, 27028, 27029, 27030, 27031, 27032, 27033, 27034, 27035, 27036, 27037, 27038, 27039, 27040, 27041, 27042, 27044, 27045, 27046, 27047, 27048, 27050, 27051, 27052, 27053, 27055, 27056, 27058, 27059, 27060, 27061, 27062, 27063, 27064, 27065, 27066, 27067, 27068, 27069, 27070, 27071, 27072, 27073, 27074, 27075, 27076, 27077, 27078, 27079, 27080, 27081, 27082, 27083, 27084, 27085, 27086, 27087, 27088, 27089, 27090, 27091, 27092, 27093, 27094, 27095, 27096, 27097, 27099, 27100, 27101, 27102, 27103, 27104, 27106, 27107, 27108, 27109, 27110, 27111, 27112, 27113, 27114, 27115, 27116, 27117, 27118, 27119, 27120, 27121, 27122, 27123, 27124, 27125, 27126, 27127, 27128, 27129, 27130, 27131, 27132, 27133, 27134, 27135, 27136, 27137, 27138, 27139, 27140, 27141, 27142, 27143, 27144, 27145, 27146, 27147, 27148, 27149, 27150, 27151, 27152, 27153, 27154, 27155, 27156, 27157, 27158, 27159, 27160, 27161, 27162, 27163, 27164, 27165, 27166, 27167, 27168, 27169, 27170, 27171, 27172, 27173, 27175, 27177, 27178, 27179, 27180, 27181, 27182, 27183, 27184, 27185, 27186, 27187, 27188, 27189, 27190, 27191, 27192, 27193, 27194, 27195, 27196, 27197, 27198, 27199, 27200, 27201, 27202, 27203, 27204, 27205, 27206, 27207, 27208, 27209, 27210, 27211, 27212, 27213, 27214, 27215, 27216, 27217, 27218, 27220, 27221, 27223, 27224, 27225, 27226, 27227, 27228, 27229, 27230, 27231, 27232, 27233, 27234, 27235, 27237, 27238, 27240, 27241, 27243, 27244, 27246, 27247, 27248, 27249, 27250, 27251, 27252, 27253, 27254, 27255, 27256, 27257, 27258, 27259, 27260, 27261, 27262, 27263, 27264, 27265, 27266, 27267, 27268, 27269, 27271, 27272, 27274, 27275, 27276, 27277, 27278, 27279, 27280, 27281, 27282, 27283, 27284, 27285, 27286, 27287, 27288, 27289, 27290, 27291, 27292, 27293, 27294, 27295, 27296, 27297, 27298, 27299, 27300, 27301, 27302, 27303, 27304, 27305, 27306, 27307, 27308, 27309, 27310, 27311, 27313, 27314, 27315, 27316, 27317, 27318, 27319, 27320, 27321, 27323, 27324, 27326, 27327, 27328, 27329, 27330, 27331, 27332, 27333, 27334, 27335, 27336, 27337, 27338, 27339, 27340, 27341, 27342, 27343, 27344, 27345, 27346, 27347, 27348, 27349, 27350, 27351, 27352, 27353, 27354, 27355, 27356, 27357, 27358, 27359, 27360, 27361, 27362, 27363, 27364, 27365, 27366, 27367, 27368, 27369, 27370, 27371, 27372, 27373, 27374, 27375, 27376, 27377, 27378, 27379, 27380, 27381, 27382, 27383, 27384, 27385, 27386, 27387, 27388, 27389, 27390, 27391, 27392, 27393, 27394, 27396, 27397, 27399, 27400, 27401, 27402, 27403, 27404, 27405, 27406, 27407, 27408, 27409, 27410, 27411, 27412, 27413, 27414, 27415, 27416, 27417, 27418, 27419, 27420, 27421, 27422, 27423, 27424, 27425, 27426, 27427, 27428, 27429, 27430, 27431, 27432, 27433, 27434, 27435, 27436, 27437, 27438, 27439, 27440, 27441, 27442, 27443, 27444, 27445, 27446, 27447, 27448, 27449, 27450, 27451, 27453, 27454, 27455, 27456, 27457, 27458, 27459, 27460, 27461, 27462, 27463, 27464, 27465, 27466, 27467, 27468, 27469, 27470, 27471, 27472, 27473, 27474, 27475, 27477, 27478, 27479, 27480, 27481, 27482, 27483, 27484, 27485, 27486, 27487, 27488, 27489, 27490, 27491, 27492, 27494, 27495, 27497, 27498, 27499, 27500, 27501, 27502, 27504, 27505, 27506, 27507, 27508, 27509, 27510, 27511, 27512, 27513, 27514, 27515, 27516, 27517, 27518, 27519, 27520, 27521, 27522, 27523, 27524, 27525, 27526, 27527, 27528, 27529, 27530, 27531, 27532, 27533, 27534, 27535, 27536, 27537, 27538, 27539, 25340, 25338, 27541, 27542, 27543, 27544, 27545, 27546, 27239, 27236, 25628, 27239, 27549, 27550, 27551, 27552, 27553, 27554, 27555, 27556, 27562, 27563, 27565, 27566, 27572, 27573, 25656, 25654, 24257, 24254, 26398, 26365, 26398, 26396, 27575, 27576, 27577, 27578, 27579, 27580, 27581, 27582, 27583, 27585, 27586, 27587, 27588, 27590, 27591, 27593, 27594, 27595, 27596, 27597, 27598, 27599, 27600, 27602, 27603, 27605, 27606, 27607, 27608, 27609, 27610, 27611, 27612, 27613, 27614, 27615, 27616, 27617, 27618, 27619, 27620, 27622, 27623, 27625, 27626, 27627, 27628, 27629, 27630, 27631, 27632, 27634, 27635, 27636, 27637, 27638, 27639, 27640, 27641, 27643, 27644, 27645, 27647, 27648, 27650, 27651, 27652, 27653, 27654, 27655, 27656, 27657, 27659, 27660, 27661, 27663, 27664, 27666, 27667, 27668, 27669, 27670, 27671, 27672, 27673, 27674, 27675, 27676, 27677, 27678, 27680, 27681, 27683, 27684, 27685, 27686, 27687, 27688, 27689, 27690, 27691, 27692, 27693, 27694, 27695, 27696, 27697, 27698, 27699, 27700, 27701, 27702, 27703, 27705, 27706, 27708, 27709, 27710, 27711, 27712, 27713, 27715, 27716, 27717, 27718, 27719, 27720, 27721, 27722, 27723, 27724, 27725, 27726, 27727, 27728, 27729, 27730, 27732, 27733, 27734, 27735, 27736, 27737, 27738, 27739, 27741, 27742, 27744, 27745, 27747, 27748, 27749, 27750, 27751, 27752, 27753, 27754, 27756, 27757, 27759, 27760, 27761, 27762, 27763, 27764, 27765, 27766, 27767, 27768, 27769, 27771, 27772, 27774, 27775, 27776, 27777, 27778, 27779, 27780, 27781, 27782, 27783, 27784, 27785, 27786, 27788, 27789, 27790, 27791, 27792, 27793, 27794, 27795, 27796, 27797, 27798, 27799, 27800, 27802, 27803, 27804, 27805, 27806, 27807, 27808, 27809, 27810, 27811, 27812, 27813, 27814, 27815, 27816, 27817, 27818, 27819, 27820, 27821, 27822, 27823, 27824, 27825, 27826, 27832, 27833, 27835, 27836, 27842, 27843, 27844, 27845, 27846, 27847, 27849, 27850, 27851, 27852, 27853, 27854, 27856, 27857, 27858, 27859, 27860, 27861, 27862, 27863, 27864, 27865, 27866, 27867, 27868, 27869, 27870, 27871, 27872, 27873, 27874, 27875, 27876, 27877, 27878, 27879, 27881, 27882, 27884, 27885, 27887, 27888, 27890, 27891, 27892, 27893, 27894, 27895, 27897, 27898, 27899, 27900, 27901, 27902, 27904, 27905, 27906, 27907, 27908, 27909, 27910, 27911, 27912, 27913, 27914, 27915, 27916, 27917, 27918, 27919, 27920, 27921, 27922, 27923, 27924, 27925, 27926, 27927, 27928, 27929, 27930, 27933, 27934, 27935, 27936, 27937, 27938, 27939, 27940, 27941, 27942, 27945, 27946, 27947, 27948, 27949, 27950, 27951, 27952, 27953, 27954, 27955, 27956, 27958, 27959, 27960, 27961, 27962, 27963, 27964, 27965, 27966, 27967, 27968, 27969, 27970, 27971, 27972, 27973, 27974, 27975, 27976, 27977, 27978, 27979, 27980, 27981, 27982, 27983, 27984, 27985, 27986, 27987, 27988, 27989, 27990, 27991, 27992, 27993, 27994, 27995, 27996, 27997, 27998, 27999, 28000, 28002, 28003, 28004, 28005, 28006, 28007, 28008, 28009, 27561, 27559, 27571, 24942, 24939, 27831, 27829, 27841, 24942, 24939, 28010, 28011, 28012, 28013, 28014, 28015, 28016, 28017, 28018, 28019, 28020, 28021, 28022, 28023, 28024, 28025, 28026, 28027, 28028, 28029, 28030, 28031, 28032, 28033, 28034, 28035, 28036, 28037, 28038, 28039, 28040, 28041, 28042, 28043, 28044, 28045, 28046, 28047, 28048, 28049, 28050, 28051, 28052, 28053, 28054, 28055, 28056, 28057, 28058, 28059, 28060, 28061, 28062, 28063, 28064, 28065, 28066, 28067, 28068, 28069, 28070, 28071, 28072, 28073, 28074, 28075, 28076, 28077, 28078, 28079, 28081, 28082, 28084, 28085, 28087, 28088, 28090, 28091, 28092, 28093, 28094, 28095, 28096, 28097, 28098, 28099, 28100, 28101, 28102, 28103, 28104, 28105, 28106, 28107, 28108, 28109, 28110, 28111, 28112, 28113, 28114, 28115, 28116, 28117, 28118, 28119, 28120, 28121, 28122, 28123, 28124, 28125, 28126, 28127, 28128, 28129, 28130, 28131, 28132, 28133, 28134, 28135, 28136, 28137, 28138, 28139, 28141, 28142, 28144, 28145, 28146, 28147, 28148, 28149, 28150, 28151, 28152, 28153, 28154, 28156, 28157, 28159, 28160, 28161, 28162, 28163, 28164, 28165, 28166, 28167, 28168, 28169, 28170, 28171, 28172, 28174, 28175, 28177, 28178, 28179, 28180, 28181, 28182, 28183, 28184, 28185, 28186, 28187, 27932, 27944, 28188, 28189, 28191, 28192, 28193, 28194, 28195, 28197, 28198, 28200, 28201, 28203, 28204, 28205, 28206, 28207, 28208, 28209, 28210, 28211, 28212, 28213, 28214, 28215, 28216, 28217, 28218, 28219, 28221, 28222, 28224, 28225, 28227, 28228, 28229, 28230, 28231, 28232, 28233, 28234, 28235, 28236, 28237, 28238, 28239, 28240, 28241, 28242, 28243, 28244, 28246, 28247, 28249, 28250, 28252, 28253, 28254, 28255, 28256, 28257, 28258, 28259, 28260, 28261, 28262, 28263, 28264, 28265, 28266, 28267, 28268, 28269, 28270, 28271, 28273, 28274, 28275, 28276, 28277, 28278, 28279, 28280, 28281, 28282, 28283, 28284, 28285, 28286, 28287, 28288, 28289, 28290, 28291, 28292, 28294, 28295, 28296, 28297, 28298, 28299, 28300, 28301, 28302, 28303, 28304, 28305, 28306, 28307, 28308, 28309, 28310, 28311, 28314, 28315, 28316, 28317, 28318, 28319, 28320, 28321, 28322, 28323, 28324, 28325, 28326, 28327, 28328, 28329, 28330, 28331, 28332, 28333, 28334, 28335, 28336, 28337, 28338, 28339, 28340, 28341, 28342, 28343, 28344, 28345, 28346, 28347, 28348, 28349, 13, 14, 15, 28352, 28354, 28356, 28358, 28360, 28362, 28364, 28367, 28370, 28372, 28378, 28380, 28386, 28388, 28390, 28392, 28394, 28397, 28400, 28402, 28404, 28406, 28408, 28411, 28414, 28421, 28423, 28427, 28429, 28431, 28433, 28437, 28439, 28441, 28445, 28448, 28450, 28454, 28457, 28459, 28461, 28463, 28466, 28472, 28475, 28478, 28481, 28484, 28486, 28488, 28491, 28493, 28495, 28498, 28500, 28502, 28504, 28507, 28509, 28512, 28514, 28516, 28518, 28520, 28523, 28525, 28527, 28529, 28531, 28533, 28535, 28537, 28539, 28541, 28543, 28545, 28547, 28549, 28551, 28553, 28555, 28559, 28561, 28563, 28566, 28568, 28570, 28573, 28575, 28577, 28579, 28582, 28584, 28587, 28589, 28591, 28593, 28596, 28599, 28601, 28603, 28605, 28607, 28609, 28611, 28613, 28616, 28618, 28620, 28622, 28628, 28630, 28632, 28634, 28636, 28638, 28641, 28643, 28645, 28647, 28650, 28652, 28654, 28656, 28659, 28661, 28663, 28665, 28667, 28672, 28675, 28678, 28681, 28685, 28687, 28692, 28697, 28700, 28703, 28705, 28711, 28714, 28717, 28720, 28723, 28725, 28727, 28729, 28731, 28733, 28735, 28737, 28739, 28742, 28745, 28747, 28749, 28751, 28753, 28756, 28760, 28762, 28766, 28768, 28770, 28773, 28776, 28781, 28783, 28786, 28788, 28790, 28792, 28794, 28796, 28799, 28801, 28807, 28809, 28812, 28814, 28817, 28819, 28821, 28824, 28826, 28828, 28830, 28832, 28835, 28838, 28840, 28842, 28845, 28848, 28851, 28854, 28856, 28858, 28860, 28862, 28864, 28867, 28869, 28871, 28873, 28875, 28877, 28880, 28883, 28888, 28890, 28892, 28897, 28899, 28901, 28903, 28905, 28907, 28909, 28914, 28916, 28921, 28923, 28926, 28929, 28932, 28935, 28938, 28941, 28943, 28945, 28948, 28951, 28953, 28955, 28958, 28961, 28963, 28965, 28967, 28969, 28972, 28975, 28977, 28979, 28981, 28983, 28985, 28987, 28989, 28991, 28993, 28995, 28997, 29000, 29003, 29005, 29007, 29009, 29011, 29013, 29015, 29017, 29020, 29023, 29025, 29027, 29030, 29033, 29036, 29038, 29040, 29046, 29048, 29050, 29052, 29058, 29061, 29066, 29068, 29070, 29073, 29076, 29078, 29080, 29083, 29086, 29089, 29092, 29095, 29097, 29099, 29101, 29103, 29106, 29109, 29111, 29113, 29115, 29117, 29119, 29122, 29125, 29127, 29129, 29131, 29134, 29137, 29139, 29141, 29143, 29145, 29147, 29150, 29152, 29154, 29156, 29159, 29162, 29164, 29166, 29168, 29170, 29172, 29174, 29177, 29180, 29182, 29184, 29187, 29190, 29192, 29194, 29196, 29198, 29200, 29202, 29204, 29206, 29208, 29210, 29212, 29214, 29217, 29220, 29222, 29225, 29227, 29229, 29231, 29233, 29235, 29237, 29239, 29241, 29243, 29246, 29248, 29251, 29253, 29255, 29257, 29260, 29263, 29266, 29268, 29270, 29273, 29276, 29279, 29282, 29286, 29288, 29290, 29292, 29294, 29296, 29300, 29303, 29305, 29307, 29309, 29312, 29314, 29316, 29318, 29321, 29323, 29325, 29327, 29329, 29332, 29334, 29336, 29339, 29342, 29345, 29347, 29349, 29351, 29353, 29355, 29358, 29360, 29362, 29364, 29366, 29368, 29371, 29374, 29377, 29380, 29383, 29386, 29388, 29390, 29392, 29394, 29396, 29398, 29400, 29404, 29407, 29409, 29411, 29414, 29417, 29419, 29423, 29426, 29429, 29431, 29433, 29435, 29439, 29442, 29445, 29448, 29450, 29452, 29455, 29458, 29460, 29462, 29464, 29466, 29469, 29472, 29474, 29476, 29480, 29483, 29485, 29487, 29489, 29491, 29494, 29497, 29499, 29501, 29504, 29507, 29509, 29511, 29513, 29515, 29525, 29527, 29529, 29531, 29533, 29543, 29545, 29547, 29550, 29557, 29560, 29563, 29565, 29567, 29572, 29574, 29576, 29578, 29580, 29582, 29584, 29586, 29588, 29590, 29610, 29613, 29616, 29618, 29620, 29622, 29624, 29626, 29628, 29631, 29635, 29637, 29640, 29643, 29645, 29647, 29650, 29653, 29656, 29659, 29662, 29665, 29667, 29669, 29672, 29674, 29676, 29679, 29682, 29684, 29686, 29689, 29692, 29694, 29700, 29703, 29706, 29708, 29710, 29713, 29716, 29719, 29726, 29729, 29732, 29734, 29736, 29743, 29746, 29749, 29751, 29755, 29758, 29760, 29762, 29766, 29768, 29770, 29773, 29776, 29778, 29782, 29784, 29787, 29789, 29791, 29793, 29796, 29798, 29804, 29811, 29813, 29815, 29818, 29821, 29824, 29827, 29829, 29831, 29834, 29836, 29838, 29841, 29843, 29845, 29847, 29849, 29851, 29853, 29855, 29858, 29861, 29863, 29865, 29867, 29870, 29873, 29877, 29879, 29881, 29883, 29889, 29895, 29898, 29901, 29903, 29905, 29908, 29911, 29913, 29915, 29918, 29920, 29923, 29926, 29928, 29930, 29933, 29935, 29937, 29939, 29941, 29943, 29946, 29951, 29954, 29956, 29958, 29960, 29962, 29965, 29968, 29971, 29974, 29977, 29979, 29981, 29983, 29985, 29988, 29990, 29992, 29994, 29996, 29998, 30000, 30002, 30004, 30006, 30008, 30010, 30012, 30015, 30019, 30021, 30024, 30027, 30030, 30034, 30036, 30038, 30040, 30042, 30044, 30047, 30050, 30053, 30055, 30057, 30060, 30063, 30065, 30067, 30069, 30071, 30073, 30077, 30080, 30084, 30086, 30088, 30090, 30096, 30099, 30102, 30104, 30106, 30108, 30113, 30115, 30117, 30119, 30121, 30123, 30125, 30127, 30129, 30132, 30134, 30136, 30138, 30140, 30143, 30150, 30153, 30156, 30159, 30162, 30164, 30166, 30169, 30172, 30175, 30178, 30181, 30184, 30186, 30188, 30191, 30194, 30197, 30200, 30202, 30204, 30206, 30209, 30211, 30213, 30216, 30218, 30221, 30223, 30226, 30228, 30230, 30232, 30236, 30239, 30241, 30243, 30245, 30249, 30252, 30254, 30256, 30258, 30260, 30263, 30265, 30267, 30269, 30272, 30274, 30277, 30280, 30282, 30284, 30288, 30291, 30294, 30299, 30301, 30307, 30309, 30313, 30316, 30319, 30322, 30325, 30330, 30333, 30335, 30337, 30339, 30342, 30345, 30350, 30352, 30354, 30357, 30370, 30373, 30376, 30378, 30380, 30382, 30384, 29826, 29823, 29826, 29823, 30395, 30398, 30401, 30403, 30405, 30407, 30409, 30412, 30415, 30418, 30420, 30422, 30423, 29725, 29742, 28377, 28375, 29723, 29740, 28385, 28383, 28420, 28418, 28425, 28444, 28453, 30424, 30426, 30428, 30430, 30431, 28469, 28471, 28470, 24423, 24421, 24423, 24424, 24422, 24424, 30432, 30433, 28626, 28626, 30434, 30436, 30438, 30440, 30442, 30444, 30446, 30448, 30449, 24247, 28671, 24247, 28691, 28696, 28710, 28708, 28896, 28780, 28785, 28866, 28806, 28804, 30450, 30451, 28866, 28913, 28887, 28896, 28913, 24263, 24262, 29044, 29056, 29065, 30452, 30453, 26386, 30454, 30455, 29403, 29422, 29438, 29479, 29520, 29518, 29524, 29522, 29538, 29536, 29542, 29540, 29556, 29554, 29571, 29699, 29595, 29593, 29599, 29597, 29603, 29601, 29607, 24375, 24374, 26673, 26671, 29699, 29697, 29725, 29723, 29742, 29740, 29754, 29781, 29801, 29803, 29810, 29808, 29781, 29801, 29803, 29810, 29808, 26951, 24424, 24423, 24422, 24421, 29892, 29894, 29893, 29950, 27176, 27174, 30095, 30093, 30112, 30149, 30147, 30208, 30215, 24505, 24504, 30287, 30298, 30306, 24505, 24504, 24506, 24507, 30329, 30349, 24506, 24507, 30329, 24507, 30329, 30349, 30287, 30298, 30306, 30304, 24505, 24504, 24507, 30329, 30349, 30456, 30458, 30460, 30463, 30467, 30469, 30471, 30474, 30477, 30479, 30485, 30487, 30489, 30492, 30495, 30497, 30501, 30503, 30505, 30507, 30509, 30511, 30513, 30516, 30518, 30520, 30522, 30524, 30526, 30529, 30531, 30533, 30535, 30537, 30539, 30541, 30544, 30546, 30548, 30550, 30552, 30554, 30556, 30561, 30564, 30567, 30569, 30571, 30573, 30575, 30577, 30580, 30583, 30585, 30587, 30589, 30591, 30593, 30595, 30597, 30599, 30601, 30603, 30605, 30607, 30609, 30611, 30613, 30615, 30618, 30621, 30624, 30626, 30630, 30633, 30637, 30639, 30641, 30643, 30645, 30647, 30649, 30651, 30654, 30657, 30659, 30661, 30663, 30665, 30667, 30669, 30671, 30673, 30675, 30677, 30679, 30681, 30683, 30685, 30687, 30689, 30691, 30693, 30695, 30698, 30701, 30703, 30705, 30707, 30709, 30711, 30713, 30715, 30717, 30719, 30721, 30723, 30725, 30727, 30729, 30731, 30733, 30735, 30737, 30739, 30741, 30743, 30745, 30748, 30751, 30754, 24891, 24879, 30362, 24891, 24879, 24892, 24886, 30558, 24892, 24891, 24891, 24879, 24891, 24879, 24892, 24886, 30558, 24892, 24891, 30757, 30759, 30761, 30764, 30767, 30769, 30771, 30774, 30777, 30779, 30781, 30784, 30786, 30789, 30791, 30793, 30795, 30797, 30800, 30803, 30806, 30808, 30810, 30812, 30814, 30817, 30820, 30822, 30824, 30827, 30829, 30831, 30833, 30835, 30837, 30838, 30839, 30840, 30841, 30484, 30466, 30484, 30482, 30500, 30515, 30528, 30560, 30629, 30629, 27801, 30842, 30843, 30844, 30845, 30846, 30849, 30851, 30853, 30855, 30857, 30860, 30863, 30865, 30867, 30870, 30872, 30874, 30877, 30879, 30881, 30884, 30886, 30888, 30891, 30893, 30895, 30898, 30901, 30903, 30905, 30907, 30909, 30912, 30915, 30917, 30919, 30921, 30923, 30926, 30929, 30932, 30935, 30938, 30940, 30942, 30945, 30948, 30950, 30952, 30955, 30958, 30961, 30964, 30967, 30969, 30971, 30973, 30975, 30977, 30979, 30984, 30986, 30988, 30990, 30992, 30994, 30996, 30998, 31000, 31002, 31004, 31006, 31008, 31010, 31015, 31016, 31017, 31020, 31022, 31024, 31026, 31031, 31033, 31035, 31037, 31039, 31043, 31045, 31047, 31049, 31051, 31053, 31055, 31057, 31059, 31062, 31065, 31067, 31069, 30848, 30983, 31075, 31077, 31079, 31082, 31084, 31086, 31089, 31091, 31093, 31095, 31097, 31099, 31102, 31104, 31106, 31109, 31111, 31113, 31115, 31117, 31119, 31122, 31074, 25301, 25300, 31074, 25301, 25300, 31074, 25301, 25300, 31128, 25301, 25300, 31129, 31132, 31135, 31138, 31141, 31144, 31147, 31150, 31153, 31156, 31159, 31162, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 31946, 31947, 31948, 31949, 24159, 24158, 24159, 24158, 24244, 24245, 31961, 31963, 24169, 24168, 31964, 24169, 24168, 25348, 25345, 28369, 28366, 31177, 31965, 31966, 31967, 31968, 31179, 31969, 31970, 31181, 31183, 28399, 28396, 31187, 31189, 28413, 28410, 28416, 31971, 31972, 31194, 31973, 31347, 28816, 31350, 28426, 31195, 24259, 24258, 28837, 28834, 24260, 24261, 28847, 28435, 28436, 28853, 31199, 25417, 31347, 28816, 31350, 28823, 31353, 24259, 24258, 28837, 28834, 24260, 24261, 28847, 28435, 28436, 28853, 31199, 25426, 31201, 28447, 31974, 31204, 31975, 31205, 28456, 29189, 31207, 31421, 29002, 28999, 26025, 31426, 26032, 26038, 25449, 25452, 30155, 30152, 30161, 28465, 25458, 27270, 30171, 30168, 30177, 30174, 30183, 30180, 31875, 30190, 30196, 30193, 30199, 31979, 31981, 31982, 31983, 28477, 28474, 28483, 28480, 31216, 28490, 25491, 25488, 28497, 31222, 31224, 31225, 28511, 29826, 31228, 24211, 24210, 28522, 31233, 31235, 31237, 31239, 25538, 24207, 24206, 31244, 31246, 25553, 28557, 31249, 31251, 31984, 31985, 31986, 24421, 31254, 31255, 24211, 24210, 28581, 31260, 31261, 24213, 24212, 28595, 28598, 31267, 31269, 31271, 31987, 31988, 31989, 24422, 31273, 31274, 25622, 25619, 28624, 31990, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 31992, 31920, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 31993, 31920, 31390, 28928, 28925, 28934, 28931, 28937, 28940, 31398, 28950, 28947, 31279, 29179, 29176, 31390, 28928, 28925, 28934, 28931, 28940, 28937, 31398, 28950, 28947, 31492, 29179, 29176, 31280, 31280, 31481, 31482, 31484, 29161, 29158, 31488, 31490, 31492, 29179, 29176, 31282, 28640, 29186, 31500, 31502, 31504, 31506, 31508, 31510, 24244, 24245, 32001, 31287, 31288, 31289, 31291, 31292, 31566, 24245, 24244, 31295, 32003, 24246, 32004, 28674, 28677, 28683, 28680, 31301, 32005, 24246, 32006, 28694, 32007, 28702, 28699, 31307, 32008, 32009, 28716, 28713, 28722, 28719, 31313, 31315, 31317, 31319, 28744, 28741, 31323, 31325, 28758, 28755, 31328, 31329, 32010, 28764, 31331, 28798, 28772, 28775, 32011, 28778, 31336, 32012, 31337, 28894, 32013, 31339, 28798, 24251, 24250, 31344, 32014, 32015, 31346, 32016, 31347, 28816, 31350, 28823, 31353, 24259, 24258, 28837, 28834, 24261, 24260, 28847, 28844, 28853, 28850, 31365, 31367, 31369, 28894, 32018, 31371, 31373, 31375, 32019, 28911, 28885, 28882, 32020, 31379, 32021, 28894, 31382, 31384, 31386, 32022, 28911, 31389, 32023, 32024, 31390, 28928, 28925, 28934, 28931, 28940, 28937, 31398, 28950, 28947, 31402, 28960, 28957, 31406, 31408, 28974, 28971, 25992, 31413, 25999, 26005, 26002, 26008, 31496, 29189, 29186, 31419, 31421, 29002, 28999, 26025, 31426, 26032, 26038, 26035, 26041, 29219, 29019, 29022, 31434, 29029, 29032, 29035, 31439, 29042, 32025, 31441, 31443, 29054, 32026, 31445, 29219, 29060, 29063, 32027, 31517, 31448, 29072, 29075, 31452, 29082, 29088, 29085, 29091, 29094, 31459, 29105, 24285, 24284, 29108, 26139, 26136, 26145, 26142, 31469, 31470, 31471, 31473, 24294, 31475, 31476, 31478, 24295, 31481, 31482, 31484, 29161, 29158, 31488, 31490, 31492, 29179, 29176, 31496, 29189, 29186, 31500, 31502, 31504, 31506, 31508, 31510, 29219, 29216, 26254, 26251, 31515, 31517, 31519, 26268, 31522, 26275, 29245, 31525, 29250, 31527, 26293, 26290, 29262, 29259, 29265, 31534, 29272, 29278, 29275, 29281, 29284, 31540, 31542, 31544, 29298, 31546, 31547, 31549, 29311, 31552, 31554, 31555, 31557, 24323, 24322, 32028, 31570, 29331, 31561, 29341, 29338, 32030, 26384, 31566, 24323, 24322, 32031, 31570, 31571, 31573, 26415, 26412, 29373, 29370, 29379, 29376, 29385, 29382, 31583, 26443, 26440, 31587, 31589, 29406, 32033, 31592, 29416, 29413, 31596, 32034, 29428, 29425, 24341, 24340, 31602, 32035, 29444, 29441, 29447, 31607, 29457, 29454, 31611, 31613, 29471, 29468, 26528, 24343, 24342, 29482, 32036, 31621, 31623, 29496, 29493, 31627, 29506, 29503, 26563, 26560, 26570, 24371, 24370, 32037, 32038, 32039, 32040, 26584, 26581, 26591, 24371, 24370, 32041, 32042, 32043, 32044, 31641, 24359, 24358, 29552, 29549, 32045, 32046, 29559, 29562, 24361, 24360, 26627, 26624, 32047, 32048, 31650, 29615, 29612, 31652, 31653, 26646, 26643, 26653, 24371, 24370, 32049, 32050, 32051, 32052, 32053, 32054, 32055, 32056, 32057, 32058, 32059, 29615, 29612, 31662, 31664, 26693, 26690, 29633, 29630, 31669, 29642, 29639, 31673, 29649, 29655, 29652, 29658, 29664, 29661, 29671, 24385, 24384, 29678, 24387, 24386, 29681, 26753, 26750, 29691, 29688, 31692, 32060, 32061, 29705, 29702, 31696, 29715, 29712, 29721, 29718, 32062, 32063, 29731, 29728, 31704, 29738, 32064, 32065, 29748, 29745, 31709, 32066, 26841, 26838, 29775, 29772, 26853, 26850, 32067, 26826, 26825, 31711, 31724, 29795, 31713, 32068, 29806, 32069, 32070, 32071, 26841, 26838, 29775, 29772, 26853, 26850, 32072, 26861, 26858, 31722, 31724, 29795, 31727, 32073, 29806, 32074, 32075, 32076, 26892, 26889, 29820, 29817, 29826, 29823, 26910, 26907, 29833, 31739, 29840, 31742, 31744, 31746, 26935, 29860, 29857, 31751, 31753, 32077, 29875, 29872, 31756, 31758, 24420, 32078, 32079, 32080, 32081, 24425, 32082, 32083, 32084, 29900, 29897, 26990, 26987, 29910, 29907, 27002, 26999, 29917, 27008, 29925, 29922, 27020, 27017, 29932, 27026, 31778, 31780, 29945, 29948, 32085, 31783, 31784, 31785, 27057, 27054, 29967, 29964, 29973, 29970, 29976, 31794, 29987, 24455, 24454, 31799, 31801, 24459, 24458, 27098, 24459, 24458, 27105, 31809, 30017, 30014, 31812, 30023, 30029, 30026, 30032, 31817, 31819, 31821, 30046, 30052, 30049, 31826, 30062, 30059, 31830, 31832, 31834, 32086, 32087, 30082, 30079, 31837, 31839, 24466, 32088, 32089, 30101, 30098, 31844, 24467, 32090, 30110, 31848, 27222, 27219, 31852, 30131, 24473, 24472, 27239, 27236, 27245, 27242, 30145, 30142, 32091, 32092, 30155, 30152, 30161, 30158, 27273, 27270, 30171, 30168, 30177, 30174, 30183, 30180, 31875, 30190, 30196, 30193, 30199, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 32093, 31920, 31911, 31912, 31885, 27325, 27322, 32094, 31888, 32095, 32096, 30315, 31890, 31891, 31893, 27398, 27395, 32097, 30293, 30235, 32098, 31920, 32099, 31922, 32100, 32101, 30238, 30321, 30247, 32102, 32103, 32104, 30327, 30251, 31901, 24497, 24496, 24492, 24493, 32105, 31898, 30356, 30359, 30321, 30247, 32106, 32107, 32108, 30327, 30251, 31901, 24497, 24496, 24493, 24492, 30347, 31936, 30359, 24495, 24494, 30321, 30318, 32109, 24506, 32110, 30327, 30332, 31930, 24497, 24496, 24499, 24498, 30271, 32111, 31936, 30359, 30356, 31911, 31912, 31913, 27398, 27395, 30290, 32112, 30296, 30293, 32113, 31920, 32114, 32115, 31922, 32116, 32117, 30315, 30321, 30318, 32118, 24506, 32119, 30327, 30332, 31930, 27452, 30344, 30341, 32120, 30347, 31936, 30359, 30356, 32252, 32253, 32254, 32255, 32256, 32257, 32258, 32259, 32260, 32261, 30375, 30372, 27496, 27493, 31944, 27503, 32262, 32263, 32264, 32265, 32266, 32267, 32268, 32269, 32270, 30400, 30397, 24951, 31954, 24952, 30414, 30411, 24954, 24953, 30417, 31994, 31977, 27547, 31994, 27557, 24922, 24921, 32305, 27567, 27564, 32307, 32239, 27574, 32242, 27903, 27592, 27589, 30476, 30473, 27604, 24854, 24853, 32310, 30462, 32132, 30494, 30491, 27624, 27584, 32311, 27592, 27589, 30476, 30473, 27604, 27601, 32312, 32313, 32132, 30494, 30491, 27624, 27621, 32314, 27633, 24870, 24869, 32141, 24891, 24879, 32315, 27649, 27646, 32147, 24891, 24879, 32316, 27665, 27662, 32153, 24892, 24886, 30543, 27682, 27679, 32160, 30558, 24892, 24891, 32317, 30566, 30563, 27707, 27704, 32169, 27714, 30582, 30579, 32174, 32176, 27731, 32179, 32318, 30635, 30632, 32196, 27740, 27746, 27743, 32201, 32203, 27801, 30656, 32186, 27758, 27755, 30620, 30617, 30623, 27773, 27770, 32319, 30635, 30632, 32196, 27787, 32199, 32201, 32203, 32320, 30656, 32206, 32208, 32210, 24920, 24919, 27827, 24922, 24921, 32321, 27837, 27834, 32323, 32219, 27848, 32222, 27855, 30700, 30697, 24951, 32228, 24952, 30750, 30747, 24954, 24953, 30756, 27883, 27880, 27889, 27886, 32239, 27896, 32242, 27903, 32245, 24968, 24967, 30750, 30747, 30756, 30753, 32272, 30763, 30766, 32276, 30773, 30776, 32280, 30783, 27957, 30788, 25145, 25144, 32285, 32287, 30802, 30799, 25147, 25146, 30805, 32292, 32294, 30819, 30816, 32298, 28001, 30826, 32302, 32304, 32420, 32327, 32329, 30862, 30859, 32333, 30869, 32336, 30876, 32339, 30883, 32342, 30890, 32345, 30900, 30897, 32349, 32351, 30914, 30911, 28083, 28080, 28089, 28086, 30928, 30925, 30931, 30937, 30934, 32364, 30947, 30944, 30954, 25216, 25215, 30960, 30957, 30966, 30963, 32375, 28143, 28140, 32379, 32421, 30981, 28158, 28155, 32384, 32386, 32388, 28176, 28173, 32392, 25281, 25280, 32444, 32445, 32446, 28190, 32398, 28199, 28196, 28202, 32447, 32448, 32449, 25288, 32404, 25281, 25280, 32430, 32439, 28223, 28220, 28226, 32411, 25288, 32414, 25289, 32416, 32439, 28248, 28245, 28251, 32450, 32451, 32452, 32423, 31081, 32426, 31088, 28272, 32430, 32432, 31101, 32435, 31108, 28293, 32439, 32441, 31124, 31121, 32453, 32454, 32455, 31134, 31131, 31140, 31137, 31146, 31143, 31152, 31149, 31158, 31155, 31164, 31161, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 32480, 32482, 32484, 32485, 32486, 32487, 32488, 32489, 32492, 32493, 32495, 32496, 32497, 32498, 32499, 32500, 32501, 32502, 32506, 32507, 32509, 32510, 32511, 32512, 32513, 32514, 32515, 32516, 32517, 32518, 32520, 32522, 32523, 32524, 32525, 32526, 32527, 32528, 32529, 32530, 32531, 32532, 32533, 32534, 32535, 32536, 32537, 32538, 32539, 32540, 32541, 32542, 32543, 32544, 32545, 32546, 32547, 32548, 32549, 32550, 32551, 32552, 32553, 32554, 32555, 32556, 32557, 32559, 32561, 32562, 32563, 32564, 32565, 32566, 32567, 32568, 32569, 32570, 32571, 32572, 32573, 32574, 32575, 32576, 32577, 32578, 32579, 32580, 32581, 32582, 32583, 32584, 32585, 32586, 32587, 32588, 32589, 32590, 32593, 32595, 32596, 32597, 32598, 32599, 32600, 32601, 32602, 32603, 32604, 32605, 32606, 32607, 32608, 32609, 32610, 32611, 32612, 32613, 32614, 32615, 32616, 32617, 32618, 32619, 32620, 32621, 32622, 32623, 32624, 32625, 32626, 32629, 32630, 32631, 32632, 32633, 32634, 32635, 32636, 32637, 32638, 32639, 32640, 32641, 32642, 32643, 32644, 32647, 32648, 32649, 32650, 32651, 32652, 32654, 32655, 32656, 32657, 32658, 32659, 32660, 32661, 32663, 32664, 32665, 32666, 32667, 32668, 32669, 32670, 32671, 32673, 32674, 32675, 32676, 32677, 32678, 32679, 32680, 32681, 32682, 32683, 32684, 32685, 32686, 32687, 32688, 32689, 32690, 32691, 32692, 32693, 32694, 32695, 32696, 32697, 32698, 32699, 32700, 32701, 32702, 32703, 32704, 32705, 32706, 32707, 32708, 32709, 32710, 32711, 32712, 32713, 32714, 32715, 32716, 32717, 32718, 32719, 32720, 32721, 32722, 32724, 32725, 32726, 32727, 32728, 32729, 32730, 32731, 32732, 32734, 32736, 32737, 32738, 32739, 32740, 32742, 32744, 32746, 32747, 32748, 32749, 32751, 32752, 32753, 32754, 32755, 32756, 32757, 32758, 32759, 32760, 32761, 32762, 32763, 32764, 32765, 32766, 32768, 32769, 32770, 32771, 32772, 32774, 32775, 32777, 32778, 32780, 32781, 32782, 32783, 32784, 32785, 32787, 32789, 32790, 32791, 32792, 32793, 32794, 32795, 32796, 32797, 32798, 32799, 32800, 32801, 32802, 32803, 32804, 32805, 32806, 32807, 32809, 32810, 32811, 32813, 32814, 32815, 32817, 32819, 32820, 32821, 32822, 32824, 32825, 32826, 32828, 32829, 32830, 32831, 32832, 32833, 32834, 32835, 32836, 32837, 32838, 32839, 32840, 32841, 32842, 32843, 32844, 32845, 32846, 32847, 32848, 32849, 32850, 32851, 32852, 32853, 32854, 32855, 32856, 32857, 32858, 32859, 32860, 32861, 32862, 32863, 32864, 32865, 32866, 32867, 32868, 32869, 32870, 32871, 32872, 32874, 32875, 32876, 32878, 32879, 32880, 32881, 32883, 32884, 32885, 32886, 32887, 32888, 32889, 32890, 32891, 32892, 32893, 32894, 32895, 32896, 32897, 32898, 32899, 32900, 32901, 32902, 32903, 32904, 32905, 32906, 32907, 32908, 32909, 32910, 32911, 32912, 32913, 32914, 32915, 32916, 32917, 32918, 32919, 32920, 32921, 32922, 32923, 32924, 32925, 32926, 32927, 32928, 32929, 32930, 32931, 32932, 32933, 32934, 32935, 32936, 32937, 32938, 32939, 32940, 32941, 32942, 32943, 32944, 32945, 32946, 32947, 32948, 32949, 32950, 32951, 32952, 32953, 32954, 32955, 32956, 32957, 32958, 32959, 32960, 32961, 32962, 32963, 32964, 32965, 32966, 32967, 32968, 32970, 32971, 32972, 32973, 32974, 32976, 32977, 32978, 32979, 32981, 32982, 32983, 32984, 32985, 32986, 32987, 32988, 32989, 32990, 32991, 32992, 32993, 32994, 32995, 32996, 32997, 32999, 33000, 33001, 33002, 33004, 33005, 33006, 33007, 33008, 33010, 33011, 33012, 33013, 33014, 33015, 33016, 33017, 33018, 33019, 33020, 33021, 33022, 33023, 33025, 33026, 33027, 33028, 33029, 33030, 33031, 33032, 33033, 33034, 33035, 33036, 33037, 33039, 33041, 33042, 33043, 33044, 33045, 33046, 33048, 33050, 33051, 33052, 33053, 33054, 33055, 33057, 33058, 33059, 33060, 33061, 33062, 33063, 33065, 33066, 33067, 33068, 33069, 33070, 33071, 33072, 33073, 33074, 33075, 33077, 33079, 33081, 33084, 33086, 33087, 33088, 33089, 33090, 33091, 33092, 33093, 33094, 33095, 33096, 33097, 33098, 33099, 33100, 33101, 33102, 33103, 33104, 33105, 33106, 33107, 33108, 33109, 33110, 33111, 33112, 33113, 33114, 33115, 33116, 33118, 33119, 33120, 33121, 33122, 33123, 33124, 33125, 33127, 33128, 33129, 33130, 33131, 33133, 33134, 33135, 33137, 33138, 33139, 33140, 33141, 33142, 33144, 33145, 33146, 33147, 33148, 33149, 33151, 33153, 33155, 33156, 33157, 33158, 33159, 33160, 33162, 33163, 33164, 33165, 33166, 33167, 33169, 33171, 33173, 33174, 33175, 33176, 33177, 33178, 33179, 33180, 33181, 33182, 33183, 33184, 33185, 33186, 33187, 33188, 33189, 33190, 33191, 33193, 33194, 33195, 33196, 33197, 33198, 33200, 33202, 33204, 33206, 33207, 33208, 33209, 33210, 33211, 33212, 33213, 33214, 33215, 33216, 33217, 33218, 33219, 33220, 33221, 33222, 33223, 33224, 33225, 33227, 33228, 33229, 33230, 33231, 33232, 33233, 33234, 33235, 33236, 33237, 33238, 33239, 33240, 33241, 33242, 33243, 33244, 33245, 33246, 33247, 33248, 33249, 33250, 33251, 33252, 33253, 33254, 33255, 33256, 33257, 33258, 33259, 33260, 33261, 33262, 33263, 33264, 33265, 33266, 33267, 33268, 33269, 33271, 33272, 33273, 33274, 33275, 33276, 33278, 33279, 33280, 33281, 33283, 33284, 33285, 33286, 33287, 33288, 33289, 33290, 33291, 33292, 33293, 33294, 33295, 33296, 33297, 33299, 33300, 33301, 33302, 33303, 33304, 33305, 33306, 33307, 33308, 33309, 33310, 33311, 33312, 33313, 33314, 33315, 33316, 33317, 33318, 33319, 33320, 33321, 33322, 33323, 33325, 33326, 33327, 33328, 33329, 33330, 33332, 33333, 33335, 33336, 33337, 33338, 33339, 33340, 33342, 33343, 33345, 33347, 33348, 33350, 33351, 33352, 33353, 33356, 33357, 33358, 33359, 33360, 33361, 33362, 33364, 33365, 33366, 33367, 33368, 33369, 33372, 33373, 33374, 33375, 33376, 33377, 33378, 33379, 33380, 33381, 33382, 33383, 33384, 33385, 33387, 33389, 33390, 33391, 33392, 33393, 33394, 33395, 33396, 33398, 33399, 33400, 33401, 33402, 33403, 33404, 33405, 33406, 33408, 33409, 33411, 33412, 33414, 33415, 33417, 33418, 33419, 33421, 33423, 33424, 33425, 33426, 33427, 33428, 33430, 33431, 33432, 33433, 33434, 33437, 33439, 33441, 33444, 33445, 33446, 33447, 33448, 33449, 33450, 33452, 33454, 33456, 33459, 33460, 33461, 33462, 33463, 33464, 33465, 33466, 33467, 33468, 33469, 33470, 33471, 33472, 33473, 33474, 33475, 33477, 33478, 33479, 33480, 33481, 33482, 33483, 33484, 33485, 33486, 33487, 33488, 33489, 33490, 33492, 33493, 33494, 33495, 33496, 33497, 33499, 33500, 33501, 33502, 33503, 33504, 33505, 33507, 33508, 33509, 33510, 33511, 33513, 33514, 33515, 33516, 33517, 33518, 33520, 33521, 33522, 33523, 33524, 33526, 33527, 33528, 33529, 33530, 33531, 33532, 33533, 33534, 33535, 33536, 33537, 33539, 33540, 33541, 33542, 33543, 33544, 33545, 33546, 33547, 33548, 33549, 33550, 33552, 33553, 33554, 33555, 33556, 33557, 33558, 33559, 33560, 33561, 33562, 33563, 33564, 33565, 33566, 33567, 33568, 33569, 33571, 33572, 33573, 33574, 33575, 33576, 33577, 33579, 33580, 33581, 33582, 33583, 33584, 33585, 33586, 33587, 33589, 33590, 33591, 33592, 33593, 33594, 33595, 33596, 33597, 33598, 33599, 33600, 33601, 33602, 33603, 33604, 33605, 33606, 33607, 33608, 33609, 33610, 33611, 33612, 33613, 33614, 33615, 33616, 33617, 33618, 33619, 33620, 33621, 33622, 33623, 33624, 33625, 33626, 33627, 33628, 33629, 33630, 33631, 33632, 33633, 33634, 33635, 33636, 33637, 33638, 33639, 33640, 33641, 33642, 33643, 33644, 33645, 33646, 33647, 33648, 33650, 33651, 33652, 33653, 33654, 33655, 33656, 33657, 33658, 33659, 33660, 33661, 33662, 33663, 33664, 33665, 33666, 33667, 33668, 33669, 33670, 33671, 33672, 33673, 33674, 33675, 33676, 33677, 33678, 33679, 33680, 33681, 33682, 33683, 33684, 33685, 33686, 33687, 33688, 33689, 33690, 33691, 33693, 33694, 33695, 33696, 33697, 33698, 33699, 33700, 33701, 33702, 33703, 33704, 33707, 33708, 33709, 33710, 33711, 33712, 33715, 33716, 33717, 33718, 33719, 33720, 33721, 33722, 33723, 33724, 33725, 33726, 33727, 33728, 33729, 33730, 33731, 33732, 33733, 33736, 33737, 33738, 33739, 33740, 33741, 33742, 33743, 33744, 33745, 33746, 33747, 33748, 33749, 33750, 33751, 33754, 33755, 33756, 33757, 33758, 33759, 33760, 33761, 33762, 33763, 33764, 33765, 9, 10, 11, 12, 13, 14, 15, 33778, 33780, 33782, 33784, 33786, 33788, 33790, 33798, 33802, 33812, 33814, 33816, 33818, 33820, 33829, 33831, 33833, 33835, 33837, 33842, 33845, 33849, 33854, 33857, 33859, 33861, 33863, 33865, 33867, 33871, 33875, 33877, 33881, 33890, 33898, 32628, 33910, 33915, 32646, 33926, 33932, 33935, 33941, 33944, 33948, 33950, 33952, 33955, 33958, 33961, 33963, 33965, 33968, 33971, 33978, 33983, 33986, 33994, 34002, 32733, 34008, 32741, 34013, 34017, 34019, 34025, 34029, 32767, 34035, 32773, 34041, 34043, 34054, 34056, 34058, 34060, 34062, 34067, 32812, 34072, 32818, 32823, 34083, 34085, 34087, 34090, 34093, 34097, 34102, 34106, 34110, 34115, 34118, 34131, 34140, 34145, 34149, 34151, 34165, 34170, 34173, 34181, 34183, 34195, 34197, 34202, 34218, 34223, 32975, 34227, 34232, 34234, 34236, 34238, 34241, 34245, 34247, 34250, 34252, 34255, 34259, 34263, 34265, 34268, 34271, 34274, 34276, 34278, 34283, 34285, 34291, 34293, 34298, 34300, 34304, 34308, 34310, 34316, 34318, 34322, 34324, 34327, 34331, 34334, 34336, 34339, 34343, 34345, 34349, 34352, 34354, 34357, 34362, 34365, 34367, 34369, 34371, 34377, 34379, 34381, 34383, 34385, 34391, 34393, 34395, 34397, 34399, 34408, 34412, 34417, 34421, 34423, 34425, 34427, 34431, 34433, 34444, 34446, 34448, 34452, 34457, 34460, 34464, 34468, 34475, 34478, 34484, 34490, 33282, 34496, 34499, 34502, 34504, 34506, 34509, 34511, 34513, 34515, 34517, 34519, 34523, 34529, 34532, 34538, 34546, 34548, 34554, 33355, 34560, 34562, 34565, 34567, 33371, 34573, 34575, 34579, 34582, 33386, 33388, 34588, 34590, 34592, 34594, 34599, 34601, 34602, 34609, 33420, 33422, 34616, 33429, 34620, 34625, 34626, 34628, 34443, 34420, 34402, 34438, 34439, 34443, 34420, 34402, 34438, 34439, 34635, 34636, 34641, 34643, 33793, 34006, 34012, 33795, 34022, 33797, 33805, 32521, 33810, 33808, 33823, 33827, 33825, 33840, 32560, 33848, 33853, 33870, 34443, 33874, 33884, 33889, 33887, 34443, 34420, 34402, 33889, 34439, 33894, 33896, 33901, 33903, 33905, 33906, 33909, 33914, 33919, 33921, 33922, 33925, 33930, 33937, 33939, 33946, 34650, 34653, 33976, 33981, 33989, 33991, 33993, 33997, 33999, 34001, 34006, 34012, 34016, 34022, 34024, 34052, 34050, 34065, 32776, 34047, 32788, 34052, 34050, 34065, 34069, 34077, 34081, 34096, 34101, 34109, 34114, 34124, 34122, 34126, 34127, 34129, 34130, 34134, 34136, 34138, 34143, 34155, 34157, 34159, 34161, 34163, 34168, 34176, 34178, 34180, 34186, 34190, 34188, 34194, 34192, 34200, 34205, 34207, 34209, 34211, 34213, 34215, 34217, 34221, 34230, 33003, 33009, 34262, 34282, 34289, 34321, 34296, 34307, 34314, 34321, 34296, 34307, 34314, 34321, 34330, 34348, 34360, 33136, 33150, 34375, 33168, 34389, 34402, 34404, 34406, 34411, 34415, 34443, 34420, 34430, 34436, 34438, 34439, 34443, 34441, 34456, 34467, 34472, 34474, 34482, 34487, 34489, 34522, 34527, 34534, 34536, 34540, 34542, 34544, 34550, 34551, 34553, 34597, 34604, 34606, 34608, 34615, 34660, 34662, 34664, 33491, 34669, 34671, 34673, 34675, 34677, 34681, 34683, 34685, 34689, 34691, 34694, 34696, 34699, 34702, 34705, 34708, 34710, 34714, 34720, 34724, 34731, 34733, 34736, 34738, 34749, 34751, 34754, 34761, 34766, 34768, 34771, 34773, 34780, 34782, 34784, 33436, 33525, 34701, 34631, 33519, 33525, 34701, 34640, 34796, 34800, 34802, 34807, 34810, 34716, 34648, 34716, 34659, 34657, 34713, 34718, 33551, 34727, 34729, 34742, 34744, 34745, 34747, 34760, 34758, 34765, 34778, 34776, 34816, 34827, 34831, 34833, 34835, 34837, 34840, 34843, 34845, 34848, 34850, 34853, 33692, 34857, 34862, 34865, 34867, 34787, 34788, 34790, 34791, 34793, 34795, 34870, 34873, 34876, 34880, 34805, 34813, 34889, 34892, 34814, 34819, 34821, 34823, 34825, 34830, 34860, 34906, 34908, 34869, 34900, 34886, 34874, 34878, 34879, 34886, 34884, 34887, 34902, 34888, 34896, 34894, 34898, 34902, 34900, 34904, 34909, 34911, 34913, 34915, 34917, 34919, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 34999, 35023, 35050, 35055, 35057, 35064, 35072, 35073, 35107, 35118, 35143, 35163, 35164, 35092, 35098, 35094, 35165, 35166, 34440, 35167, 35168, 35169, 35092, 35098, 35094, 35170, 35171, 34440, 35172, 35176, 34928, 34929, 32490, 34931, 34932, 34934, 35177, 35079, 35080, 35178, 34987, 34988, 35179, 34989, 34990, 35180, 34992, 35181, 35182, 33800, 34936, 35183, 35184, 35185, 35186, 34939, 34937, 34941, 35187, 35188, 35189, 34944, 34942, 34946, 35190, 34947, 35191, 34948, 35192, 35193, 33851, 33856, 34952, 34954, 34956, 33873, 35194, 35120, 35195, 35196, 34958, 33879, 34960, 35197, 35198, 35199, 34440, 35200, 35201, 35092, 35098, 35094, 35202, 35203, 34440, 35204, 33892, 35205, 35206, 34962, 35207, 35208, 35209, 35210, 34963, 35211, 33912, 35212, 33917, 35213, 35214, 35215, 34966, 35216, 33928, 35120, 35217, 34968, 34969, 35218, 35219, 34970, 34971, 35220, 35221, 34972, 34974, 34975, 34976, 34977, 34979, 34980, 34981, 33973, 33974, 35223, 34982, 35224, 34983, 34984, 35225, 35226, 35227, 32723, 35228, 35229, 35230, 32969, 35231, 34987, 34988, 35232, 34989, 34990, 35233, 34992, 35234, 35235, 34027, 34994, 35236, 35237, 35002, 35000, 35004, 35238, 34995, 34996, 34997, 35239, 34998, 35240, 35241, 35242, 35243, 35002, 35000, 35004, 35244, 35005, 35245, 35006, 32816, 35008, 35246, 35009, 35247, 35010, 35012, 35013, 35014, 35248, 35249, 34099, 34104, 35017, 35250, 35251, 34112, 34117, 34120, 35252, 35253, 35254, 35255, 35256, 35257, 34133, 35258, 35259, 35260, 35022, 35261, 35024, 34153, 35262, 35263, 35264, 35265, 35266, 35026, 35267, 35027, 35028, 35268, 35269, 35270, 35030, 35271, 35272, 35273, 35274, 35275, 35032, 35276, 35033, 35277, 35278, 35279, 35280, 35281, 35282, 35283, 32969, 35284, 35035, 34226, 32980, 35285, 35038, 35040, 34240, 34243, 35043, 35044, 35286, 35046, 35287, 34257, 35048, 35288, 34269, 35052, 35053, 35289, 35290, 35066, 35291, 35068, 35069, 35292, 34297, 35059, 34302, 35062, 35293, 35065, 35294, 35066, 35295, 35068, 35069, 35296, 34297, 35075, 34302, 35062, 35297, 35065, 35298, 35066, 35299, 35068, 35069, 35300, 34333, 35075, 35301, 34351, 35078, 35079, 35302, 35080, 35303, 35082, 34373, 33143, 35304, 35305, 34378, 35087, 34387, 33161, 35306, 35307, 34392, 35092, 35094, 35308, 35309, 35310, 35095, 35311, 35096, 35312, 35097, 35116, 35313, 35314, 35105, 35098, 35101, 35315, 35103, 35316, 35317, 34440, 35318, 35319, 35320, 35105, 34450, 35321, 34462, 34459, 35110, 34470, 35322, 35323, 35324, 34477, 34480, 35325, 35114, 35326, 35327, 34492, 35116, 35117, 35120, 34508, 35123, 35125, 35127, 34525, 35328, 35329, 35129, 35130, 35330, 35331, 35131, 35332, 35333, 35334, 35132, 35133, 35335, 35336, 35337, 34558, 34556, 35136, 33363, 35138, 34571, 34569, 35141, 34577, 34586, 35145, 35147, 35149, 35150, 35338, 35151, 35153, 35339, 35340, 35341, 34613, 35155, 35342, 35158, 35159, 35345, 35354, 35361, 35372, 35376, 35382, 35383, 35384, 33538, 35162, 35385, 35386, 35387, 35388, 33538, 35389, 34638, 35395, 35396, 35397, 34655, 35398, 35399, 35344, 35347, 33498, 35350, 34679, 35352, 33512, 33519, 34693, 33525, 34698, 34701, 34704, 35363, 35400, 34716, 35401, 34722, 35402, 35366, 35403, 35404, 35367, 34735, 34740, 33570, 35405, 35406, 35407, 35408, 35371, 34756, 35409, 35410, 35411, 34763, 35378, 35412, 35413, 35379, 35381, 35422, 35431, 35432, 35433, 35434, 35435, 35436, 34798, 35391, 34852, 35441, 35393, 35394, 35442, 35445, 35414, 35446, 35447, 35448, 35449, 35415, 35450, 35417, 35419, 35420, 35421, 34852, 34855, 35427, 35451, 35428, 35429, 28312, 35439, 35454, 35455, 34872, 28312, 34882, 35439, 35456, 35457, 35458, 35459, 34882, 34891, 35460, 35461, 35462, 35463, 35464, 34891, 28312, 35465, 35466, 35467, 35468, 35469, 35470, 35452, 28313, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 35499, 35501, 35502, 35496, 35503, 35506, 35508, 35510, 35511, 35496, 35512, 35515, 35518, 35519, 35520, 35521, 35522, 35523, 35525, 35526, 35528, 35529, 35531, 35532, 35534, 35537, 35538, 35541, 35543, 35544, 35545, 35547, 35549, 35550, 35551, 35553, 35555, 35558, 35559, 35560, 35561, 35562, 35563, 35565, 35566, 35568, 35569, 35496, 35570, 35574, 35575, 35577, 35578, 35496, 35579, 35582, 35584, 35587, 35592, 35594, 35596, 35600, 35602, 35603, 35605, 35606, 35609, 35610, 35613, 35614, 35615, 35616, 35617, 35618, 35619, 35620, 35621, 35622, 35624, 35626, 35627, 35631, 35635, 35637, 35638, 35640, 35641, 35643, 35646, 35647, 35648, 35650, 35651, 35652, 35654, 35655, 35656, 35658, 35488, 35661, 35663, 35664, 35665, 35667, 35669, 35670, 35671, 35673, 35675, 35676, 35677, 35678, 35681, 35682, 35683, 35686, 35687, 35688, 35689, 35695, 35699, 35489, 35701, 35702, 35708, 35710, 35711, 35715, 35717, 35719, 35721, 35723, 35731, 35733, 35734, 35735, 35737, 35738, 35739, 35740, 35741, 35742, 35744, 35746, 35747, 35490, 35749, 35750, 35751, 35491, 35492, 35754, 35756, 35757, 35759, 35494, 34342, 35760, 35761, 35762, 35493, 35764, 35766, 35768, 35769, 35771, 35494, 34342, 35772, 35773, 35774, 35493, 35776, 35778, 35780, 35781, 35783, 35494, 34342, 35784, 35786, 35787, 35788, 35790, 35792, 35793, 35794, 35795, 35797, 35798, 35799, 35800, 35801, 35803, 35804, 35805, 35809, 35811, 35813, 35814, 35815, 35817, 35818, 35496, 35819, 35821, 35824, 35826, 35828, 35829, 35496, 35831, 35832, 35833, 35834, 35838, 35839, 35841, 35844, 35845, 35846, 35497, 35847, 35848, 35849, 35850, 35851, 35852, 35855, 35856, 35859, 35863, 35864, 35868, 35869, 35870, 35871, 35872, 35873, 35874, 35875, 35876, 35498, 35877, 35878, 35879, 35880, 35881, 35883, 35884, 35888, 35889, 35891, 35892, 35901, 35902, 35907, 35909, 35517, 35536, 35540, 35726, 35706, 35704, 35728, 35598, 35589, 35591, 35598, 35599, 35913, 33476, 35914, 35694, 35692, 35706, 35704, 35698, 35706, 35704, 35630, 35634, 35645, 35660, 35694, 35692, 35698, 35706, 35704, 35714, 35728, 35726, 35730, 35837, 35837, 35843, 35861, 35867, 35887, 35916, 35346, 35917, 35918, 35919, 35920, 35921, 35922, 34688, 35923, 35924, 35925, 35926, 35927, 35928, 33538, 35929, 35931, 35933, 35935, 35938, 35939, 35940, 35941, 35946, 35947, 33588, 35948, 35951, 35897, 35952, 35953, 35955, 35956, 35964, 35965, 35966, 35968, 35969, 35911, 35932, 35937, 35944, 35972, 35977, 35979, 35980, 35981, 35982, 35423, 35983, 35984, 35985, 35987, 35988, 35989, 35976, 35963, 35961, 35959, 35976, 35974, 35990, 35993, 35994, 35995, 35996, 36001, 36002, 36003, 36008, 36009, 35976, 35974, 36010, 36013, 36016, 36017, 36007, 36007, 36007, 12, 13, 14, 15, 36035, 36037, 36041, 36043, 35527, 35530, 36060, 36064, 35557, 36074, 36079, 35573, 36085, 36087, 35636, 35639, 36123, 36130, 36132, 35680, 35685, 36153, 36177, 36181, 36182, 36187, 36188, 36192, 36193, 36198, 36199, 36203, 36204, 36209, 36210, 36217, 36222, 36235, 36238, 36242, 36243, 36246, 36253, 36259, 36265, 36270, 36274, 36275, 36282, 36033, 35504, 36039, 35513, 35908, 36290, 35791, 35789, 35736, 35791, 35789, 35524, 35791, 35789, 36056, 36291, 36058, 36292, 35546, 35552, 35554, 35726, 36293, 35556, 36294, 36295, 36296, 36072, 36255, 36077, 35571, 36083, 35580, 36240, 36092, 36088, 36297, 36229, 36298, 36089, 36090, 36299, 36226, 35806, 36240, 36092, 36091, 36300, 36229, 36228, 36093, 36301, 36094, 36255, 36096, 35607, 36098, 35611, 36303, 36141, 36157, 35679, 35684, 36150, 36305, 36306, 35700, 36307, 36308, 35726, 36101, 36103, 36105, 36107, 35679, 35684, 35696, 36309, 35700, 36310, 36311, 35726, 36108, 36109, 36110, 36111, 35628, 36312, 35632, 36313, 35732, 36119, 36314, 36121, 35653, 36127, 35657, 36315, 35666, 35668, 36137, 35672, 35674, 36141, 36157, 35679, 35684, 36150, 36316, 36317, 35696, 36318, 35700, 36319, 36320, 35726, 36156, 36157, 35712, 36321, 35716, 36161, 35724, 35722, 36322, 36323, 36324, 35732, 36166, 35736, 36169, 36171, 36173, 36175, 36174, 35748, 36180, 35755, 36185, 36186, 36190, 35763, 35767, 36196, 36197, 36201, 35775, 35779, 36207, 36208, 35785, 36213, 35791, 35789, 36220, 36225, 36226, 35806, 36240, 36325, 36248, 36229, 36228, 36231, 36230, 36233, 35822, 35820, 36240, 36326, 36248, 36249, 36251, 36327, 36255, 36257, 36260, 35857, 36262, 36328, 36263, 36329, 35865, 36269, 36279, 36280, 36330, 35885, 36285, 36332, 36339, 36346, 36349, 36353, 36357, 35950, 36360, 36286, 35900, 35899, 35903, 36288, 35906, 35905, 35932, 35930, 36370, 36371, 36334, 36336, 36338, 36344, 36342, 35932, 35930, 36372, 36352, 36373, 36355, 36362, 36364, 36380, 36368, 36387, 36388, 36389, 36390, 36366, 36377, 35970, 36383, 36384, 36368, 36391, 36392, 36366, 36377, 35970, 36383, 36384, 36397, 36398, 36368, 35970, 36374, 36403, 36404, 35978, 36377, 36379, 36383, 36384, 36409, 35991, 36386, 36410, 35991, 36395, 36007, 36402, 36000, 36411, 36005, 36402, 36015, 36012, 36408, 12, 13, 14, 15, 36465, 36416, 36466, 36417, 36467, 36418, 36468, 36419, 36471, 36472, 36473, 36474, 36475, 36476, 36477, 36478, 36055, 36053, 36479, 36481, 36422, 36483, 36423, 36484, 36485, 36486, 36488, 36070, 36489, 36492, 36425, 36493, 36494, 36426, 36495, 36427, 36496, 36428, 36497, 36429, 36498, 36455, 36456, 36457, 36499, 36500, 36502, 36504, 36505, 36507, 36453, 36508, 36454, 36509, 36455, 36456, 36457, 36510, 36511, 36513, 36514, 36515, 36517, 36518, 36519, 36520, 36521, 36522, 36302, 36524, 36525, 36526, 36145, 36527, 36148, 36528, 36529, 36154, 36531, 36534, 36532, 36535, 36536, 36537, 36538, 36539, 36145, 36540, 36148, 36541, 36154, 36543, 36546, 36544, 36547, 36548, 36549, 36550, 36551, 36553, 36555, 36118, 36116, 36556, 36558, 36432, 36559, 36560, 36561, 36433, 36434, 36563, 36564, 36565, 36566, 36567, 36568, 36569, 36570, 36145, 36571, 36148, 36572, 36573, 36575, 36154, 36577, 36580, 36578, 36581, 36582, 36583, 36585, 36586, 36587, 36588, 36589, 36592, 36593, 36594, 36595, 36596, 36597, 36598, 36599, 36600, 36178, 36601, 35753, 35752, 36602, 36603, 36442, 36604, 36605, 36606, 36444, 36607, 36608, 36446, 36609, 36610, 36611, 36448, 36612, 36613, 36450, 36614, 36615, 36616, 36617, 36618, 36451, 36619, 36452, 36620, 36621, 36453, 36622, 36454, 36623, 36455, 36456, 36457, 36625, 36626, 36627, 36628, 36629, 36630, 36453, 36631, 36632, 36454, 36633, 36455, 36456, 36457, 36635, 36636, 36637, 36458, 36639, 36640, 36459, 36641, 36642, 36643, 36645, 36647, 36267, 36648, 36272, 36462, 36277, 36649, 36650, 36652, 35890, 36653, 36356, 36662, 36663, 36664, 35898, 36665, 36666, 36667, 36668, 35904, 36470, 36669, 36670, 36654, 36673, 36674, 36675, 36656, 36676, 36677, 36340, 36678, 36679, 36350, 36681, 35942, 36683, 36661, 36684, 36685, 36687, 36688, 36690, 36692, 36693, 36694, 36365, 36695, 36696, 36697, 36698, 36700, 36701, 36702, 36367, 36703, 36704, 36705, 36707, 36708, 36709, 36710, 36712, 36713, 36714, 36381, 36715, 36716, 36718, 36719, 36721, 36722, 36723, 36724, 36725, 36402, 36727, 36728, 36729, 36730, 36731, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36737, 36739, 36741, 36743, 36744, 36747, 36750, 36752, 36753, 36756, 36758, 36763, 36766, 36769, 36771, 36773, 36775, 36777, 36778, 36779, 36780, 36782, 36784, 36786, 36788, 36790, 36791, 36792, 36793, 36795, 36797, 36808, 36810, 36813, 36816, 36822, 36824, 36826, 36829, 36837, 36838, 36841, 36845, 36846, 36855, 36857, 36861, 36864, 36870, 36879, 36882, 36884, 36885, 36888, 36892, 36895, 36899, 36902, 36906, 36908, 36910, 36913, 36915, 36917, 36918, 36919, 36921, 36923, 36926, 36927, 36929, 36931, 36932, 36933, 36936, 36937, 36940, 36646, 36946, 36948, 36949, 36950, 36651, 36954, 36960, 36957, 36965, 36962, 36966, 36746, 36836, 36877, 36887, 36894, 36901, 36836, 36746, 36877, 36887, 36894, 36901, 36835, 36836, 36877, 36887, 36894, 36901, 36755, 36482, 36851, 36760, 36853, 36576, 36859, 36833, 36552, 36869, 36840, 36844, 36851, 36849, 36853, 36576, 36859, 36833, 36552, 36869, 36934, 36644, 36942, 36934, 36644, 36803, 36801, 36304, 36806, 36812, 36820, 36818, 36542, 36877, 36877, 36833, 36552, 36836, 36835, 36887, 36894, 36901, 36840, 36844, 36851, 36849, 36853, 36576, 36859, 36866, 36584, 36869, 36875, 36873, 36877, 36887, 36894, 36901, 36920, 36934, 36644, 36942, 36969, 36973, 36976, 36977, 36979, 36981, 36358, 36983, 36972, 36985, 36972, 36985, 36987, 36992, 37000, 36972, 36985, 36972, 36985, 36972, 36985, 37011, 36990, 36994, 36996, 36998, 37002, 36717, 36990, 36994, 36996, 36998, 37002, 36720, 37007, 37009, 37013, 36005, 37007, 37009, 37013, 35999, 37021, 37007, 37009, 37013, 36726, 37007, 37009, 37013, 37024, 12, 13, 14, 15, 37047, 37061, 37073, 37077, 37079, 37086, 37091, 37093, 37095, 37097, 36959, 37040, 37041, 37042, 37043, 36964, 37129, 37130, 37131, 37089, 37090, 37132, 37133, 37094, 37134, 37096, 37044, 36911, 36909, 37135, 37136, 37137, 37089, 37090, 37138, 37139, 37094, 37140, 37096, 37045, 36911, 36909, 37141, 37142, 37143, 37089, 37090, 37144, 37145, 37094, 37146, 37096, 37046, 36911, 36909, 37147, 36842, 37148, 36562, 36759, 36757, 37149, 37150, 37151, 37085, 37084, 37152, 37153, 37154, 37155, 37156, 36487, 37157, 36842, 36562, 37158, 36847, 37159, 37160, 37161, 37085, 37051, 37162, 37163, 37164, 37165, 37166, 36491, 37052, 37053, 37054, 37055, 37056, 37057, 37059, 36501, 37063, 37064, 37065, 37067, 36512, 37070, 37108, 37110, 37111, 37113, 37167, 37114, 36767, 37116, 37117, 37168, 37169, 36951, 37120, 36947, 37122, 36955, 37053, 37054, 37055, 37056, 37057, 37059, 36501, 37063, 37064, 37065, 37067, 36512, 37070, 37108, 37110, 37111, 37113, 37170, 37114, 36799, 37116, 37117, 37171, 37172, 37173, 36951, 37120, 36947, 37122, 36955, 37174, 37175, 37072, 37071, 37176, 37177, 37178, 37076, 37075, 37179, 37180, 37181, 37094, 37096, 37182, 37183, 37184, 37185, 37186, 37187, 37188, 37189, 36842, 36562, 37190, 36847, 37191, 37192, 37193, 37085, 37084, 37194, 37195, 37196, 37197, 37198, 36872, 37199, 37200, 37201, 37089, 37090, 37202, 37203, 37094, 37204, 37096, 37098, 36911, 36909, 37101, 37102, 37103, 37105, 37205, 37107, 37108, 37110, 37111, 37113, 37206, 37114, 36938, 37116, 37117, 37207, 37208, 36951, 37120, 36947, 37122, 36955, 37210, 36975, 36680, 36682, 37215, 37217, 36970, 37218, 37219, 36970, 37220, 37224, 36970, 37225, 37226, 36970, 37227, 37228, 36970, 37229, 37221, 37231, 37222, 37232, 37233, 37234, 37223, 37235, 37221, 37237, 37222, 37238, 37239, 37240, 37223, 37241, 37243, 37244, 37230, 37245, 37246, 37247, 37248, 37230, 37249, 37250, 37252, 37253, 37230, 37254, 37256, 37257, 37230, 37258, 37015, 37017, 37023, 37026, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37125, 37275, 37276, 37277, 37278, 37127, 37280, 37283, 37284, 37270, 36890, 37287, 36897, 37289, 36749, 37290, 37291, 37292, 37293, 37296, 37297, 37270, 36890, 37300, 36897, 37302, 36904, 37303, 37304, 37305, 37306, 37309, 37310, 37270, 36890, 37313, 36897, 37315, 36749, 37316, 37317, 37318, 36754, 37320, 37322, 37323, 37324, 37325, 37328, 37329, 37330, 36761, 37335, 36839, 37337, 37338, 37340, 37341, 37344, 37345, 37346, 36764, 37351, 37352, 37353, 37354, 37355, 37356, 37357, 37358, 37359, 37062, 37360, 37361, 37362, 37363, 37364, 37365, 37366, 37367, 37368, 37369, 37371, 37372, 37373, 37374, 37377, 37378, 37379, 37380, 37381, 37382, 37383, 37384, 37385, 37386, 37387, 37388, 37062, 37389, 37390, 37391, 37392, 37393, 37394, 37395, 37396, 37397, 37398, 37400, 37401, 37402, 37403, 37405, 37407, 37408, 37409, 37410, 37411, 37414, 37415, 37074, 37417, 37419, 37420, 37078, 37270, 37424, 37425, 37428, 36839, 37434, 37435, 37437, 37438, 37441, 37442, 37443, 37087, 37448, 37449, 37452, 37453, 37270, 36890, 37456, 36897, 37458, 36904, 37459, 37460, 37461, 37462, 37463, 37464, 37465, 37467, 37468, 37469, 37470, 37471, 37473, 37474, 37475, 37476, 37479, 37480, 37481, 37482, 37483, 37484, 37490, 36982, 36980, 37128, 37493, 36982, 36980, 37128, 37333, 37349, 37496, 36982, 36980, 37216, 37499, 36982, 36980, 37216, 37427, 37427, 37446, 37502, 36982, 36980, 37216, 37504, 37506, 37510, 37512, 37514, 37518, 37522, 37018, 37527, 37020, 37532, 37536, 37509, 37538, 37517, 37539, 37521, 37526, 37531, 37540, 37535, 37541, 10, 11, 12, 13, 14, 15, 37561, 37562, 37286, 37564, 37288, 37566, 37568, 37573, 37574, 37299, 37576, 37301, 37578, 37580, 37585, 37586, 37312, 37588, 37314, 37590, 37592, 37594, 37321, 37597, 37600, 37603, 37605, 37607, 37610, 37613, 37623, 37637, 37638, 37650, 37664, 37666, 37671, 37673, 37675, 37677, 37678, 37682, 37684, 37687, 37690, 37695, 37696, 37455, 37698, 37457, 37700, 37702, 37716, 37717, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37660, 37658, 37661, 37663, 37670, 37489, 36961, 37724, 37725, 37726, 37621, 37619, 37617, 37627, 37556, 37554, 37629, 37633, 37631, 37634, 37615, 37642, 37492, 36968, 37728, 37729, 37730, 36967, 37282, 37560, 37295, 37572, 37308, 37584, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37660, 37658, 37661, 37663, 37670, 36968, 37731, 37604, 37609, 37732, 37614, 37451, 37694, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37663, 37670, 37495, 36671, 37734, 37735, 37736, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37636, 37642, 37648, 37646, 37644, 37654, 37652, 37656, 37660, 37658, 37661, 37663, 37670, 37498, 36672, 37738, 37739, 37740, 37686, 37741, 37691, 37422, 37694, 37423, 37686, 37742, 37451, 37694, 37686, 37743, 37691, 37451, 37694, 37707, 37705, 37708, 37712, 37710, 37713, 37715, 37721, 37501, 37212, 37745, 37746, 37747, 37505, 37507, 37760, 37511, 37513, 37515, 37762, 37519, 37764, 37523, 37019, 37765, 37528, 37251, 37766, 37533, 37768, 37537, 35471, 35472, 35475, 35476, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37285, 37298, 37311, 37807, 37808, 37810, 37811, 37454, 37828, 37829, 37830, 37831, 37832, 37806, 37833, 37834, 37835, 37836, 37837, 37838, 37839, 37840, 37841, 37842, 37843, 37809, 37844, 37845, 37846, 37847, 37848, 37850, 37851, 37491, 37854, 37855, 37856, 37806, 37857, 37858, 37859, 37860, 37861, 37862, 37863, 37864, 37865, 37867, 37868, 37494, 37871, 37872, 37873, 37781, 37779, 37782, 37874, 37875, 37788, 37786, 37789, 37876, 37877, 37795, 37793, 37796, 37878, 37879, 37880, 37806, 37881, 37882, 37883, 37884, 37885, 37886, 37887, 37888, 37889, 37890, 37891, 37809, 37892, 37893, 37894, 37895, 37896, 37897, 37319, 37599, 37798, 37800, 37801, 37899, 37336, 37900, 37803, 37804, 37805, 37902, 37903, 37904, 37826, 37824, 37827, 37905, 37906, 37907, 37806, 37908, 37909, 37910, 37911, 37912, 37913, 37914, 37915, 37916, 37917, 37918, 37809, 37919, 37920, 37922, 37923, 37497, 37926, 37927, 37928, 37806, 37929, 37930, 37931, 37932, 37933, 37934, 37935, 37936, 37937, 37938, 37939, 37809, 37940, 37941, 37942, 37943, 37944, 37945, 37946, 37947, 37949, 37950, 37500, 37433, 37953, 37818, 37812, 37813, 37814, 37815, 37955, 37956, 37957, 37826, 37824, 37822, 37827, 37958, 37826, 37824, 37822, 37433, 37959, 37818, 37819, 37820, 37961, 37962, 37826, 37824, 37822, 37827, 37433, 37963, 37818, 37819, 37820, 37965, 37966, 37967, 37826, 37824, 37827, 37968, 37969, 37970, 37971, 37972, 37973, 37974, 37975, 37977, 37978, 37503, 37981, 37982, 37984, 37985, 37986, 37988, 37990, 37991, 37993, 37994, 37996, 37998, 37999, 38000, 38001, 38002, 12, 13, 14, 15, 38026, 38029, 38030, 38033, 38020, 38038, 38041, 38042, 38022, 38050, 38053, 38054, 38058, 38020, 38069, 38070, 37777, 38071, 38074, 38075, 37784, 38076, 38079, 38080, 37791, 38081, 38082, 38085, 38086, 38089, 38020, 38094, 38097, 38098, 38022, 38104, 38105, 38106, 38107, 38108, 38110, 38112, 38113, 38114, 38118, 38119, 37822, 38120, 38121, 38124, 38125, 38128, 38020, 38133, 38136, 38022, 38142, 38145, 38146, 38149, 38020, 38154, 38157, 38158, 38161, 38022, 38169, 38171, 38172, 38173, 38174, 38175, 38179, 38180, 38181, 38182, 38184, 38185, 38186, 38187, 38189, 38190, 38191, 38194, 38195, 38196, 38197, 38198, 38200, 38201, 38202, 38206, 38207, 37822, 38208, 38209, 38212, 38025, 38047, 38049, 38063, 38065, 38066, 38219, 38109, 38115, 38068, 38073, 38078, 38203, 38205, 38103, 38219, 38109, 38115, 38117, 38203, 38205, 38139, 38141, 38166, 38168, 38176, 38178, 38193, 38203, 38193, 38203, 38205, 38217, 38219, 38222, 38221, 38225, 38224, 38226, 35473, 38228, 35474, 38230, 38231, 13, 14, 15, 38240, 38244, 38245, 38248, 38249, 38251, 38253, 38256, 38254, 38260, 38258, 38264, 38262, 38266, 38270, 38271, 38274, 38276, 38111, 38286, 38284, 38288, 38292, 38293, 38295, 38296, 38300, 38301, 38305, 38170, 38312, 38316, 38188, 38323, 38199, 38333, 38331, 38337, 38035, 38032, 38044, 38160, 38338, 38339, 38060, 38340, 38341, 38091, 38088, 38100, 38160, 38342, 38343, 38344, 38279, 38345, 38283, 38346, 38347, 38348, 38349, 38330, 38350, 38091, 38088, 38100, 38160, 38351, 38352, 38353, 38279, 38354, 38283, 38355, 38356, 38330, 38357, 38130, 38127, 38163, 38160, 38358, 38359, 38151, 38148, 38163, 38160, 38360, 38361, 38362, 38311, 38309, 38363, 38364, 38365, 38322, 38366, 38367, 38330, 38368, 38214, 38211, 38369, 38370, 38371, 38372, 38373, 38374, 38375, 38376, 38377, 38378, 38379, 38380, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38037, 38046, 38062, 38392, 38394, 38396, 38093, 38102, 38404, 38132, 38138, 38153, 38165, 38414, 38415, 38417, 38420, 38216, 38422, 38423, 38241, 38424, 38425, 38246, 38428, 38057, 38250, 38431, 38432, 38267, 38433, 38434, 38272, 38401, 38438, 38402, 38440, 38418, 38445, 38447, 38448, 38267, 38449, 38450, 38272, 38401, 38454, 38402, 38456, 38418, 38459, 38461, 38462, 38289, 38463, 38464, 38294, 38467, 38468, 38297, 38469, 38470, 38302, 38413, 38474, 38475, 38416, 38479, 38418, 38482, 38484, 38485, 38427, 38430, 38436, 38487, 38452, 38466, 38472, 38487, 38488, 38490, 38234, 38495, 38493, 38235, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38532, 38530, 38512, 38535, 38533, 38513, 38537, 38538, 38514, 38541, 38539, 38518, 38544, 38542, 38519, 38545, 38437, 38547, 38439, 38257, 38261, 38265, 38549, 38444, 38334, 38553, 38551, 38518, 38556, 38554, 38519, 38557, 38453, 38559, 38455, 38287, 38561, 38458, 38334, 38565, 38563, 38521, 38568, 38566, 38522, 38571, 38569, 38523, 38574, 38572, 38524, 38575, 38473, 38315, 38326, 38578, 38478, 38326, 38580, 38481, 38334, 38582, 38529, 38584, 38585, 38586, 38587, 38588, 38589, 38590, 38591, 38594, 38595, 38233, 38596, 38232, 38597, 13, 14, 15, 38609, 38610, 38612, 38613, 38536, 38616, 38618, 38619, 38621, 38622, 38627, 38628, 38629, 38632, 38634, 38635, 38637, 38638, 38643, 38646, 38648, 38649, 38651, 38652, 38654, 38655, 38657, 38658, 38660, 38661, 38662, 38665, 38668, 38670, 38626, 38624, 38631, 38642, 38640, 38645, 38626, 38624, 38631, 38642, 38640, 38645, 38664, 38667, 38681, 38683, 38679, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38692, 38722, 38723, 38700, 38699, 38698, 38724, 38701, 38691, 38689, 38715, 38725, 38726, 38706, 38727, 38707, 38697, 38695, 38716, 38717, 38721, 38728, 38729, 38700, 38699, 38698, 38730, 38701, 38705, 38703, 38731, 38732, 38706, 38733, 38707, 38711, 38709, 38715, 38713, 38716, 38717, 38718, 38734, 38719, 38735, 38720, 38721, 38736, 38753, 38755, 38756, 38757, 38759, 38760, 38761, 38762, 38693, 38763, 38765, 38767, 38768, 38769, 38770, 38771, 38772, 38773, 38775, 38776, 38777, 38779, 38780, 38781, 38782, 38784, 38786, 38787, 38788, 38789, 38790, 38791, 38792, 38793, 38795, 38797, 38798, 38738, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38801, 38805, 38808, 38812, 38818, 38822, 38827, 38829, 38804, 38811, 38810, 38835, 38834, 38833, 38815, 38674, 38821, 38826, 38825, 38835, 38834, 38833, 38832, 38678, 38837, 9, 10, 11, 12, 13, 14, 15, 38848, 38807, 38852, 38856, 38671, 38857, 38858, 38673, 38859, 38860, 38861, 38862, 38863, 38864, 38675, 38865, 38866, 38677, 38676, 38867, 38868, 38869, 38870, 38871, 8, 9, 10, 11, 12, 13, 14, 15, 38880, 38672, 38884, 38885, 38887, 38888, 38890, 38882, 38894, 38895, 38897, 38898, 38899, 38901, 14, 15, 38912, 38913, 38917, 38919, 38922, 38924, 38916, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38883, 38929, 38893, 38892, 38934, 38903, 38932, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38947, 38945, 38949, 38950, 38920, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38961, 38960, 38964, 38962, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38977, 38979, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38684, 38872, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39008, 39009, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39024, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
int h_C[]= {
4, 6, 8, 10, 12, 14, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 477, 479, 481, 483, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572, 574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 643, 645, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 669, 671, 673, 675, 678, 680, 682, 684, 688, 690, 692, 694, 696, 698, 701, 703, 705, 707, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 769, 771, 774, 776, 779, 781, 783, 785, 787, 789, 791, 793, 796, 798, 801, 803, 806, 808, 810, 812, 814, 816, 818, 820, 823, 825, 828, 830, 832, 834, 837, 839, 841, 843, 847, 849, 851, 853, 855, 857, 859, 861, 863, 865, 868, 870, 873, 875, 878, 880, 882, 884, 886, 888, 891, 893, 895, 897, 899, 901, 904, 906, 909, 911, 914, 916, 919, 921, 924, 926, 929, 931, 934, 936, 939, 941, 943, 945, 947, 949, 952, 954, 956, 958, 960, 962, 965, 967, 969, 971, 973, 975, 978, 980, 983, 985, 988, 990, 993, 995, 997, 999, 1002, 1004, 1006, 1008, 1011, 1013, 1017, 1019, 1021, 1023, 1026, 1028, 1031, 1033, 1036, 1038, 1041, 1043, 1046, 1048, 1051, 1053, 1056, 1058, 1061, 1063, 1066, 1068, 1071, 1073, 1076, 1078, 1081, 1083, 1086, 1088, 1091, 1093, 1096, 1098, 1101, 1103, 1105, 1107, 1109, 1111, 1114, 1116, 1119, 1121, 1124, 1126, 1129, 1131, 1134, 1136, 1139, 1141, 1144, 1146, 1149, 1151, 1154, 1156, 1159, 1161, 1164, 1166, 1169, 1171, 1173, 1175, 1177, 1179, 1182, 1184, 1187, 1189, 1192, 1194, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217, 1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1327, 1329, 1331, 1333, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368, 1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406, 1408, 1410, 1412, 1414, 1416, 1419, 1421, 1424, 1426, 1429, 1431, 1434, 1436, 1439, 1441, 1444, 1446, 1449, 1451, 1454, 1456, 1458, 1460, 1462, 1464, 1467, 1469, 1472, 1474, 1476, 1478, 1480, 1482, 1484, 1486, 1489, 1491, 1493, 1495, 1498, 1500, 1502, 1504, 1507, 1509, 1513, 1515, 1517, 1519, 1521, 1523, 1526, 1528, 1531, 1533, 1538, 1540, 1542, 1544, 1546, 1548, 1551, 1553, 1556, 1558, 1561, 1563, 1566, 1568, 1570, 1572, 1574, 1576, 1579, 1581, 1584, 1586, 1589, 1591, 1594, 1596, 1599, 1601, 1604, 1606, 1609, 1611, 1614, 1616, 1619, 1621, 1624, 1626, 1629, 1631, 1634, 1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1739, 1741, 1743, 1745, 1748, 1750, 1753, 1755, 1758, 1760, 1763, 1765, 1768, 1770, 1773, 1775, 1778, 1780, 1782, 1784, 1786, 1788, 1791, 1793, 1796, 1798, 1801, 1803, 1806, 1808, 1811, 1813, 1816, 1818, 1821, 1823, 1826, 1828, 1831, 1833, 1836, 1838, 1841, 1843, 1846, 1848, 1851, 1853, 1856, 1858, 1861, 1863, 1866, 1868, 1870, 1872, 1874, 1876, 1879, 1881, 1884, 1886, 1889, 1891, 1894, 1896, 1899, 1901, 1904, 1906, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939, 1941, 1944, 1946, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977, 1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015, 2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091, 2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128, 2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166, 2168, 2170, 2172, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2200, 2202, 2204, 2206, 2209, 2211, 2213, 2215, 2218, 2220, 2222, 2224, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243, 2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281, 2283, 2285, 2288, 2290, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357, 2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433, 2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508, 2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546, 2548, 2550, 2552, 2554, 2556, 2558, 2560, 2563, 2565, 2567, 2569, 2572, 2574, 2576, 2578, 2580, 2582, 2584, 2586, 2588, 2590, 2593, 2595, 2597, 2599, 2602, 2604, 2606, 2608, 2611, 2613, 2616, 2618, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2659, 2661, 2663, 2665, 2668, 2670, 2673, 2675, 2678, 2680, 2683, 2685, 2688, 2690, 2692, 2694, 2696, 2698, 2701, 2703, 2706, 2708, 2711, 2713, 2716, 2718, 2721, 2723, 2726, 2728, 2730, 2732, 2734, 2736, 2739, 2741, 2744, 2746, 2749, 2751, 2754, 2756, 2759, 2761, 2764, 2766, 2769, 2771, 2774, 2776, 2779, 2781, 2784, 2786, 2789, 2791, 2794, 2796, 2799, 2801, 2804, 2806, 2809, 2811, 2814, 2816, 2818, 2820, 2822, 2824, 2827, 2829, 2832, 2834, 2837, 2839, 2842, 2844, 2847, 2849, 2852, 2854, 2857, 2859, 2862, 2864, 2866, 2868, 2870, 2872, 2875, 2877, 2880, 2882, 2885, 2887, 2890, 2892, 2895, 2897, 2900, 2902, 2905, 2907, 2910, 2912, 2915, 2917, 2920, 2922, 2925, 2927, 2930, 2932, 2935, 2937, 2940, 2942, 2945, 2947, 2950, 2952, 2955, 2957, 2960, 2962, 2965, 2967, 2970, 2972, 2975, 2977, 2980, 2982, 2985, 2987, 2990, 2992, 2995, 2997, 3000, 3002, 3005, 3007, 3010, 3012, 3014, 3016, 3018, 3020, 3023, 3025, 3028, 3030, 3033, 3035, 3038, 3040, 3042, 3044, 3047, 3049, 3052, 3054, 3060, 3062, 3064, 3066, 3068, 3070, 3073, 3075, 3078, 3080, 3083, 3085, 3088, 3090, 3093, 3095, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116, 3118, 3120, 3123, 3125, 3128, 3130, 3133, 3135, 3138, 3140, 3143, 3145, 3148, 3150, 3156, 3158, 3160, 3162, 3164, 3166, 3169, 3171, 3173, 3175, 3177, 3179, 3182, 3184, 3187, 3189, 3195, 3197, 3200, 3202, 3205, 3207, 3209, 3211, 3214, 3216, 3218, 3220, 3225, 3227, 3229, 3231, 3233, 3235, 3237, 3239, 3242, 3244, 3246, 3248, 3250, 3252, 3255, 3257, 3260, 3262, 3265, 3267, 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3291, 3293, 3296, 3298, 3301, 3303, 3306, 3308, 3311, 3313, 3316, 3318, 3321, 3323, 3326, 3328, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345, 3347, 3349, 3352, 3354, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383, 3386, 3388, 3390, 3392, 3394, 3396, 3399, 3401, 3404, 3406, 3408, 3410, 3412, 3414, 3417, 3419, 3422, 3424, 3427, 3429, 3432, 3434, 3437, 3439, 3442, 3444, 3447, 3449, 3452, 3454, 3457, 3459, 3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3498, 3500, 3502, 3504, 3506, 3508, 3511, 3513, 3516, 3518, 3520, 3522, 3524, 3526, 3529, 3531, 3534, 3536, 3539, 3541, 3544, 3546, 3548, 3550, 3552, 3554, 3557, 3559, 3562, 3564, 3567, 3569, 3572, 3574, 3577, 3579, 3583, 3585, 3587, 3589, 3594, 3596, 3599, 3601, 3604, 3606, 3609, 3611, 3614, 3616, 3618, 3620, 3623, 3625, 3628, 3630, 3642, 3644, 3647, 3649, 3652, 3654, 3657, 3659, 3662, 3664, 3666, 3668, 3670, 3672, 3675, 3677, 3680, 3682, 3685, 3687, 3690, 3692, 3695, 3697, 3700, 3702, 3704, 3706, 3709, 3711, 3714, 3716, 3722, 3724, 3726, 3728, 3730, 3732, 3735, 3737, 3740, 3742, 3745, 3747, 3750, 3752, 3754, 3756, 3758, 3760, 3763, 3765, 3768, 3770, 3773, 3775, 3778, 3780, 3782, 3784, 3786, 3788, 3791, 3793, 3796, 3798, 3801, 3803, 3806, 3808, 3811, 3813, 3816, 3818, 3821, 3823, 3826, 3828, 3830, 3832, 3835, 3837, 3840, 3842, 3848, 3850, 3853, 3855, 3858, 3860, 3863, 3865, 3868, 3870, 3873, 3875, 3878, 3880, 3883, 3885, 3888, 3890, 3892, 3894, 3896, 3898, 3901, 3903, 3906, 3908, 3911, 3913, 3916, 3918, 3921, 3923, 3926, 3928, 3931, 3933, 3936, 3938, 3940, 3942, 3944, 3946, 3949, 3951, 3954, 3956, 3959, 3961, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3981, 3983, 3985, 3987, 3991, 3993, 3996, 3998, 4001, 4003, 4006, 4008, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029, 4031, 4033, 4035, 4037, 4040, 4042, 4045, 4047, 4050, 4052, 4055, 4057, 4060, 4062, 4065, 4067, 4070, 4072, 4075, 4077, 4080, 4082, 4085, 4087, 4090, 4092, 4094, 4096, 4098, 4100, 4103, 4105, 4108, 4110, 4113, 4115, 4118, 4120, 4123, 4125, 4128, 4130, 4133, 4135, 4138, 4140, 4143, 4145, 4148, 4150, 4153, 4155, 4158, 4160, 4162, 4164, 4166, 4168, 4171, 4173, 4176, 4178, 4181, 4183, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4203, 4205, 4208, 4210, 4216, 4218, 4220, 4222, 4224, 4226, 4229, 4231, 4234, 4236, 4239, 4241, 4244, 4246, 4249, 4251, 4254, 4256, 4259, 4261, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294, 4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332, 4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370, 4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408, 4410, 4412, 4414, 4416, 4418, 4420, 4422, 4425, 4427, 4429, 4431, 4434, 4436, 4438, 4440, 4443, 4445, 4447, 4449, 4452, 4454, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485, 4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4522, 4524, 4526, 4528, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561, 4563, 4565, 4567, 4569, 4571, 4573, 4576, 4578, 4580, 4582, 4585, 4587, 4590, 4592, 4594, 4596, 4598, 4600, 4602, 4604, 4607, 4609, 4611, 4613, 4618, 4620, 4622, 4624, 4626, 4628, 4631, 4633, 4636, 4638, 4641, 4643, 4646, 4648, 4651, 4653, 4656, 4658, 4661, 4663, 4666, 4668, 4671, 4673, 4676, 4678, 4681, 4683, 4686, 4688, 4690, 4692, 4695, 4697, 4700, 4702, 4708, 4710, 4712, 4714, 4716, 4718, 4721, 4723, 4726, 4728, 4731, 4733, 4736, 4738, 4741, 4743, 4746, 4748, 4751, 4753, 4755, 4757, 4759, 4761, 4764, 4766, 4769, 4771, 4774, 4776, 4779, 4781, 4783, 4785, 4787, 4789, 4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827, 4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4848, 4850, 4852, 4854, 4856, 4858, 4861, 4863, 4866, 4868, 4871, 4873, 4876, 4878, 4880, 4882, 4885, 4887, 4889, 4891, 4895, 4897, 4900, 4902, 4905, 4907, 4910, 4912, 4914, 4916, 4919, 4921, 4923, 4925, 4929, 4931, 4934, 4936, 4939, 4941, 4944, 4946, 4949, 4951, 4954, 4956, 4959, 4961, 4963, 4965, 4968, 4970, 4973, 4975, 4981, 4983, 4985, 4987, 4990, 4992, 4994, 4996, 4999, 5001, 5005, 5007, 5009, 5011, 5014, 5016, 5019, 5021, 5024, 5026, 5029, 5031, 5033, 5035, 5038, 5040, 5043, 5045, 5048, 5050, 5052, 5054, 5056, 5058, 5061, 5063, 5066, 5068, 5071, 5073, 5076, 5078, 5081, 5083, 5086, 5088, 5091, 5093, 5096, 5098, 5100, 5102, 5104, 5106, 5109, 5111, 5114, 5116, 5119, 5121, 5124, 5126, 5128, 5130, 5132, 5134, 5137, 5139, 5142, 5144, 5147, 5149, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168, 5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5189, 5191, 5194, 5196, 5199, 5201, 5203, 5205, 5207, 5209, 5212, 5214, 5216, 5218, 5220, 5222, 5225, 5227, 5229, 5231, 5233, 5235, 5238, 5240, 5243, 5245, 5248, 5250, 5253, 5255, 5258, 5260, 5263, 5265, 5268, 5270, 5273, 5275, 5277, 5279, 5281, 5283, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320, 5322, 5324, 5326, 5328, 5331, 5333, 5335, 5337, 5340, 5342, 5345, 5347, 5353, 5355, 5357, 5359, 5361, 5363, 5366, 5368, 5371, 5373, 5376, 5378, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397, 5399, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5419, 5421, 5424, 5426, 5432, 5434, 5437, 5439, 5442, 5444, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5468, 5470, 5473, 5475, 5478, 5480, 5483, 5485, 5488, 5490, 5493, 5495, 5498, 5500, 5502, 5504, 5506, 5508, 5511, 5513, 5515, 5517, 5519, 5521, 5524, 5526, 5529, 5531, 5534, 5536, 5539, 5541, 5543, 5545, 5548, 5550, 5553, 5555, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586, 5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624, 5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662, 5664, 5667, 5669, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700, 5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738, 5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5761, 5763, 5765, 5767, 5770, 5772, 5774, 5776, 5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5816, 5818, 5820, 5822, 5824, 5826, 5829, 5831, 5833, 5835, 5838, 5840, 5842, 5844, 5847, 5849, 5851, 5853, 5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891, 5893, 5896, 5898, 5900, 5902, 5905, 5907, 5909, 5911, 5913, 5915, 5918, 5920, 5922, 5924, 5926, 5928, 5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966, 5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5999, 6001, 6003, 6005, 6007, 6009, 6011, 6013, 6015, 6017, 6020, 6022, 6028, 6030, 6033, 6035, 6038, 6040, 6042, 6044, 6047, 6049, 6052, 6054, 6060, 6062, 6064, 6066, 6068, 6070, 6073, 6075, 6078, 6080, 6083, 6085, 6088, 6090, 6092, 6094, 6096, 6098, 6101, 6103, 6106, 6108, 6111, 6113, 6116, 6118, 6121, 6123, 6126, 6128, 6130, 6132, 6134, 6136, 6139, 6141, 6144, 6146, 6149, 6151, 6154, 6156, 6158, 6160, 6163, 6165, 6167, 6169, 6173, 6175, 6178, 6180, 6183, 6185, 6188, 6190, 6193, 6195, 6198, 6200, 6203, 6205, 6208, 6210, 6213, 6215, 6218, 6220, 6223, 6225, 6228, 6230, 6232, 6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6253, 6255, 6257, 6259, 6262, 6264, 6267, 6269, 6274, 6276, 6279, 6281, 6287, 6289, 6292, 6294, 6297, 6299, 6302, 6304, 6307, 6309, 6311, 6313, 6315, 6317, 6320, 6322, 6325, 6327, 6330, 6332, 6335, 6337, 6339, 6341, 6343, 6345, 6348, 6350, 6353, 6355, 6357, 6359, 6361, 6363, 6366, 6368, 6370, 6372, 6375, 6377, 6379, 6381, 6384, 6386, 6390, 6392, 6394, 6396, 6399, 6401, 6404, 6406, 6409, 6411, 6413, 6415, 6417, 6419, 6422, 6424, 6427, 6429, 6432, 6434, 6437, 6439, 6442, 6444, 6447, 6449, 6452, 6454, 6457, 6459, 6462, 6464, 6467, 6469, 6472, 6474, 6477, 6479, 6482, 6484, 6486, 6488, 6490, 6492, 6495, 6497, 6500, 6502, 6505, 6507, 6510, 6512, 6515, 6517, 6520, 6522, 6525, 6527, 6530, 6532, 6534, 6536, 6538, 6540, 6543, 6545, 6548, 6550, 6553, 6555, 6558, 6560, 6563, 6565, 6568, 6570, 6573, 6575, 6578, 6580, 6583, 6585, 6588, 6590, 6593, 6595, 6598, 6600, 6603, 6605, 6608, 6610, 6613, 6615, 6618, 6620, 6622, 6624, 6627, 6629, 6631, 6633, 6638, 6640, 6642, 6644, 6646, 6648, 6651, 6653, 6656, 6658, 6661, 6663, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688, 6690, 6692, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6720, 6722, 6725, 6727, 6730, 6732, 6735, 6737, 6740, 6742, 6745, 6747, 6750, 6752, 6755, 6757, 6760, 6762, 6767, 6769, 6772, 6774, 6777, 6779, 6782, 6784, 6786, 6788, 6790, 6792, 6795, 6797, 6800, 6802, 6805, 6807, 6810, 6812, 6815, 6817, 6819, 6821, 6824, 6826, 6829, 6831, 6837, 6839, 6842, 6844, 6847, 6849, 6852, 6854, 6857, 6859, 6861, 6863, 6866, 6868, 6870, 6872, 6876, 6878, 6881, 6883, 6886, 6888, 6891, 6893, 6895, 6897, 6900, 6902, 6905, 6907, 6913, 6915, 6917, 6919, 6922, 6924, 6927, 6929, 6935, 6937, 6939, 6941, 6943, 6945, 6948, 6950, 6953, 6955, 6958, 6960, 6963, 6965, 6967, 6969, 6971, 6973, 6976, 6978, 6980, 6982, 6984, 6986, 6989, 6991, 6993, 6995, 6997, 6999, 7002, 7004, 7007, 7009, 7012, 7014, 7017, 7019, 7021, 7023, 7025, 7027, 7030, 7032, 7035, 7037, 7040, 7042, 7045, 7047, 7049, 7051, 7054, 7056, 7059, 7061, 7067, 7069, 7071, 7073, 7075, 7077, 7080, 7082, 7085, 7087, 7090, 7092, 7095, 7097, 7099, 7101, 7103, 7105, 7108, 7110, 7113, 7115, 7118, 7120, 7123, 7125, 7127, 7129, 7131, 7133, 7136, 7138, 7141, 7143, 7146, 7148, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7180, 7182, 7184, 7186, 7188, 7190, 7193, 7195, 7198, 7200, 7203, 7205, 7208, 7210, 7213, 7215, 7218, 7220, 7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259, 7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297, 7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335, 7337, 7339, 7341, 7344, 7346, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373, 7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411, 7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449, 7451, 7453, 7455, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7475, 7477, 7479, 7481, 7484, 7486, 7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7517, 7519, 7521, 7523, 7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7543, 7545, 7548, 7550, 7553, 7555, 7558, 7560, 7562, 7564, 7566, 7568, 7571, 7573, 7576, 7578, 7580, 7582, 7584, 7586, 7589, 7591, 7594, 7596, 7598, 7600, 7603, 7605, 7608, 7610, 7616, 7618, 7621, 7623, 7626, 7628, 7630, 7632, 7634, 7636, 7638, 7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7667, 7669, 7671, 7673, 7675, 7677, 7680, 7682, 7685, 7687, 7689, 7691, 7693, 7695, 7698, 7700, 7702, 7704, 7706, 7708, 7711, 7713, 7716, 7718, 7721, 7723, 7726, 7728, 7731, 7733, 7735, 7737, 7740, 7742, 7744, 7746, 7750, 7752, 7755, 7757, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790, 7792, 7794, 7796, 7798, 7801, 7803, 7805, 7807, 7811, 7813, 7816, 7818, 7821, 7823, 7826, 7828, 7830, 7832, 7834, 7836, 7839, 7841, 7844, 7846, 7849, 7851, 7854, 7856, 7859, 7861, 7864, 7866, 7869, 7871, 7874, 7876, 7879, 7881, 7887, 7889, 7891, 7893, 7895, 7897, 7900, 7902, 7905, 7907, 7910, 7912, 7915, 7917, 7920, 7922, 7925, 7927, 7930, 7932, 7934, 7936, 7938, 7940, 7943, 7945, 7948, 7950, 7953, 7955, 7958, 7960, 7963, 7965, 7967, 7969, 7971, 7973, 7976, 7978, 7981, 7983, 7986, 7988, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 0, 0, 1, 1, 1, 1, 8013, 8015, 8017, 8019, 8021, 8023, 2, 2, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 15, 15, 4966, 4966, 4978, 4978, 1288, 1288, 144, 144, 177, 177, 2207, 2207, 2108, 2108, 7064, 7064, 8170, 8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 250, 250, 250, 250, 5997, 5997, 5997, 5997, 5894, 5894, 5903, 5903, 6625, 6625, 475, 484, 6625, 6625, 667, 676, 667, 676, 667, 667, 676, 676, 686, 686, 686, 686, 6898, 6898, 8387, 8389, 8391, 8393, 8395, 8397, 8399, 8401, 699, 699, 708, 708, 709, 709, 710, 710, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 767, 767, 835, 844, 1288, 1288, 1325, 1334, 1387, 1387, 1902, 1387, 1387, 1908, 1487, 1487, 1535, 1535, 1902, 1908, 2173, 2173, 2198, 2198, 2207, 2207, 2216, 2216, 2108, 2108, 2173, 2173, 2198, 2198, 2207, 2207, 2216, 2216, 2225, 2225, 2561, 2570, 2591, 2591, 2600, 2600, 2609, 2609, 2620, 2620, 2657, 2666, 3045, 3045, 3057, 3057, 3121, 3121, 3153, 3153, 3167, 3167, 3192, 3192, 3212, 3212, 3222, 3222, 3496, 3496, 3634, 3634, 3496, 3496, 3581, 3581, 3591, 3591, 3632, 3632, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 3707, 3707, 3719, 3719, 3833, 3833, 3845, 3845, 3979, 3988, 4201, 4201, 4213, 4213, 4693, 4693, 4705, 4705, 4529, 4529, 4693, 4693, 4705, 4705, 4529, 4529, 4966, 4978, 4966, 4978, 4423, 4423, 4432, 4432, 4693, 4693, 4705, 4705, 4520, 4520, 4529, 4529, 4583, 4588, 4605, 4605, 4615, 4615, 4693, 4693, 4705, 4705, 4883, 4892, 4917, 4926, 4966, 4966, 4978, 4978, 5429, 5429, 5417, 5417, 5338, 5338, 5350, 5350, 5417, 5417, 5429, 5429, 5417, 5417, 5429, 5429, 6045, 6045, 6057, 6057, 5894, 5894, 5903, 5903, 6625, 6625, 6635, 6635, 6693, 5759, 5768, 5759, 5768, 6764, 5997, 5997, 5997, 5997, 5836, 5836, 5845, 5845, 5894, 5894, 5903, 5903, 6251, 6251, 5894, 5894, 5903, 5903, 6284, 6284, 5997, 5997, 6025, 6025, 6045, 6045, 6057, 6057, 6161, 6170, 6251, 6251, 6260, 6271, 6284, 6284, 6625, 6625, 6635, 6635, 6693, 6764, 6822, 6822, 6834, 6834, 6864, 6873, 6898, 6898, 6910, 6910, 6920, 6920, 6932, 6932, 7052, 7052, 7064, 7064, 7601, 7601, 7613, 7613, 7601, 7601, 7456, 7456, 7473, 7482, 7515, 7515, 7524, 7524, 7601, 7601, 7613, 7613, 7738, 7747, 7799, 7808, 7884, 7884, 10417, 10419, 10421, 10423, 10425, 10427, 10429, 10431, 10433, 10435, 10438, 10440, 10443, 10445, 10447, 10449, 10451, 10453, 10455, 10457, 10459, 10461, 10463, 10465, 10468, 10470, 10472, 10474, 10477, 10479, 10482, 10484, 10490, 10492, 10494, 10496, 10498, 10500, 10503, 10505, 10508, 10510, 10513, 10515, 10518, 10520, 10522, 10524, 10527, 10529, 10532, 10534, 10540, 10542, 10544, 10546, 10548, 10550, 10553, 10555, 10557, 10559, 10561, 10563, 10566, 10568, 10571, 10573, 10576, 10578, 10581, 10583, 10586, 10588, 10590, 10592, 10594, 10596, 10599, 10601, 10604, 10606, 10609, 10611, 10614, 10616, 10618, 10620, 10623, 10625, 10628, 10630, 10636, 10638, 10640, 10642, 10644, 10646, 10649, 10651, 10653, 10655, 10657, 10659, 10661, 10663, 10665, 10667, 10669, 10671, 10673, 10675, 10678, 10680, 10682, 10684, 10686, 10688, 10691, 10693, 10695, 10697, 10699, 10701, 10703, 10705, 10707, 10709, 10711, 10713, 10715, 10717, 10719, 10721, 10723, 10725, 10728, 10730, 10733, 10735, 10738, 10740, 10742, 10744, 10747, 10749, 10752, 10754, 10760, 10762, 10765, 10767, 10770, 10772, 10775, 10777, 10780, 10782, 10784, 10786, 10789, 10791, 10793, 10795, 10799, 10801, 10804, 10806, 10809, 10811, 10814, 10816, 10818, 10820, 10822, 10824, 10827, 10829, 10831, 10833, 10835, 10837, 10839, 10841, 10843, 10845, 10847, 10849, 10851, 10853, 10855, 10857, 10860, 10862, 10864, 10866, 10869, 10871, 10873, 10875, 10877, 10879, 10882, 10884, 10886, 10888, 10890, 10892, 10895, 10897, 10900, 10902, 10905, 10907, 10910, 10912, 10915, 10917, 10920, 10922, 10925, 10927, 10930, 10932, 10935, 10937, 10940, 10942, 10945, 10947, 10950, 10952, 10955, 10957, 10963, 10965, 10967, 10969, 10971, 10973, 10975, 10977, 10979, 10981, 10983, 10985, 10987, 10989, 10991, 10993, 10995, 10997, 10999, 11001, 11004, 11006, 11008, 11010, 11013, 11015, 11017, 11019, 11021, 11023, 11025, 11027, 11029, 11031, 11034, 11036, 11038, 11040, 11043, 11045, 11047, 11049, 11051, 11053, 11055, 11057, 11059, 11061, 11063, 11065, 11067, 11069, 11071, 11073, 11075, 11077, 11080, 11082, 11084, 11086, 11089, 11091, 11093, 11095, 11098, 11100, 11102, 11104, 11106, 11108, 11110, 11112, 11114, 11116, 11118, 11120, 11122, 11124, 11126, 11128, 11130, 11132, 11134, 11136, 11138, 11140, 11142, 11144, 11146, 11148, 11151, 11153, 11157, 11159, 11161, 11163, 11165, 11167, 11170, 11172, 11175, 11177, 11180, 11182, 11185, 11187, 11190, 11192, 11195, 11197, 11200, 11202, 11205, 11207, 11210, 11212, 11215, 11217, 11219, 11221, 11226, 11228, 11231, 11233, 11236, 11238, 11240, 11242, 11244, 11246, 11249, 11251, 11254, 11256, 11259, 11261, 11264, 11266, 11269, 11271, 11274, 11276, 11279, 11281, 11284, 11286, 11289, 11291, 11294, 11296, 11299, 11301, 11304, 11306, 11308, 11310, 11313, 11315, 11318, 11320, 11325, 11327, 11329, 11331, 11333, 11335, 11337, 11339, 11341, 11343, 11345, 11347, 11350, 11352, 11354, 11356, 11358, 11360, 11362, 11364, 11367, 11369, 11371, 11373, 11375, 11377, 11379, 11381, 11383, 11385, 11387, 11389, 11391, 11393, 11395, 11397, 11399, 11401, 11403, 11405, 11407, 11409, 11411, 11413, 11415, 11417, 11419, 11421, 11423, 11425, 11427, 11429, 11432, 11434, 11436, 11438, 11440, 11442, 11444, 11446, 11448, 11450, 11452, 11454, 11457, 11459, 11461, 11463, 11466, 11468, 11470, 11472, 11475, 11477, 11480, 11482, 11488, 11490, 11492, 11494, 11497, 11499, 11502, 11504, 11510, 11512, 11514, 11516, 11518, 11520, 11522, 11524, 11526, 11528, 11530, 11532, 11535, 11537, 11540, 11542, 11545, 11547, 11550, 11552, 11555, 11557, 11560, 11562, 11568, 11570, 11573, 11575, 11578, 11580, 11583, 11585, 11588, 11590, 11592, 11594, 11597, 11599, 11602, 11604, 11609, 11611, 11613, 11615, 11617, 11619, 11622, 11624, 11627, 11629, 11632, 11634, 11637, 11639, 11641, 11643, 11645, 11647, 11650, 11652, 11655, 11657, 11660, 11662, 10676, 10676, 10858, 10858, 10867, 10867, 10880, 10880, 11742, 11744, 11746, 11748, 11751, 11753, 11755, 11757, 11760, 11762, 11764, 11766, 11768, 11770, 11772, 11774, 11776, 11778, 11780, 11782, 11784, 11786, 11788, 11790, 11792, 11794, 11796, 11798, 11800, 11802, 11804, 11806, 11808, 11810, 11812, 11814, 11817, 11819, 11822, 11824, 11827, 11829, 11831, 11833, 11835, 11837, 11839, 11841, 11844, 11846, 11849, 11851, 11853, 11855, 11859, 11861, 11863, 11865, 11867, 11869, 11871, 11873, 11875, 11877, 11879, 11881, 11883, 11885, 8168, 8168, 10960, 10960, 8385, 8385, 11473, 11473, 11348, 11348, 11495, 11495, 11348, 11348, 11507, 11507, 11473, 11473, 11485, 11485, 11365, 11365, 11365, 11365, 11365, 11365, 11495, 11495, 11533, 11533, 10525, 10525, 10621, 10621, 10475, 10475, 10487, 10487, 10525, 10525, 10537, 10537, 10621, 10621, 10633, 10633, 10745, 10745, 10757, 10757, 10676, 10676, 10745, 10745, 10757, 10757, 10787, 10689, 10689, 10745, 10745, 10757, 10757, 10796, 10745, 10745, 10757, 10757, 10787, 10796, 10858, 10858, 10867, 10867, 10880, 10880, 10960, 10960, 11155, 11155, 11002, 11002, 11011, 11011, 11032, 11032, 11041, 11041, 11087, 11087, 11096, 11096, 11155, 11155, 11223, 11223, 11311, 11322, 11485, 11485, 11473, 11473, 11348, 11348, 11495, 11495, 11348, 11348, 11507, 11507, 11473, 11473, 11485, 11485, 11365, 11365, 11507, 11365, 11365, 11507, 11365, 11365, 11495, 11495, 11533, 11533, 11565, 11565, 11430, 11430, 11455, 11464, 11473, 11473, 11485, 11485, 11495, 11495, 11507, 11507, 11533, 11533, 11565, 11565, 11606, 11606, 13311, 13313, 13315, 13317, 13319, 13321, 13323, 13325, 13327, 13329, 13332, 13334, 13337, 13339, 13342, 13344, 13347, 13349, 13352, 13354, 13357, 13359, 13362, 13364, 13367, 13369, 13371, 13373, 13375, 13377, 13380, 13382, 13385, 13387, 13390, 13392, 13395, 13397, 13399, 13401, 13403, 13405, 13407, 13409, 13411, 13413, 13415, 13417, 13419, 13421, 13423, 13425, 13427, 13429, 13432, 13434, 13437, 13439, 13442, 13444, 13447, 13449, 13452, 13454, 13457, 13459, 13462, 13464, 13466, 13468, 13470, 13472, 13475, 13477, 13480, 13482, 13485, 13487, 13490, 13492, 13495, 13497, 13500, 13502, 13505, 13507, 13510, 13512, 13515, 13517, 13520, 13522, 13525, 13527, 13529, 13531, 13533, 13535, 13538, 13540, 13543, 13545, 13548, 13550, 13553, 13555, 13558, 13560, 13563, 13565, 13568, 13570, 13573, 13575, 13578, 13580, 13583, 13585, 13588, 13590, 13593, 13595, 13597, 13599, 13601, 13603, 13606, 13608, 13611, 13613, 13616, 13618, 13621, 13623, 13625, 13627, 13630, 13632, 13635, 13637, 13643, 13645, 13647, 13649, 13652, 13654, 13657, 13659, 13665, 13667, 13669, 13671, 13673, 13675, 13678, 13680, 13683, 13685, 13688, 13690, 13693, 13695, 13698, 13700, 13703, 13705, 13707, 13709, 13711, 13713, 13715, 13717, 13720, 13722, 13724, 13726, 13729, 13731, 13735, 13737, 13739, 13741, 13744, 13746, 13749, 13751, 13754, 13756, 13759, 13761, 13763, 13765, 13767, 13769, 13772, 13774, 13777, 13779, 13782, 13784, 13787, 13789, 13791, 13793, 13796, 13798, 13800, 13802, 13806, 13808, 13811, 13813, 13816, 13818, 13821, 13823, 13825, 13827, 13829, 13831, 13834, 13836, 13839, 13841, 13844, 13846, 13849, 13851, 13853, 13855, 13857, 13859, 13862, 13864, 13867, 13869, 13872, 13874, 13877, 13879, 13882, 13884, 13887, 13889, 13892, 13894, 13897, 13899, 13901, 13903, 13906, 13908, 13911, 13913, 13919, 13921, 13924, 13926, 13929, 13931, 13934, 13936, 13939, 13941, 13944, 13946, 13949, 13951, 13954, 13956, 13958, 13960, 13962, 13964, 13967, 13969, 13971, 13973, 13976, 13978, 13981, 13983, 13989, 13991, 13994, 13996, 13999, 14001, 14004, 14006, 14009, 14011, 14014, 14016, 14019, 14021, 14024, 14026, 14029, 14031, 14034, 14036, 14039, 14041, 14044, 14046, 14049, 14051, 14053, 14055, 14058, 14060, 14063, 14065, 14126, 14128, 14130, 14132, 14134, 14136, 14138, 14140, 14142, 14144, 14146, 14148, 14150, 14152, 14154, 14156, 11738, 11738, 11740, 11740, 11739, 11739, 11740, 11740, 11749, 11749, 11758, 11758, 11815, 11815, 14219, 14221, 14223, 14225, 14227, 14229, 14231, 14233, 14235, 14237, 14239, 14241, 14243, 14245, 14247, 14249, 14251, 14253, 14255, 14257, 14259, 14261, 14263, 14265, 14268, 14270, 14272, 14274, 14276, 14278, 14280, 14282, 14284, 14286, 14288, 14290, 14292, 14294, 14296, 14298, 14300, 14302, 14304, 14306, 14308, 14310, 14312, 14314, 14316, 14318, 14320, 14322, 14324, 14326, 14328, 14330, 13733, 13733, 14369, 14371, 14373, 14375, 14377, 14379, 14381, 14383, 14385, 14387, 14389, 14391, 14394, 14396, 14398, 14400, 14402, 14404, 14406, 14408, 14410, 14412, 14414, 14416, 14419, 14421, 14423, 14425, 14427, 14429, 14431, 14433, 14435, 14437, 14439, 14441, 14444, 14446, 14448, 14450, 14453, 14455, 14457, 14459, 14462, 14464, 14466, 14468, 14470, 14472, 14474, 14476, 14478, 14480, 14482, 14484, 14486, 14488, 13628, 13628, 13640, 13640, 13650, 13650, 13662, 13662, 13794, 13803, 13904, 13904, 13916, 13916, 13974, 13974, 13986, 13986, 14056, 14056, 14068, 14068, 15220, 15222, 15224, 15226, 15228, 15230, 15233, 15235, 15237, 15239, 15241, 15243, 15245, 15247, 15249, 15251, 15253, 15255, 15257, 15259, 15261, 15263, 15265, 15267, 15269, 15271, 15274, 15276, 15279, 15281, 15284, 15286, 15289, 15291, 15294, 15296, 15299, 15301, 15304, 15306, 15309, 15311, 15314, 15316, 15322, 15324, 15327, 15329, 15332, 15334, 15337, 15339, 15342, 15344, 15347, 15349, 15352, 15354, 15357, 15359, 15362, 15364, 15366, 15368, 15370, 15372, 15375, 15377, 15380, 15382, 15385, 15387, 15390, 15392, 15394, 15396, 15399, 15401, 15403, 15405, 15409, 15411, 15413, 15415, 15417, 15419, 15231, 15231, 14442, 14442, 14451, 14451, 14460, 14460, 14392, 14417, 14442, 14442, 14451, 14451, 14331, 14331, 14392, 14417, 14442, 14442, 14451, 14451, 14460, 14460, 15231, 15231, 15319, 15319, 15397, 15406, 15565, 15565, 15973, 15973, 16412, 16414, 16417, 16419, 16421, 16423, 16432, 16434, 16445, 16447, 16449, 16451, 16453, 16455, 16457, 16459, 16660, 16662, 16664, 16666, 16668, 16670, 16673, 16675, 16678, 16680, 16683, 16685, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17025, 17027, 17029, 17031, 17033, 17035, 17037, 17039, 17041, 17043, 17045, 17047, 17049, 17051, 17053, 17055, 17057, 17059, 17061, 17063, 17065, 17067, 17069, 17071, 17073, 17075, 17077, 17079, 17081, 17083, 17085, 17087, 17089, 17091, 17093, 17095, 17097, 17099, 17101, 17103, 17105, 17107, 17109, 17111, 17113, 17115, 17117, 17119, 17121, 17123, 17125, 17127, 17129, 17131, 17133, 17135, 17137, 17139, 17141, 17143, 17145, 17147, 17149, 17151, 17153, 17155, 17157, 17159, 17161, 17163, 17165, 17167, 17169, 17171, 17173, 17175, 17177, 17179, 17181, 17183, 17185, 17187, 17189, 17191, 17193, 17195, 17197, 17199, 17201, 17203, 17205, 17207, 17209, 17211, 17213, 17215, 17217, 17219, 17221, 17223, 17225, 17227, 17229, 17231, 17233, 17235, 17237, 17239, 17241, 17243, 17245, 17247, 17249, 17251, 17253, 17255, 17257, 17259, 17261, 17263, 17265, 17267, 17269, 17271, 17273, 17275, 17277, 17279, 17281, 17283, 17285, 17287, 17289, 17291, 17293, 17295, 17297, 17299, 17301, 17303, 17305, 17307, 17309, 17311, 17313, 17315, 17317, 17319, 17321, 17323, 17325, 17327, 17329, 17331, 17333, 17335, 17337, 17339, 17341, 17343, 17345, 17347, 17349, 17351, 17353, 17355, 17357, 17359, 17361, 17363, 17365, 17367, 17369, 17371, 17373, 17375, 17377, 17379, 17381, 17383, 17385, 17387, 17389, 17391, 17393, 17395, 17397, 17399, 17401, 17403, 17405, 17407, 17409, 17411, 17413, 17415, 17417, 17419, 17421, 17423, 17425, 17427, 17429, 17431, 17433, 17435, 17437, 17439, 17441, 17443, 17445, 17447, 17449, 17451, 17453, 17455, 17457, 17459, 17461, 17463, 17465, 17467, 17469, 17471, 17473, 17475, 17477, 17479, 17481, 17483, 17485, 17487, 17489, 17491, 17493, 17495, 17497, 17499, 17501, 17503, 17505, 17507, 17509, 17511, 17513, 17515, 17517, 17519, 17521, 17523, 17525, 17527, 17529, 17531, 17533, 17535, 17537, 17539, 17541, 17543, 17545, 17547, 17549, 17551, 17553, 17555, 17557, 17559, 17561, 17563, 17565, 17567, 17569, 17571, 17573, 17575, 17577, 17579, 17581, 17583, 17585, 17587, 17589, 17591, 17593, 17595, 17597, 17599, 17601, 17603, 17605, 17607, 17609, 17611, 17613, 17615, 17617, 17619, 17621, 17623, 17625, 17627, 17629, 17631, 17633, 17635, 17637, 17639, 17641, 17643, 17645, 17647, 17649, 17651, 17653, 17655, 17657, 17659, 17661, 17663, 17665, 17667, 17669, 17671, 17673, 17675, 17677, 17679, 17681, 17683, 17685, 17687, 17689, 17691, 17693, 17695, 17697, 17699, 17701, 17703, 17705, 17707, 17709, 17711, 17713, 17715, 17717, 17719, 17721, 17723, 17725, 17727, 17729, 17731, 17733, 17735, 17737, 17739, 17741, 17743, 17745, 17747, 17749, 17751, 17753, 17755, 17757, 17759, 17761, 17763, 17765, 17767, 17769, 17771, 17773, 17775, 17777, 17779, 17781, 17783, 17785, 17787, 17789, 17791, 17793, 17795, 17797, 17799, 17801, 17803, 17805, 17807, 17809, 17811, 17813, 17815, 17817, 17819, 17821, 17823, 17825, 17827, 17829, 17831, 17833, 17835, 17837, 17839, 17841, 17843, 17845, 17847, 17849, 17851, 17853, 17855, 17857, 17859, 17861, 17863, 17865, 17867, 17869, 17871, 17873, 17875, 17877, 17879, 17881, 17883, 17885, 17887, 17889, 17891, 17893, 17895, 17897, 17899, 17901, 17903, 17905, 17907, 17909, 17911, 17913, 17915, 17917, 17919, 17921, 17923, 17925, 17927, 17929, 17931, 17933, 17935, 17937, 17939, 17941, 17943, 17945, 17947, 17949, 17951, 17953, 17955, 17957, 17959, 17961, 17963, 17965, 17967, 17969, 17971, 17973, 17975, 17977, 17979, 17981, 17983, 17985, 17987, 17989, 17991, 17993, 17995, 17997, 17999, 18001, 18003, 18005, 18007, 18009, 18011, 18013, 18015, 18017, 18019, 18021, 18023, 18025, 18027, 18029, 18031, 18033, 18035, 18037, 18039, 18041, 18043, 18045, 18047, 18049, 18051, 18053, 18055, 18057, 18059, 18061, 18063, 18065, 18067, 18069, 18071, 18073, 18075, 18077, 18079, 18081, 18083, 18085, 18087, 18089, 18091, 18093, 18095, 18097, 18099, 18101, 18103, 18105, 18107, 18109, 18111, 18113, 18115, 18117, 18119, 18121, 18123, 18125, 18127, 18129, 18131, 18133, 18135, 18137, 18139, 18141, 18143, 18145, 18147, 18149, 18151, 18153, 18155, 18157, 18159, 18161, 18163, 18165, 18167, 18169, 18171, 18173, 18175, 18177, 18179, 18181, 18183, 18185, 18187, 18189, 18191, 18193, 18195, 18197, 18199, 18201, 18203, 18205, 18207, 18209, 18211, 18213, 18215, 18217, 18219, 18221, 18223, 18225, 18227, 18229, 18231, 18233, 18235, 18237, 18239, 18241, 18243, 18245, 18247, 18249, 18251, 18253, 18255, 18257, 18259, 18261, 18263, 18265, 18267, 18269, 18271, 18273, 18275, 18277, 18279, 18281, 18283, 18285, 18287, 18289, 18291, 18293, 18295, 18297, 18299, 18301, 18303, 18305, 18307, 18309, 18311, 18313, 18315, 18317, 18319, 18321, 18323, 18325, 18327, 18329, 18331, 18333, 18335, 18337, 18339, 18341, 18343, 18345, 18347, 18349, 18351, 18353, 18355, 18357, 18359, 18361, 18363, 18365, 18367, 18369, 18371, 18373, 18375, 18377, 18379, 18381, 18383, 18385, 18387, 18389, 18391, 18393, 18395, 18397, 18399, 18401, 18403, 18405, 18407, 18409, 18411, 18413, 18415, 18417, 18419, 18421, 18423, 18425, 18427, 18429, 18431, 18433, 18435, 18437, 18439, 18441, 18443, 18445, 18447, 18449, 18451, 18453, 18455, 18457, 18459, 18461, 18463, 18465, 18467, 18469, 18471, 18473, 18475, 18477, 18479, 18481, 18483, 18485, 18487, 18489, 18491, 18493, 18495, 18497, 18499, 18501, 18503, 18505, 18507, 18509, 18511, 18513, 18515, 18517, 18519, 18521, 18523, 18525, 18527, 18529, 18531, 18533, 18535, 18537, 18539, 18541, 18543, 18545, 18547, 18549, 18551, 18553, 18555, 18557, 18559, 18561, 18563, 18565, 18567, 18569, 18571, 18573, 18575, 18577, 18579, 18581, 18583, 18585, 18587, 18589, 18591, 18593, 18595, 18597, 18599, 18601, 18603, 18605, 18607, 18609, 18611, 18613, 18615, 18617, 18619, 18621, 18623, 18625, 18627, 18629, 18631, 18633, 18635, 18637, 18639, 18641, 18643, 18645, 18647, 18649, 18651, 18653, 18655, 18657, 18659, 18661, 18663, 18665, 18667, 18669, 18671, 18673, 18675, 18677, 18679, 18681, 18683, 18685, 18687, 18689, 18691, 18693, 18695, 18697, 18699, 18701, 18703, 18705, 18707, 18709, 18711, 18713, 18715, 18717, 18719, 18721, 18723, 18725, 18727, 18729, 18731, 18733, 18735, 18737, 18739, 18741, 18743, 18745, 18747, 18749, 18751, 18753, 18755, 18757, 18759, 18761, 18763, 18765, 18767, 18769, 18771, 18773, 18775, 18777, 18779, 18781, 18783, 18785, 18787, 18789, 18791, 18793, 18795, 18797, 18799, 18801, 18803, 18805, 18807, 18809, 18811, 18813, 18815, 18817, 18819, 18821, 18823, 18825, 18827, 18829, 18831, 18833, 18835, 18837, 18839, 18841, 18843, 18845, 18847, 18849, 18851, 18853, 18855, 18857, 18859, 18861, 18863, 18865, 18867, 18869, 18871, 18873, 18875, 18877, 18879, 18881, 18883, 18885, 18887, 18889, 18891, 18893, 18895, 18897, 18899, 18901, 18903, 18905, 18907, 18909, 18911, 18913, 18915, 18917, 18919, 18921, 18923, 18925, 18927, 18929, 18931, 18933, 18935, 18937, 18939, 18941, 18943, 18945, 18947, 18949, 18951, 18953, 18955, 18957, 18959, 18961, 18963, 18965, 18967, 18969, 18971, 18973, 18975, 18977, 18979, 18981, 18983, 18985, 18987, 18989, 18991, 18993, 18995, 18997, 18999, 19001, 19003, 19005, 19007, 19009, 19011, 19013, 19015, 19017, 19019, 19021, 19023, 19025, 19027, 19029, 19031, 19033, 19035, 19037, 19039, 19041, 19043, 19045, 19047, 19049, 19051, 19053, 19055, 19057, 19059, 19061, 19063, 19065, 19067, 19069, 19071, 19073, 19075, 19077, 19079, 19081, 19083, 19085, 19087, 19089, 19091, 19093, 19095, 19097, 19099, 19101, 19103, 19105, 19107, 19109, 19111, 19113, 19115, 19117, 19119, 19121, 19123, 19125, 19127, 19129, 19131, 19133, 19135, 19137, 19139, 19141, 19143, 19145, 19147, 19149, 19151, 19153, 19155, 19157, 19159, 19161, 19163, 19165, 19167, 19169, 19171, 19173, 19175, 19177, 19179, 19181, 19183, 19185, 19187, 19189, 19191, 19193, 19195, 19197, 19199, 19201, 19203, 19205, 19207, 19209, 19211, 19213, 19215, 19217, 19219, 19221, 19223, 19225, 19227, 19229, 19231, 19233, 19235, 19237, 19239, 19241, 19243, 19245, 19247, 19249, 19251, 19253, 19255, 19257, 19259, 19261, 19263, 19265, 19267, 19269, 19271, 19273, 19275, 19277, 19279, 19281, 19283, 19285, 19287, 19289, 19291, 19293, 19295, 19297, 19299, 19301, 19303, 19305, 19307, 19309, 19311, 19313, 19315, 19317, 19319, 19321, 19323, 19325, 19327, 19329, 19331, 19333, 19335, 19337, 19339, 19341, 19343, 19345, 19347, 19349, 19351, 19353, 19355, 19357, 19359, 19361, 19363, 19365, 19367, 19369, 19371, 19373, 19375, 19377, 19379, 19381, 19383, 19385, 19387, 19389, 19391, 19393, 19395, 19397, 19399, 19401, 19403, 19405, 19407, 19409, 19411, 19413, 19415, 19417, 19419, 19421, 19423, 19425, 19427, 19429, 19431, 19433, 19435, 19437, 19439, 19441, 19443, 19445, 19447, 19449, 19451, 19453, 19455, 19457, 19459, 19461, 19463, 19465, 19467, 19469, 19471, 19473, 19475, 19477, 19479, 19481, 19483, 19485, 19487, 19489, 19491, 19493, 19495, 19497, 19499, 19501, 19503, 19505, 19507, 19509, 19511, 19513, 19515, 19517, 19519, 19521, 19523, 19525, 19527, 19529, 19531, 19533, 19535, 19537, 19539, 19541, 19543, 19545, 19547, 19549, 19551, 19553, 19555, 19557, 19559, 19561, 19563, 19565, 19567, 19569, 19571, 19573, 19575, 19577, 19579, 19581, 19583, 19585, 19587, 19589, 19591, 19593, 19595, 19597, 19599, 19601, 19603, 19605, 19607, 19609, 19611, 19613, 19615, 19617, 19619, 19621, 19623, 19625, 19627, 19629, 19631, 19633, 19635, 19637, 19639, 19641, 19643, 19645, 19647, 19649, 19651, 19653, 19655, 19657, 19659, 19661, 19663, 19665, 19667, 19669, 19671, 19673, 19675, 19677, 19679, 19681, 19683, 19685, 19687, 19689, 19691, 19693, 19695, 19697, 19699, 19701, 19703, 19705, 19707, 19709, 19711, 19713, 19715, 19717, 19719, 19721, 19723, 19725, 19727, 19729, 19731, 19733, 19735, 19737, 19739, 19741, 19743, 19745, 19747, 19749, 19751, 19753, 19755, 19757, 19759, 19761, 19763, 19765, 19767, 19769, 19771, 19773, 19775, 19777, 19779, 19781, 19783, 19785, 19787, 19789, 19791, 19793, 19795, 19797, 19799, 19801, 19803, 19805, 19807, 19809, 19811, 19813, 19815, 19817, 19819, 19821, 19823, 19825, 19827, 19829, 19831, 19833, 19835, 19837, 19839, 19841, 19843, 19845, 19847, 19849, 19851, 19853, 19855, 19857, 19859, 19861, 19863, 19865, 19867, 19869, 19871, 19873, 19875, 19877, 19879, 19881, 19883, 19885, 19887, 19889, 19891, 19893, 19895, 19897, 19899, 19901, 19903, 19905, 19907, 19909, 19911, 19913, 19915, 19917, 19919, 19921, 19923, 19925, 19927, 19929, 19931, 19933, 19935, 19937, 19939, 19941, 19943, 19945, 19947, 19949, 19951, 19953, 19955, 19957, 19959, 19961, 19963, 19965, 19967, 19969, 19971, 19973, 19975, 19977, 19979, 19981, 19983, 19985, 19987, 19989, 19991, 19993, 19995, 19997, 19999, 20001, 20003, 20005, 20007, 20009, 20011, 20013, 20015, 20017, 20019, 20021, 20023, 20025, 20027, 20029, 20031, 20033, 20035, 20037, 20039, 20041, 20043, 20045, 20047, 20049, 20051, 20053, 20055, 20057, 20059, 20061, 20063, 20065, 20067, 20069, 20071, 20073, 20075, 20077, 20079, 20081, 20083, 20085, 20087, 20089, 20091, 20093, 20095, 20097, 20099, 20101, 20103, 20105, 20107, 20109, 20111, 20113, 20115, 20117, 20119, 20121, 20123, 20125, 20127, 20129, 20131, 20133, 20135, 20137, 20139, 20141, 20143, 20145, 20147, 20149, 20151, 20153, 20155, 20157, 20159, 20161, 20163, 20165, 20167, 20169, 20171, 20173, 20175, 20177, 20179, 20181, 20183, 20185, 20187, 20189, 20191, 20193, 20195, 20197, 20199, 20201, 20203, 20205, 20207, 20209, 20211, 20213, 20215, 20217, 20219, 20221, 20223, 20225, 20227, 20229, 20231, 20233, 20235, 20237, 20239, 20241, 20243, 20245, 20247, 20249, 20251, 20253, 20255, 20257, 20259, 20261, 20263, 20265, 20267, 20269, 20271, 20273, 20275, 20277, 20279, 20281, 20283, 20285, 20287, 20289, 20291, 20293, 20295, 20297, 20299, 20301, 20303, 20305, 20307, 20309, 20311, 20313, 20315, 20317, 20319, 20321, 20323, 20325, 20327, 20329, 20331, 20333, 20335, 20337, 20339, 20341, 20343, 20345, 20347, 20349, 20351, 20353, 20355, 20357, 20359, 20361, 20363, 20365, 20367, 20369, 20371, 20373, 20375, 20377, 20379, 20381, 20383, 20385, 20387, 20389, 20391, 20393, 20395, 20397, 20399, 20401, 20403, 20405, 20407, 20409, 20411, 20413, 20415, 20417, 20419, 20421, 20423, 20425, 20427, 20429, 20431, 20433, 20435, 20437, 20439, 20441, 20443, 20445, 20447, 20449, 20451, 20453, 20455, 20457, 20459, 20461, 20463, 20465, 20467, 20469, 20471, 20473, 20475, 20477, 20479, 20481, 20483, 20485, 20487, 20489, 20491, 20493, 20495, 20497, 20499, 20501, 20503, 20505, 20507, 20509, 20511, 20513, 20515, 20517, 20519, 20521, 20523, 20525, 20527, 20529, 20531, 20533, 20535, 20537, 20539, 20541, 20543, 20545, 20547, 20549, 20551, 20553, 20555, 20557, 20559, 20561, 20563, 20565, 20567, 20569, 20571, 20573, 20575, 20577, 20579, 20581, 20583, 20585, 8006, 8007, 8008, 8009, 8010, 8011, 20593, 20595, 20597, 8026, 8027, 8032, 8033, 8034, 8035, 8036, 8037, 8038, 8039, 8042, 8043, 8046, 8047, 8050, 8051, 8113, 8114, 8121, 8122, 8131, 8132, 8155, 8156, 8159, 8160, 8166, 8167, 20627, 20629, 20631, 20633, 20635, 20637, 8193, 8194, 8195, 8196, 8199, 8200, 8202, 8203, 8226, 8227, 8230, 8231, 8278, 8279, 8282, 8285, 8296, 8297, 8320, 8323, 8336, 8339, 8367, 8368, 8371, 8372, 8376, 8377, 8378, 8379, 8380, 8381, 20671, 20673, 20675, 20677, 8404, 8405, 8408, 8409, 8410, 8411, 8412, 8413, 8427, 8428, 8429, 8430, 8431, 8432, 8433, 8434, 8453, 8454, 8481, 8483, 8579, 8580, 8590, 8593, 8609, 8610, 8611, 8612, 8613, 8614, 8634, 8637, 8646, 8649, 8727, 8729, 8787, 8788, 8795, 8796, 8799, 8800, 8803, 8804, 8807, 8808, 8825, 8826, 8833, 8834, 8837, 8838, 8841, 8842, 8845, 8846, 8931, 8934, 8943, 8944, 8947, 8948, 8951, 8952, 8955, 8956, 8973, 8986, 9064, 9065, 9068, 9069, 9083, 9084, 9091, 9092, 9096, 9097, 9103, 9104, 9109, 9110, 9113, 9114, 9185, 9186, 9187, 9188, 9189, 9190, 9208, 9209, 9211, 9212, 9219, 9222, 9223, 9224, 9225, 9226, 9227, 9228, 9229, 9230, 9244, 9245, 9248, 9249, 9274, 9275, 9278, 9279, 9317, 9320, 9362, 9365, 9368, 9369, 9401, 9402, 9405, 9406, 9413, 9414, 9425, 9426, 9429, 9430, 9437, 9438, 9451, 9452, 9471, 9472, 9475, 9476, 9479, 9480, 9496, 9497, 9500, 9501, 9504, 9507, 9510, 9511, 9525, 9527, 9531, 9532, 9534, 9535, 9552, 9553, 9556, 9557, 9597, 9600, 9606, 9609, 9618, 9619, 9622, 9623, 9695, 9696, 9697, 9698, 9710, 9711, 9714, 9715, 9724, 9725, 9728, 9729, 9734, 9735, 9738, 9739, 9767, 9768, 9771, 9772, 9787, 9788, 9791, 9792, 9817, 9818, 9834, 9835, 9851, 9853, 9855, 9857, 9859, 9862, 9865, 9866, 9868, 9869, 9879, 9880, 9883, 9884, 9893, 9894, 9897, 9898, 9904, 9905, 9914, 9915, 9918, 9919, 9925, 9926, 9943, 9944, 9950, 9951, 9956, 9957, 9960, 9961, 9983, 9986, 10004, 10005, 10008, 10011, 10014, 10015, 10084, 10085, 10087, 10088, 10104, 10121, 10133, 10134, 10137, 10138, 10145, 10148, 10154, 10155, 10158, 10159, 10162, 10163, 10166, 10167, 10192, 10193, 10196, 10197, 10236, 10237, 10247, 10248, 10251, 10252, 10292, 10293, 10298, 10301, 10307, 10310, 10311, 10314, 10330, 10331, 10334, 10335, 10362, 10364, 10376, 10378, 10393, 10394, 20961, 20963, 20965, 20967, 20969, 20971, 20973, 20975, 20977, 20979, 20981, 20983, 20985, 20987, 20989, 20991, 20993, 20995, 20997, 20999, 21001, 21003, 21005, 21007, 21009, 21011, 21013, 21015, 21017, 21019, 21021, 21023, 21025, 21027, 21029, 21031, 21033, 21035, 21037, 21039, 21041, 21043, 21045, 21047, 21049, 21051, 21053, 21055, 21057, 21059, 21061, 21063, 21065, 21067, 21069, 21071, 21073, 21075, 21077, 21079, 21081, 21083, 21085, 21087, 21089, 21091, 21093, 21095, 21097, 21099, 21101, 21103, 21105, 21107, 21109, 21111, 21113, 21115, 21117, 21119, 21121, 21123, 21125, 21127, 21129, 21131, 21133, 21135, 21137, 21139, 21141, 21143, 21145, 21147, 21149, 21151, 21153, 21155, 21157, 21159, 21161, 21163, 21165, 21167, 21169, 21171, 21173, 21175, 21177, 21179, 21181, 21183, 21185, 21187, 21189, 21191, 21193, 21195, 21197, 21199, 21201, 21203, 21205, 21207, 21209, 21211, 21213, 21215, 21217, 21219, 21221, 21223, 21225, 21227, 21229, 21231, 21233, 21235, 21237, 21239, 21241, 21243, 21245, 21247, 21249, 21251, 21253, 21255, 21257, 21259, 21261, 21263, 21265, 21267, 21269, 21271, 21273, 21275, 21277, 21279, 21281, 21283, 21285, 21287, 21289, 21291, 21293, 21295, 21297, 21299, 21301, 21303, 21305, 21307, 21309, 21311, 21313, 21315, 21317, 21319, 21321, 21323, 21325, 21327, 21329, 21331, 21333, 21335, 21337, 21339, 21341, 21343, 21345, 21347, 21349, 21351, 21353, 21355, 21357, 21359, 21361, 21363, 21365, 21367, 21369, 21371, 21373, 21375, 21377, 21379, 21381, 21383, 21385, 21387, 21389, 21391, 21393, 21395, 21397, 21399, 21401, 21403, 21405, 21407, 21409, 21411, 21413, 21415, 21417, 21419, 21421, 21423, 21425, 21427, 21429, 21431, 21433, 21435, 21437, 21439, 21441, 21443, 21445, 21447, 21449, 21451, 21453, 21455, 21457, 21459, 21461, 21463, 21465, 21467, 21469, 21471, 21473, 21475, 21477, 21479, 21481, 21483, 21485, 21487, 21489, 21491, 21493, 21495, 21497, 21499, 21501, 11666, 11667, 11684, 11685, 11688, 11689, 11696, 11697, 21511, 21513, 21515, 21517, 21519, 21521, 21523, 21525, 21527, 21529, 21531, 21533, 21535, 21537, 21539, 21541, 21543, 21545, 21547, 21549, 21551, 21553, 21555, 21557, 21559, 21561, 21563, 21565, 21567, 21569, 21571, 21573, 21575, 21577, 11994, 11995, 12002, 12003, 12089, 12090, 12099, 12100, 12101, 12102, 12103, 12104, 12105, 12106, 12107, 12108, 12111, 12112, 12115, 12116, 12117, 12118, 12119, 12120, 12121, 12122, 12123, 12124, 12127, 12128, 12895, 12898, 12904, 12905, 12910, 12911, 12914, 12915, 12924, 12925, 12928, 12929, 12946, 12947, 12950, 12951, 12956, 12959, 12962, 12963, 12972, 12973, 12977, 12978, 12981, 12982, 12989, 12992, 12993, 12997, 12998, 13001, 13002, 13009, 13017, 13018, 13021, 13022, 13029, 13032, 13046, 13047, 13050, 13051, 13058, 13059, 13076, 13077, 13086, 13087, 13090, 13091, 13094, 13095, 13104, 13105, 13108, 13109, 13121, 13122, 13125, 13126, 13140, 13141, 13155, 13156, 13174, 13177, 13180, 13183, 13186, 13187, 13188, 13189, 13190, 13191, 13192, 13193, 13194, 13195, 13198, 13199, 13202, 13203, 13204, 13205, 13206, 13207, 13208, 13209, 13210, 13211, 13212, 13213, 13220, 13221, 13228, 13229, 13238, 13245, 13252, 13255, 13258, 13259, 13262, 13263, 13266, 13267, 13270, 13271, 13278, 13279, 13286, 13287, 13294, 13297, 21725, 21727, 21729, 21731, 21733, 21735, 21737, 21739, 21741, 21743, 21745, 21747, 21749, 21751, 21753, 21755, 21757, 21759, 21761, 21763, 21765, 21767, 21769, 21771, 21773, 21775, 21777, 21779, 21781, 21783, 21785, 21787, 21789, 21791, 21793, 21795, 21797, 21799, 21801, 21803, 21805, 21807, 21809, 21811, 21813, 21815, 21817, 21819, 21821, 21823, 21825, 21827, 21829, 21831, 21833, 21835, 21837, 21839, 21841, 21843, 21845, 21847, 21849, 21851, 21853, 21855, 21857, 21859, 21861, 21863, 21865, 21867, 21869, 21871, 21873, 21875, 21877, 21879, 21881, 21883, 21885, 21887, 21889, 21891, 21893, 21895, 21897, 21899, 21901, 21903, 21905, 21907, 21909, 21911, 21913, 21915, 21917, 21919, 21921, 21923, 21925, 21927, 21929, 21931, 21933, 21935, 21937, 21939, 21941, 21943, 21945, 21947, 21949, 21951, 21953, 21955, 21957, 21959, 21961, 21963, 21965, 21967, 21969, 21971, 21973, 21975, 21977, 21979, 21981, 21983, 21985, 21987, 21989, 21991, 21993, 21995, 21997, 21999, 22001, 22003, 22005, 22007, 22009, 22011, 22013, 22015, 22017, 22019, 22021, 22023, 22025, 22027, 22029, 22031, 22033, 22035, 22037, 22039, 22041, 22043, 22045, 22047, 22049, 22051, 22053, 14165, 14166, 14167, 14168, 14179, 14180, 14181, 14182, 14195, 14196, 14200, 14201, 14216, 14217, 22069, 22071, 22073, 22075, 22077, 22079, 22081, 22083, 22085, 22087, 22089, 22091, 22093, 22095, 22097, 22099, 22101, 22103, 22105, 22107, 22109, 22111, 22113, 22115, 22117, 22119, 22121, 22123, 14358, 14359, 22127, 22129, 22131, 22133, 22135, 22137, 22139, 22141, 22143, 22145, 22147, 22149, 22151, 22153, 22155, 22157, 22159, 22161, 22163, 22165, 22167, 22169, 22171, 22173, 22175, 22177, 22179, 22181, 22183, 15110, 15111, 15114, 15115, 15118, 15119, 15122, 15123, 15151, 15154, 15176, 15177, 15180, 15181, 15193, 15194, 15197, 15198, 15213, 15214, 15217, 15218, 22207, 22209, 22211, 22213, 22215, 22217, 22219, 22221, 22223, 22225, 22227, 22229, 22231, 22233, 22235, 22237, 22239, 22241, 22243, 22245, 22247, 22249, 22251, 22253, 22255, 22257, 22259, 22261, 22263, 22265, 22267, 22269, 22271, 22273, 22275, 22277, 22279, 22281, 22283, 22285, 22287, 22289, 22291, 15475, 15476, 15482, 15483, 15486, 15487, 15490, 15491, 15503, 15506, 15511, 15512, 15515, 15516, 15519, 15520, 15537, 15544, 15551, 15552, 15555, 15556, 15559, 15560, 15928, 15929, 15950, 15951, 15967, 15969, 16038, 16039, 16330, 16331, 22327, 22329, 22331, 22333, 22335, 22337, 22339, 22341, 22343, 22345, 22347, 22349, 22351, 22353, 8, 9, 10, 11, 12, 13, 14, 15, 24150, 24152, 24154, 23524, 23523, 22549, 22548, 22551, 22368, 24161, 24163, 24165, 24167, 22370, 22369, 23481, 23480, 24171, 23483, 23482, 24173, 23485, 23484, 4988, 23488, 23487, 5003, 23491, 22371, 23493, 22372, 23494, 23495, 22373, 22374, 22376, 22375, 22378, 22377, 22379, 22381, 22382, 22384, 22386, 22385, 22388, 22387, 22390, 22389, 22392, 22391, 22627, 22626, 22628, 22630, 22629, 22631, 22633, 22632, 22635, 22634, 22637, 22636, 22638, 22393, 22641, 22640, 22642, 22644, 22643, 22394, 22683, 22395, 22662, 22797, 22799, 22687, 22667, 22690, 22689, 22692, 22691, 24175, 22669, 22398, 22400, 22399, 22402, 22401, 24177, 22404, 22403, 22406, 22405, 22407, 22408, 22410, 22409, 24179, 22759, 22758, 22761, 22760, 22411, 22686, 22413, 22412, 22414, 22416, 22415, 22418, 22417, 22770, 22419, 22841, 22840, 22844, 22846, 22420, 22848, 22421, 24181, 22423, 22422, 24183, 22425, 22424, 22426, 22428, 22427, 24185, 24193, 24195, 22429, 22431, 24197, 22433, 24199, 22434, 22436, 22435, 22437, 22439, 22438, 22440, 22442, 22441, 22443, 22445, 22444, 22446, 22448, 22447, 22450, 22449, 23684, 23683, 23685, 22452, 22451, 24201, 22454, 22453, 24203, 23681, 23621, 22455, 23696, 23622, 23624, 23623, 22457, 22456, 22459, 22458, 22460, 22463, 22462, 22465, 22464, 22466, 22468, 22467, 22470, 22469, 22472, 22471, 22474, 22473, 22476, 22475, 22477, 23626, 23625, 22479, 22478, 23630, 23629, 22481, 22480, 23641, 23640, 23839, 23634, 23841, 23840, 23843, 23642, 23636, 22482, 24205, 22484, 22483, 22486, 22485, 23641, 23640, 23839, 22487, 23841, 23840, 23843, 23642, 23636, 23635, 24209, 23852, 23851, 23639, 22488, 22491, 22490, 22493, 22492, 22495, 22494, 22496, 22499, 22498, 22504, 22503, 22506, 22505, 22500, 22510, 22509, 22531, 22530, 22533, 22501, 22511, 22534, 22502, 22504, 22503, 22506, 22505, 22507, 22510, 22509, 22531, 22530, 22533, 22532, 22511, 22534, 22535, 22513, 22512, 22514, 22517, 22516, 22519, 22518, 22521, 22520, 22523, 22522, 23833, 22524, 23835, 23834, 22526, 22525, 23804, 22527, 22528, 23808, 23807, 22531, 22530, 24215, 22533, 22532, 24217, 22534, 23813, 22535, 24219, 24221, 24223, 24077, 22536, 24076, 22538, 22537, 24229, 22540, 22539, 24231, 24233, 24235, 22542, 22541, 22839, 22838, 22544, 22543, 22546, 22545, 22547, 22549, 22548, 22551, 22550, 24237, 24239, 24241, 24243, 23194, 23193, 23196, 23195, 22552, 23200, 23199, 23156, 23155, 23158, 23157, 23160, 23159, 23161, 23164, 23163, 22554, 22553, 22556, 22555, 22567, 22566, 22569, 22568, 22570, 22572, 22558, 22577, 22559, 22578, 22561, 22560, 22562, 22583, 22563, 22584, 22586, 22585, 22587, 22564, 22567, 22566, 22569, 22568, 22570, 22572, 22574, 22577, 22576, 22578, 22579, 22581, 22583, 22582, 22584, 22586, 22585, 22587, 22589, 22588, 22591, 22590, 22592, 22594, 22595, 22597, 22599, 22598, 22600, 22602, 22601, 22603, 22605, 22604, 1000, 22608, 22607, 1015, 22611, 22610, 22613, 22612, 22615, 22614, 22617, 22616, 22619, 22618, 22621, 22620, 22623, 22622, 22625, 22624, 22627, 22626, 22628, 22630, 22629, 22631, 22633, 22632, 22635, 22634, 22637, 22636, 22639, 22638, 22641, 22640, 22642, 22644, 22643, 22645, 22646, 22649, 22648, 22651, 22650, 22652, 22653, 22656, 22655, 22658, 22657, 22660, 22659, 22661, 22683, 22791, 22662, 22665, 22664, 22799, 22687, 22667, 22690, 22689, 22668, 22692, 24249, 22669, 22671, 22670, 22673, 22672, 22675, 22674, 22677, 22676, 22679, 22678, 22681, 22680, 22682, 22683, 22791, 22685, 22684, 22797, 22686, 22687, 22796, 22690, 22689, 22692, 22691, 24253, 24256, 22693, 22696, 22695, 22698, 22697, 22699, 22701, 22700, 22703, 22702, 22705, 22704, 22706, 22708, 22707, 22710, 22709, 22712, 22711, 22714, 22713, 22716, 22715, 1496, 22719, 22718, 1511, 22722, 22721, 22724, 22723, 22726, 22725, 22727, 22729, 22728, 22730, 22732, 22731, 22733, 22735, 22734, 22736, 22738, 22737, 22740, 22739, 22742, 22741, 22744, 22743, 22746, 22745, 22748, 22747, 22749, 22751, 22750, 22753, 22752, 22755, 22754, 22757, 22756, 22759, 22758, 22761, 22760, 22762, 22765, 22764, 22766, 22768, 22767, 22769, 22770, 22771, 22773, 22772, 22775, 22774, 22777, 22776, 22778, 22779, 22781, 22783, 22782, 22785, 22784, 22787, 22786, 22789, 22788, 22791, 22790, 22793, 22792, 22795, 22794, 22796, 22797, 22799, 22801, 22800, 22803, 22802, 22804, 22805, 22806, 22809, 22808, 22811, 22810, 22812, 22814, 22813, 22815, 22817, 22816, 22818, 22820, 22819, 22821, 22823, 22822, 22824, 22826, 22825, 22827, 22829, 22828, 22831, 22830, 22833, 22832, 22834, 22836, 22835, 22837, 22839, 22838, 22841, 22840, 23034, 23033, 23035, 22843, 22842, 22844, 22846, 22845, 22848, 22847, 22860, 22859, 22850, 22849, 22864, 22851, 22865, 22867, 22866, 22868, 22869, 22852, 24265, 22872, 22871, 22874, 22873, 22876, 22875, 24267, 22878, 22877, 24269, 22880, 22879, 24271, 22854, 22853, 24273, 22856, 22855, 22858, 22857, 22860, 22859, 22862, 22861, 22864, 22863, 22865, 22867, 22866, 22868, 22870, 22869, 24275, 22872, 22871, 22874, 22873, 22876, 22875, 24277, 22878, 22877, 24279, 22880, 22879, 24281, 22882, 22881, 24283, 22884, 22883, 22885, 22887, 22886, 22888, 22891, 22890, 22893, 22892, 22895, 22894, 22896, 22898, 22897, 22899, 22902, 22901, 22903, 22913, 22912, 22915, 22904, 22917, 22916, 22905, 22906, 22908, 22909, 22911, 22910, 22913, 22912, 22915, 22914, 22917, 22916, 22918, 22919, 22921, 22922, 22924, 22923, 22925, 22927, 22926, 22928, 22930, 22932, 22935, 22934, 22937, 22936, 22939, 22938, 22940, 22942, 22941, 22943, 22945, 22944, 22947, 22946, 22949, 22948, 22950, 22953, 22952, 22954, 22956, 22955, 22957, 23123, 22958, 23124, 23126, 23125, 23127, 22961, 22960, 22963, 22962, 22965, 22964, 22967, 22966, 22969, 22968, 22970, 23106, 23105, 23107, 22972, 22971, 24287, 22974, 22973, 24289, 22976, 22975, 24291, 22978, 22977, 24293, 22980, 22979, 22982, 22981, 23140, 23142, 22983, 23144, 23147, 22985, 22986, 23134, 23136, 22987, 23138, 22989, 22988, 23143, 23142, 23144, 23147, 23146, 23149, 23148, 23151, 23150, 23153, 22989, 22991, 22990, 22993, 22992, 22994, 22997, 22996, 22999, 22998, 23001, 23000, 23003, 23002, 23004, 23006, 23005, 23007, 23009, 23008, 23011, 23010, 23013, 23012, 23015, 23014, 23017, 23016, 23019, 23018, 23021, 23020, 23022, 23024, 23023, 23025, 23027, 23026, 23029, 23028, 23031, 23030, 23032, 23034, 23033, 23035, 23037, 23036, 23039, 23038, 23041, 23040, 23043, 23042, 23045, 23044, 23047, 23046, 23049, 23048, 23051, 23050, 23053, 23052, 23055, 23054, 23057, 23056, 23059, 23058, 23061, 23060, 23062, 23064, 23063, 23065, 23067, 23066, 24297, 23069, 23068, 24299, 23070, 23073, 23072, 23075, 23074, 23077, 23076, 23079, 23078, 23081, 23080, 23083, 23082, 24301, 23085, 23084, 23087, 23086, 23089, 23088, 24303, 23091, 23090, 23092, 24305, 23094, 23093, 23096, 23095, 23097, 24307, 23099, 23098, 23101, 23100, 24309, 23103, 23102, 24311, 23117, 23116, 23104, 23120, 23119, 23121, 23106, 23105, 23107, 23109, 23108, 23111, 23110, 23113, 23112, 23114, 23117, 23116, 23118, 23120, 23119, 23121, 23123, 23122, 23124, 23126, 23125, 23127, 23129, 23143, 23142, 23132, 23131, 23147, 23133, 23135, 23134, 23137, 23136, 23152, 23138, 23139, 23140, 23143, 23142, 23144, 23147, 23146, 23149, 23148, 23151, 23150, 23153, 23152, 23154, 23172, 23171, 23156, 23155, 23158, 23157, 23160, 23159, 23161, 23164, 23163, 23186, 23185, 23188, 23187, 24313, 24315, 24317, 23166, 23165, 23168, 23167, 23169, 23172, 23171, 23174, 23173, 23176, 23175, 23177, 23179, 23178, 23180, 23182, 23181, 24319, 23183, 24321, 23186, 23185, 23188, 23187, 23190, 23189, 23192, 23191, 24325, 24327, 24329, 24331, 23194, 23193, 23196, 23195, 23197, 23200, 23199, 23202, 23201, 23204, 23203, 23206, 23205, 24333, 23208, 23207, 24335, 23210, 23209, 23211, 23213, 23212, 23214, 23216, 23215, 23217, 23219, 23218, 23220, 23222, 23221, 23223, 23225, 23224, 23226, 23228, 23227, 23230, 23229, 23232, 23231, 24337, 23234, 23233, 24339, 23236, 23235, 23238, 23237, 23240, 23239, 23242, 23241, 23243, 23245, 23247, 23246, 23248, 23250, 23249, 23252, 23251, 23254, 23253, 23255, 23257, 23256, 23258, 23266, 23265, 23268, 23259, 23269, 23271, 23274, 23273, 23275, 23277, 23276, 23278, 23262, 23261, 23264, 23263, 23266, 23265, 23268, 23267, 23269, 23271, 23274, 23273, 23275, 23277, 23276, 23278, 23280, 23279, 23281, 23283, 23282, 23285, 23284, 23287, 23286, 23288, 23290, 23289, 23291, 23293, 23292, 23295, 23294, 23297, 23296, 23299, 23298, 23301, 23300, 23302, 23304, 23303, 23305, 23307, 23306, 23309, 23308, 23311, 23310, 24345, 23312, 23314, 23316, 23315, 23317, 23319, 23318, 23321, 23320, 23368, 23367, 23370, 23369, 23408, 23407, 23322, 23411, 23410, 23412, 23372, 23371, 23374, 23373, 23324, 23323, 23325, 23327, 23326, 23328, 23378, 23377, 24347, 23380, 23379, 24349, 23382, 23381, 23384, 23329, 23386, 23330, 24351, 23387, 23388, 23389, 23390, 23331, 23332, 23333, 23334, 23378, 23377, 24353, 23380, 23379, 24355, 23382, 23381, 23384, 23383, 23386, 23385, 24357, 23395, 23396, 23397, 23398, 23401, 23402, 23335, 23336, 23338, 23337, 23340, 23339, 23342, 23341, 4988, 23345, 23344, 5003, 23347, 23348, 23349, 23350, 23352, 23351, 23353, 23356, 23355, 23357, 23359, 23358, 23361, 23360, 24363, 23363, 23362, 24365, 23364, 23365, 23366, 23368, 23367, 23370, 23369, 23372, 23371, 23374, 23373, 23376, 23375, 23378, 23377, 24367, 23380, 23379, 24369, 23382, 23381, 23384, 23383, 23386, 23385, 24373, 23387, 23388, 23389, 23390, 23391, 23392, 23393, 23394, 23395, 23396, 23397, 23398, 23399, 23400, 23401, 23402, 23403, 24377, 23405, 24379, 23408, 23407, 23409, 23411, 23410, 23412, 23414, 23413, 23416, 23415, 23418, 23417, 23420, 23419, 23422, 23421, 24381, 23424, 23423, 24383, 23426, 23425, 23427, 23429, 23428, 23430, 23431, 23433, 23432, 23435, 23434, 23436, 23438, 23437, 23439, 23441, 23440, 23443, 23442, 23445, 23444, 23446, 23449, 23448, 23450, 23452, 23451, 23453, 23455, 23454, 23456, 23458, 23457, 23459, 23461, 23460, 23462, 23464, 23463, 23466, 23465, 23468, 23467, 23469, 23471, 23470, 23473, 23472, 23475, 23474, 23476, 23478, 23477, 23479, 23481, 23480, 24389, 23483, 23482, 24391, 23485, 23484, 4988, 23488, 23487, 5003, 23491, 23490, 23493, 23492, 23494, 23495, 23496, 23497, 23499, 23498, 23500, 23502, 23501, 23503, 23505, 23504, 23507, 23506, 23509, 23508, 23510, 23512, 23511, 23513, 23515, 23514, 23516, 23518, 23517, 23519, 23531, 23521, 23534, 23522, 23524, 23523, 23525, 23541, 23540, 23542, 23544, 23543, 23527, 23526, 23529, 23528, 23530, 23531, 23533, 23534, 23536, 23538, 23537, 23539, 23541, 23540, 23542, 23544, 23543, 23546, 23545, 23547, 23549, 23551, 23550, 24393, 24395, 23552, 23554, 23553, 23556, 23555, 23558, 23557, 23559, 23560, 23562, 23561, 24397, 23564, 23563, 24399, 23566, 23565, 23567, 23569, 23568, 23570, 23572, 23571, 24401, 23574, 23573, 24403, 23575, 23576, 23579, 23578, 24405, 23581, 23580, 24407, 23582, 23584, 23583, 23586, 23585, 23588, 23587, 23590, 23589, 23591, 23593, 23592, 23595, 23594, 23596, 23598, 23599, 23601, 23603, 23602, 23604, 23605, 23606, 23607, 23608, 23610, 23609, 24409, 23612, 23611, 24411, 23614, 23613, 23615, 23617, 23616, 23618, 23620, 23619, 23685, 23687, 23686, 23688, 23690, 23689, 24413, 23692, 23680, 24415, 23681, 23621, 23682, 23696, 23622, 23624, 23623, 23626, 23625, 23627, 23630, 23629, 23632, 23631, 23837, 23641, 23634, 23633, 23841, 23840, 23843, 23642, 23636, 23635, 24417, 23638, 23637, 23850, 23852, 23643, 23639, 23641, 23640, 23839, 23838, 23841, 23840, 23843, 23642, 23846, 24419, 23849, 23848, 23850, 23852, 23643, 23644, 23645, 23648, 23647, 23650, 23649, 23652, 23651, 23662, 23661, 23653, 23655, 23657, 23659, 23662, 23661, 23663, 23665, 24427, 23667, 24429, 23668, 23670, 23669, 23671, 23673, 23672, 23674, 23676, 23675, 24431, 23678, 23677, 24433, 23684, 23683, 23679, 23687, 23686, 23688, 23690, 23689, 24435, 23692, 23680, 24437, 23681, 23694, 23682, 23697, 23696, 24439, 23684, 23683, 23685, 23687, 23686, 23688, 23690, 23689, 24441, 23692, 23691, 24443, 23694, 23693, 23695, 23697, 23696, 24445, 23699, 23698, 23701, 23700, 23703, 23702, 23705, 23704, 23707, 23706, 23708, 23710, 23709, 23711, 23712, 23714, 24447, 23717, 23716, 23718, 23721, 23720, 24449, 23723, 23722, 23725, 23724, 24451, 23727, 23726, 24453, 23729, 23728, 23730, 23732, 23731, 23733, 23735, 23734, 23736, 23738, 23737, 23739, 23741, 23740, 23742, 23745, 23744, 23747, 23746, 23749, 23748, 23751, 23750, 23753, 23752, 23754, 23756, 23755, 23758, 23757, 23760, 23759, 23762, 23761, 23764, 23763, 23766, 23765, 23768, 23767, 24457, 23770, 23769, 23772, 23771, 23774, 23773, 24461, 23776, 23775, 23778, 23777, 23780, 23779, 23781, 23783, 23782, 23784, 23785, 23788, 23787, 23790, 23789, 23791, 23793, 23792, 6373, 23796, 23795, 6388, 23799, 23798, 23800, 23801, 23804, 23803, 23806, 23805, 23808, 23807, 23810, 23809, 23812, 23811, 23814, 23813, 23815, 23817, 23816, 23818, 23820, 23819, 23821, 23823, 23822, 23825, 23824, 23827, 23826, 23828, 23830, 23829, 23831, 23833, 23832, 23835, 23834, 23837, 23836, 23839, 23838, 23841, 23840, 23843, 23842, 23844, 24463, 23846, 24465, 23849, 23848, 23850, 23852, 23851, 23853, 23854, 23857, 23856, 23859, 23858, 23872, 23860, 23875, 23874, 23861, 23863, 23879, 23881, 23865, 23864, 23866, 23868, 23867, 23869, 23871, 23870, 23873, 23872, 23875, 23874, 23877, 23876, 23878, 23879, 23881, 23883, 23882, 23885, 23884, 23887, 23886, 24469, 23889, 23888, 24471, 23891, 23890, 23893, 23892, 23895, 23894, 23897, 23896, 23899, 23898, 23900, 23902, 23901, 24475, 23904, 23903, 24477, 23906, 23905, 24479, 23908, 23907, 24481, 23910, 23909, 23911, 23913, 23912, 23914, 23915, 23917, 23918, 23920, 23922, 23921, 23923, 23925, 23924, 23926, 23928, 23927, 23929, 23931, 23930, 23932, 23934, 23933, 24483, 23936, 23935, 24485, 23938, 23937, 23939, 23941, 23940, 23942, 23944, 23943, 23945, 23947, 23946, 23948, 23950, 23949, 23951, 23953, 23952, 23954, 23956, 23955, 23958, 23957, 23960, 23959, 23961, 23963, 23962, 23964, 23966, 23965, 23967, 23969, 23968, 23970, 23972, 23971, 23974, 23973, 24487, 23976, 23975, 23978, 23977, 23979, 23981, 23980, 23983, 23982, 24489, 23985, 23984, 24491, 24066, 23987, 23986, 23989, 23988, 23990, 23993, 23992, 23995, 23994, 23996, 23999, 23998, 24001, 24000, 24003, 24002, 24005, 24004, 24006, 24008, 24010, 24009, 24011, 24014, 24013, 24016, 24015, 24018, 24017, 24020, 24019, 24021, 24022, 24025, 24024, 24026, 24028, 24027, 24030, 24029, 24032, 24031, 24034, 24033, 24036, 24035, 24037, 24040, 24039, 24042, 24041, 24044, 24043, 24046, 24045, 24047, 24049, 24048, 24051, 24050, 24052, 24055, 24054, 24056, 24059, 24058, 24061, 24060, 24501, 24063, 24062, 24503, 24083, 24085, 24065, 24064, 24066, 24068, 24067, 24069, 24071, 24070, 24072, 24073, 24075, 24077, 24076, 24079, 24078, 24080, 24082, 24083, 24085, 24087, 24086, 24089, 24088, 24090, 24092, 24095, 24094, 24096, 24099, 24098, 24100, 24102, 24101, 24103, 24105, 24104, 24106, 24109, 24108, 24110, 24111, 24113, 24115, 24114, 24116, 24118, 24117, 24120, 24119, 24122, 24121, 24509, 24124, 24123, 24125, 24127, 24126, 24128, 24130, 24129, 24131, 24132, 24134, 24136, 24135, 24138, 24137, 24140, 24139, 24141, 24143, 24142, 24144, 24574, 24567, 24782, 24145, 24574, 24567, 24574, 24576, 24588, 24590, 24592, 24599, 24598, 24600, 24602, 24601, 24146, 24605, 24147, 24784, 24607, 24606, 24786, 24622, 24148, 24624, 24623, 24626, 24610, 24788, 24593, 24575, 24593, 24568, 24573, 24155, 24589, 24591, 24593, 24720, 24719, 24721, 24723, 24722, 24724, 24726, 24725, 24728, 24727, 24730, 24729, 24732, 24731, 24734, 24733, 24156, 24773, 24772, 24774, 24736, 24735, 24780, 24738, 24157, 24740, 24739, 24824, 24187, 24186, 24189, 24188, 24191, 24190, 24826, 24828, 24225, 24224, 24708, 24707, 24710, 24709, 24712, 24226, 24830, 24832, 24834, 24836, 24838, 24714, 24713, 24840, 24716, 24715, 24842, 24844, 24846, 24848, 24850, 24718, 24227, 24852, 24511, 24510, 24513, 24512, 24515, 24514, 24516, 24518, 24517, 24856, 24519, 24521, 24523, 24522, 24858, 24525, 24524, 24860, 24527, 24526, 24528, 24530, 24529, 24531, 24533, 24532, 24862, 24535, 24534, 24864, 24536, 24538, 24539, 24541, 24543, 24542, 24545, 24544, 24547, 24546, 24548, 24550, 24549, 24551, 24553, 24552, 24866, 24555, 24554, 24868, 24556, 24558, 24560, 24559, 24562, 24561, 24583, 24563, 24872, 24585, 24564, 24587, 24572, 24593, 24565, 24567, 24575, 24874, 24579, 24566, 24570, 24876, 24583, 24582, 24878, 24585, 24571, 24587, 24572, 24593, 24574, 24568, 24567, 24881, 24569, 24580, 24570, 24883, 24583, 24582, 24885, 24585, 24571, 24587, 24572, 24574, 24573, 24576, 24575, 24578, 24577, 24579, 24581, 24580, 24888, 24583, 24582, 24890, 24585, 24584, 24587, 24586, 24589, 24588, 24591, 24590, 24593, 24592, 24594, 24595, 24597, 24599, 24598, 24600, 24602, 24601, 24603, 24605, 24604, 24894, 24607, 24606, 24896, 24622, 24608, 24624, 24623, 24610, 24609, 24898, 24612, 24611, 24613, 24615, 24614, 24616, 24618, 24617, 24620, 24619, 24622, 24621, 24624, 24623, 24626, 24625, 24900, 24628, 24627, 24630, 24629, 24632, 24631, 24634, 24633, 24902, 24636, 24635, 24904, 24638, 24637, 24906, 24640, 24639, 24692, 24691, 24694, 24641, 24643, 24642, 24908, 24645, 24644, 24910, 24647, 24646, 24648, 24650, 24649, 24651, 24653, 24652, 24654, 24656, 24655, 24912, 24658, 24657, 24914, 24659, 24660, 24663, 24662, 24664, 24666, 24665, 24667, 24668, 24670, 24669, 24672, 24671, 24916, 24674, 24673, 24676, 24675, 24678, 24677, 24680, 24679, 24682, 24681, 24684, 24683, 24685, 24918, 24688, 24687, 24689, 24692, 24691, 24694, 24693, 24696, 24695, 24698, 24697, 24700, 24699, 24702, 24701, 24704, 24703, 24706, 24705, 24708, 24707, 24710, 24709, 24712, 24711, 24924, 24926, 24928, 24930, 24932, 24714, 24713, 24934, 24716, 24715, 24936, 24938, 24941, 24944, 24946, 24718, 24717, 24752, 24751, 24754, 24753, 24948, 24756, 24755, 24758, 24757, 24760, 24759, 24950, 24720, 24719, 24721, 24723, 24722, 24724, 24726, 24725, 24728, 24727, 24730, 24729, 24732, 24731, 24734, 24733, 24736, 24735, 24738, 24737, 24740, 24739, 24742, 24741, 24956, 24744, 24743, 24958, 24746, 24745, 24960, 24748, 24747, 24962, 24750, 24749, 24752, 24751, 24754, 24753, 24964, 24756, 24755, 24758, 24757, 24760, 24759, 24966, 24762, 24761, 24764, 24763, 24766, 24765, 24768, 24767, 24770, 24769, 24771, 24773, 24772, 24774, 24776, 24775, 24777, 24779, 24778, 24780, 24996, 24987, 24999, 24998, 25001, 25000, 25002, 25135, 25137, 24994, 24791, 24995, 24997, 24996, 24999, 24998, 25001, 25000, 25002, 25139, 25141, 24994, 24791, 24995, 24996, 24987, 24999, 24998, 25001, 24789, 25002, 25005, 25004, 25143, 24994, 24791, 24792, 24794, 24793, 24796, 24795, 24798, 24797, 24800, 24799, 24802, 24801, 24803, 24805, 24804, 24806, 24808, 24807, 24971, 24974, 24973, 24976, 24975, 24978, 24977, 24980, 24979, 24982, 24810, 24983, 24985, 24984, 24986, 25050, 24811, 24813, 24812, 24815, 24814, 13718, 25057, 25056, 25177, 24817, 24816, 25062, 24818, 24820, 24819, 24822, 24821, 24969, 24971, 24974, 24973, 24976, 24975, 24978, 24977, 24980, 24979, 24982, 24981, 24983, 24985, 24984, 24986, 24988, 24987, 24999, 24989, 25001, 25000, 25002, 25005, 25004, 24992, 24991, 24994, 24993, 24995, 24997, 24996, 24999, 24998, 25001, 25000, 25002, 25005, 25004, 25007, 25006, 25009, 25008, 25010, 25012, 25011, 25014, 25013, 25016, 25015, 25017, 25019, 25018, 25020, 25022, 25021, 25024, 25023, 25026, 25025, 25028, 25027, 25030, 25029, 25031, 25033, 25032, 25034, 25036, 25035, 25208, 25038, 25037, 25210, 25040, 25039, 25212, 25042, 25041, 25214, 25044, 25043, 25045, 25047, 25046, 25048, 25050, 25049, 25051, 25054, 25053, 13718, 25057, 25056, 13733, 25060, 25059, 25062, 25061, 25064, 25063, 25065, 25067, 25066, 25068, 25070, 25069, 25072, 25071, 25074, 25073, 25075, 25077, 25076, 25078, 25080, 25079, 25081, 25083, 25082, 25084, 25086, 25085, 25087, 25089, 25088, 25091, 25090, 25093, 25092, 25218, 25095, 25094, 25220, 25097, 25096, 25099, 25098, 25101, 25100, 25102, 25103, 25105, 25107, 25106, 25222, 25109, 25108, 25224, 25111, 25110, 25113, 25112, 25115, 25114, 25117, 25116, 25119, 25118, 25121, 25120, 25123, 25122, 25226, 25125, 25124, 25228, 25127, 25126, 25129, 25128, 25131, 25130, 25133, 25132, 25200, 25202, 25159, 25149, 25148, 25273, 25150, 25152, 25151, 25154, 25153, 25275, 25156, 25155, 25277, 25158, 25157, 25279, 25200, 25202, 25159, 25161, 25160, 25163, 25162, 25165, 25164, 25167, 25166, 25169, 25168, 25233, 25252, 25171, 25170, 25283, 25173, 25172, 25285, 25175, 25174, 25287, 25179, 25178, 25181, 25180, 25183, 25182, 25185, 25184, 25187, 25186, 25189, 25188, 25190, 25192, 25191, 25193, 25195, 25194, 25291, 25197, 25196, 25293, 25199, 25198, 25295, 25200, 25202, 25204, 25206, 25236, 25235, 25238, 25229, 25240, 25239, 25230, 25243, 25242, 25245, 25244, 25247, 25246, 25231, 25250, 25249, 25297, 25233, 25232, 25253, 25234, 25236, 25235, 25238, 25237, 25240, 25239, 25241, 25243, 25242, 25245, 25244, 25247, 25246, 25248, 25250, 25249, 25299, 25252, 25251, 25254, 25253, 25256, 25255, 25258, 25257, 25260, 25259, 25261, 25263, 25262, 25264, 25265, 25267, 25269, 25271, 25303, 25305, 25314, 25310, 25306, 25317, 25312, 25313, 25314, 25310, 25306, 25317, 25312, 25313, 25314, 25307, 25308, 25317, 25312, 25319, 25314, 25310, 25316, 25317, 25309, 25319, 25314, 25310, 25311, 25317, 25312, 25313, 25315, 25314, 25316, 25318, 25317, 25319, 14, 15, 8024, 8025, 8028, 8029, 8030, 8031, 8040, 8041, 8044, 8045, 8048, 8049, 8052, 8053, 8054, 8055, 8056, 8057, 8058, 8059, 8060, 8061, 8062, 8063, 8064, 8065, 8066, 8067, 8068, 8069, 8070, 8071, 8072, 8073, 8074, 8075, 8076, 8077, 8078, 8079, 8080, 8081, 8082, 8083, 8084, 8085, 8086, 8087, 8088, 8089, 8090, 8091, 8092, 8093, 8094, 8095, 8096, 8097, 8098, 8099, 8100, 8101, 8102, 8103, 8104, 8105, 8106, 8107, 8108, 8109, 8110, 8111, 8112, 8115, 8116, 8117, 8118, 8119, 8120, 8123, 8124, 8125, 8126, 8127, 8128, 8129, 8130, 8133, 8134, 8135, 8136, 8137, 8138, 8139, 8140, 8141, 8142, 8143, 8144, 8145, 8146, 8147, 8148, 8149, 8150, 8151, 8152, 8153, 8154, 8157, 8158, 8161, 8162, 8163, 8164, 8165, 8197, 8198, 8201, 8204, 8205, 8206, 8207, 8208, 8209, 8210, 8211, 8212, 8213, 8214, 8215, 8216, 8217, 8218, 8219, 8220, 8221, 8222, 8223, 8224, 8225, 8228, 8229, 8232, 8233, 8234, 8235, 8236, 8237, 8238, 8239, 8240, 8241, 8242, 8243, 8244, 8245, 8246, 8247, 8248, 8249, 8250, 8251, 8252, 8253, 8254, 8255, 8256, 8257, 8258, 8259, 8260, 8261, 8262, 8263, 8264, 8265, 8266, 8267, 8268, 8269, 8270, 8271, 8272, 8273, 8274, 8275, 8276, 8277, 8280, 8281, 8283, 8284, 8286, 8287, 8288, 8289, 8290, 8291, 8292, 8293, 8294, 8295, 8298, 8299, 8300, 8301, 8302, 8303, 8304, 8305, 8306, 8307, 8308, 8309, 8310, 8311, 8312, 8313, 8314, 8315, 8316, 8317, 8318, 8319, 8321, 8322, 8324, 8325, 8326, 8327, 8328, 8329, 8330, 8331, 8332, 8333, 8334, 8335, 8337, 8338, 8340, 8341, 8342, 8343, 8344, 8345, 8346, 8347, 8348, 8349, 8350, 8351, 8352, 8353, 8354, 8355, 8356, 8357, 8358, 8359, 8360, 8361, 8362, 8363, 8364, 8365, 8366, 8369, 8370, 8373, 8374, 8375, 8382, 8383, 8384, 8402, 8403, 8406, 8407, 8414, 8415, 8416, 8417, 8418, 8419, 8420, 8421, 8422, 8423, 8424, 8425, 8426, 8435, 8436, 8437, 8438, 8439, 8440, 8441, 8442, 8443, 8444, 8445, 8446, 8447, 8448, 8449, 8450, 8451, 8452, 8455, 8456, 8457, 8458, 8459, 8460, 8461, 8462, 8463, 8464, 8465, 8466, 8467, 8468, 8469, 8470, 8471, 8472, 8473, 8474, 8475, 8476, 8477, 8478, 8479, 8480, 8482, 8484, 8485, 8486, 8487, 8488, 8489, 8490, 8491, 8492, 8493, 8494, 8495, 8496, 8497, 8498, 8499, 8500, 8501, 8502, 8503, 8504, 8505, 8506, 8507, 8508, 8509, 8510, 8511, 8512, 8513, 8514, 8515, 8516, 8517, 8518, 8519, 8520, 8521, 8522, 8523, 8524, 8525, 8526, 8527, 8528, 8529, 8530, 8531, 8532, 8533, 8534, 8535, 8536, 8537, 8538, 8539, 8540, 8541, 8542, 8543, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553, 8554, 8555, 8556, 8557, 8558, 8559, 8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 8570, 8571, 8572, 8573, 8574, 8575, 8576, 8577, 8578, 8581, 8582, 8583, 8584, 8585, 8586, 8587, 8588, 8589, 8591, 8592, 8594, 8595, 8596, 8597, 8598, 8599, 8600, 8601, 8602, 8603, 8604, 8605, 8606, 8607, 8608, 8615, 8616, 8617, 8618, 8619, 8620, 8621, 8622, 8623, 8624, 8625, 8626, 8627, 8628, 8629, 8630, 8631, 8632, 8633, 8635, 8636, 8638, 8639, 8640, 8641, 8642, 8643, 8644, 8645, 8647, 8648, 8650, 8651, 8652, 8653, 8654, 8655, 8656, 8657, 8658, 8659, 8660, 8661, 8662, 8663, 8664, 8665, 8666, 8667, 8668, 8669, 8670, 8671, 8672, 8673, 8674, 8675, 8676, 8677, 8678, 8679, 8680, 8681, 8682, 8683, 8684, 8685, 8686, 8687, 8688, 8689, 8690, 8691, 8692, 8693, 8694, 8695, 8696, 8697, 8698, 8699, 8700, 8701, 8702, 8703, 8704, 8705, 8706, 8707, 8708, 8709, 8710, 8711, 8712, 8713, 8714, 8715, 8716, 8717, 8718, 8719, 8720, 8721, 8722, 8723, 8724, 8725, 8726, 8728, 8730, 8731, 8732, 8733, 8734, 8735, 8736, 8737, 8738, 8739, 8740, 8741, 8742, 8743, 8744, 8745, 8746, 8747, 8748, 8749, 8750, 8751, 8752, 8753, 8754, 8755, 8756, 8757, 8758, 8759, 8760, 8761, 8762, 8763, 8764, 8765, 8766, 8767, 8768, 8769, 8770, 8771, 8772, 8773, 8774, 8775, 8776, 8777, 8778, 8779, 8780, 8781, 8782, 8783, 8784, 8785, 8786, 8789, 8790, 8791, 8792, 8793, 8794, 8797, 8798, 8801, 8802, 8805, 8806, 8809, 8810, 8811, 8812, 8813, 8814, 8815, 8816, 8817, 8818, 8819, 8820, 8821, 8822, 8823, 8824, 8827, 8828, 8829, 8830, 8831, 8832, 8835, 8836, 8839, 8840, 8843, 8844, 8847, 8848, 8849, 8850, 8851, 8852, 8853, 8854, 8855, 8856, 8857, 8858, 8859, 8860, 8861, 8862, 8863, 8864, 8865, 8866, 8867, 8868, 8869, 8870, 8871, 8872, 8873, 8874, 8875, 8876, 8877, 8878, 8879, 8880, 8881, 8882, 8883, 8884, 8885, 8886, 8887, 8888, 8889, 8890, 8891, 8892, 8893, 8894, 8895, 8896, 8897, 8898, 8899, 8900, 8901, 8902, 8903, 8904, 8905, 8906, 8907, 8908, 8909, 8910, 8911, 8912, 8913, 8914, 8915, 8916, 8917, 8918, 8919, 8920, 8921, 8922, 8923, 8924, 8925, 8926, 8927, 8928, 8929, 8930, 8932, 8933, 8935, 8936, 8937, 8938, 8939, 8940, 8941, 8942, 8945, 8946, 8949, 8950, 8953, 8954, 8957, 8958, 8959, 8960, 8961, 8962, 8963, 8964, 8965, 8966, 8967, 8968, 8969, 8970, 8971, 8972, 8974, 8975, 8976, 8977, 8978, 8979, 8980, 8981, 8982, 8983, 8984, 8985, 8987, 8988, 8989, 8990, 8991, 8992, 8993, 8994, 8995, 8996, 8997, 8998, 8999, 9000, 9001, 9002, 9003, 9004, 9005, 9006, 9007, 9008, 9009, 9010, 9011, 9012, 9013, 9014, 9015, 9016, 9017, 9018, 9019, 9020, 9021, 9022, 9023, 9024, 9025, 9026, 9027, 9028, 9029, 9030, 9031, 9032, 9033, 9034, 9035, 9036, 9037, 9038, 9039, 9040, 9041, 9042, 9043, 9044, 9045, 9046, 9047, 9048, 9049, 9050, 9051, 9052, 9053, 9054, 9055, 9056, 9057, 9058, 9059, 9060, 9061, 9062, 9063, 9066, 9067, 9070, 9071, 9072, 9073, 9074, 9075, 9076, 9077, 9078, 9079, 9080, 9081, 9082, 9085, 9086, 9087, 9088, 9089, 9090, 9093, 9094, 9095, 9098, 9099, 9100, 9101, 9102, 9105, 9106, 9107, 9108, 9111, 9112, 9115, 9116, 9117, 9118, 9119, 9120, 9121, 9122, 9123, 9124, 9125, 9126, 9127, 9128, 9129, 9130, 9131, 9132, 9133, 9134, 9135, 9136, 9137, 9138, 9139, 9140, 9141, 9142, 9143, 9144, 9145, 9146, 9147, 9148, 9149, 9150, 9151, 9152, 9153, 9154, 9155, 9156, 9157, 9158, 9159, 9160, 9161, 9162, 9163, 9164, 9165, 9166, 9167, 9168, 9169, 9170, 9171, 9172, 9173, 9174, 9175, 9176, 9177, 9178, 9179, 9180, 9181, 9182, 9183, 9184, 9191, 9192, 9193, 9194, 9195, 9196, 9197, 9198, 9199, 9200, 9201, 9202, 9203, 9204, 9205, 9206, 9207, 9210, 9213, 9214, 9215, 9216, 9217, 9218, 9220, 9221, 9231, 9232, 9233, 9234, 9235, 9236, 9237, 9238, 9239, 9240, 9241, 9242, 9243, 9246, 9247, 9250, 9251, 9252, 9253, 9254, 9255, 9256, 9257, 9258, 9259, 9260, 9261, 9262, 9263, 9264, 9265, 9266, 9267, 9268, 9269, 9270, 9271, 9272, 9273, 9276, 9277, 9280, 9281, 9282, 9283, 9284, 9285, 9286, 9287, 9288, 9289, 9290, 9291, 9292, 9293, 9294, 9295, 9296, 9297, 9298, 9299, 9300, 9301, 9302, 9303, 9304, 9305, 9306, 9307, 9308, 9309, 9310, 9311, 9312, 9313, 9314, 9315, 9316, 9318, 9319, 9321, 9322, 9323, 9324, 9325, 9326, 9327, 9328, 9329, 9330, 9331, 9332, 9333, 9334, 9335, 9336, 9337, 9338, 9339, 9340, 9341, 9342, 9343, 9344, 9345, 9346, 9347, 9348, 9349, 9350, 9351, 9352, 9353, 9354, 9355, 9356, 9357, 9358, 9359, 9360, 9361, 9363, 9364, 9366, 9367, 9370, 9371, 9372, 9373, 9374, 9375, 9376, 9377, 9378, 9379, 9380, 9381, 9382, 9383, 9384, 9385, 9386, 9387, 9388, 9389, 9390, 9391, 9392, 9393, 9394, 9395, 9396, 9397, 9398, 9399, 9400, 9403, 9404, 9407, 9408, 9409, 9410, 9411, 9412, 9415, 9416, 9417, 9418, 9419, 9420, 9421, 9422, 9423, 9424, 9427, 9428, 9431, 9432, 9433, 9434, 9435, 9436, 9439, 9440, 9441, 9442, 9443, 9444, 9445, 9446, 9447, 9448, 9449, 9450, 9453, 9454, 9455, 9456, 9457, 9458, 9459, 9460, 9461, 9462, 9463, 9464, 9465, 9466, 9467, 9468, 9469, 9470, 9473, 9474, 9477, 9478, 9481, 9482, 9483, 9484, 9485, 9486, 9487, 9488, 9489, 9490, 9491, 9492, 9493, 9494, 9495, 9498, 9499, 9502, 9503, 9505, 9506, 9508, 9509, 9512, 9513, 9514, 9515, 9516, 9517, 9518, 9519, 9520, 9521, 9522, 9523, 9524, 9526, 9528, 9529, 9530, 9533, 9536, 9537, 9538, 9539, 9540, 9541, 9542, 9543, 9544, 9545, 9546, 9547, 9548, 9549, 9550, 9551, 9554, 9555, 9558, 9559, 9560, 9561, 9562, 9563, 9564, 9565, 9566, 9567, 9568, 9569, 9570, 9571, 9572, 9573, 9574, 9575, 9576, 9577, 9578, 9579, 9580, 9581, 9582, 9583, 9584, 9585, 9586, 9587, 9588, 9589, 9590, 9591, 9592, 9593, 9594, 9595, 9596, 9598, 9599, 9601, 9602, 9603, 9604, 9605, 9607, 9608, 9610, 9611, 9612, 9613, 9614, 9615, 9616, 9617, 9620, 9621, 9624, 9625, 9626, 9627, 9628, 9629, 9630, 9631, 9632, 9633, 9634, 9635, 9636, 9637, 9638, 9639, 9640, 9641, 9642, 9643, 9644, 9645, 9646, 9647, 9648, 9649, 9650, 9651, 9652, 9653, 9654, 9655, 9656, 9657, 9658, 9659, 9660, 9661, 9662, 9663, 9664, 9665, 9666, 9667, 9668, 9669, 9670, 9671, 9672, 9673, 9674, 9675, 9676, 9677, 9678, 9679, 9680, 9681, 9682, 9683, 9684, 9685, 9686, 9687, 9688, 9689, 9690, 9691, 9692, 9693, 9694, 9699, 9700, 9701, 9702, 9703, 9704, 9705, 9706, 9707, 9708, 9709, 9712, 9713, 9716, 9717, 9718, 9719, 9720, 9721, 9722, 9723, 9726, 9727, 9730, 9731, 9732, 9733, 9736, 9737, 9740, 9741, 9742, 9743, 9744, 9745, 9746, 9747, 9748, 9749, 9750, 9751, 9752, 9753, 9754, 9755, 9756, 9757, 9758, 9759, 9760, 9761, 9762, 9763, 9764, 9765, 9766, 9769, 9770, 9773, 9774, 9775, 9776, 9777, 9778, 9779, 9780, 9781, 9782, 9783, 9784, 9785, 9786, 9789, 9790, 9793, 9794, 9795, 9796, 9797, 9798, 9799, 9800, 9801, 9802, 9803, 9804, 9805, 9806, 9807, 9808, 9809, 9810, 9811, 9812, 9813, 9814, 9815, 9816, 9819, 9820, 9821, 9822, 9823, 9824, 9825, 9826, 9827, 9828, 9829, 9830, 9831, 9832, 9833, 9836, 9837, 9838, 9839, 9840, 9841, 9842, 9843, 9844, 9845, 9846, 9847, 9848, 9849, 9850, 9852, 9854, 9856, 9858, 9860, 9861, 9863, 9864, 9867, 9870, 9871, 9872, 9873, 9874, 9875, 9876, 9877, 9878, 9881, 9882, 9885, 9886, 9887, 9888, 9889, 9890, 9891, 9892, 9895, 9896, 9899, 9900, 9901, 9902, 9903, 9906, 9907, 9908, 9909, 9910, 9911, 9912, 9913, 9916, 9917, 9920, 9921, 9922, 9923, 9924, 9927, 9928, 9929, 9930, 9931, 9932, 9933, 9934, 9935, 9936, 9937, 9938, 9939, 9940, 9941, 9942, 9945, 9946, 9947, 9948, 9949, 9952, 9953, 9954, 9955, 9958, 9959, 9962, 9963, 9964, 9965, 9966, 9967, 9968, 9969, 9970, 9971, 9972, 9973, 9974, 9975, 9976, 9977, 9978, 9979, 9980, 9981, 9982, 9984, 9985, 9987, 9988, 9989, 9990, 9991, 9992, 9993, 9994, 9995, 9996, 9997, 9998, 9999, 10000, 10001, 10002, 10003, 10006, 10007, 10009, 10010, 10012, 10013, 10016, 10017, 10018, 10019, 10020, 10021, 10022, 10023, 10024, 10025, 10026, 10027, 10028, 10029, 10030, 10031, 10032, 10033, 10034, 10035, 10036, 10037, 10038, 10039, 10040, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 10048, 10049, 10050, 10051, 10052, 10053, 10054, 10055, 10056, 10057, 10058, 10059, 10060, 10061, 10062, 10063, 10064, 10065, 10066, 10067, 10068, 10069, 10070, 10071, 10072, 10073, 10074, 10075, 10076, 10077, 10078, 10079, 10080, 10081, 10082, 10083, 10086, 10089, 10090, 10091, 10092, 10093, 10094, 10095, 10096, 10097, 10098, 10099, 10100, 10101, 10102, 10103, 10105, 10106, 10107, 10108, 10109, 10110, 10111, 10112, 10113, 10114, 10115, 10116, 10117, 10118, 10119, 10120, 10122, 10123, 10124, 10125, 10126, 10127, 10128, 10129, 10130, 10131, 10132, 10135, 10136, 10139, 10140, 10141, 10142, 10143, 10144, 10146, 10147, 10149, 10150, 10151, 10152, 10153, 10156, 10157, 10160, 10161, 10164, 10165, 10168, 10169, 10170, 10171, 10172, 10173, 10174, 10175, 10176, 10177, 10178, 10179, 10180, 10181, 10182, 10183, 10184, 10185, 10186, 10187, 10188, 10189, 10190, 10191, 10194, 10195, 10198, 10199, 10200, 10201, 10202, 10203, 10204, 10205, 10206, 10207, 10208, 10209, 10210, 10211, 10212, 10213, 10214, 10215, 10216, 10217, 10218, 10219, 10220, 10221, 10222, 10223, 10224, 10225, 10226, 10227, 10228, 10229, 10230, 10231, 10232, 10233, 10234, 10235, 10238, 10239, 10240, 10241, 10242, 10243, 10244, 10245, 10246, 10249, 10250, 10253, 10254, 10255, 10256, 10257, 10258, 10259, 10260, 10261, 10262, 10263, 10264, 10265, 10266, 10267, 10268, 10269, 10270, 10271, 10272, 10273, 10274, 10275, 10276, 10277, 10278, 10279, 10280, 10281, 10282, 10283, 10284, 10285, 10286, 10287, 10288, 10289, 10290, 10291, 10294, 10295, 10296, 10297, 10299, 10300, 10302, 10303, 10304, 10305, 10306, 10308, 10309, 10312, 10313, 10315, 10316, 10317, 10318, 10319, 10320, 10321, 10322, 10323, 10324, 10325, 10326, 10327, 10328, 10329, 10332, 10333, 10336, 10337, 10338, 10339, 10340, 10341, 10342, 10343, 10344, 10345, 10346, 10347, 10348, 10349, 10350, 10351, 10352, 10353, 10354, 10355, 10356, 10357, 10358, 10359, 10360, 10361, 10363, 10365, 10366, 10367, 10368, 10369, 10370, 10371, 10372, 10373, 10374, 10375, 10377, 10379, 10380, 10381, 10382, 10383, 10384, 10385, 10386, 10387, 10388, 10389, 10390, 10391, 10392, 10395, 10396, 10397, 10398, 10399, 10400, 10401, 10402, 10403, 10404, 10405, 10406, 10407, 10408, 10409, 10410, 10411, 10412, 10413, 10414, 10415, 11664, 11665, 11668, 11669, 11670, 11671, 11672, 11673, 11674, 11675, 11676, 11677, 11678, 11679, 11680, 11681, 11682, 11683, 11686, 11687, 11690, 11691, 11692, 11693, 11694, 11695, 11702, 11703, 11704, 11705, 11706, 11707, 11708, 11709, 11710, 11711, 11712, 11713, 11714, 11715, 11716, 11717, 11718, 11719, 11720, 11721, 11722, 11723, 11724, 11725, 11726, 11727, 11728, 11729, 11730, 11731, 11732, 11733, 11734, 11735, 11736, 11737, 25339, 25337, 11996, 11997, 11998, 11999, 12000, 12001, 25460, 25459, 25627, 25626, 12091, 12092, 12093, 12094, 12095, 12096, 12097, 12098, 12109, 12110, 12113, 12114, 12125, 12126, 25655, 25653, 25825, 25824, 26366, 26364, 26397, 26395, 12893, 12894, 12896, 12897, 12899, 12900, 12901, 12902, 12903, 12906, 12907, 12908, 12909, 12912, 12913, 12916, 12917, 12918, 12919, 12920, 12921, 12922, 12923, 12926, 12927, 12930, 12931, 12932, 12933, 12934, 12935, 12936, 12937, 12938, 12939, 12940, 12941, 12942, 12943, 12944, 12945, 12948, 12949, 12952, 12953, 12954, 12955, 12957, 12958, 12960, 12961, 12964, 12965, 12966, 12967, 12968, 12969, 12970, 12971, 12974, 12975, 12976, 12979, 12980, 12983, 12984, 12985, 12986, 12987, 12988, 12990, 12991, 12994, 12995, 12996, 12999, 13000, 13003, 13004, 13005, 13006, 13007, 13008, 13010, 13011, 13012, 13013, 13014, 13015, 13016, 13019, 13020, 13023, 13024, 13025, 13026, 13027, 13028, 13030, 13031, 13033, 13034, 13035, 13036, 13037, 13038, 13039, 13040, 13041, 13042, 13043, 13044, 13045, 13048, 13049, 13052, 13053, 13054, 13055, 13056, 13057, 13060, 13061, 13062, 13063, 13064, 13065, 13066, 13067, 13068, 13069, 13070, 13071, 13072, 13073, 13074, 13075, 13078, 13079, 13080, 13081, 13082, 13083, 13084, 13085, 13088, 13089, 13092, 13093, 13096, 13097, 13098, 13099, 13100, 13101, 13102, 13103, 13106, 13107, 13110, 13111, 13112, 13113, 13114, 13115, 13116, 13117, 13118, 13119, 13120, 13123, 13124, 13127, 13128, 13129, 13130, 13131, 13132, 13133, 13134, 13135, 13136, 13137, 13138, 13139, 13142, 13143, 13144, 13145, 13146, 13147, 13148, 13149, 13150, 13151, 13152, 13153, 13154, 13157, 13158, 13159, 13160, 13161, 13162, 13163, 13164, 13165, 13166, 13167, 13168, 13169, 13170, 13171, 13172, 13173, 13175, 13176, 13178, 13179, 13181, 13182, 13184, 13185, 13196, 13197, 13200, 13201, 13214, 13215, 13216, 13217, 13218, 13219, 13222, 13223, 13224, 13225, 13226, 13227, 13230, 13231, 13232, 13233, 13234, 13235, 13236, 13237, 13239, 13240, 13241, 13242, 13243, 13244, 13246, 13247, 13248, 13249, 13250, 13251, 13253, 13254, 13256, 13257, 13260, 13261, 13264, 13265, 13268, 13269, 13272, 13273, 13274, 13275, 13276, 13277, 13280, 13281, 13282, 13283, 13284, 13285, 13288, 13289, 13290, 13291, 13292, 13293, 13295, 13296, 13298, 13299, 13300, 13301, 13302, 13303, 13304, 13305, 13306, 13307, 13308, 13309, 14158, 14159, 14160, 14161, 14162, 14163, 14164, 14169, 14170, 14171, 14172, 14173, 14174, 14175, 14176, 14177, 14178, 14183, 14184, 14185, 14186, 14187, 14188, 14189, 14190, 14191, 14192, 14193, 14194, 14197, 14198, 14199, 14202, 14203, 14204, 14205, 14206, 14207, 14208, 14209, 14210, 14211, 14212, 14213, 14214, 14215, 14332, 14333, 14334, 14335, 14336, 14337, 14338, 14339, 14340, 14341, 14342, 14343, 14344, 14345, 14346, 14347, 14348, 14349, 14350, 14351, 14352, 14353, 14354, 14355, 14356, 14357, 14360, 14361, 14362, 14363, 14364, 14365, 14366, 14367, 27560, 27558, 27570, 27569, 27568, 27830, 27828, 27840, 27839, 27838, 15040, 15041, 15042, 15043, 15044, 15045, 15046, 15047, 15048, 15049, 15050, 15051, 15052, 15053, 15054, 15055, 15056, 15057, 15058, 15059, 15060, 15061, 15062, 15063, 15064, 15065, 15066, 15067, 15068, 15069, 15070, 15071, 15072, 15073, 15074, 15075, 15076, 15077, 15078, 15079, 15080, 15081, 15082, 15083, 15084, 15085, 15086, 15087, 15088, 15089, 15090, 15091, 15092, 15093, 15094, 15095, 15096, 15097, 15098, 15099, 15100, 15101, 15102, 15103, 15104, 15105, 15106, 15107, 15108, 15109, 15112, 15113, 15116, 15117, 15120, 15121, 15124, 15125, 15126, 15127, 15128, 15129, 15130, 15131, 15132, 15133, 15134, 15135, 15136, 15137, 15138, 15139, 15140, 15141, 15142, 15143, 15144, 15145, 15146, 15147, 15148, 15149, 15150, 15152, 15153, 15155, 15156, 15157, 15158, 15159, 15160, 15161, 15162, 15163, 15164, 15165, 15166, 15167, 15168, 15169, 15170, 15171, 15172, 15173, 15174, 15175, 15178, 15179, 15182, 15183, 15184, 15185, 15186, 15187, 15188, 15189, 15190, 15191, 15192, 15195, 15196, 15199, 15200, 15201, 15202, 15203, 15204, 15205, 15206, 15207, 15208, 15209, 15210, 15211, 15212, 15215, 15216, 15442, 15443, 15444, 15445, 15446, 15447, 15448, 15449, 15450, 15451, 15452, 27931, 27943, 15473, 15474, 15477, 15478, 15479, 15480, 15481, 15484, 15485, 15488, 15489, 15492, 15493, 15494, 15495, 15496, 15497, 15498, 15499, 15500, 15501, 15502, 15504, 15505, 15507, 15508, 15509, 15510, 15513, 15514, 15517, 15518, 15531, 15532, 15533, 15534, 15535, 15536, 15538, 15539, 15540, 15541, 15542, 15543, 15545, 15546, 15547, 15548, 15549, 15550, 15553, 15554, 15557, 15558, 15561, 15562, 15563, 15564, 15912, 15913, 15914, 15915, 15916, 15917, 15918, 15919, 15920, 15921, 15922, 15923, 15924, 15925, 15926, 15927, 15930, 15931, 15932, 15933, 15934, 15935, 15936, 15937, 15938, 15939, 15940, 15941, 15942, 15943, 15944, 15945, 15946, 15947, 15948, 15949, 15952, 15953, 15954, 15955, 15956, 15957, 15958, 15959, 15960, 15961, 15962, 15963, 15964, 15965, 15966, 15968, 15970, 15971, 16724, 16725, 16726, 16727, 16728, 16729, 16741, 16742, 16743, 16744, 16745, 16746, 16754, 16755, 16756, 16757, 16758, 16759, 16767, 16768, 16769, 16770, 16771, 16772, 16780, 16781, 16782, 16783, 16784, 16785, 16876, 16877, 16878, 16879, 16880, 16881, 13, 14, 15, 28353, 28355, 28357, 28359, 28361, 28363, 28365, 28368, 28371, 28373, 28379, 28381, 28387, 28389, 28391, 28393, 28395, 28398, 28401, 28403, 28405, 28407, 28409, 28412, 28415, 28422, 28424, 28428, 28430, 28432, 28434, 28438, 28440, 28442, 28446, 28449, 28451, 28455, 28458, 28460, 28462, 28464, 28467, 28473, 28476, 28479, 28482, 28485, 28487, 28489, 28492, 28494, 28496, 28499, 28501, 28503, 28505, 28508, 28510, 28513, 28515, 28517, 28519, 28521, 28524, 28526, 28528, 28530, 28532, 28534, 28536, 28538, 28540, 28542, 28544, 28546, 28548, 28550, 28552, 28554, 28556, 28560, 28562, 28564, 28567, 28569, 28571, 28574, 28576, 28578, 28580, 28583, 28585, 28588, 28590, 28592, 28594, 28597, 28600, 28602, 28604, 28606, 28608, 28610, 28612, 28614, 28617, 28619, 28621, 28623, 28629, 28631, 28633, 28635, 28637, 28639, 28642, 28644, 28646, 28648, 28651, 28653, 28655, 28657, 28660, 28662, 28664, 28666, 28668, 28673, 28676, 28679, 28682, 28686, 28688, 28693, 28698, 28701, 28704, 28706, 28712, 28715, 28718, 28721, 28724, 28726, 28728, 28730, 28732, 28734, 28736, 28738, 28740, 28743, 28746, 28748, 28750, 28752, 28754, 28757, 28761, 28763, 28767, 28769, 28771, 28774, 28777, 28782, 28784, 28787, 28789, 28791, 28793, 28795, 28797, 28800, 28802, 28808, 28810, 28813, 28815, 28818, 28820, 28822, 28825, 28827, 28829, 28831, 28833, 28836, 28839, 28841, 28843, 28846, 28849, 28852, 28855, 28857, 28859, 28861, 28863, 28865, 28868, 28870, 28872, 28874, 28876, 28878, 28881, 28884, 28889, 28891, 28893, 28898, 28900, 28902, 28904, 28906, 28908, 28910, 28915, 28917, 28922, 28924, 28927, 28930, 28933, 28936, 28939, 28942, 28944, 28946, 28949, 28952, 28954, 28956, 28959, 28962, 28964, 28966, 28968, 28970, 28973, 28976, 28978, 28980, 28982, 28984, 28986, 28988, 28990, 28992, 28994, 28996, 28998, 29001, 29004, 29006, 29008, 29010, 29012, 29014, 29016, 29018, 29021, 29024, 29026, 29028, 29031, 29034, 29037, 29039, 29041, 29047, 29049, 29051, 29053, 29059, 29062, 29067, 29069, 29071, 29074, 29077, 29079, 29081, 29084, 29087, 29090, 29093, 29096, 29098, 29100, 29102, 29104, 29107, 29110, 29112, 29114, 29116, 29118, 29120, 29123, 29126, 29128, 29130, 29132, 29135, 29138, 29140, 29142, 29144, 29146, 29148, 29151, 29153, 29155, 29157, 29160, 29163, 29165, 29167, 29169, 29171, 29173, 29175, 29178, 29181, 29183, 29185, 29188, 29191, 29193, 29195, 29197, 29199, 29201, 29203, 29205, 29207, 29209, 29211, 29213, 29215, 29218, 29221, 29223, 29226, 29228, 29230, 29232, 29234, 29236, 29238, 29240, 29242, 29244, 29247, 29249, 29252, 29254, 29256, 29258, 29261, 29264, 29267, 29269, 29271, 29274, 29277, 29280, 29283, 29287, 29289, 29291, 29293, 29295, 29297, 29301, 29304, 29306, 29308, 29310, 29313, 29315, 29317, 29319, 29322, 29324, 29326, 29328, 29330, 29333, 29335, 29337, 29340, 29343, 29346, 29348, 29350, 29352, 29354, 29356, 29359, 29361, 29363, 29365, 29367, 29369, 29372, 29375, 29378, 29381, 29384, 29387, 29389, 29391, 29393, 29395, 29397, 29399, 29401, 29405, 29408, 29410, 29412, 29415, 29418, 29420, 29424, 29427, 29430, 29432, 29434, 29436, 29440, 29443, 29446, 29449, 29451, 29453, 29456, 29459, 29461, 29463, 29465, 29467, 29470, 29473, 29475, 29477, 29481, 29484, 29486, 29488, 29490, 29492, 29495, 29498, 29500, 29502, 29505, 29508, 29510, 29512, 29514, 29516, 29526, 29528, 29530, 29532, 29534, 29544, 29546, 29548, 29551, 29558, 29561, 29564, 29566, 29568, 29573, 29575, 29577, 29579, 29581, 29583, 29585, 29587, 29589, 29591, 29611, 29614, 29617, 29619, 29621, 29623, 29625, 29627, 29629, 29632, 29636, 29638, 29641, 29644, 29646, 29648, 29651, 29654, 29657, 29660, 29663, 29666, 29668, 29670, 29673, 29675, 29677, 29680, 29683, 29685, 29687, 29690, 29693, 29695, 29701, 29704, 29707, 29709, 29711, 29714, 29717, 29720, 29727, 29730, 29733, 29735, 29737, 29744, 29747, 29750, 29752, 29756, 29759, 29761, 29763, 29767, 29769, 29771, 29774, 29777, 29779, 29783, 29785, 29788, 29790, 29792, 29794, 29797, 29799, 29805, 29812, 29814, 29816, 29819, 29822, 29825, 29828, 29830, 29832, 29835, 29837, 29839, 29842, 29844, 29846, 29848, 29850, 29852, 29854, 29856, 29859, 29862, 29864, 29866, 29868, 29871, 29874, 29878, 29880, 29882, 29884, 29890, 29896, 29899, 29902, 29904, 29906, 29909, 29912, 29914, 29916, 29919, 29921, 29924, 29927, 29929, 29931, 29934, 29936, 29938, 29940, 29942, 29944, 29947, 29952, 29955, 29957, 29959, 29961, 29963, 29966, 29969, 29972, 29975, 29978, 29980, 29982, 29984, 29986, 29989, 29991, 29993, 29995, 29997, 29999, 30001, 30003, 30005, 30007, 30009, 30011, 30013, 30016, 30020, 30022, 30025, 30028, 30031, 30035, 30037, 30039, 30041, 30043, 30045, 30048, 30051, 30054, 30056, 30058, 30061, 30064, 30066, 30068, 30070, 30072, 30074, 30078, 30081, 30085, 30087, 30089, 30091, 30097, 30100, 30103, 30105, 30107, 30109, 30114, 30116, 30118, 30120, 30122, 30124, 30126, 30128, 30130, 30133, 30135, 30137, 30139, 30141, 30144, 30151, 30154, 30157, 30160, 30163, 30165, 30167, 30170, 30173, 30176, 30179, 30182, 30185, 30187, 30189, 30192, 30195, 30198, 30201, 30203, 30205, 30207, 30210, 30212, 30214, 30217, 30219, 30222, 30224, 30227, 30229, 30231, 30233, 30237, 30240, 30242, 30244, 30246, 30250, 30253, 30255, 30257, 30259, 30261, 30264, 30266, 30268, 30270, 30273, 30275, 30278, 30281, 30283, 30285, 30289, 30292, 30295, 30300, 30302, 30308, 30310, 30314, 30317, 30320, 30323, 30326, 30331, 30334, 30336, 30338, 30340, 30343, 30346, 30351, 30353, 30355, 30358, 30371, 30374, 30377, 30379, 30381, 30383, 30385, 25328, 25328, 25330, 25329, 30396, 30399, 30402, 30404, 30406, 30408, 30410, 30413, 30416, 30419, 30421, 11892, 11893, 29724, 29741, 28376, 28374, 29722, 29739, 28384, 28382, 28419, 28417, 25410, 28443, 28452, 30425, 30427, 30429, 12004, 12005, 28468, 25465, 25463, 29887, 28565, 30111, 29888, 29886, 30111, 12067, 12068, 30297, 30297, 30435, 30437, 30439, 30441, 30443, 30445, 30447, 12178, 12179, 28669, 28670, 28689, 28690, 28695, 28709, 28707, 28765, 28779, 25797, 28895, 28805, 28803, 12244, 12245, 28895, 28879, 28886, 28895, 28912, 28919, 28918, 29043, 29055, 29064, 12426, 12427, 29344, 12438, 12439, 29402, 29421, 29437, 29478, 29519, 29517, 29523, 29521, 29537, 29535, 29541, 29539, 29555, 29553, 29570, 29569, 29594, 29592, 29598, 29596, 29602, 29600, 29606, 29605, 29604, 29609, 29608, 29698, 29696, 29724, 29722, 29741, 29739, 29753, 29780, 29800, 29802, 29765, 29764, 29780, 29800, 29802, 29809, 29807, 29869, 29888, 29887, 29886, 29885, 29891, 26977, 26975, 29949, 30076, 30075, 30094, 30092, 30111, 30148, 30146, 30297, 30303, 30312, 30220, 30234, 30297, 30305, 30312, 30311, 30248, 30324, 30328, 30348, 30248, 30324, 30328, 30262, 30328, 30348, 30286, 30297, 30305, 30303, 30312, 30311, 30324, 30328, 30348, 30457, 30459, 30461, 30464, 30468, 30470, 30472, 30475, 30478, 30480, 30486, 30488, 30490, 30493, 30496, 30498, 30502, 30504, 30506, 30508, 30510, 30512, 30514, 30517, 30519, 30521, 30523, 30525, 30527, 30530, 30532, 30534, 30536, 30538, 30540, 30542, 30545, 30547, 30549, 30551, 30553, 30555, 30557, 30562, 30565, 30568, 30570, 30572, 30574, 30576, 30578, 30581, 30584, 30586, 30588, 30590, 30592, 30594, 30596, 30598, 30600, 30602, 30604, 30606, 30608, 30610, 30612, 30614, 30616, 30619, 30622, 30625, 30627, 30631, 30634, 30638, 30640, 30642, 30644, 30646, 30648, 30650, 30652, 30655, 30658, 30660, 30662, 30664, 30666, 30668, 30670, 30672, 30674, 30676, 30678, 30680, 30682, 30684, 30686, 30688, 30690, 30692, 30694, 30696, 30699, 30702, 30704, 30706, 30708, 30710, 30712, 30714, 30716, 30718, 30720, 30722, 30724, 30726, 30728, 30730, 30732, 30734, 30736, 30738, 30740, 30742, 30744, 30746, 30749, 30752, 30755, 30361, 30360, 27476, 30364, 30363, 30366, 30365, 30369, 30368, 30367, 30387, 30386, 30389, 30388, 30391, 30390, 30394, 30393, 30392, 30758, 30760, 30762, 30765, 30768, 30770, 30772, 30775, 30778, 30780, 30782, 30785, 30787, 30790, 30792, 30794, 30796, 30798, 30801, 30804, 30807, 30809, 30811, 30813, 30815, 30818, 30821, 30823, 30825, 30828, 30830, 30832, 30834, 30836, 14592, 14593, 14596, 14597, 14598, 30483, 30465, 30483, 30481, 30499, 27642, 27658, 30559, 30628, 30628, 30653, 15004, 15005, 15008, 15009, 15010, 30850, 30852, 30854, 30856, 30858, 30861, 30864, 30866, 30868, 30871, 30873, 30875, 30878, 30880, 30882, 30885, 30887, 30889, 30892, 30894, 30896, 30899, 30902, 30904, 30906, 30908, 30910, 30913, 30916, 30918, 30920, 30922, 30924, 30927, 30930, 30933, 30936, 30939, 30941, 30943, 30946, 30949, 30951, 30953, 30956, 30959, 30962, 30965, 30968, 30970, 30972, 30974, 30976, 30978, 30980, 30985, 30987, 30989, 30991, 30993, 30995, 30997, 30999, 31001, 31003, 31005, 31007, 31009, 31011, 15455, 15459, 31018, 31021, 31023, 31025, 31027, 31032, 31034, 31036, 31038, 31040, 31044, 31046, 31048, 31050, 31052, 31054, 31056, 31058, 31060, 31063, 31066, 31068, 31070, 30847, 30982, 31076, 31078, 31080, 31083, 31085, 31087, 31090, 31092, 31094, 31096, 31098, 31100, 31103, 31105, 31107, 31110, 31112, 31114, 31116, 31118, 31120, 31123, 31014, 31013, 31012, 31030, 31029, 31028, 31073, 31072, 31071, 31127, 31126, 31125, 31130, 31133, 31136, 31139, 31142, 31145, 31148, 31151, 31154, 31157, 31160, 31163, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 11698, 11699, 11700, 11701, 31702, 31171, 31707, 31168, 31170, 31169, 31962, 11894, 31702, 31171, 11897, 31707, 31706, 31173, 31172, 31175, 31174, 31176, 11905, 11906, 11907, 11908, 31178, 11910, 11911, 31180, 31182, 31185, 31184, 31186, 31188, 31191, 31190, 31192, 11921, 11922, 31193, 11924, 28811, 31197, 31349, 31351, 31352, 31355, 31196, 31357, 31356, 31198, 31359, 31361, 31360, 31362, 31363, 31364, 31366, 28811, 31197, 31349, 31351, 31352, 31355, 31354, 31357, 31356, 31198, 31359, 31361, 31360, 31362, 31363, 31364, 31366, 31200, 31202, 11961, 31203, 11963, 31281, 31497, 31498, 31206, 31420, 31423, 31422, 31424, 31425, 31427, 31429, 31428, 31208, 31863, 31862, 31865, 31209, 31867, 31210, 31869, 31868, 31871, 31870, 31873, 31872, 31874, 31876, 31878, 31877, 31879, 31980, 12006, 12007, 12008, 31212, 31211, 31214, 31213, 31215, 31217, 31219, 31218, 31220, 31221, 31223, 28506, 31226, 31734, 31227, 31230, 31229, 31231, 31232, 31234, 31236, 31238, 31240, 31242, 31241, 31243, 31245, 31247, 31248, 28558, 31250, 12040, 12041, 12042, 31252, 31253, 28572, 31257, 31256, 31258, 31259, 28586, 31263, 31262, 31264, 31265, 31266, 31268, 31270, 12058, 12059, 12060, 31846, 31272, 28615, 31276, 31275, 31277, 31991, 31910, 30225, 31892, 31915, 31914, 31916, 31917, 31883, 12077, 28625, 31910, 30225, 31892, 31915, 31914, 31882, 31917, 31883, 12087, 28627, 28920, 31392, 31391, 31394, 31393, 31278, 31396, 31397, 31400, 31399, 25634, 31494, 31493, 28920, 31392, 31391, 31394, 31393, 31396, 31395, 31397, 31400, 31399, 25637, 31494, 31493, 25638, 25639, 31480, 29149, 31483, 31486, 31485, 31487, 31489, 31491, 31494, 31493, 31281, 31283, 31497, 31499, 31501, 31503, 31505, 31507, 31509, 31285, 31284, 32002, 31286, 28649, 31560, 31290, 28658, 31565, 31568, 31293, 31294, 12189, 31296, 12191, 31297, 31298, 31300, 31299, 28684, 12197, 31302, 12199, 31303, 12201, 31305, 31304, 31306, 12205, 12206, 31309, 31308, 31311, 31310, 31312, 31314, 31316, 31318, 31321, 31320, 31322, 31324, 31327, 31326, 28759, 31378, 12223, 31380, 31330, 31342, 31332, 31333, 12229, 31334, 31335, 12232, 31368, 31380, 12235, 31338, 31342, 31341, 31340, 31343, 12241, 12242, 31345, 32017, 28811, 31348, 31349, 31351, 31352, 31355, 31354, 31357, 31356, 31359, 31358, 31361, 31360, 31363, 31362, 31364, 31366, 31368, 31380, 12265, 31370, 31372, 31374, 12269, 31387, 31377, 31376, 12273, 31378, 12275, 31380, 31381, 31383, 31385, 12280, 31387, 31388, 12283, 12284, 28920, 31392, 31391, 31394, 31393, 31396, 31395, 31397, 31400, 31399, 31401, 31404, 31403, 31405, 31407, 31410, 31409, 31411, 31412, 31414, 31416, 31415, 31417, 31495, 31498, 31497, 31418, 31420, 31423, 31422, 31424, 31425, 31427, 31429, 31428, 31430, 31512, 31431, 31432, 31433, 31435, 31436, 31437, 31438, 31440, 12330, 29045, 31442, 31444, 12334, 29057, 31512, 31511, 31446, 12339, 31516, 31447, 31449, 31450, 31451, 31453, 31455, 31454, 31456, 31457, 31458, 31462, 31461, 31460, 31463, 31465, 31464, 31467, 31466, 31468, 29121, 29124, 31472, 31474, 29133, 29136, 31477, 31479, 31480, 29149, 31483, 31486, 31485, 31487, 31489, 31491, 31494, 31493, 31495, 31498, 31497, 31499, 31501, 31503, 31505, 31507, 31509, 31512, 31511, 31514, 31513, 29224, 31516, 31518, 31520, 31521, 31523, 31524, 26279, 31526, 26285, 31529, 31528, 31531, 31530, 31532, 31533, 31535, 31537, 31536, 31538, 31539, 29285, 31541, 31543, 31545, 29299, 29302, 31548, 31550, 31551, 31553, 29320, 31556, 31568, 31567, 32029, 31558, 31559, 31560, 31563, 31562, 12433, 31564, 31565, 31568, 31567, 32032, 31569, 29357, 31572, 31575, 31574, 31577, 31576, 31579, 31578, 31581, 31580, 31582, 31585, 31584, 31586, 31588, 31590, 12457, 31591, 31594, 31593, 31595, 12462, 31598, 31597, 31600, 31599, 31601, 12468, 31604, 31603, 31605, 31606, 31609, 31608, 31610, 31612, 31615, 31614, 31618, 31617, 31616, 31619, 12483, 31620, 31622, 31625, 31624, 31626, 31629, 31628, 31631, 31630, 31634, 31633, 31632, 12496, 12497, 12498, 12499, 31636, 31635, 31639, 31638, 31637, 12505, 12506, 12507, 12508, 31640, 31688, 31646, 31643, 31642, 12514, 12515, 31644, 31645, 31688, 31646, 31648, 31647, 12522, 12523, 31649, 31660, 31659, 31651, 31663, 31655, 31654, 31658, 31657, 31656, 12534, 12535, 12536, 12537, 12538, 12539, 12540, 12541, 12542, 12543, 12544, 31660, 31659, 31661, 31663, 31666, 31665, 31668, 31667, 29634, 31671, 31670, 31672, 31674, 31676, 31675, 31677, 31679, 31678, 31682, 31681, 31680, 31685, 31684, 31683, 31686, 31688, 31687, 31690, 31689, 31691, 12575, 12576, 31694, 31693, 31695, 31698, 31697, 31700, 31699, 12584, 12585, 31702, 31701, 31703, 31705, 12590, 12591, 31707, 31706, 31708, 12595, 31715, 31710, 31717, 31716, 31719, 31718, 12602, 31720, 31721, 29757, 31723, 31725, 31712, 12609, 31728, 12611, 12612, 12613, 31715, 31714, 31717, 31716, 31719, 31718, 12620, 31721, 31720, 29786, 31723, 31725, 31726, 12627, 31728, 12629, 12630, 12631, 31730, 31729, 31732, 31731, 31734, 31733, 31736, 31735, 31737, 31738, 31740, 31741, 31743, 31745, 31747, 31749, 31748, 31750, 31752, 12651, 31755, 31754, 29876, 31757, 31759, 12657, 12658, 12659, 12660, 31760, 12662, 12663, 12664, 31762, 31761, 31764, 31763, 31766, 31765, 31768, 31767, 31769, 31770, 31772, 31771, 31774, 31773, 31775, 31776, 31777, 31779, 31781, 31782, 12685, 27043, 29953, 27049, 31787, 31786, 31789, 31788, 31791, 31790, 31792, 31793, 31797, 31796, 31795, 31798, 31800, 31803, 31802, 31804, 31806, 31805, 31807, 31808, 31811, 31810, 30018, 31813, 31815, 31814, 31816, 30033, 31818, 31820, 31822, 31824, 31823, 31825, 31828, 31827, 31829, 31831, 31833, 12728, 12729, 31836, 31835, 30083, 31838, 31840, 12735, 12736, 31842, 31841, 31843, 31845, 12741, 31846, 31847, 31850, 31849, 31851, 31855, 31854, 31853, 31857, 31856, 31859, 31858, 31861, 31860, 12756, 12757, 31863, 31862, 31865, 31864, 31867, 31866, 31869, 31868, 31871, 31870, 31873, 31872, 31874, 31876, 31878, 31877, 31879, 31910, 30225, 31892, 31915, 31914, 31882, 31917, 31883, 12783, 31919, 31884, 30276, 30279, 31886, 31915, 12790, 31887, 12792, 12793, 31923, 31889, 30225, 31892, 31915, 31894, 12800, 31895, 31918, 12803, 31919, 12805, 31921, 12807, 12808, 31923, 31925, 31905, 12812, 12813, 12814, 31927, 31900, 31929, 31907, 31896, 31897, 31908, 12822, 31902, 31899, 31938, 31925, 31905, 12828, 12829, 12830, 31927, 31900, 31929, 31907, 31906, 31908, 31933, 31934, 31902, 31938, 31904, 31903, 31925, 31905, 12845, 31926, 12847, 31927, 31928, 31929, 31907, 31906, 31908, 31933, 31909, 12856, 31935, 31938, 31937, 31910, 30276, 30279, 31915, 31914, 31916, 12866, 31918, 31917, 12869, 31919, 12871, 12872, 31921, 12874, 12875, 31923, 31925, 31924, 12879, 31926, 12881, 31927, 31928, 31929, 31931, 31933, 31932, 12888, 31934, 31935, 31938, 31937, 14070, 14071, 14072, 14073, 14074, 14075, 14076, 14077, 14078, 14079, 31940, 31939, 31942, 31941, 31943, 31945, 14106, 14107, 14108, 14109, 14110, 14111, 14112, 14113, 14114, 31951, 31950, 31952, 31953, 31955, 31957, 31956, 31960, 31959, 31958, 27540, 31976, 31978, 27548, 31997, 31996, 31995, 32306, 31999, 31998, 32308, 32000, 32240, 32241, 32243, 32126, 32125, 32128, 32127, 32130, 32122, 32121, 14907, 32123, 32124, 32134, 32133, 32136, 32135, 14914, 32126, 32125, 32128, 32127, 32130, 32129, 14921, 14922, 32131, 32134, 32133, 32136, 32135, 14928, 32139, 32138, 32137, 32140, 32143, 32142, 14935, 32145, 32144, 32146, 32149, 32148, 14941, 32151, 32150, 32152, 32155, 32154, 32156, 32158, 32157, 32159, 32163, 32162, 32161, 14954, 32165, 32164, 32167, 32166, 32168, 32170, 32172, 32171, 32173, 32175, 32177, 32178, 14967, 32195, 32180, 30636, 32181, 32183, 32182, 32200, 32202, 32184, 32204, 32185, 32188, 32187, 32190, 32189, 32191, 32193, 32192, 14986, 32195, 32194, 30636, 32197, 32198, 32200, 32202, 14994, 32204, 32205, 32207, 32209, 32212, 32211, 32215, 32214, 32213, 32322, 32217, 32216, 32324, 32218, 32220, 32221, 32223, 32225, 32224, 32226, 32227, 32229, 32249, 32230, 32233, 32232, 32231, 32235, 32234, 32237, 32236, 32238, 32240, 32241, 32243, 32244, 32247, 32246, 32249, 32248, 32251, 32250, 32271, 32273, 32274, 32275, 32277, 32278, 32279, 32281, 32282, 32283, 32373, 32372, 32284, 32286, 32289, 32288, 32373, 32372, 32290, 32291, 32293, 32296, 32295, 32297, 32300, 32299, 32301, 32303, 15860, 32326, 32328, 32331, 32330, 32332, 32334, 32335, 32337, 32338, 32340, 32341, 32343, 32344, 32347, 32346, 32348, 32350, 32353, 32352, 32355, 32354, 32357, 32356, 32359, 32358, 32360, 32362, 32361, 32363, 32366, 32365, 32369, 32368, 32367, 32371, 32370, 32373, 32372, 32374, 32377, 32376, 32378, 15903, 32380, 32382, 32381, 32383, 32385, 32387, 32390, 32389, 32391, 32394, 32393, 15993, 15994, 15995, 32397, 31019, 32400, 32399, 32401, 16010, 16011, 16012, 32402, 32403, 32406, 32405, 31041, 31042, 32408, 32407, 32409, 32410, 32412, 32413, 32415, 31061, 31064, 32418, 32417, 32419, 16035, 16036, 16037, 32422, 32424, 32425, 32427, 32428, 32429, 32431, 32433, 32434, 32436, 32437, 32438, 32440, 32443, 32442, 16327, 16328, 16329, 32457, 32456, 32459, 32458, 32461, 32460, 32463, 32462, 32465, 32464, 32467, 32466, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 32481, 32483, 11886, 11887, 11888, 11889, 11890, 11891, 11895, 11896, 11898, 11899, 11900, 11901, 11902, 11903, 11904, 32503, 11909, 32508, 11912, 11913, 11914, 11915, 11916, 11917, 11918, 11919, 11920, 32519, 11923, 11925, 11926, 11927, 11928, 11929, 11930, 11931, 11932, 11933, 11934, 11935, 11936, 11937, 11938, 11939, 11940, 11941, 11942, 11943, 11944, 11945, 11946, 11947, 11948, 11949, 11950, 11951, 11952, 11953, 11954, 11955, 11956, 11957, 11958, 11959, 11960, 11962, 11964, 11965, 11966, 11967, 11968, 11969, 11970, 11971, 11972, 11973, 11974, 11975, 11976, 11977, 11978, 11979, 11980, 11981, 11982, 11983, 11984, 11985, 11986, 11987, 11988, 11989, 11990, 11991, 11992, 11993, 32594, 12009, 12010, 12011, 12012, 12013, 12014, 12015, 12016, 12017, 12018, 12019, 12020, 12021, 12022, 12023, 12024, 12025, 12026, 12027, 12028, 12029, 12030, 12031, 12032, 12033, 12034, 12035, 12036, 12037, 12038, 12039, 32627, 12043, 12044, 12045, 12046, 12047, 12048, 12049, 12050, 12051, 12052, 12053, 12054, 12055, 12056, 12057, 32645, 12061, 12062, 12063, 12064, 12065, 12066, 12069, 12070, 12071, 12072, 12073, 12074, 12075, 12076, 12078, 12079, 12080, 12081, 12082, 12083, 12084, 12085, 12086, 12088, 12129, 12130, 12131, 12132, 12133, 12134, 12135, 12136, 12137, 12138, 12139, 12140, 12141, 12142, 12143, 12144, 12145, 12146, 12147, 12148, 12149, 12150, 12151, 12152, 12153, 12154, 12155, 12156, 12157, 12158, 12159, 12160, 12161, 12162, 12163, 12164, 12165, 12166, 12167, 12168, 12169, 12170, 12171, 12172, 12173, 12174, 12175, 12176, 12177, 12180, 12181, 12182, 12183, 12184, 12185, 12186, 12187, 12188, 12190, 12192, 12193, 12194, 12195, 12196, 12198, 12200, 12202, 12203, 12204, 32750, 12207, 12208, 12209, 12210, 12211, 12212, 12213, 12214, 12215, 12216, 12217, 12218, 12219, 12220, 12221, 12222, 12224, 12225, 12226, 12227, 12228, 12230, 12231, 12233, 12234, 12236, 12237, 12238, 12239, 12240, 32786, 12243, 12246, 12247, 12248, 12249, 12250, 12251, 12252, 12253, 12254, 12255, 12256, 12257, 12258, 12259, 12260, 12261, 12262, 12263, 12264, 12266, 12267, 12268, 12270, 12271, 12272, 12274, 12276, 12277, 12278, 12279, 12281, 12282, 32827, 12285, 12286, 12287, 12288, 12289, 12290, 12291, 12292, 12293, 12294, 12295, 12296, 12297, 12298, 12299, 12300, 12301, 12302, 12303, 12304, 12305, 12306, 12307, 12308, 12309, 12310, 12311, 12312, 12313, 12314, 12315, 12316, 12317, 12318, 12319, 12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12331, 12332, 12333, 12335, 12336, 12337, 12338, 12340, 12341, 12342, 12343, 12344, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12353, 12354, 12355, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 12368, 12369, 12370, 12371, 12372, 12373, 12374, 12375, 12376, 12377, 12378, 12379, 12380, 12381, 12382, 12383, 12384, 12385, 12386, 12387, 12388, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12398, 12399, 12400, 12401, 12402, 12403, 12404, 12405, 12406, 12407, 12408, 12409, 12410, 12411, 12412, 12413, 12414, 12415, 12416, 12417, 12418, 12419, 12420, 12421, 12422, 12423, 12424, 12425, 12428, 12429, 12430, 12431, 12432, 12434, 12435, 12436, 12437, 12440, 12441, 12442, 12443, 12444, 12445, 12446, 12447, 12448, 12449, 12450, 12451, 12452, 12453, 12454, 12455, 12456, 12458, 12459, 12460, 12461, 12463, 12464, 12465, 12466, 12467, 12469, 12470, 12471, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12479, 12480, 12481, 12482, 12484, 12485, 12486, 12487, 12488, 12489, 12490, 12491, 12492, 12493, 12494, 12495, 33038, 33040, 12500, 12501, 12502, 12503, 12504, 33047, 33049, 12509, 12510, 12511, 12512, 12513, 33056, 12516, 12517, 12518, 12519, 12520, 12521, 33064, 12524, 12525, 12526, 12527, 12528, 12529, 12530, 12531, 12532, 12533, 33076, 33078, 33080, 33082, 33085, 12545, 12546, 12547, 12548, 12549, 12550, 12551, 12552, 12553, 12554, 12555, 12556, 12557, 12558, 12559, 12560, 12561, 12562, 12563, 12564, 12565, 12566, 12567, 12568, 12569, 12570, 12571, 12572, 12573, 12574, 33117, 12577, 12578, 12579, 12580, 12581, 12582, 12583, 33126, 12586, 12587, 12588, 12589, 33132, 12592, 12593, 12594, 12596, 12597, 12598, 12599, 12600, 12601, 12603, 12604, 12605, 12606, 12607, 12608, 12610, 33154, 12614, 12615, 12616, 12617, 12618, 12619, 12621, 12622, 12623, 12624, 12625, 12626, 12628, 33172, 12632, 12633, 12634, 12635, 12636, 12637, 12638, 12639, 12640, 12641, 12642, 12643, 12644, 12645, 12646, 12647, 12648, 12649, 12650, 12652, 12653, 12654, 12655, 12656, 33199, 33201, 12661, 33205, 12665, 12666, 12667, 12668, 12669, 12670, 12671, 12672, 12673, 12674, 12675, 12676, 12677, 12678, 12679, 12680, 12681, 12682, 12683, 12684, 12686, 12687, 12688, 12689, 12690, 12691, 12692, 12693, 12694, 12695, 12696, 12697, 12698, 12699, 12700, 12701, 12702, 12703, 12704, 12705, 12706, 12707, 12708, 12709, 12710, 12711, 12712, 12713, 12714, 12715, 12716, 12717, 12718, 12719, 12720, 12721, 12722, 12723, 12724, 12725, 12726, 12727, 33270, 12730, 12731, 12732, 12733, 12734, 33277, 12737, 12738, 12739, 12740, 12742, 12743, 12744, 12745, 12746, 12747, 12748, 12749, 12750, 12751, 12752, 12753, 12754, 12755, 33298, 12758, 12759, 12760, 12761, 12762, 12763, 12764, 12765, 12766, 12767, 12768, 12769, 12770, 12771, 12772, 12773, 12774, 12775, 12776, 12777, 12778, 12779, 12780, 12781, 12782, 12784, 12785, 12786, 12787, 12788, 12789, 12791, 33334, 12794, 12795, 12796, 12797, 12798, 12799, 12801, 12802, 12804, 12806, 33349, 12809, 12810, 12811, 33354, 12815, 12816, 12817, 12818, 12819, 12820, 12821, 12823, 12824, 12825, 12826, 12827, 33370, 12831, 12832, 12833, 12834, 12835, 12836, 12837, 12838, 12839, 12840, 12841, 12842, 12843, 12844, 12846, 12848, 12849, 12850, 12851, 12852, 12853, 12854, 12855, 12857, 12858, 12859, 12860, 12861, 12862, 12863, 12864, 12865, 12867, 12868, 12870, 33413, 12873, 33416, 12876, 12877, 12878, 12880, 12882, 12883, 12884, 12885, 12886, 12887, 12889, 12890, 12891, 12892, 33435, 33438, 33440, 33442, 14080, 14081, 14082, 14083, 14084, 14085, 33451, 33453, 33455, 33457, 14115, 14116, 14117, 14118, 14119, 14120, 14121, 14122, 14123, 14124, 14536, 14537, 14538, 14588, 14589, 14590, 14591, 14594, 14595, 32309, 14599, 14600, 14601, 14602, 14900, 14901, 14902, 14903, 14904, 14905, 14906, 14908, 14909, 14910, 14911, 14912, 14913, 14915, 14916, 14917, 14918, 14919, 14920, 33506, 14923, 14924, 14925, 14926, 14927, 14929, 14930, 14931, 14932, 14933, 14934, 14936, 14937, 14938, 14939, 14940, 14942, 14943, 14944, 14945, 14946, 14947, 14948, 14949, 14950, 14951, 14952, 14953, 14955, 14956, 14957, 14958, 14959, 14960, 14961, 14962, 14963, 14964, 14965, 14966, 14968, 14969, 14970, 14971, 14972, 14973, 14974, 14975, 14976, 14977, 14978, 14979, 14980, 14981, 14982, 14983, 14984, 14985, 14987, 14988, 14989, 14990, 14991, 14992, 14993, 14995, 14996, 14997, 14998, 14999, 15000, 15001, 15002, 15003, 15006, 15007, 32325, 15011, 15012, 15013, 15014, 15015, 15016, 15017, 15018, 15019, 15020, 15021, 15022, 15023, 15024, 15025, 15026, 15027, 15028, 15029, 15030, 15031, 15032, 15033, 15034, 15035, 15036, 15037, 15038, 15039, 15453, 15454, 15456, 15457, 15458, 15460, 15461, 15462, 15463, 15464, 15465, 15466, 15467, 15468, 15469, 15470, 15471, 15472, 15521, 15522, 15523, 15524, 15525, 15526, 15527, 15528, 15529, 15530, 15861, 15862, 15863, 15864, 15865, 15866, 15867, 15868, 15869, 15870, 15871, 15872, 15873, 15874, 15875, 15876, 15877, 15878, 15879, 15880, 15881, 15882, 15883, 15884, 15885, 15886, 15887, 15888, 15889, 15890, 15891, 15892, 15893, 15894, 15895, 15896, 15897, 15898, 15899, 15900, 15901, 15902, 15904, 15905, 15906, 15907, 15908, 15909, 15910, 15911, 15990, 15991, 15992, 33705, 16005, 16006, 16007, 16008, 16009, 33713, 16013, 16014, 16015, 16016, 16017, 16018, 16019, 16020, 16021, 16026, 16027, 16028, 16029, 16030, 16031, 16032, 16033, 16034, 33734, 16312, 16313, 16314, 16315, 16316, 16317, 16318, 16319, 16320, 16321, 16322, 16323, 16324, 16325, 16326, 33752, 16912, 16913, 16919, 16920, 16924, 16925, 16929, 16930, 16934, 16935, 16969, 16970, 9, 10, 11, 12, 13, 14, 15, 33779, 33781, 33783, 33785, 33787, 33789, 33791, 33799, 33803, 33813, 33815, 33817, 33819, 33821, 33830, 33832, 33834, 33836, 33838, 32558, 33846, 33850, 33855, 33858, 33860, 33862, 33864, 33866, 33868, 33872, 33876, 33878, 33882, 33891, 33899, 33907, 33911, 33916, 33923, 33927, 33933, 33936, 33942, 33945, 33949, 33951, 33953, 33956, 33959, 33962, 33964, 33966, 33969, 33972, 33979, 33984, 33987, 33995, 34003, 34005, 34009, 34011, 34014, 34018, 34020, 34026, 34030, 34033, 34036, 34038, 32779, 34044, 34055, 34057, 34059, 34061, 34063, 32808, 34071, 34073, 34075, 34079, 34084, 34086, 34088, 34091, 34094, 34098, 34103, 34107, 34111, 34116, 34119, 34132, 34141, 34146, 34150, 34152, 34166, 34171, 34174, 34182, 34184, 34196, 34198, 34203, 34219, 34224, 34225, 34228, 34233, 34235, 34237, 34239, 34242, 32998, 34248, 34251, 34253, 34256, 34260, 34264, 34266, 33024, 34272, 34275, 34277, 34279, 34284, 34286, 34292, 34294, 34299, 34301, 34305, 34309, 34311, 33083, 34319, 34323, 34325, 34328, 34332, 34335, 34337, 34340, 34344, 34346, 34350, 34353, 34355, 34358, 34363, 34366, 34368, 34370, 34372, 33152, 34380, 34382, 34384, 34386, 33170, 34394, 34396, 34398, 34400, 34409, 34413, 34418, 34422, 34424, 34426, 34428, 34432, 34434, 34445, 34447, 34449, 34453, 34458, 34461, 34465, 34469, 34476, 34479, 34485, 34491, 34494, 34497, 34500, 34503, 34505, 34507, 34510, 34512, 34514, 34516, 34518, 34520, 34524, 34530, 34533, 34539, 34547, 34549, 34555, 34557, 34561, 34563, 34566, 34568, 34570, 34574, 34576, 34580, 34583, 34584, 34585, 34589, 34591, 33397, 34595, 34600, 33407, 34603, 34610, 34611, 34612, 34617, 34618, 34621, 33443, 34627, 34629, 34442, 33203, 34401, 34437, 34466, 34442, 33203, 34401, 34437, 34466, 33458, 34637, 34642, 34644, 33792, 32735, 32743, 34015, 34021, 33796, 33804, 33806, 33809, 33807, 33822, 33826, 33824, 33839, 33843, 33847, 33852, 33869, 34442, 32592, 33883, 33885, 33886, 34442, 33203, 34401, 34437, 34466, 33893, 33895, 33900, 33902, 33904, 34416, 33908, 33913, 33918, 33920, 34416, 33924, 33929, 32662, 33938, 32672, 34651, 34654, 33975, 33980, 33988, 33990, 33992, 33996, 33998, 34000, 32735, 32743, 34015, 34021, 34023, 34051, 34031, 34064, 34039, 34046, 34048, 34051, 34049, 34064, 34068, 34076, 34080, 34095, 34100, 34108, 34113, 34123, 34121, 34125, 32873, 34128, 32877, 32882, 34135, 34137, 34142, 34154, 34156, 34158, 34160, 34162, 34167, 34175, 34177, 34179, 34185, 34189, 34187, 34193, 34191, 34199, 34204, 34206, 34208, 34210, 34212, 34214, 34216, 34220, 34229, 34249, 34254, 34261, 34281, 34288, 34320, 34290, 34306, 34295, 34320, 34329, 34306, 34313, 34320, 34329, 34347, 34359, 34364, 34376, 34374, 34390, 34388, 34401, 34403, 34405, 34410, 34414, 34442, 33203, 34429, 34435, 34437, 34466, 34442, 33226, 34455, 34466, 34471, 34473, 34481, 34486, 34488, 34521, 34526, 33324, 34535, 33331, 34541, 34543, 33344, 33346, 34552, 34596, 33410, 34605, 34607, 34614, 34661, 34663, 34665, 34667, 34670, 34672, 34674, 34676, 34678, 34682, 34684, 34686, 34690, 34692, 34695, 34697, 34700, 34703, 34706, 34709, 34711, 34715, 34721, 34725, 34732, 34734, 34737, 34739, 34750, 34752, 34755, 34762, 34767, 34769, 34772, 34774, 34781, 34783, 34785, 34622, 34623, 34624, 34630, 34632, 34633, 34634, 34639, 34797, 34801, 34803, 34808, 34811, 34646, 34647, 34649, 34658, 34656, 34712, 34717, 34719, 34726, 34728, 34741, 34743, 33578, 34746, 34759, 34757, 34764, 34777, 34775, 34817, 34828, 34832, 34834, 34836, 34838, 34841, 34844, 34846, 34849, 34851, 34854, 34856, 34858, 34863, 34866, 33706, 34786, 32395, 34789, 32396, 34792, 34794, 34871, 33714, 34877, 34881, 34804, 34812, 34890, 33735, 33649, 34818, 34820, 34822, 34824, 34829, 34859, 34907, 33753, 34868, 34899, 34885, 34883, 34897, 34903, 34885, 34883, 34897, 34901, 34903, 34895, 34893, 34897, 34901, 34899, 34903, 34910, 34912, 34914, 34916, 34918, 34920, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 34045, 34147, 34267, 34280, 34287, 34312, 34338, 34341, 34454, 34501, 34581, 14086, 14087, 35091, 35106, 33776, 14092, 14093, 35111, 14095, 14096, 14097, 35091, 35106, 33777, 14102, 14103, 35111, 14105, 34645, 32491, 32494, 34930, 32491, 32494, 34933, 14495, 32504, 32505, 14498, 33794, 34007, 14501, 34010, 32745, 14504, 34991, 14506, 14507, 34935, 33801, 14510, 14511, 14512, 14513, 34938, 33811, 34940, 14517, 14518, 14519, 34943, 33828, 34945, 14523, 33841, 14525, 33844, 14527, 14528, 34949, 34950, 34951, 34953, 34955, 34957, 14535, 32591, 14540, 14541, 35091, 34959, 33880, 14546, 14547, 14548, 35111, 14550, 14551, 35091, 35106, 33888, 14556, 14557, 35111, 14559, 34961, 14561, 14562, 33897, 14564, 14565, 14566, 14567, 34419, 14569, 34964, 14571, 34965, 14573, 14574, 14575, 34419, 14577, 34967, 32653, 14580, 33931, 33934, 14583, 14584, 33940, 33943, 14587, 34652, 33947, 34973, 33954, 33957, 33960, 34978, 33967, 33970, 35039, 35039, 14613, 33977, 14615, 33982, 33985, 14618, 14619, 14620, 34985, 14622, 14623, 14624, 34986, 14626, 34004, 34007, 14629, 34010, 32745, 14632, 34991, 14634, 14635, 34993, 34028, 14638, 14639, 35001, 34053, 35003, 14643, 34032, 34034, 34037, 14647, 34040, 14650, 14651, 14652, 14653, 35001, 34053, 35003, 14657, 34066, 14659, 34070, 35007, 34074, 14663, 34078, 14665, 34082, 35011, 34089, 34092, 14670, 14671, 35015, 35016, 34105, 14675, 14676, 35018, 35019, 35020, 14680, 14681, 14682, 14683, 14684, 14685, 35021, 14687, 14688, 14689, 34139, 14691, 34148, 35025, 14695, 14696, 14697, 14698, 14699, 34164, 14701, 34169, 34172, 14704, 14705, 14706, 35029, 14708, 14709, 14710, 14711, 14712, 35031, 14714, 34201, 14716, 14717, 14718, 14719, 14720, 14721, 14722, 35034, 14724, 34222, 35036, 35037, 14728, 34231, 35039, 35041, 35042, 34244, 34246, 14735, 35045, 14737, 35047, 34258, 14740, 35051, 34270, 34273, 14746, 14748, 34317, 14750, 35067, 34326, 14753, 35070, 35058, 35061, 34303, 14760, 34315, 14763, 34317, 14765, 35067, 34326, 14768, 35070, 35060, 35061, 34303, 14775, 34315, 14778, 34317, 14780, 35067, 34326, 14783, 35070, 35074, 14788, 35076, 35077, 34356, 14792, 34361, 14794, 35081, 35084, 35083, 14798, 14799, 35085, 35086, 35089, 35088, 14804, 14805, 35090, 35091, 35093, 14809, 14810, 14811, 34407, 14813, 33192, 14815, 34416, 34419, 14818, 14819, 35104, 35106, 35100, 14824, 35102, 14826, 14827, 35111, 14829, 14830, 14831, 35104, 35106, 14835, 35109, 35108, 34463, 35111, 14840, 14841, 14842, 35112, 35113, 14845, 34483, 14847, 14848, 35115, 34493, 34495, 35119, 35121, 35122, 35124, 35126, 35128, 14859, 14860, 34528, 34531, 14863, 14864, 34537, 14866, 14867, 14868, 34545, 33341, 14871, 14872, 14873, 35135, 35134, 34559, 35137, 34564, 35140, 35139, 34572, 35142, 35146, 35144, 34587, 35148, 34593, 14889, 34598, 35152, 14892, 14893, 14894, 35156, 35154, 14897, 35157, 34619, 34666, 34687, 34707, 34753, 34770, 15421, 15422, 15423, 35160, 35161, 15426, 15435, 15436, 15437, 35173, 15439, 35174, 15594, 15595, 15638, 35222, 15641, 15642, 35343, 34668, 35348, 35349, 35351, 34680, 35353, 35355, 35356, 35357, 35358, 35359, 35360, 35362, 15831, 35364, 15833, 35365, 15835, 34723, 15837, 15838, 34730, 35368, 35370, 35369, 15843, 15844, 15845, 15846, 34748, 35373, 15850, 15851, 15852, 35374, 35377, 15856, 15857, 34779, 35380, 34847, 15996, 15997, 15998, 15999, 16000, 16001, 35390, 34799, 35392, 16022, 34806, 34809, 16025, 16294, 34815, 16296, 16297, 16298, 16299, 34826, 16301, 35416, 35418, 34839, 34842, 35424, 35425, 35426, 16310, 34861, 34864, 35430, 34875, 16407, 16408, 35437, 35438, 35440, 34875, 16425, 16426, 16427, 16428, 35440, 35443, 16437, 16438, 16439, 16440, 16441, 35443, 35444, 16651, 16652, 16653, 16654, 16655, 16656, 34905, 35453, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 35500, 14088, 14089, 35099, 14091, 14094, 35509, 14098, 14099, 35099, 14101, 14104, 14489, 14490, 14491, 14492, 14493, 14494, 14496, 14497, 14499, 14500, 14502, 14503, 14505, 14508, 14509, 35542, 14514, 14515, 14516, 35548, 14520, 14521, 14522, 14524, 14526, 14529, 14530, 14531, 14532, 14533, 14534, 14539, 35567, 14542, 14543, 35099, 14545, 14549, 35576, 14552, 14553, 35099, 14555, 14558, 14560, 14563, 14568, 14570, 14572, 14576, 14578, 14579, 14581, 14582, 14585, 14586, 14603, 14604, 14605, 14606, 14607, 14608, 14609, 14610, 14611, 14612, 14614, 14616, 14617, 14621, 14625, 14627, 14628, 14630, 14631, 14633, 14636, 14637, 35649, 14640, 14641, 14642, 14644, 14645, 14646, 14648, 34042, 35662, 14654, 14655, 14656, 14658, 14660, 14661, 14662, 14664, 14666, 14667, 14668, 14669, 14672, 14673, 14674, 14677, 14678, 14679, 35690, 14686, 14690, 34144, 14693, 14694, 14700, 14702, 14703, 14707, 35718, 35720, 14713, 14715, 14723, 14725, 14726, 14727, 14729, 14730, 14731, 14732, 14733, 14734, 14736, 14738, 14739, 35049, 14742, 14743, 14744, 35054, 35056, 14749, 14751, 14752, 14754, 35071, 35495, 14757, 14758, 14759, 35063, 14762, 14764, 14766, 14767, 14769, 35071, 35495, 14772, 14773, 14774, 35063, 14777, 14779, 14781, 14782, 14784, 35071, 35495, 14787, 14789, 14790, 14791, 14793, 14795, 14796, 14797, 35796, 14800, 14801, 14802, 14803, 35802, 14806, 14807, 14808, 14812, 14814, 14816, 14817, 35816, 14820, 14821, 35099, 14823, 14825, 14828, 35827, 14832, 14833, 34451, 14836, 14837, 14838, 14839, 14843, 14844, 14846, 14849, 14850, 14851, 34498, 14853, 14854, 14855, 14856, 14857, 14858, 14861, 14862, 14865, 14869, 14870, 14874, 14875, 14876, 14877, 14878, 14879, 14880, 14881, 14882, 34578, 14884, 14885, 14886, 14887, 14888, 14890, 14891, 14895, 14896, 14898, 14899, 15424, 15425, 15438, 15440, 35175, 35535, 35539, 35725, 35705, 35703, 35727, 35585, 35588, 35590, 35597, 35812, 15639, 35612, 35915, 35693, 35691, 35705, 35703, 35697, 35705, 35703, 35629, 35633, 35644, 35659, 35693, 35691, 35697, 35705, 35703, 35713, 35727, 35725, 35729, 35836, 35836, 35842, 35860, 35866, 35886, 15814, 35893, 15816, 15817, 15818, 15819, 15820, 15821, 35894, 15823, 15824, 15825, 15826, 15827, 15828, 35895, 15830, 15832, 15834, 15836, 15839, 15840, 15841, 15842, 15847, 15848, 35896, 35949, 15853, 35375, 15855, 35954, 15858, 15859, 16002, 16003, 16004, 16023, 16024, 35910, 35912, 35936, 35943, 16295, 16300, 16302, 16303, 16304, 16305, 35957, 16307, 16308, 16309, 16311, 16384, 16385, 35975, 35962, 35960, 35958, 35975, 35973, 16406, 16409, 16410, 16415, 16424, 16429, 16430, 36004, 16442, 16443, 35975, 35973, 36011, 36014, 16657, 16658, 35992, 35992, 36006, 12, 13, 14, 15, 14090, 35507, 14100, 35516, 36052, 36054, 36061, 36065, 36069, 35564, 14544, 36081, 14554, 35583, 36115, 36117, 36124, 14649, 36133, 36144, 36147, 14692, 14741, 14745, 14747, 14755, 14756, 14761, 35765, 14770, 14771, 14776, 35777, 14785, 14786, 36218, 36223, 14822, 35825, 14834, 36244, 35835, 14852, 35853, 36266, 36271, 14883, 36276, 36283, 36032, 36036, 36038, 36042, 36289, 15441, 36045, 36044, 36046, 36048, 36047, 36049, 36051, 36050, 35533, 15577, 36057, 15579, 36062, 36066, 36067, 36155, 15586, 36068, 15589, 15590, 15591, 36071, 36075, 36076, 36080, 36082, 36086, 36239, 35595, 35593, 15611, 35810, 15613, 35586, 36250, 15616, 36232, 36227, 36239, 35595, 35593, 15627, 35810, 35808, 36250, 15631, 35601, 36095, 35604, 36097, 35608, 36099, 15640, 36140, 36142, 36143, 36146, 36149, 15650, 15651, 36152, 15654, 15655, 36155, 36100, 36102, 36104, 36106, 36143, 36146, 36151, 15666, 36152, 15669, 15670, 36155, 36168, 36168, 35623, 35625, 36112, 15677, 36113, 15679, 36114, 35642, 15684, 36120, 36125, 36126, 36128, 15691, 36134, 36135, 36136, 36138, 36139, 36140, 36142, 36143, 36146, 36149, 15705, 15706, 36151, 15708, 36152, 15711, 15712, 36155, 35707, 35709, 36158, 15717, 36159, 36160, 36163, 36162, 15722, 15723, 15724, 36164, 36165, 36167, 36168, 36170, 36172, 35745, 35743, 36176, 36179, 36183, 36184, 35758, 36189, 36191, 36194, 36195, 35770, 36200, 36202, 36205, 36206, 35782, 36211, 36212, 36215, 36214, 36219, 36224, 36232, 36227, 36239, 15772, 35807, 35810, 35808, 36250, 35812, 36232, 36237, 36236, 36239, 15787, 36247, 35840, 36250, 15791, 36254, 36256, 35854, 36261, 35858, 15799, 35862, 15801, 36264, 36268, 36278, 35882, 15810, 36281, 36284, 15815, 15822, 15829, 35934, 36354, 15849, 36359, 15854, 36345, 36343, 36341, 36287, 36345, 36343, 36341, 36348, 36347, 16130, 16192, 36333, 36335, 36337, 36343, 36341, 36348, 36347, 16284, 36351, 16287, 35945, 36361, 36363, 16306, 35967, 16387, 16388, 16389, 16390, 36375, 36376, 36369, 36382, 35986, 35967, 16398, 16399, 36375, 36376, 36369, 36382, 35986, 35997, 36399, 35967, 36369, 35971, 16643, 16644, 36375, 36376, 36378, 36382, 35986, 16721, 36385, 36394, 16738, 36393, 36394, 36406, 36396, 36406, 16777, 36400, 36401, 36406, 36405, 36407, 12, 13, 14, 15, 15427, 36034, 15429, 35505, 15431, 36040, 15433, 35514, 15566, 15567, 15568, 15569, 15570, 15571, 15572, 15573, 36421, 36420, 15576, 15578, 36059, 15581, 36063, 15583, 15584, 15585, 15587, 36424, 36490, 15592, 36073, 15596, 15597, 36078, 15599, 35572, 15601, 36084, 15603, 35581, 15605, 36241, 35830, 36245, 15609, 15610, 15612, 15614, 15615, 15617, 36234, 15619, 35823, 15621, 36241, 35830, 36245, 15625, 15626, 15628, 15629, 15630, 15632, 15633, 15634, 15635, 15636, 15637, 36523, 15643, 15644, 15645, 36435, 15647, 36436, 15649, 36530, 36437, 15653, 15656, 36533, 15657, 15658, 15659, 15660, 15661, 36435, 15663, 36436, 15665, 36437, 15668, 15671, 36545, 15672, 15673, 15674, 15675, 15676, 15678, 15680, 36431, 36430, 15683, 15685, 36122, 15687, 15688, 15689, 36129, 36131, 15693, 15694, 15695, 15696, 15697, 15698, 15699, 15700, 36435, 15702, 36436, 15704, 36574, 15707, 36437, 15710, 15713, 36579, 15714, 15715, 15716, 15718, 15719, 15720, 15721, 36590, 15725, 15726, 15727, 15728, 15729, 15730, 15731, 15732, 15733, 36438, 15735, 36440, 36439, 15738, 15739, 36441, 15741, 15742, 15743, 36443, 15745, 15746, 36445, 15748, 15749, 15750, 36447, 15752, 15753, 36449, 15755, 15756, 15757, 15758, 15759, 36216, 15761, 36221, 15763, 15764, 36234, 15766, 35823, 15768, 36241, 35830, 36245, 15773, 15774, 15775, 15776, 15777, 15778, 36234, 15780, 15781, 35823, 15783, 36241, 35830, 36245, 15788, 15789, 15790, 36252, 15793, 15794, 36258, 15796, 15797, 15798, 15800, 15802, 36460, 15804, 36461, 36273, 36463, 15808, 15809, 15811, 36464, 15813, 36659, 15975, 15976, 15977, 36655, 15979, 15984, 15985, 15986, 36655, 36469, 15989, 16091, 36331, 16275, 16276, 16277, 36345, 16279, 16280, 36655, 16282, 16283, 36657, 16286, 36658, 16289, 36660, 16292, 16293, 16386, 36689, 36691, 16391, 16392, 16393, 36686, 16395, 16396, 16397, 36699, 16400, 16401, 16402, 36686, 16404, 16405, 35998, 16435, 16436, 16642, 36711, 16645, 16646, 16647, 36686, 16649, 16650, 16722, 16723, 16739, 16740, 16751, 16753, 16764, 36706, 16778, 16779, 16873, 16874, 16875, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15428, 15430, 15432, 15434, 36745, 36748, 36751, 15574, 15575, 15580, 15582, 15588, 15593, 15598, 15600, 15602, 15604, 15606, 15607, 15608, 36781, 36503, 36506, 15618, 15620, 15622, 15623, 15624, 36794, 36796, 36516, 15646, 15648, 15652, 36815, 15662, 15664, 15667, 36828, 15681, 15682, 15686, 15690, 15692, 15701, 15703, 15709, 36863, 36871, 36880, 15734, 15736, 15737, 15740, 15744, 15747, 15751, 15754, 36907, 15760, 15762, 15765, 15767, 15769, 15770, 15771, 36922, 36924, 15779, 36928, 15782, 15784, 15785, 15786, 36638, 15792, 15795, 36945, 15803, 15805, 15806, 15807, 36953, 15812, 15978, 36958, 15987, 36963, 15988, 36874, 36591, 36876, 36886, 36893, 36900, 36554, 36874, 36876, 36886, 36893, 36900, 36874, 36591, 36876, 36886, 36893, 36900, 36480, 36843, 36850, 36848, 36852, 36860, 36858, 36832, 36834, 36868, 36557, 36843, 36850, 36848, 36852, 36860, 36858, 36832, 36834, 36868, 36798, 36943, 36941, 36798, 36943, 36802, 36800, 36804, 36805, 36811, 36819, 36817, 36825, 36830, 36831, 36832, 36834, 36554, 36874, 36886, 36893, 36900, 36557, 36843, 36850, 36848, 36852, 36860, 36858, 36865, 36867, 36868, 36874, 36591, 36876, 36886, 36893, 36900, 36624, 36634, 36943, 36941, 16274, 16278, 16281, 36978, 16285, 16288, 36956, 16291, 36971, 36984, 36971, 36984, 36988, 16394, 16403, 36971, 36984, 36971, 36984, 36971, 36984, 16648, 36989, 36993, 36995, 36997, 37001, 37014, 36989, 36993, 36995, 36997, 37001, 37016, 37006, 37008, 37012, 37003, 37006, 37008, 37012, 37003, 16766, 37004, 37008, 37012, 37022, 37006, 37008, 37012, 37025, 12, 13, 14, 15, 37048, 36783, 36814, 36827, 37080, 36862, 37092, 36889, 36896, 36903, 37124, 36736, 36738, 36740, 36742, 37126, 16040, 16041, 16042, 36878, 36881, 16045, 16048, 36891, 16051, 36898, 36905, 37100, 37099, 16057, 16058, 16059, 36878, 36881, 16062, 16065, 36891, 16068, 36898, 36905, 37100, 37099, 16074, 16075, 16076, 36878, 36881, 16079, 16082, 36891, 16085, 36898, 36905, 37100, 37099, 16093, 37081, 16095, 37082, 37050, 37049, 16099, 16100, 16101, 36856, 36854, 16104, 16105, 16107, 16108, 16109, 37088, 16112, 37081, 37082, 16115, 37083, 16117, 16118, 16119, 36856, 36762, 16122, 16123, 16125, 16126, 16127, 37088, 36765, 36768, 36770, 36772, 36774, 36776, 37058, 37060, 36785, 36787, 36789, 37066, 37068, 37069, 36925, 37109, 36930, 37112, 16149, 36935, 37115, 36939, 36944, 16154, 16155, 37121, 37119, 37118, 36952, 37123, 36768, 36770, 36772, 36774, 36776, 37058, 37060, 36785, 36787, 36789, 37066, 37068, 37069, 36925, 37109, 36930, 37112, 16179, 36935, 37115, 36939, 36944, 16184, 16185, 16186, 37121, 37119, 37118, 36952, 37123, 16193, 16194, 36809, 36807, 16197, 16199, 16200, 36823, 36821, 16203, 16205, 16206, 36891, 36898, 16210, 16211, 16212, 16213, 16214, 16215, 16216, 16218, 37081, 37082, 16221, 37083, 16223, 16224, 16225, 36856, 36854, 16228, 16229, 16231, 16232, 16233, 37088, 16235, 16236, 16237, 36878, 36881, 16240, 16243, 36891, 16246, 36898, 36905, 37100, 37099, 36912, 36914, 36916, 37104, 16256, 37106, 36925, 37109, 36930, 37112, 16262, 36935, 37115, 36939, 36944, 16267, 16268, 37121, 37119, 37118, 36952, 37123, 36974, 37211, 37213, 37214, 16290, 16355, 37209, 16360, 16376, 37209, 16381, 16542, 37209, 16547, 16575, 37209, 16580, 16635, 37209, 16640, 36986, 16714, 36991, 16716, 16717, 16718, 36999, 16720, 36986, 16731, 36991, 16733, 16734, 16735, 36999, 16737, 16747, 16748, 37005, 16750, 16752, 16760, 16761, 37010, 16763, 16765, 16773, 16774, 37005, 16776, 16869, 16870, 37010, 16872, 37236, 37242, 37255, 37259, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37274, 15980, 15981, 15982, 15983, 37279, 37281, 16043, 16044, 36883, 37271, 16049, 37272, 16052, 37273, 16054, 16055, 16056, 37294, 16060, 16061, 36883, 37271, 16066, 37272, 16069, 37273, 16071, 16072, 16073, 37307, 16077, 16078, 36883, 37271, 16083, 37272, 16086, 37273, 16088, 16089, 16090, 37264, 16094, 16096, 16097, 16098, 37326, 16102, 16103, 37331, 37269, 16110, 37268, 16113, 16114, 16116, 37342, 16120, 16121, 37347, 37269, 16128, 16129, 16131, 16132, 16133, 16134, 16135, 16136, 16137, 37265, 16139, 16140, 16141, 16142, 16143, 16144, 16145, 16146, 16147, 16148, 16150, 16151, 16152, 16153, 16156, 16157, 16158, 16159, 16160, 16161, 16162, 16163, 16164, 16165, 16166, 16167, 37265, 16169, 16170, 16171, 16172, 16173, 16174, 16175, 16176, 16177, 16178, 16180, 16181, 16182, 16183, 37406, 16187, 16188, 16189, 16190, 16191, 16195, 16196, 37266, 37418, 16201, 16202, 37267, 36883, 16208, 16209, 37429, 37268, 16219, 16220, 16222, 37439, 16226, 16227, 37444, 37269, 16234, 37450, 16238, 16239, 36883, 37271, 16244, 37272, 16247, 37273, 16249, 16250, 16251, 16252, 16253, 16254, 16255, 16257, 16258, 16259, 16260, 16261, 16263, 16264, 16265, 16266, 16269, 16270, 16271, 16272, 16273, 37485, 16356, 37487, 37486, 37412, 16377, 37487, 37486, 37488, 37332, 37348, 16543, 37487, 37486, 37488, 16576, 37487, 37486, 37412, 37426, 37426, 37445, 16636, 37487, 37486, 37488, 16713, 16715, 16719, 16730, 16732, 16736, 16749, 37524, 16762, 37529, 16775, 16871, 37508, 16911, 37516, 16918, 37520, 37525, 37530, 16933, 37534, 16968, 10, 11, 12, 13, 14, 15, 16046, 16047, 37563, 16050, 37565, 16053, 37569, 16063, 16064, 37575, 16067, 37577, 16070, 37581, 16080, 16081, 37587, 16084, 37589, 16087, 37593, 16092, 37596, 37598, 37601, 16106, 16111, 37339, 37611, 16124, 16138, 37375, 37639, 16168, 37404, 37667, 37672, 16198, 37676, 16204, 16207, 16217, 37436, 37688, 16230, 16241, 16242, 37697, 16245, 37699, 16248, 37703, 37477, 37718, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37659, 37657, 37399, 37662, 37669, 37723, 37552, 16358, 16359, 16361, 37620, 37618, 37616, 37626, 37555, 37553, 37628, 37632, 37630, 37370, 37635, 37641, 37727, 37557, 16379, 16380, 16382, 37722, 37558, 37559, 37570, 37571, 37582, 37583, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37659, 37657, 37399, 37662, 37669, 37722, 16507, 37334, 37608, 16514, 37350, 37681, 37693, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37662, 37669, 37733, 37722, 16545, 16546, 16548, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37653, 37651, 37655, 37659, 37657, 37399, 37662, 37669, 37737, 37722, 16578, 16579, 16581, 37685, 16589, 37447, 37681, 37693, 37681, 37685, 16606, 37681, 37693, 37685, 16618, 37447, 37692, 37693, 37706, 37704, 37466, 37711, 37709, 37472, 37714, 37720, 37744, 37722, 16638, 16639, 16641, 37748, 37749, 16909, 37750, 37751, 37752, 16916, 37753, 16921, 37754, 37755, 16926, 37756, 37757, 16931, 37758, 16966, 37759, 37761, 37763, 37767, 37769, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37776, 37783, 37790, 37376, 37640, 37665, 37668, 37821, 37478, 37719, 16332, 16333, 16334, 37622, 16336, 16337, 16338, 16339, 16340, 16341, 16342, 16343, 16345, 16346, 16347, 37649, 16349, 16350, 16351, 16352, 16353, 16357, 37852, 37853, 16362, 16363, 16364, 37622, 16366, 16367, 16368, 16369, 16370, 16371, 16372, 16373, 16374, 16378, 37869, 37870, 16383, 16460, 16461, 37780, 37778, 37567, 16466, 16467, 37787, 37785, 37579, 16472, 16473, 37794, 37792, 37591, 16478, 16479, 16480, 37622, 16482, 16483, 16484, 16485, 16486, 16487, 16488, 16489, 16491, 16492, 16493, 37649, 16495, 16496, 16497, 16498, 16499, 16501, 37797, 37799, 37595, 37327, 37602, 16508, 37802, 16510, 37606, 37343, 37612, 16515, 16516, 16517, 37825, 37823, 37701, 16522, 16523, 16524, 37622, 16526, 16527, 16528, 16529, 16530, 16531, 16532, 16533, 16535, 16536, 16537, 37649, 16539, 16540, 16544, 37924, 37925, 16549, 16550, 16551, 37622, 16553, 16554, 16555, 16556, 16557, 16558, 16559, 16560, 16562, 16563, 16564, 37649, 16566, 16567, 16568, 16569, 16570, 16571, 16572, 16573, 16577, 37951, 37952, 37817, 16583, 37683, 37413, 37416, 37674, 37421, 16590, 16591, 16592, 37680, 37679, 37816, 37701, 16597, 37680, 37679, 37816, 37817, 16602, 37683, 37440, 37689, 16607, 16608, 37432, 37431, 37430, 37701, 37817, 16614, 37683, 37440, 37689, 16619, 16620, 16621, 37825, 37823, 37701, 16626, 16627, 16628, 16629, 16630, 16631, 16632, 16633, 16637, 37979, 37980, 16907, 16908, 16910, 16914, 16915, 16917, 16922, 16923, 16927, 16928, 16932, 16967, 16985, 16988, 16994, 17008, 12, 13, 14, 15, 38027, 16335, 38031, 38034, 38019, 38039, 16348, 38043, 38021, 38051, 16365, 38055, 38059, 38019, 16462, 16463, 38016, 16465, 16468, 16469, 38017, 16471, 16474, 16475, 38018, 16477, 38083, 16481, 38087, 38090, 38019, 38095, 16494, 38099, 38021, 16502, 16503, 16504, 16505, 16506, 16509, 16511, 16512, 16513, 16518, 16519, 38023, 16521, 38122, 16525, 38126, 38129, 38019, 38134, 16538, 38021, 38143, 16552, 38147, 38150, 38019, 38155, 16565, 38159, 38162, 38021, 16582, 16584, 16585, 16586, 16587, 16588, 16593, 16594, 16595, 16596, 16598, 16599, 16600, 16601, 16603, 16604, 16605, 16609, 16610, 16611, 16612, 16613, 16615, 16616, 16617, 16622, 16623, 38023, 16625, 38210, 38213, 38024, 37849, 38048, 37866, 38064, 37976, 38218, 37898, 37901, 38067, 38072, 38077, 37964, 38204, 37976, 38218, 37898, 37901, 38116, 37964, 38204, 37921, 38140, 37948, 38167, 37954, 38177, 38183, 37960, 38192, 37964, 38204, 37976, 38218, 37983, 38220, 37987, 38223, 37989, 38227, 37992, 38229, 37995, 37997, 13, 14, 15, 38028, 16344, 38040, 16354, 38052, 38056, 16375, 16464, 38255, 16470, 38259, 16476, 38263, 38084, 16490, 38096, 16500, 38277, 38281, 16520, 38285, 38123, 16534, 38135, 16541, 38144, 16561, 38156, 16574, 38307, 38313, 38317, 38320, 38324, 38328, 16624, 38332, 16634, 38243, 38242, 38247, 38303, 16695, 16696, 38252, 16701, 16702, 38269, 38268, 38273, 38303, 16711, 16712, 16787, 38278, 16790, 38282, 16792, 16794, 16796, 16799, 38329, 16801, 38269, 38268, 38273, 38303, 16811, 16812, 16814, 38278, 16817, 38282, 16819, 16822, 38329, 16824, 38291, 38290, 38304, 38303, 16834, 16835, 38299, 38298, 38304, 38303, 16844, 16845, 16847, 38310, 38308, 16850, 16852, 16855, 38321, 16857, 16860, 38329, 16862, 38336, 38335, 16867, 16868, 16983, 16984, 16986, 16987, 16989, 16990, 16991, 16992, 16993, 17007, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38385, 38387, 38390, 38391, 38393, 38395, 38398, 38400, 38403, 38406, 38408, 38410, 38412, 38314, 38318, 38325, 38419, 38421, 16687, 16688, 38384, 16691, 16692, 38386, 16697, 38389, 38388, 16703, 16704, 38397, 16707, 16708, 38399, 38275, 16788, 38280, 16791, 38327, 16800, 16803, 16804, 38397, 16807, 16808, 38399, 38275, 16815, 38280, 16818, 38327, 16823, 16826, 16827, 38405, 16830, 16831, 38407, 16836, 16837, 38409, 16840, 16841, 38411, 38306, 16848, 16849, 38319, 16856, 38327, 16861, 16864, 16865, 38426, 38429, 38435, 38486, 38451, 38465, 38471, 38486, 38489, 38491, 38496, 38494, 38492, 38497, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16689, 38531, 38036, 16693, 38534, 38045, 16698, 16699, 38061, 16705, 38540, 38092, 16709, 38543, 38101, 16786, 38546, 16789, 38548, 38515, 38516, 38517, 16798, 38550, 38528, 16805, 38552, 38092, 16809, 38555, 38101, 16813, 38558, 16816, 38560, 38520, 16821, 38562, 38528, 16828, 38564, 38131, 16832, 38567, 38137, 16838, 38570, 38152, 16842, 38573, 38164, 16846, 38576, 38525, 38526, 16854, 38579, 38527, 16859, 38581, 38528, 38583, 38215, 16891, 16894, 16902, 16906, 16945, 16953, 16956, 16965, 17012, 17013, 38593, 17015, 38592, 17020, 13, 14, 15, 38608, 16690, 38611, 16694, 38614, 16700, 38617, 16706, 38620, 16710, 16793, 16795, 16797, 16802, 38633, 16806, 38636, 16810, 16820, 16825, 38647, 16829, 38650, 16833, 38653, 16839, 38656, 16843, 38577, 16851, 16853, 16858, 16863, 16866, 38625, 38623, 38630, 38641, 38639, 38644, 38625, 38623, 38630, 38641, 38639, 38644, 38663, 38666, 17014, 17016, 38680, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38615, 16882, 16883, 38443, 38442, 38441, 16887, 38446, 38690, 38688, 38714, 16895, 16896, 38457, 16898, 38460, 38696, 38694, 38659, 38476, 38669, 16936, 16937, 38443, 38442, 38441, 16941, 38446, 38704, 38702, 16946, 16947, 38457, 16949, 38460, 38710, 38708, 38714, 38712, 38659, 38476, 38477, 16960, 38480, 16962, 38483, 38669, 38682, 38754, 16884, 16885, 16886, 16888, 16889, 16890, 16892, 38752, 38764, 16897, 16899, 16900, 16901, 16903, 16904, 16905, 38774, 16938, 16939, 16940, 16942, 16943, 16944, 38783, 16948, 16950, 16951, 16952, 16954, 16955, 16957, 16958, 16959, 16961, 16963, 16964, 38799, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38802, 38806, 16893, 38813, 38819, 38823, 38828, 38830, 38758, 38766, 38809, 38796, 38794, 38794, 38814, 38816, 38778, 38785, 38824, 38796, 38794, 38794, 38831, 38836, 38737, 9, 10, 11, 12, 13, 14, 15, 38803, 38850, 38820, 16971, 38849, 16975, 16976, 38851, 16978, 16979, 16980, 16981, 16982, 16995, 38853, 16998, 16999, 38855, 38854, 17002, 17003, 17004, 17005, 17006, 8, 9, 10, 11, 12, 13, 14, 15, 38800, 38881, 16974, 38886, 16977, 38889, 38891, 38817, 16997, 38896, 17000, 17001, 38900, 38902, 14, 15, 16972, 16973, 38918, 16996, 38923, 38925, 38915, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38928, 38914, 38931, 38930, 17010, 38933, 38921, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17009, 38944, 17017, 17018, 38946, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17011, 38948, 17019, 38963, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38976, 38978, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38993, 38992, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17021, 17022, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39025, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
bool h_Op[]= {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#define THREADS_PER_BLOCK 16
#define BLOCKS_PER_GRID 1
#define SIZE_OF_IN 17024
#define SIZE_OF_AC 22032
__device__ void
ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) {
int i= blockDim.x * blockIdx.x + threadIdx.x;
__shared__ float R[2441*THREADS_PER_BLOCK];
const int t= THREADS_PER_BLOCK;
__shared__ float final;
final=0;
R[i + 0*t] = A[i + 0*t];
R[i + 1*t] = A[i + 1*t];
R[i + 2*t] = A[i + 2*t];
R[i + 3*t] = A[i + 3*t];
R[i + 4*t] = A[i + 4*t];
R[i + 5*t] = A[i + 5*t];
R[i + 6*t] = A[i + 6*t];
R[i + 7*t] = A[i + 7*t];
R[i + 8*t] = A[i + 8*t];
R[i + 9*t] = A[i + 9*t];
R[i + 10*t] = A[i + 10*t];
R[i + 11*t] = A[i + 11*t];
R[i + 12*t] = A[i + 12*t];
R[i + 13*t] = A[i + 13*t];
R[i + 14*t] = A[i + 14*t];
R[i + 15*t] = A[i + 15*t];
R[i + 16*t] = A[i + 16*t];
R[i + 17*t] = A[i + 17*t];
R[i + 18*t] = A[i + 18*t];
R[i + 19*t] = A[i + 19*t];
R[i + 20*t] = A[i + 20*t];
R[i + 21*t] = A[i + 21*t];
R[i + 22*t] = A[i + 22*t];
R[i + 23*t] = A[i + 23*t];
R[i + 24*t] = A[i + 24*t];
R[i + 25*t] = A[i + 25*t];
R[i + 26*t] = A[i + 26*t];
R[i + 27*t] = A[i + 27*t];
R[i + 28*t] = A[i + 28*t];
R[i + 29*t] = A[i + 29*t];
R[i + 30*t] = A[i + 30*t];
R[i + 31*t] = A[i + 31*t];
R[i + 32*t] = A[i + 32*t];
R[i + 33*t] = A[i + 33*t];
R[i + 34*t] = A[i + 34*t];
R[i + 35*t] = A[i + 35*t];
R[i + 36*t] = A[i + 36*t];
R[i + 37*t] = A[i + 37*t];
R[i + 38*t] = A[i + 38*t];
R[i + 39*t] = A[i + 39*t];
R[i + 40*t] = A[i + 40*t];
R[i + 41*t] = A[i + 41*t];
R[i + 42*t] = A[i + 42*t];
R[i + 43*t] = A[i + 43*t];
R[i + 44*t] = A[i + 44*t];
R[i + 45*t] = A[i + 45*t];
R[i + 46*t] = A[i + 46*t];
R[i + 47*t] = A[i + 47*t];
R[i + 48*t] = A[i + 48*t];
R[i + 49*t] = A[i + 49*t];
R[i + 50*t] = A[i + 50*t];
R[i + 51*t] = A[i + 51*t];
R[i + 52*t] = A[i + 52*t];
R[i + 53*t] = A[i + 53*t];
R[i + 54*t] = A[i + 54*t];
R[i + 55*t] = A[i + 55*t];
R[i + 56*t] = A[i + 56*t];
R[i + 57*t] = A[i + 57*t];
R[i + 58*t] = A[i + 58*t];
R[i + 59*t] = A[i + 59*t];
R[i + 60*t] = A[i + 60*t];
R[i + 61*t] = A[i + 61*t];
R[i + 62*t] = A[i + 62*t];
R[i + 63*t] = A[i + 63*t];
R[i + 64*t] = A[i + 64*t];
R[i + 65*t] = A[i + 65*t];
R[i + 66*t] = A[i + 66*t];
R[i + 67*t] = A[i + 67*t];
R[i + 68*t] = A[i + 68*t];
R[i + 69*t] = A[i + 69*t];
R[i + 70*t] = A[i + 70*t];
R[i + 71*t] = A[i + 71*t];
R[i + 72*t] = A[i + 72*t];
R[i + 73*t] = A[i + 73*t];
R[i + 74*t] = A[i + 74*t];
R[i + 75*t] = A[i + 75*t];
R[i + 76*t] = A[i + 76*t];
R[i + 77*t] = A[i + 77*t];
R[i + 78*t] = A[i + 78*t];
R[i + 79*t] = A[i + 79*t];
R[i + 80*t] = A[i + 80*t];
R[i + 81*t] = A[i + 81*t];
R[i + 82*t] = A[i + 82*t];
R[i + 83*t] = A[i + 83*t];
R[i + 84*t] = A[i + 84*t];
R[i + 85*t] = A[i + 85*t];
R[i + 86*t] = A[i + 86*t];
R[i + 87*t] = A[i + 87*t];
R[i + 88*t] = A[i + 88*t];
R[i + 89*t] = A[i + 89*t];
R[i + 90*t] = A[i + 90*t];
R[i + 91*t] = A[i + 91*t];
R[i + 92*t] = A[i + 92*t];
R[i + 93*t] = A[i + 93*t];
R[i + 94*t] = A[i + 94*t];
R[i + 95*t] = A[i + 95*t];
R[i + 96*t] = A[i + 96*t];
R[i + 97*t] = A[i + 97*t];
R[i + 98*t] = A[i + 98*t];
R[i + 99*t] = A[i + 99*t];
R[i + 100*t] = A[i + 100*t];
R[i + 101*t] = A[i + 101*t];
R[i + 102*t] = A[i + 102*t];
R[i + 103*t] = A[i + 103*t];
R[i + 104*t] = A[i + 104*t];
R[i + 105*t] = A[i + 105*t];
R[i + 106*t] = A[i + 106*t];
R[i + 107*t] = A[i + 107*t];
R[i + 108*t] = A[i + 108*t];
R[i + 109*t] = A[i + 109*t];
R[i + 110*t] = A[i + 110*t];
R[i + 111*t] = A[i + 111*t];
R[i + 112*t] = A[i + 112*t];
R[i + 113*t] = A[i + 113*t];
R[i + 114*t] = A[i + 114*t];
R[i + 115*t] = A[i + 115*t];
R[i + 116*t] = A[i + 116*t];
R[i + 117*t] = A[i + 117*t];
R[i + 118*t] = A[i + 118*t];
R[i + 119*t] = A[i + 119*t];
R[i + 120*t] = A[i + 120*t];
R[i + 121*t] = A[i + 121*t];
R[i + 122*t] = A[i + 122*t];
R[i + 123*t] = A[i + 123*t];
R[i + 124*t] = A[i + 124*t];
R[i + 125*t] = A[i + 125*t];
R[i + 126*t] = A[i + 126*t];
R[i + 127*t] = A[i + 127*t];
R[i + 128*t] = A[i + 128*t];
R[i + 129*t] = A[i + 129*t];
R[i + 130*t] = A[i + 130*t];
R[i + 131*t] = A[i + 131*t];
R[i + 132*t] = A[i + 132*t];
R[i + 133*t] = A[i + 133*t];
R[i + 134*t] = A[i + 134*t];
R[i + 135*t] = A[i + 135*t];
R[i + 136*t] = A[i + 136*t];
R[i + 137*t] = A[i + 137*t];
R[i + 138*t] = A[i + 138*t];
R[i + 139*t] = A[i + 139*t];
R[i + 140*t] = A[i + 140*t];
R[i + 141*t] = A[i + 141*t];
R[i + 142*t] = A[i + 142*t];
R[i + 143*t] = A[i + 143*t];
R[i + 144*t] = A[i + 144*t];
R[i + 145*t] = A[i + 145*t];
R[i + 146*t] = A[i + 146*t];
R[i + 147*t] = A[i + 147*t];
R[i + 148*t] = A[i + 148*t];
R[i + 149*t] = A[i + 149*t];
R[i + 150*t] = A[i + 150*t];
R[i + 151*t] = A[i + 151*t];
R[i + 152*t] = A[i + 152*t];
R[i + 153*t] = A[i + 153*t];
R[i + 154*t] = A[i + 154*t];
R[i + 155*t] = A[i + 155*t];
R[i + 156*t] = A[i + 156*t];
R[i + 157*t] = A[i + 157*t];
R[i + 158*t] = A[i + 158*t];
R[i + 159*t] = A[i + 159*t];
R[i + 160*t] = A[i + 160*t];
R[i + 161*t] = A[i + 161*t];
R[i + 162*t] = A[i + 162*t];
R[i + 163*t] = A[i + 163*t];
R[i + 164*t] = A[i + 164*t];
R[i + 165*t] = A[i + 165*t];
R[i + 166*t] = A[i + 166*t];
R[i + 167*t] = A[i + 167*t];
R[i + 168*t] = A[i + 168*t];
R[i + 169*t] = A[i + 169*t];
R[i + 170*t] = A[i + 170*t];
R[i + 171*t] = A[i + 171*t];
R[i + 172*t] = A[i + 172*t];
R[i + 173*t] = A[i + 173*t];
R[i + 174*t] = A[i + 174*t];
R[i + 175*t] = A[i + 175*t];
R[i + 176*t] = A[i + 176*t];
R[i + 177*t] = A[i + 177*t];
R[i + 178*t] = A[i + 178*t];
R[i + 179*t] = A[i + 179*t];
R[i + 180*t] = A[i + 180*t];
R[i + 181*t] = A[i + 181*t];
R[i + 182*t] = A[i + 182*t];
R[i + 183*t] = A[i + 183*t];
R[i + 184*t] = A[i + 184*t];
R[i + 185*t] = A[i + 185*t];
R[i + 186*t] = A[i + 186*t];
R[i + 187*t] = A[i + 187*t];
R[i + 188*t] = A[i + 188*t];
R[i + 189*t] = A[i + 189*t];
R[i + 190*t] = A[i + 190*t];
R[i + 191*t] = A[i + 191*t];
R[i + 192*t] = A[i + 192*t];
R[i + 193*t] = A[i + 193*t];
R[i + 194*t] = A[i + 194*t];
R[i + 195*t] = A[i + 195*t];
R[i + 196*t] = A[i + 196*t];
R[i + 197*t] = A[i + 197*t];
R[i + 198*t] = A[i + 198*t];
R[i + 199*t] = A[i + 199*t];
R[i + 200*t] = A[i + 200*t];
R[i + 201*t] = A[i + 201*t];
R[i + 202*t] = A[i + 202*t];
R[i + 203*t] = A[i + 203*t];
R[i + 204*t] = A[i + 204*t];
R[i + 205*t] = A[i + 205*t];
R[i + 206*t] = A[i + 206*t];
R[i + 207*t] = A[i + 207*t];
R[i + 208*t] = A[i + 208*t];
R[i + 209*t] = A[i + 209*t];
R[i + 210*t] = A[i + 210*t];
R[i + 211*t] = A[i + 211*t];
R[i + 212*t] = A[i + 212*t];
R[i + 213*t] = A[i + 213*t];
R[i + 214*t] = A[i + 214*t];
R[i + 215*t] = A[i + 215*t];
R[i + 216*t] = A[i + 216*t];
R[i + 217*t] = A[i + 217*t];
R[i + 218*t] = A[i + 218*t];
R[i + 219*t] = A[i + 219*t];
R[i + 220*t] = A[i + 220*t];
R[i + 221*t] = A[i + 221*t];
R[i + 222*t] = A[i + 222*t];
R[i + 223*t] = A[i + 223*t];
R[i + 224*t] = A[i + 224*t];
R[i + 225*t] = A[i + 225*t];
R[i + 226*t] = A[i + 226*t];
R[i + 227*t] = A[i + 227*t];
R[i + 228*t] = A[i + 228*t];
R[i + 229*t] = A[i + 229*t];
R[i + 230*t] = A[i + 230*t];
R[i + 231*t] = A[i + 231*t];
R[i + 232*t] = A[i + 232*t];
R[i + 233*t] = A[i + 233*t];
R[i + 234*t] = A[i + 234*t];
R[i + 235*t] = A[i + 235*t];
R[i + 236*t] = A[i + 236*t];
R[i + 237*t] = A[i + 237*t];
R[i + 238*t] = A[i + 238*t];
R[i + 239*t] = A[i + 239*t];
R[i + 240*t] = A[i + 240*t];
R[i + 241*t] = A[i + 241*t];
R[i + 242*t] = A[i + 242*t];
R[i + 243*t] = A[i + 243*t];
R[i + 244*t] = A[i + 244*t];
R[i + 245*t] = A[i + 245*t];
R[i + 246*t] = A[i + 246*t];
R[i + 247*t] = A[i + 247*t];
R[i + 248*t] = A[i + 248*t];
R[i + 249*t] = A[i + 249*t];
R[i + 250*t] = A[i + 250*t];
R[i + 251*t] = A[i + 251*t];
R[i + 252*t] = A[i + 252*t];
R[i + 253*t] = A[i + 253*t];
R[i + 254*t] = A[i + 254*t];
R[i + 255*t] = A[i + 255*t];
R[i + 256*t] = A[i + 256*t];
R[i + 257*t] = A[i + 257*t];
R[i + 258*t] = A[i + 258*t];
R[i + 259*t] = A[i + 259*t];
R[i + 260*t] = A[i + 260*t];
R[i + 261*t] = A[i + 261*t];
R[i + 262*t] = A[i + 262*t];
R[i + 263*t] = A[i + 263*t];
R[i + 264*t] = A[i + 264*t];
R[i + 265*t] = A[i + 265*t];
R[i + 266*t] = A[i + 266*t];
R[i + 267*t] = A[i + 267*t];
R[i + 268*t] = A[i + 268*t];
R[i + 269*t] = A[i + 269*t];
R[i + 270*t] = A[i + 270*t];
R[i + 271*t] = A[i + 271*t];
R[i + 272*t] = A[i + 272*t];
R[i + 273*t] = A[i + 273*t];
R[i + 274*t] = A[i + 274*t];
R[i + 275*t] = A[i + 275*t];
R[i + 276*t] = A[i + 276*t];
R[i + 277*t] = A[i + 277*t];
R[i + 278*t] = A[i + 278*t];
R[i + 279*t] = A[i + 279*t];
R[i + 280*t] = A[i + 280*t];
R[i + 281*t] = A[i + 281*t];
R[i + 282*t] = A[i + 282*t];
R[i + 283*t] = A[i + 283*t];
R[i + 284*t] = A[i + 284*t];
R[i + 285*t] = A[i + 285*t];
R[i + 286*t] = A[i + 286*t];
R[i + 287*t] = A[i + 287*t];
R[i + 288*t] = A[i + 288*t];
R[i + 289*t] = A[i + 289*t];
R[i + 290*t] = A[i + 290*t];
R[i + 291*t] = A[i + 291*t];
R[i + 292*t] = A[i + 292*t];
R[i + 293*t] = A[i + 293*t];
R[i + 294*t] = A[i + 294*t];
R[i + 295*t] = A[i + 295*t];
R[i + 296*t] = A[i + 296*t];
R[i + 297*t] = A[i + 297*t];
R[i + 298*t] = A[i + 298*t];
R[i + 299*t] = A[i + 299*t];
R[i + 300*t] = A[i + 300*t];
R[i + 301*t] = A[i + 301*t];
R[i + 302*t] = A[i + 302*t];
R[i + 303*t] = A[i + 303*t];
R[i + 304*t] = A[i + 304*t];
R[i + 305*t] = A[i + 305*t];
R[i + 306*t] = A[i + 306*t];
R[i + 307*t] = A[i + 307*t];
R[i + 308*t] = A[i + 308*t];
R[i + 309*t] = A[i + 309*t];
R[i + 310*t] = A[i + 310*t];
R[i + 311*t] = A[i + 311*t];
R[i + 312*t] = A[i + 312*t];
R[i + 313*t] = A[i + 313*t];
R[i + 314*t] = A[i + 314*t];
R[i + 315*t] = A[i + 315*t];
R[i + 316*t] = A[i + 316*t];
R[i + 317*t] = A[i + 317*t];
R[i + 318*t] = A[i + 318*t];
R[i + 319*t] = A[i + 319*t];
R[i + 320*t] = A[i + 320*t];
R[i + 321*t] = A[i + 321*t];
R[i + 322*t] = A[i + 322*t];
R[i + 323*t] = A[i + 323*t];
R[i + 324*t] = A[i + 324*t];
R[i + 325*t] = A[i + 325*t];
R[i + 326*t] = A[i + 326*t];
R[i + 327*t] = A[i + 327*t];
R[i + 328*t] = A[i + 328*t];
R[i + 329*t] = A[i + 329*t];
R[i + 330*t] = A[i + 330*t];
R[i + 331*t] = A[i + 331*t];
R[i + 332*t] = A[i + 332*t];
R[i + 333*t] = A[i + 333*t];
R[i + 334*t] = A[i + 334*t];
R[i + 335*t] = A[i + 335*t];
R[i + 336*t] = A[i + 336*t];
R[i + 337*t] = A[i + 337*t];
R[i + 338*t] = A[i + 338*t];
R[i + 339*t] = A[i + 339*t];
R[i + 340*t] = A[i + 340*t];
R[i + 341*t] = A[i + 341*t];
R[i + 342*t] = A[i + 342*t];
R[i + 343*t] = A[i + 343*t];
R[i + 344*t] = A[i + 344*t];
R[i + 345*t] = A[i + 345*t];
R[i + 346*t] = A[i + 346*t];
R[i + 347*t] = A[i + 347*t];
R[i + 348*t] = A[i + 348*t];
R[i + 349*t] = A[i + 349*t];
R[i + 350*t] = A[i + 350*t];
R[i + 351*t] = A[i + 351*t];
R[i + 352*t] = A[i + 352*t];
R[i + 353*t] = A[i + 353*t];
R[i + 354*t] = A[i + 354*t];
R[i + 355*t] = A[i + 355*t];
R[i + 356*t] = A[i + 356*t];
R[i + 357*t] = A[i + 357*t];
R[i + 358*t] = A[i + 358*t];
R[i + 359*t] = A[i + 359*t];
R[i + 360*t] = A[i + 360*t];
R[i + 361*t] = A[i + 361*t];
R[i + 362*t] = A[i + 362*t];
R[i + 363*t] = A[i + 363*t];
R[i + 364*t] = A[i + 364*t];
R[i + 365*t] = A[i + 365*t];
R[i + 366*t] = A[i + 366*t];
R[i + 367*t] = A[i + 367*t];
R[i + 368*t] = A[i + 368*t];
R[i + 369*t] = A[i + 369*t];
R[i + 370*t] = A[i + 370*t];
R[i + 371*t] = A[i + 371*t];
R[i + 372*t] = A[i + 372*t];
R[i + 373*t] = A[i + 373*t];
R[i + 374*t] = A[i + 374*t];
R[i + 375*t] = A[i + 375*t];
R[i + 376*t] = A[i + 376*t];
R[i + 377*t] = A[i + 377*t];
R[i + 378*t] = A[i + 378*t];
R[i + 379*t] = A[i + 379*t];
R[i + 380*t] = A[i + 380*t];
R[i + 381*t] = A[i + 381*t];
R[i + 382*t] = A[i + 382*t];
R[i + 383*t] = A[i + 383*t];
R[i + 384*t] = A[i + 384*t];
R[i + 385*t] = A[i + 385*t];
R[i + 386*t] = A[i + 386*t];
R[i + 387*t] = A[i + 387*t];
R[i + 388*t] = A[i + 388*t];
R[i + 389*t] = A[i + 389*t];
R[i + 390*t] = A[i + 390*t];
R[i + 391*t] = A[i + 391*t];
R[i + 392*t] = A[i + 392*t];
R[i + 393*t] = A[i + 393*t];
R[i + 394*t] = A[i + 394*t];
R[i + 395*t] = A[i + 395*t];
R[i + 396*t] = A[i + 396*t];
R[i + 397*t] = A[i + 397*t];
R[i + 398*t] = A[i + 398*t];
R[i + 399*t] = A[i + 399*t];
R[i + 400*t] = A[i + 400*t];
R[i + 401*t] = A[i + 401*t];
R[i + 402*t] = A[i + 402*t];
R[i + 403*t] = A[i + 403*t];
R[i + 404*t] = A[i + 404*t];
R[i + 405*t] = A[i + 405*t];
R[i + 406*t] = A[i + 406*t];
R[i + 407*t] = A[i + 407*t];
R[i + 408*t] = A[i + 408*t];
R[i + 409*t] = A[i + 409*t];
R[i + 410*t] = A[i + 410*t];
R[i + 411*t] = A[i + 411*t];
R[i + 412*t] = A[i + 412*t];
R[i + 413*t] = A[i + 413*t];
R[i + 414*t] = A[i + 414*t];
R[i + 415*t] = A[i + 415*t];
R[i + 416*t] = A[i + 416*t];
R[i + 417*t] = A[i + 417*t];
R[i + 418*t] = A[i + 418*t];
R[i + 419*t] = A[i + 419*t];
R[i + 420*t] = A[i + 420*t];
R[i + 421*t] = A[i + 421*t];
R[i + 422*t] = A[i + 422*t];
R[i + 423*t] = A[i + 423*t];
R[i + 424*t] = A[i + 424*t];
R[i + 425*t] = A[i + 425*t];
R[i + 426*t] = A[i + 426*t];
R[i + 427*t] = A[i + 427*t];
R[i + 428*t] = A[i + 428*t];
R[i + 429*t] = A[i + 429*t];
R[i + 430*t] = A[i + 430*t];
R[i + 431*t] = A[i + 431*t];
R[i + 432*t] = A[i + 432*t];
R[i + 433*t] = A[i + 433*t];
R[i + 434*t] = A[i + 434*t];
R[i + 435*t] = A[i + 435*t];
R[i + 436*t] = A[i + 436*t];
R[i + 437*t] = A[i + 437*t];
R[i + 438*t] = A[i + 438*t];
R[i + 439*t] = A[i + 439*t];
R[i + 440*t] = A[i + 440*t];
R[i + 441*t] = A[i + 441*t];
R[i + 442*t] = A[i + 442*t];
R[i + 443*t] = A[i + 443*t];
R[i + 444*t] = A[i + 444*t];
R[i + 445*t] = A[i + 445*t];
R[i + 446*t] = A[i + 446*t];
R[i + 447*t] = A[i + 447*t];
R[i + 448*t] = A[i + 448*t];
R[i + 449*t] = A[i + 449*t];
R[i + 450*t] = A[i + 450*t];
R[i + 451*t] = A[i + 451*t];
R[i + 452*t] = A[i + 452*t];
R[i + 453*t] = A[i + 453*t];
R[i + 454*t] = A[i + 454*t];
R[i + 455*t] = A[i + 455*t];
R[i + 456*t] = A[i + 456*t];
R[i + 457*t] = A[i + 457*t];
R[i + 458*t] = A[i + 458*t];
R[i + 459*t] = A[i + 459*t];
R[i + 460*t] = A[i + 460*t];
R[i + 461*t] = A[i + 461*t];
R[i + 462*t] = A[i + 462*t];
R[i + 463*t] = A[i + 463*t];
R[i + 464*t] = A[i + 464*t];
R[i + 465*t] = A[i + 465*t];
R[i + 466*t] = A[i + 466*t];
R[i + 467*t] = A[i + 467*t];
R[i + 468*t] = A[i + 468*t];
R[i + 469*t] = A[i + 469*t];
R[i + 470*t] = A[i + 470*t];
R[i + 471*t] = A[i + 471*t];
R[i + 472*t] = A[i + 472*t];
R[i + 473*t] = A[i + 473*t];
R[i + 474*t] = A[i + 474*t];
R[i + 475*t] = A[i + 475*t];
R[i + 476*t] = A[i + 476*t];
R[i + 477*t] = A[i + 477*t];
R[i + 478*t] = A[i + 478*t];
R[i + 479*t] = A[i + 479*t];
R[i + 480*t] = A[i + 480*t];
R[i + 481*t] = A[i + 481*t];
R[i + 482*t] = A[i + 482*t];
R[i + 483*t] = A[i + 483*t];
R[i + 484*t] = A[i + 484*t];
R[i + 485*t] = A[i + 485*t];
R[i + 486*t] = A[i + 486*t];
R[i + 487*t] = A[i + 487*t];
R[i + 488*t] = A[i + 488*t];
R[i + 489*t] = A[i + 489*t];
R[i + 490*t] = A[i + 490*t];
R[i + 491*t] = A[i + 491*t];
R[i + 492*t] = A[i + 492*t];
R[i + 493*t] = A[i + 493*t];
R[i + 494*t] = A[i + 494*t];
R[i + 495*t] = A[i + 495*t];
R[i + 496*t] = A[i + 496*t];
R[i + 497*t] = A[i + 497*t];
R[i + 498*t] = A[i + 498*t];
R[i + 499*t] = A[i + 499*t];
R[i + 500*t] = A[i + 500*t];
R[i + 501*t] = A[i + 501*t];
R[i + 502*t] = A[i + 502*t];
R[i + 503*t] = A[i + 503*t];
R[i + 504*t] = A[i + 504*t];
R[i + 505*t] = A[i + 505*t];
R[i + 506*t] = A[i + 506*t];
R[i + 507*t] = A[i + 507*t];
R[i + 508*t] = A[i + 508*t];
R[i + 509*t] = A[i + 509*t];
R[i + 510*t] = A[i + 510*t];
R[i + 511*t] = A[i + 511*t];
R[i + 512*t] = A[i + 512*t];
R[i + 513*t] = A[i + 513*t];
R[i + 514*t] = A[i + 514*t];
R[i + 515*t] = A[i + 515*t];
R[i + 516*t] = A[i + 516*t];
R[i + 517*t] = A[i + 517*t];
R[i + 518*t] = A[i + 518*t];
R[i + 519*t] = A[i + 519*t];
R[i + 520*t] = A[i + 520*t];
R[i + 521*t] = A[i + 521*t];
R[i + 522*t] = A[i + 522*t];
R[i + 523*t] = A[i + 523*t];
R[i + 524*t] = A[i + 524*t];
R[i + 525*t] = A[i + 525*t];
R[i + 526*t] = A[i + 526*t];
R[i + 527*t] = A[i + 527*t];
R[i + 528*t] = A[i + 528*t];
R[i + 529*t] = A[i + 529*t];
R[i + 530*t] = A[i + 530*t];
R[i + 531*t] = A[i + 531*t];
R[i + 532*t] = A[i + 532*t];
R[i + 533*t] = A[i + 533*t];
R[i + 534*t] = A[i + 534*t];
R[i + 535*t] = A[i + 535*t];
R[i + 536*t] = A[i + 536*t];
R[i + 537*t] = A[i + 537*t];
R[i + 538*t] = A[i + 538*t];
R[i + 539*t] = A[i + 539*t];
R[i + 540*t] = A[i + 540*t];
R[i + 541*t] = A[i + 541*t];
R[i + 542*t] = A[i + 542*t];
R[i + 543*t] = A[i + 543*t];
R[i + 544*t] = A[i + 544*t];
R[i + 545*t] = A[i + 545*t];
R[i + 546*t] = A[i + 546*t];
R[i + 547*t] = A[i + 547*t];
R[i + 548*t] = A[i + 548*t];
R[i + 549*t] = A[i + 549*t];
R[i + 550*t] = A[i + 550*t];
R[i + 551*t] = A[i + 551*t];
R[i + 552*t] = A[i + 552*t];
R[i + 553*t] = A[i + 553*t];
R[i + 554*t] = A[i + 554*t];
R[i + 555*t] = A[i + 555*t];
R[i + 556*t] = A[i + 556*t];
R[i + 557*t] = A[i + 557*t];
R[i + 558*t] = A[i + 558*t];
R[i + 559*t] = A[i + 559*t];
R[i + 560*t] = A[i + 560*t];
R[i + 561*t] = A[i + 561*t];
R[i + 562*t] = A[i + 562*t];
R[i + 563*t] = A[i + 563*t];
R[i + 564*t] = A[i + 564*t];
R[i + 565*t] = A[i + 565*t];
R[i + 566*t] = A[i + 566*t];
R[i + 567*t] = A[i + 567*t];
R[i + 568*t] = A[i + 568*t];
R[i + 569*t] = A[i + 569*t];
R[i + 570*t] = A[i + 570*t];
R[i + 571*t] = A[i + 571*t];
R[i + 572*t] = A[i + 572*t];
R[i + 573*t] = A[i + 573*t];
R[i + 574*t] = A[i + 574*t];
R[i + 575*t] = A[i + 575*t];
R[i + 576*t] = A[i + 576*t];
R[i + 577*t] = A[i + 577*t];
R[i + 578*t] = A[i + 578*t];
R[i + 579*t] = A[i + 579*t];
R[i + 580*t] = A[i + 580*t];
R[i + 581*t] = A[i + 581*t];
R[i + 582*t] = A[i + 582*t];
R[i + 583*t] = A[i + 583*t];
R[i + 584*t] = A[i + 584*t];
R[i + 585*t] = A[i + 585*t];
R[i + 586*t] = A[i + 586*t];
R[i + 587*t] = A[i + 587*t];
R[i + 588*t] = A[i + 588*t];
R[i + 589*t] = A[i + 589*t];
R[i + 590*t] = A[i + 590*t];
R[i + 591*t] = A[i + 591*t];
R[i + 592*t] = A[i + 592*t];
R[i + 593*t] = A[i + 593*t];
R[i + 594*t] = A[i + 594*t];
R[i + 595*t] = A[i + 595*t];
R[i + 596*t] = A[i + 596*t];
R[i + 597*t] = A[i + 597*t];
R[i + 598*t] = A[i + 598*t];
R[i + 599*t] = A[i + 599*t];
R[i + 600*t] = A[i + 600*t];
R[i + 601*t] = A[i + 601*t];
R[i + 602*t] = A[i + 602*t];
R[i + 603*t] = A[i + 603*t];
R[i + 604*t] = A[i + 604*t];
R[i + 605*t] = A[i + 605*t];
R[i + 606*t] = A[i + 606*t];
R[i + 607*t] = A[i + 607*t];
R[i + 608*t] = A[i + 608*t];
R[i + 609*t] = A[i + 609*t];
R[i + 610*t] = A[i + 610*t];
R[i + 611*t] = A[i + 611*t];
R[i + 612*t] = A[i + 612*t];
R[i + 613*t] = A[i + 613*t];
R[i + 614*t] = A[i + 614*t];
R[i + 615*t] = A[i + 615*t];
R[i + 616*t] = A[i + 616*t];
R[i + 617*t] = A[i + 617*t];
R[i + 618*t] = A[i + 618*t];
R[i + 619*t] = A[i + 619*t];
R[i + 620*t] = A[i + 620*t];
R[i + 621*t] = A[i + 621*t];
R[i + 622*t] = A[i + 622*t];
R[i + 623*t] = A[i + 623*t];
R[i + 624*t] = A[i + 624*t];
R[i + 625*t] = A[i + 625*t];
R[i + 626*t] = A[i + 626*t];
R[i + 627*t] = A[i + 627*t];
R[i + 628*t] = A[i + 628*t];
R[i + 629*t] = A[i + 629*t];
R[i + 630*t] = A[i + 630*t];
R[i + 631*t] = A[i + 631*t];
R[i + 632*t] = A[i + 632*t];
R[i + 633*t] = A[i + 633*t];
R[i + 634*t] = A[i + 634*t];
R[i + 635*t] = A[i + 635*t];
R[i + 636*t] = A[i + 636*t];
R[i + 637*t] = A[i + 637*t];
R[i + 638*t] = A[i + 638*t];
R[i + 639*t] = A[i + 639*t];
R[i + 640*t] = A[i + 640*t];
R[i + 641*t] = A[i + 641*t];
R[i + 642*t] = A[i + 642*t];
R[i + 643*t] = A[i + 643*t];
R[i + 644*t] = A[i + 644*t];
R[i + 645*t] = A[i + 645*t];
R[i + 646*t] = A[i + 646*t];
R[i + 647*t] = A[i + 647*t];
R[i + 648*t] = A[i + 648*t];
R[i + 649*t] = A[i + 649*t];
R[i + 650*t] = A[i + 650*t];
R[i + 651*t] = A[i + 651*t];
R[i + 652*t] = A[i + 652*t];
R[i + 653*t] = A[i + 653*t];
R[i + 654*t] = A[i + 654*t];
R[i + 655*t] = A[i + 655*t];
R[i + 656*t] = A[i + 656*t];
R[i + 657*t] = A[i + 657*t];
R[i + 658*t] = A[i + 658*t];
R[i + 659*t] = A[i + 659*t];
R[i + 660*t] = A[i + 660*t];
R[i + 661*t] = A[i + 661*t];
R[i + 662*t] = A[i + 662*t];
R[i + 663*t] = A[i + 663*t];
R[i + 664*t] = A[i + 664*t];
R[i + 665*t] = A[i + 665*t];
R[i + 666*t] = A[i + 666*t];
R[i + 667*t] = A[i + 667*t];
R[i + 668*t] = A[i + 668*t];
R[i + 669*t] = A[i + 669*t];
R[i + 670*t] = A[i + 670*t];
R[i + 671*t] = A[i + 671*t];
R[i + 672*t] = A[i + 672*t];
R[i + 673*t] = A[i + 673*t];
R[i + 674*t] = A[i + 674*t];
R[i + 675*t] = A[i + 675*t];
R[i + 676*t] = A[i + 676*t];
R[i + 677*t] = A[i + 677*t];
R[i + 678*t] = A[i + 678*t];
R[i + 679*t] = A[i + 679*t];
R[i + 680*t] = A[i + 680*t];
R[i + 681*t] = A[i + 681*t];
R[i + 682*t] = A[i + 682*t];
R[i + 683*t] = A[i + 683*t];
R[i + 684*t] = A[i + 684*t];
R[i + 685*t] = A[i + 685*t];
R[i + 686*t] = A[i + 686*t];
R[i + 687*t] = A[i + 687*t];
R[i + 688*t] = A[i + 688*t];
R[i + 689*t] = A[i + 689*t];
R[i + 690*t] = A[i + 690*t];
R[i + 691*t] = A[i + 691*t];
R[i + 692*t] = A[i + 692*t];
R[i + 693*t] = A[i + 693*t];
R[i + 694*t] = A[i + 694*t];
R[i + 695*t] = A[i + 695*t];
R[i + 696*t] = A[i + 696*t];
R[i + 697*t] = A[i + 697*t];
R[i + 698*t] = A[i + 698*t];
R[i + 699*t] = A[i + 699*t];
R[i + 700*t] = A[i + 700*t];
R[i + 701*t] = A[i + 701*t];
R[i + 702*t] = A[i + 702*t];
R[i + 703*t] = A[i + 703*t];
R[i + 704*t] = A[i + 704*t];
R[i + 705*t] = A[i + 705*t];
R[i + 706*t] = A[i + 706*t];
R[i + 707*t] = A[i + 707*t];
R[i + 708*t] = A[i + 708*t];
R[i + 709*t] = A[i + 709*t];
R[i + 710*t] = A[i + 710*t];
R[i + 711*t] = A[i + 711*t];
R[i + 712*t] = A[i + 712*t];
R[i + 713*t] = A[i + 713*t];
R[i + 714*t] = A[i + 714*t];
R[i + 715*t] = A[i + 715*t];
R[i + 716*t] = A[i + 716*t];
R[i + 717*t] = A[i + 717*t];
R[i + 718*t] = A[i + 718*t];
R[i + 719*t] = A[i + 719*t];
R[i + 720*t] = A[i + 720*t];
R[i + 721*t] = A[i + 721*t];
R[i + 722*t] = A[i + 722*t];
R[i + 723*t] = A[i + 723*t];
R[i + 724*t] = A[i + 724*t];
R[i + 725*t] = A[i + 725*t];
R[i + 726*t] = A[i + 726*t];
R[i + 727*t] = A[i + 727*t];
R[i + 728*t] = A[i + 728*t];
R[i + 729*t] = A[i + 729*t];
R[i + 730*t] = A[i + 730*t];
R[i + 731*t] = A[i + 731*t];
R[i + 732*t] = A[i + 732*t];
R[i + 733*t] = A[i + 733*t];
R[i + 734*t] = A[i + 734*t];
R[i + 735*t] = A[i + 735*t];
R[i + 736*t] = A[i + 736*t];
R[i + 737*t] = A[i + 737*t];
R[i + 738*t] = A[i + 738*t];
R[i + 739*t] = A[i + 739*t];
R[i + 740*t] = A[i + 740*t];
R[i + 741*t] = A[i + 741*t];
R[i + 742*t] = A[i + 742*t];
R[i + 743*t] = A[i + 743*t];
R[i + 744*t] = A[i + 744*t];
R[i + 745*t] = A[i + 745*t];
R[i + 746*t] = A[i + 746*t];
R[i + 747*t] = A[i + 747*t];
R[i + 748*t] = A[i + 748*t];
R[i + 749*t] = A[i + 749*t];
R[i + 750*t] = A[i + 750*t];
R[i + 751*t] = A[i + 751*t];
R[i + 752*t] = A[i + 752*t];
R[i + 753*t] = A[i + 753*t];
R[i + 754*t] = A[i + 754*t];
R[i + 755*t] = A[i + 755*t];
R[i + 756*t] = A[i + 756*t];
R[i + 757*t] = A[i + 757*t];
R[i + 758*t] = A[i + 758*t];
R[i + 759*t] = A[i + 759*t];
R[i + 760*t] = A[i + 760*t];
R[i + 761*t] = A[i + 761*t];
R[i + 762*t] = A[i + 762*t];
R[i + 763*t] = A[i + 763*t];
R[i + 764*t] = A[i + 764*t];
R[i + 765*t] = A[i + 765*t];
R[i + 766*t] = A[i + 766*t];
R[i + 767*t] = A[i + 767*t];
R[i + 768*t] = A[i + 768*t];
R[i + 769*t] = A[i + 769*t];
R[i + 770*t] = A[i + 770*t];
R[i + 771*t] = A[i + 771*t];
R[i + 772*t] = A[i + 772*t];
R[i + 773*t] = A[i + 773*t];
R[i + 774*t] = A[i + 774*t];
R[i + 775*t] = A[i + 775*t];
R[i + 776*t] = A[i + 776*t];
R[i + 777*t] = A[i + 777*t];
R[i + 778*t] = A[i + 778*t];
R[i + 779*t] = A[i + 779*t];
R[i + 780*t] = A[i + 780*t];
R[i + 781*t] = A[i + 781*t];
R[i + 782*t] = A[i + 782*t];
R[i + 783*t] = A[i + 783*t];
R[i + 784*t] = A[i + 784*t];
R[i + 785*t] = A[i + 785*t];
R[i + 786*t] = A[i + 786*t];
R[i + 787*t] = A[i + 787*t];
R[i + 788*t] = A[i + 788*t];
R[i + 789*t] = A[i + 789*t];
R[i + 790*t] = A[i + 790*t];
R[i + 791*t] = A[i + 791*t];
R[i + 792*t] = A[i + 792*t];
R[i + 793*t] = A[i + 793*t];
R[i + 794*t] = A[i + 794*t];
R[i + 795*t] = A[i + 795*t];
R[i + 796*t] = A[i + 796*t];
R[i + 797*t] = A[i + 797*t];
R[i + 798*t] = A[i + 798*t];
R[i + 799*t] = A[i + 799*t];
R[i + 800*t] = A[i + 800*t];
R[i + 801*t] = A[i + 801*t];
R[i + 802*t] = A[i + 802*t];
R[i + 803*t] = A[i + 803*t];
R[i + 804*t] = A[i + 804*t];
R[i + 805*t] = A[i + 805*t];
R[i + 806*t] = A[i + 806*t];
R[i + 807*t] = A[i + 807*t];
R[i + 808*t] = A[i + 808*t];
R[i + 809*t] = A[i + 809*t];
R[i + 810*t] = A[i + 810*t];
R[i + 811*t] = A[i + 811*t];
R[i + 812*t] = A[i + 812*t];
R[i + 813*t] = A[i + 813*t];
R[i + 814*t] = A[i + 814*t];
R[i + 815*t] = A[i + 815*t];
R[i + 816*t] = A[i + 816*t];
R[i + 817*t] = A[i + 817*t];
R[i + 818*t] = A[i + 818*t];
R[i + 819*t] = A[i + 819*t];
R[i + 820*t] = A[i + 820*t];
R[i + 821*t] = A[i + 821*t];
R[i + 822*t] = A[i + 822*t];
R[i + 823*t] = A[i + 823*t];
R[i + 824*t] = A[i + 824*t];
R[i + 825*t] = A[i + 825*t];
R[i + 826*t] = A[i + 826*t];
R[i + 827*t] = A[i + 827*t];
R[i + 828*t] = A[i + 828*t];
R[i + 829*t] = A[i + 829*t];
R[i + 830*t] = A[i + 830*t];
R[i + 831*t] = A[i + 831*t];
R[i + 832*t] = A[i + 832*t];
R[i + 833*t] = A[i + 833*t];
R[i + 834*t] = A[i + 834*t];
R[i + 835*t] = A[i + 835*t];
R[i + 836*t] = A[i + 836*t];
R[i + 837*t] = A[i + 837*t];
R[i + 838*t] = A[i + 838*t];
R[i + 839*t] = A[i + 839*t];
R[i + 840*t] = A[i + 840*t];
R[i + 841*t] = A[i + 841*t];
R[i + 842*t] = A[i + 842*t];
R[i + 843*t] = A[i + 843*t];
R[i + 844*t] = A[i + 844*t];
R[i + 845*t] = A[i + 845*t];
R[i + 846*t] = A[i + 846*t];
R[i + 847*t] = A[i + 847*t];
R[i + 848*t] = A[i + 848*t];
R[i + 849*t] = A[i + 849*t];
R[i + 850*t] = A[i + 850*t];
R[i + 851*t] = A[i + 851*t];
R[i + 852*t] = A[i + 852*t];
R[i + 853*t] = A[i + 853*t];
R[i + 854*t] = A[i + 854*t];
R[i + 855*t] = A[i + 855*t];
R[i + 856*t] = A[i + 856*t];
R[i + 857*t] = A[i + 857*t];
R[i + 858*t] = A[i + 858*t];
R[i + 859*t] = A[i + 859*t];
R[i + 860*t] = A[i + 860*t];
R[i + 861*t] = A[i + 861*t];
R[i + 862*t] = A[i + 862*t];
R[i + 863*t] = A[i + 863*t];
R[i + 864*t] = A[i + 864*t];
R[i + 865*t] = A[i + 865*t];
R[i + 866*t] = A[i + 866*t];
R[i + 867*t] = A[i + 867*t];
R[i + 868*t] = A[i + 868*t];
R[i + 869*t] = A[i + 869*t];
R[i + 870*t] = A[i + 870*t];
R[i + 871*t] = A[i + 871*t];
R[i + 872*t] = A[i + 872*t];
R[i + 873*t] = A[i + 873*t];
R[i + 874*t] = A[i + 874*t];
R[i + 875*t] = A[i + 875*t];
R[i + 876*t] = A[i + 876*t];
R[i + 877*t] = A[i + 877*t];
R[i + 878*t] = A[i + 878*t];
R[i + 879*t] = A[i + 879*t];
R[i + 880*t] = A[i + 880*t];
R[i + 881*t] = A[i + 881*t];
R[i + 882*t] = A[i + 882*t];
R[i + 883*t] = A[i + 883*t];
R[i + 884*t] = A[i + 884*t];
R[i + 885*t] = A[i + 885*t];
R[i + 886*t] = A[i + 886*t];
R[i + 887*t] = A[i + 887*t];
R[i + 888*t] = A[i + 888*t];
R[i + 889*t] = A[i + 889*t];
R[i + 890*t] = A[i + 890*t];
R[i + 891*t] = A[i + 891*t];
R[i + 892*t] = A[i + 892*t];
R[i + 893*t] = A[i + 893*t];
R[i + 894*t] = A[i + 894*t];
R[i + 895*t] = A[i + 895*t];
R[i + 896*t] = A[i + 896*t];
R[i + 897*t] = A[i + 897*t];
R[i + 898*t] = A[i + 898*t];
R[i + 899*t] = A[i + 899*t];
R[i + 900*t] = A[i + 900*t];
R[i + 901*t] = A[i + 901*t];
R[i + 902*t] = A[i + 902*t];
R[i + 903*t] = A[i + 903*t];
R[i + 904*t] = A[i + 904*t];
R[i + 905*t] = A[i + 905*t];
R[i + 906*t] = A[i + 906*t];
R[i + 907*t] = A[i + 907*t];
R[i + 908*t] = A[i + 908*t];
R[i + 909*t] = A[i + 909*t];
R[i + 910*t] = A[i + 910*t];
R[i + 911*t] = A[i + 911*t];
R[i + 912*t] = A[i + 912*t];
R[i + 913*t] = A[i + 913*t];
R[i + 914*t] = A[i + 914*t];
R[i + 915*t] = A[i + 915*t];
R[i + 916*t] = A[i + 916*t];
R[i + 917*t] = A[i + 917*t];
R[i + 918*t] = A[i + 918*t];
R[i + 919*t] = A[i + 919*t];
R[i + 920*t] = A[i + 920*t];
R[i + 921*t] = A[i + 921*t];
R[i + 922*t] = A[i + 922*t];
R[i + 923*t] = A[i + 923*t];
R[i + 924*t] = A[i + 924*t];
R[i + 925*t] = A[i + 925*t];
R[i + 926*t] = A[i + 926*t];
R[i + 927*t] = A[i + 927*t];
R[i + 928*t] = A[i + 928*t];
R[i + 929*t] = A[i + 929*t];
R[i + 930*t] = A[i + 930*t];
R[i + 931*t] = A[i + 931*t];
R[i + 932*t] = A[i + 932*t];
R[i + 933*t] = A[i + 933*t];
R[i + 934*t] = A[i + 934*t];
R[i + 935*t] = A[i + 935*t];
R[i + 936*t] = A[i + 936*t];
R[i + 937*t] = A[i + 937*t];
R[i + 938*t] = A[i + 938*t];
R[i + 939*t] = A[i + 939*t];
R[i + 940*t] = A[i + 940*t];
R[i + 941*t] = A[i + 941*t];
R[i + 942*t] = A[i + 942*t];
R[i + 943*t] = A[i + 943*t];
R[i + 944*t] = A[i + 944*t];
R[i + 945*t] = A[i + 945*t];
R[i + 946*t] = A[i + 946*t];
R[i + 947*t] = A[i + 947*t];
R[i + 948*t] = A[i + 948*t];
R[i + 949*t] = A[i + 949*t];
R[i + 950*t] = A[i + 950*t];
R[i + 951*t] = A[i + 951*t];
R[i + 952*t] = A[i + 952*t];
R[i + 953*t] = A[i + 953*t];
R[i + 954*t] = A[i + 954*t];
R[i + 955*t] = A[i + 955*t];
R[i + 956*t] = A[i + 956*t];
R[i + 957*t] = A[i + 957*t];
R[i + 958*t] = A[i + 958*t];
R[i + 959*t] = A[i + 959*t];
R[i + 960*t] = A[i + 960*t];
R[i + 961*t] = A[i + 961*t];
R[i + 962*t] = A[i + 962*t];
R[i + 963*t] = A[i + 963*t];
R[i + 964*t] = A[i + 964*t];
R[i + 965*t] = A[i + 965*t];
R[i + 966*t] = A[i + 966*t];
R[i + 967*t] = A[i + 967*t];
R[i + 968*t] = A[i + 968*t];
R[i + 969*t] = A[i + 969*t];
R[i + 970*t] = A[i + 970*t];
R[i + 971*t] = A[i + 971*t];
R[i + 972*t] = A[i + 972*t];
R[i + 973*t] = A[i + 973*t];
R[i + 974*t] = A[i + 974*t];
R[i + 975*t] = A[i + 975*t];
R[i + 976*t] = A[i + 976*t];
R[i + 977*t] = A[i + 977*t];
R[i + 978*t] = A[i + 978*t];
R[i + 979*t] = A[i + 979*t];
R[i + 980*t] = A[i + 980*t];
R[i + 981*t] = A[i + 981*t];
R[i + 982*t] = A[i + 982*t];
R[i + 983*t] = A[i + 983*t];
R[i + 984*t] = A[i + 984*t];
R[i + 985*t] = A[i + 985*t];
R[i + 986*t] = A[i + 986*t];
R[i + 987*t] = A[i + 987*t];
R[i + 988*t] = A[i + 988*t];
R[i + 989*t] = A[i + 989*t];
R[i + 990*t] = A[i + 990*t];
R[i + 991*t] = A[i + 991*t];
R[i + 992*t] = A[i + 992*t];
R[i + 993*t] = A[i + 993*t];
R[i + 994*t] = A[i + 994*t];
R[i + 995*t] = A[i + 995*t];
R[i + 996*t] = A[i + 996*t];
R[i + 997*t] = A[i + 997*t];
R[i + 998*t] = A[i + 998*t];
R[i + 999*t] = A[i + 999*t];
R[i + 1000*t] = A[i + 1000*t];
R[i + 1001*t] = A[i + 1001*t];
R[i + 1002*t] = A[i + 1002*t];
R[i + 1003*t] = A[i + 1003*t];
R[i + 1004*t] = A[i + 1004*t];
R[i + 1005*t] = A[i + 1005*t];
R[i + 1006*t] = A[i + 1006*t];
R[i + 1007*t] = A[i + 1007*t];
R[i + 1008*t] = A[i + 1008*t];
R[i + 1009*t] = A[i + 1009*t];
R[i + 1010*t] = A[i + 1010*t];
R[i + 1011*t] = A[i + 1011*t];
R[i + 1012*t] = A[i + 1012*t];
R[i + 1013*t] = A[i + 1013*t];
R[i + 1014*t] = A[i + 1014*t];
R[i + 1015*t] = A[i + 1015*t];
R[i + 1016*t] = A[i + 1016*t];
R[i + 1017*t] = A[i + 1017*t];
R[i + 1018*t] = A[i + 1018*t];
R[i + 1019*t] = A[i + 1019*t];
R[i + 1020*t] = A[i + 1020*t];
R[i + 1021*t] = A[i + 1021*t];
R[i + 1022*t] = A[i + 1022*t];
R[i + 1023*t] = A[i + 1023*t];
R[i + 1024*t] = A[i + 1024*t];
R[i + 1025*t] = A[i + 1025*t];
R[i + 1026*t] = A[i + 1026*t];
R[i + 1027*t] = A[i + 1027*t];
R[i + 1028*t] = A[i + 1028*t];
R[i + 1029*t] = A[i + 1029*t];
R[i + 1030*t] = A[i + 1030*t];
R[i + 1031*t] = A[i + 1031*t];
R[i + 1032*t] = A[i + 1032*t];
R[i + 1033*t] = A[i + 1033*t];
R[i + 1034*t] = A[i + 1034*t];
R[i + 1035*t] = A[i + 1035*t];
R[i + 1036*t] = A[i + 1036*t];
R[i + 1037*t] = A[i + 1037*t];
R[i + 1038*t] = A[i + 1038*t];
R[i + 1039*t] = A[i + 1039*t];
R[i + 1040*t] = A[i + 1040*t];
R[i + 1041*t] = A[i + 1041*t];
R[i + 1042*t] = A[i + 1042*t];
R[i + 1043*t] = A[i + 1043*t];
R[i + 1044*t] = A[i + 1044*t];
R[i + 1045*t] = A[i + 1045*t];
R[i + 1046*t] = A[i + 1046*t];
R[i + 1047*t] = A[i + 1047*t];
R[i + 1048*t] = A[i + 1048*t];
R[i + 1049*t] = A[i + 1049*t];
R[i + 1050*t] = A[i + 1050*t];
R[i + 1051*t] = A[i + 1051*t];
R[i + 1052*t] = A[i + 1052*t];
R[i + 1053*t] = A[i + 1053*t];
R[i + 1054*t] = A[i + 1054*t];
R[i + 1055*t] = A[i + 1055*t];
R[i + 1056*t] = A[i + 1056*t];
R[i + 1057*t] = A[i + 1057*t];
R[i + 1058*t] = A[i + 1058*t];
R[i + 1059*t] = A[i + 1059*t];
R[i + 1060*t] = A[i + 1060*t];
R[i + 1061*t] = A[i + 1061*t];
R[i + 1062*t] = A[i + 1062*t];
R[i + 1063*t] = A[i + 1063*t];
__syncthreads();
for (int iter=0; iter< n_iter; iter++) {
R[i + 1064*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]];
R[i + 1065*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]];
R[i + 1066*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]];
R[i + 1067*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]];
R[i + 1068*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]];
R[i + 1069*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]];
R[i + 1070*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]];
R[i + 1071*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]];
R[i + 1072*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]];
R[i + 1073*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]];
R[i + 1074*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]];
R[i + 1075*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]];
R[i + 1076*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]];
R[i + 1077*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]];
R[i + 1078*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]];
R[i + 1079*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]];
R[i + 1080*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]];
R[i + 1081*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]];
R[i + 1082*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]];
R[i + 1083*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]];
R[i + 1084*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]];
R[i + 1085*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]];
R[i + 1086*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]];
R[i + 1087*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]];
R[i + 1088*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]];
R[i + 1089*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]];
R[i + 1090*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]];
R[i + 1091*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]];
R[i + 1092*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]];
R[i + 1093*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]];
R[i + 1094*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]];
R[i + 1095*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]];
R[i + 1096*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]];
R[i + 1097*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]];
R[i + 1098*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]];
R[i + 1099*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]];
R[i + 1100*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]];
R[i + 1101*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]];
R[i + 1102*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]];
R[i + 1103*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]];
R[i + 1104*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]];
R[i + 1105*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]];
R[i + 1106*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]];
R[i + 1107*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]];
R[i + 1108*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]];
R[i + 1109*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]];
R[i + 1110*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]];
R[i + 1111*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]];
R[i + 1112*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]];
R[i + 1113*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]];
R[i + 1114*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]];
R[i + 1115*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]];
R[i + 1116*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]];
R[i + 1117*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]];
R[i + 1118*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]];
R[i + 1119*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]];
R[i + 1120*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]];
R[i + 1121*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]];
R[i + 1122*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]];
R[i + 1123*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]];
R[i + 1124*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]];
R[i + 1125*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]];
R[i + 1126*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]];
R[i + 1127*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]];
R[i + 1128*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]];
R[i + 1129*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]];
R[i + 1130*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]];
R[i + 1131*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]];
R[i + 1132*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]];
R[i + 1133*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]];
R[i + 1134*t] = Op[i + 70*t] ? R[B[i + 70*t]] * R[C[i + 70*t]] : R[B[i + 70*t]] + R[C[i + 70*t]];
R[i + 1135*t] = Op[i + 71*t] ? R[B[i + 71*t]] * R[C[i + 71*t]] : R[B[i + 71*t]] + R[C[i + 71*t]];
R[i + 1136*t] = Op[i + 72*t] ? R[B[i + 72*t]] * R[C[i + 72*t]] : R[B[i + 72*t]] + R[C[i + 72*t]];
R[i + 1137*t] = Op[i + 73*t] ? R[B[i + 73*t]] * R[C[i + 73*t]] : R[B[i + 73*t]] + R[C[i + 73*t]];
R[i + 1138*t] = Op[i + 74*t] ? R[B[i + 74*t]] * R[C[i + 74*t]] : R[B[i + 74*t]] + R[C[i + 74*t]];
R[i + 1139*t] = Op[i + 75*t] ? R[B[i + 75*t]] * R[C[i + 75*t]] : R[B[i + 75*t]] + R[C[i + 75*t]];
R[i + 1140*t] = Op[i + 76*t] ? R[B[i + 76*t]] * R[C[i + 76*t]] : R[B[i + 76*t]] + R[C[i + 76*t]];
R[i + 1141*t] = Op[i + 77*t] ? R[B[i + 77*t]] * R[C[i + 77*t]] : R[B[i + 77*t]] + R[C[i + 77*t]];
R[i + 1142*t] = Op[i + 78*t] ? R[B[i + 78*t]] * R[C[i + 78*t]] : R[B[i + 78*t]] + R[C[i + 78*t]];
R[i + 1143*t] = Op[i + 79*t] ? R[B[i + 79*t]] * R[C[i + 79*t]] : R[B[i + 79*t]] + R[C[i + 79*t]];
R[i + 1144*t] = Op[i + 80*t] ? R[B[i + 80*t]] * R[C[i + 80*t]] : R[B[i + 80*t]] + R[C[i + 80*t]];
R[i + 1145*t] = Op[i + 81*t] ? R[B[i + 81*t]] * R[C[i + 81*t]] : R[B[i + 81*t]] + R[C[i + 81*t]];
R[i + 1146*t] = Op[i + 82*t] ? R[B[i + 82*t]] * R[C[i + 82*t]] : R[B[i + 82*t]] + R[C[i + 82*t]];
R[i + 1147*t] = Op[i + 83*t] ? R[B[i + 83*t]] * R[C[i + 83*t]] : R[B[i + 83*t]] + R[C[i + 83*t]];
R[i + 1148*t] = Op[i + 84*t] ? R[B[i + 84*t]] * R[C[i + 84*t]] : R[B[i + 84*t]] + R[C[i + 84*t]];
R[i + 1149*t] = Op[i + 85*t] ? R[B[i + 85*t]] * R[C[i + 85*t]] : R[B[i + 85*t]] + R[C[i + 85*t]];
R[i + 1150*t] = Op[i + 86*t] ? R[B[i + 86*t]] * R[C[i + 86*t]] : R[B[i + 86*t]] + R[C[i + 86*t]];
R[i + 1151*t] = Op[i + 87*t] ? R[B[i + 87*t]] * R[C[i + 87*t]] : R[B[i + 87*t]] + R[C[i + 87*t]];
R[i + 1152*t] = Op[i + 88*t] ? R[B[i + 88*t]] * R[C[i + 88*t]] : R[B[i + 88*t]] + R[C[i + 88*t]];
R[i + 1153*t] = Op[i + 89*t] ? R[B[i + 89*t]] * R[C[i + 89*t]] : R[B[i + 89*t]] + R[C[i + 89*t]];
R[i + 1154*t] = Op[i + 90*t] ? R[B[i + 90*t]] * R[C[i + 90*t]] : R[B[i + 90*t]] + R[C[i + 90*t]];
R[i + 1155*t] = Op[i + 91*t] ? R[B[i + 91*t]] * R[C[i + 91*t]] : R[B[i + 91*t]] + R[C[i + 91*t]];
R[i + 1156*t] = Op[i + 92*t] ? R[B[i + 92*t]] * R[C[i + 92*t]] : R[B[i + 92*t]] + R[C[i + 92*t]];
R[i + 1157*t] = Op[i + 93*t] ? R[B[i + 93*t]] * R[C[i + 93*t]] : R[B[i + 93*t]] + R[C[i + 93*t]];
R[i + 1158*t] = Op[i + 94*t] ? R[B[i + 94*t]] * R[C[i + 94*t]] : R[B[i + 94*t]] + R[C[i + 94*t]];
R[i + 1159*t] = Op[i + 95*t] ? R[B[i + 95*t]] * R[C[i + 95*t]] : R[B[i + 95*t]] + R[C[i + 95*t]];
R[i + 1160*t] = Op[i + 96*t] ? R[B[i + 96*t]] * R[C[i + 96*t]] : R[B[i + 96*t]] + R[C[i + 96*t]];
R[i + 1161*t] = Op[i + 97*t] ? R[B[i + 97*t]] * R[C[i + 97*t]] : R[B[i + 97*t]] + R[C[i + 97*t]];
R[i + 1162*t] = Op[i + 98*t] ? R[B[i + 98*t]] * R[C[i + 98*t]] : R[B[i + 98*t]] + R[C[i + 98*t]];
R[i + 1163*t] = Op[i + 99*t] ? R[B[i + 99*t]] * R[C[i + 99*t]] : R[B[i + 99*t]] + R[C[i + 99*t]];
R[i + 1164*t] = Op[i + 100*t] ? R[B[i + 100*t]] * R[C[i + 100*t]] : R[B[i + 100*t]] + R[C[i + 100*t]];
R[i + 1165*t] = Op[i + 101*t] ? R[B[i + 101*t]] * R[C[i + 101*t]] : R[B[i + 101*t]] + R[C[i + 101*t]];
R[i + 1166*t] = Op[i + 102*t] ? R[B[i + 102*t]] * R[C[i + 102*t]] : R[B[i + 102*t]] + R[C[i + 102*t]];
R[i + 1167*t] = Op[i + 103*t] ? R[B[i + 103*t]] * R[C[i + 103*t]] : R[B[i + 103*t]] + R[C[i + 103*t]];
R[i + 1168*t] = Op[i + 104*t] ? R[B[i + 104*t]] * R[C[i + 104*t]] : R[B[i + 104*t]] + R[C[i + 104*t]];
R[i + 1169*t] = Op[i + 105*t] ? R[B[i + 105*t]] * R[C[i + 105*t]] : R[B[i + 105*t]] + R[C[i + 105*t]];
R[i + 1170*t] = Op[i + 106*t] ? R[B[i + 106*t]] * R[C[i + 106*t]] : R[B[i + 106*t]] + R[C[i + 106*t]];
R[i + 1171*t] = Op[i + 107*t] ? R[B[i + 107*t]] * R[C[i + 107*t]] : R[B[i + 107*t]] + R[C[i + 107*t]];
R[i + 1172*t] = Op[i + 108*t] ? R[B[i + 108*t]] * R[C[i + 108*t]] : R[B[i + 108*t]] + R[C[i + 108*t]];
R[i + 1173*t] = Op[i + 109*t] ? R[B[i + 109*t]] * R[C[i + 109*t]] : R[B[i + 109*t]] + R[C[i + 109*t]];
R[i + 1174*t] = Op[i + 110*t] ? R[B[i + 110*t]] * R[C[i + 110*t]] : R[B[i + 110*t]] + R[C[i + 110*t]];
R[i + 1175*t] = Op[i + 111*t] ? R[B[i + 111*t]] * R[C[i + 111*t]] : R[B[i + 111*t]] + R[C[i + 111*t]];
R[i + 1176*t] = Op[i + 112*t] ? R[B[i + 112*t]] * R[C[i + 112*t]] : R[B[i + 112*t]] + R[C[i + 112*t]];
R[i + 1177*t] = Op[i + 113*t] ? R[B[i + 113*t]] * R[C[i + 113*t]] : R[B[i + 113*t]] + R[C[i + 113*t]];
R[i + 1178*t] = Op[i + 114*t] ? R[B[i + 114*t]] * R[C[i + 114*t]] : R[B[i + 114*t]] + R[C[i + 114*t]];
R[i + 1179*t] = Op[i + 115*t] ? R[B[i + 115*t]] * R[C[i + 115*t]] : R[B[i + 115*t]] + R[C[i + 115*t]];
R[i + 1180*t] = Op[i + 116*t] ? R[B[i + 116*t]] * R[C[i + 116*t]] : R[B[i + 116*t]] + R[C[i + 116*t]];
R[i + 1181*t] = Op[i + 117*t] ? R[B[i + 117*t]] * R[C[i + 117*t]] : R[B[i + 117*t]] + R[C[i + 117*t]];
R[i + 1182*t] = Op[i + 118*t] ? R[B[i + 118*t]] * R[C[i + 118*t]] : R[B[i + 118*t]] + R[C[i + 118*t]];
R[i + 1183*t] = Op[i + 119*t] ? R[B[i + 119*t]] * R[C[i + 119*t]] : R[B[i + 119*t]] + R[C[i + 119*t]];
R[i + 1184*t] = Op[i + 120*t] ? R[B[i + 120*t]] * R[C[i + 120*t]] : R[B[i + 120*t]] + R[C[i + 120*t]];
R[i + 1185*t] = Op[i + 121*t] ? R[B[i + 121*t]] * R[C[i + 121*t]] : R[B[i + 121*t]] + R[C[i + 121*t]];
R[i + 1186*t] = Op[i + 122*t] ? R[B[i + 122*t]] * R[C[i + 122*t]] : R[B[i + 122*t]] + R[C[i + 122*t]];
R[i + 1187*t] = Op[i + 123*t] ? R[B[i + 123*t]] * R[C[i + 123*t]] : R[B[i + 123*t]] + R[C[i + 123*t]];
R[i + 1188*t] = Op[i + 124*t] ? R[B[i + 124*t]] * R[C[i + 124*t]] : R[B[i + 124*t]] + R[C[i + 124*t]];
R[i + 1189*t] = Op[i + 125*t] ? R[B[i + 125*t]] * R[C[i + 125*t]] : R[B[i + 125*t]] + R[C[i + 125*t]];
R[i + 1190*t] = Op[i + 126*t] ? R[B[i + 126*t]] * R[C[i + 126*t]] : R[B[i + 126*t]] + R[C[i + 126*t]];
R[i + 1191*t] = Op[i + 127*t] ? R[B[i + 127*t]] * R[C[i + 127*t]] : R[B[i + 127*t]] + R[C[i + 127*t]];
R[i + 1192*t] = Op[i + 128*t] ? R[B[i + 128*t]] * R[C[i + 128*t]] : R[B[i + 128*t]] + R[C[i + 128*t]];
R[i + 1193*t] = Op[i + 129*t] ? R[B[i + 129*t]] * R[C[i + 129*t]] : R[B[i + 129*t]] + R[C[i + 129*t]];
R[i + 1194*t] = Op[i + 130*t] ? R[B[i + 130*t]] * R[C[i + 130*t]] : R[B[i + 130*t]] + R[C[i + 130*t]];
R[i + 1195*t] = Op[i + 131*t] ? R[B[i + 131*t]] * R[C[i + 131*t]] : R[B[i + 131*t]] + R[C[i + 131*t]];
R[i + 1196*t] = Op[i + 132*t] ? R[B[i + 132*t]] * R[C[i + 132*t]] : R[B[i + 132*t]] + R[C[i + 132*t]];
R[i + 1197*t] = Op[i + 133*t] ? R[B[i + 133*t]] * R[C[i + 133*t]] : R[B[i + 133*t]] + R[C[i + 133*t]];
R[i + 1198*t] = Op[i + 134*t] ? R[B[i + 134*t]] * R[C[i + 134*t]] : R[B[i + 134*t]] + R[C[i + 134*t]];
R[i + 1199*t] = Op[i + 135*t] ? R[B[i + 135*t]] * R[C[i + 135*t]] : R[B[i + 135*t]] + R[C[i + 135*t]];
R[i + 1200*t] = Op[i + 136*t] ? R[B[i + 136*t]] * R[C[i + 136*t]] : R[B[i + 136*t]] + R[C[i + 136*t]];
R[i + 1201*t] = Op[i + 137*t] ? R[B[i + 137*t]] * R[C[i + 137*t]] : R[B[i + 137*t]] + R[C[i + 137*t]];
R[i + 1202*t] = Op[i + 138*t] ? R[B[i + 138*t]] * R[C[i + 138*t]] : R[B[i + 138*t]] + R[C[i + 138*t]];
R[i + 1203*t] = Op[i + 139*t] ? R[B[i + 139*t]] * R[C[i + 139*t]] : R[B[i + 139*t]] + R[C[i + 139*t]];
R[i + 1204*t] = Op[i + 140*t] ? R[B[i + 140*t]] * R[C[i + 140*t]] : R[B[i + 140*t]] + R[C[i + 140*t]];
R[i + 1205*t] = Op[i + 141*t] ? R[B[i + 141*t]] * R[C[i + 141*t]] : R[B[i + 141*t]] + R[C[i + 141*t]];
R[i + 1206*t] = Op[i + 142*t] ? R[B[i + 142*t]] * R[C[i + 142*t]] : R[B[i + 142*t]] + R[C[i + 142*t]];
R[i + 1207*t] = Op[i + 143*t] ? R[B[i + 143*t]] * R[C[i + 143*t]] : R[B[i + 143*t]] + R[C[i + 143*t]];
R[i + 1208*t] = Op[i + 144*t] ? R[B[i + 144*t]] * R[C[i + 144*t]] : R[B[i + 144*t]] + R[C[i + 144*t]];
R[i + 1209*t] = Op[i + 145*t] ? R[B[i + 145*t]] * R[C[i + 145*t]] : R[B[i + 145*t]] + R[C[i + 145*t]];
R[i + 1210*t] = Op[i + 146*t] ? R[B[i + 146*t]] * R[C[i + 146*t]] : R[B[i + 146*t]] + R[C[i + 146*t]];
R[i + 1211*t] = Op[i + 147*t] ? R[B[i + 147*t]] * R[C[i + 147*t]] : R[B[i + 147*t]] + R[C[i + 147*t]];
R[i + 1212*t] = Op[i + 148*t] ? R[B[i + 148*t]] * R[C[i + 148*t]] : R[B[i + 148*t]] + R[C[i + 148*t]];
R[i + 1213*t] = Op[i + 149*t] ? R[B[i + 149*t]] * R[C[i + 149*t]] : R[B[i + 149*t]] + R[C[i + 149*t]];
R[i + 1214*t] = Op[i + 150*t] ? R[B[i + 150*t]] * R[C[i + 150*t]] : R[B[i + 150*t]] + R[C[i + 150*t]];
R[i + 1215*t] = Op[i + 151*t] ? R[B[i + 151*t]] * R[C[i + 151*t]] : R[B[i + 151*t]] + R[C[i + 151*t]];
R[i + 1216*t] = Op[i + 152*t] ? R[B[i + 152*t]] * R[C[i + 152*t]] : R[B[i + 152*t]] + R[C[i + 152*t]];
R[i + 1217*t] = Op[i + 153*t] ? R[B[i + 153*t]] * R[C[i + 153*t]] : R[B[i + 153*t]] + R[C[i + 153*t]];
R[i + 1218*t] = Op[i + 154*t] ? R[B[i + 154*t]] * R[C[i + 154*t]] : R[B[i + 154*t]] + R[C[i + 154*t]];
R[i + 1219*t] = Op[i + 155*t] ? R[B[i + 155*t]] * R[C[i + 155*t]] : R[B[i + 155*t]] + R[C[i + 155*t]];
R[i + 1220*t] = Op[i + 156*t] ? R[B[i + 156*t]] * R[C[i + 156*t]] : R[B[i + 156*t]] + R[C[i + 156*t]];
R[i + 1221*t] = Op[i + 157*t] ? R[B[i + 157*t]] * R[C[i + 157*t]] : R[B[i + 157*t]] + R[C[i + 157*t]];
R[i + 1222*t] = Op[i + 158*t] ? R[B[i + 158*t]] * R[C[i + 158*t]] : R[B[i + 158*t]] + R[C[i + 158*t]];
R[i + 1223*t] = Op[i + 159*t] ? R[B[i + 159*t]] * R[C[i + 159*t]] : R[B[i + 159*t]] + R[C[i + 159*t]];
R[i + 1224*t] = Op[i + 160*t] ? R[B[i + 160*t]] * R[C[i + 160*t]] : R[B[i + 160*t]] + R[C[i + 160*t]];
R[i + 1225*t] = Op[i + 161*t] ? R[B[i + 161*t]] * R[C[i + 161*t]] : R[B[i + 161*t]] + R[C[i + 161*t]];
R[i + 1226*t] = Op[i + 162*t] ? R[B[i + 162*t]] * R[C[i + 162*t]] : R[B[i + 162*t]] + R[C[i + 162*t]];
R[i + 1227*t] = Op[i + 163*t] ? R[B[i + 163*t]] * R[C[i + 163*t]] : R[B[i + 163*t]] + R[C[i + 163*t]];
R[i + 1228*t] = Op[i + 164*t] ? R[B[i + 164*t]] * R[C[i + 164*t]] : R[B[i + 164*t]] + R[C[i + 164*t]];
R[i + 1229*t] = Op[i + 165*t] ? R[B[i + 165*t]] * R[C[i + 165*t]] : R[B[i + 165*t]] + R[C[i + 165*t]];
R[i + 1230*t] = Op[i + 166*t] ? R[B[i + 166*t]] * R[C[i + 166*t]] : R[B[i + 166*t]] + R[C[i + 166*t]];
R[i + 1231*t] = Op[i + 167*t] ? R[B[i + 167*t]] * R[C[i + 167*t]] : R[B[i + 167*t]] + R[C[i + 167*t]];
R[i + 1232*t] = Op[i + 168*t] ? R[B[i + 168*t]] * R[C[i + 168*t]] : R[B[i + 168*t]] + R[C[i + 168*t]];
R[i + 1233*t] = Op[i + 169*t] ? R[B[i + 169*t]] * R[C[i + 169*t]] : R[B[i + 169*t]] + R[C[i + 169*t]];
R[i + 1234*t] = Op[i + 170*t] ? R[B[i + 170*t]] * R[C[i + 170*t]] : R[B[i + 170*t]] + R[C[i + 170*t]];
R[i + 1235*t] = Op[i + 171*t] ? R[B[i + 171*t]] * R[C[i + 171*t]] : R[B[i + 171*t]] + R[C[i + 171*t]];
R[i + 1236*t] = Op[i + 172*t] ? R[B[i + 172*t]] * R[C[i + 172*t]] : R[B[i + 172*t]] + R[C[i + 172*t]];
R[i + 1237*t] = Op[i + 173*t] ? R[B[i + 173*t]] * R[C[i + 173*t]] : R[B[i + 173*t]] + R[C[i + 173*t]];
R[i + 1238*t] = Op[i + 174*t] ? R[B[i + 174*t]] * R[C[i + 174*t]] : R[B[i + 174*t]] + R[C[i + 174*t]];
R[i + 1239*t] = Op[i + 175*t] ? R[B[i + 175*t]] * R[C[i + 175*t]] : R[B[i + 175*t]] + R[C[i + 175*t]];
R[i + 1240*t] = Op[i + 176*t] ? R[B[i + 176*t]] * R[C[i + 176*t]] : R[B[i + 176*t]] + R[C[i + 176*t]];
R[i + 1241*t] = Op[i + 177*t] ? R[B[i + 177*t]] * R[C[i + 177*t]] : R[B[i + 177*t]] + R[C[i + 177*t]];
R[i + 1242*t] = Op[i + 178*t] ? R[B[i + 178*t]] * R[C[i + 178*t]] : R[B[i + 178*t]] + R[C[i + 178*t]];
R[i + 1243*t] = Op[i + 179*t] ? R[B[i + 179*t]] * R[C[i + 179*t]] : R[B[i + 179*t]] + R[C[i + 179*t]];
R[i + 1244*t] = Op[i + 180*t] ? R[B[i + 180*t]] * R[C[i + 180*t]] : R[B[i + 180*t]] + R[C[i + 180*t]];
R[i + 1245*t] = Op[i + 181*t] ? R[B[i + 181*t]] * R[C[i + 181*t]] : R[B[i + 181*t]] + R[C[i + 181*t]];
R[i + 1246*t] = Op[i + 182*t] ? R[B[i + 182*t]] * R[C[i + 182*t]] : R[B[i + 182*t]] + R[C[i + 182*t]];
R[i + 1247*t] = Op[i + 183*t] ? R[B[i + 183*t]] * R[C[i + 183*t]] : R[B[i + 183*t]] + R[C[i + 183*t]];
R[i + 1248*t] = Op[i + 184*t] ? R[B[i + 184*t]] * R[C[i + 184*t]] : R[B[i + 184*t]] + R[C[i + 184*t]];
R[i + 1249*t] = Op[i + 185*t] ? R[B[i + 185*t]] * R[C[i + 185*t]] : R[B[i + 185*t]] + R[C[i + 185*t]];
R[i + 1250*t] = Op[i + 186*t] ? R[B[i + 186*t]] * R[C[i + 186*t]] : R[B[i + 186*t]] + R[C[i + 186*t]];
R[i + 1251*t] = Op[i + 187*t] ? R[B[i + 187*t]] * R[C[i + 187*t]] : R[B[i + 187*t]] + R[C[i + 187*t]];
R[i + 1252*t] = Op[i + 188*t] ? R[B[i + 188*t]] * R[C[i + 188*t]] : R[B[i + 188*t]] + R[C[i + 188*t]];
R[i + 1253*t] = Op[i + 189*t] ? R[B[i + 189*t]] * R[C[i + 189*t]] : R[B[i + 189*t]] + R[C[i + 189*t]];
R[i + 1254*t] = Op[i + 190*t] ? R[B[i + 190*t]] * R[C[i + 190*t]] : R[B[i + 190*t]] + R[C[i + 190*t]];
R[i + 1255*t] = Op[i + 191*t] ? R[B[i + 191*t]] * R[C[i + 191*t]] : R[B[i + 191*t]] + R[C[i + 191*t]];
R[i + 1256*t] = Op[i + 192*t] ? R[B[i + 192*t]] * R[C[i + 192*t]] : R[B[i + 192*t]] + R[C[i + 192*t]];
R[i + 1257*t] = Op[i + 193*t] ? R[B[i + 193*t]] * R[C[i + 193*t]] : R[B[i + 193*t]] + R[C[i + 193*t]];
R[i + 1258*t] = Op[i + 194*t] ? R[B[i + 194*t]] * R[C[i + 194*t]] : R[B[i + 194*t]] + R[C[i + 194*t]];
R[i + 1259*t] = Op[i + 195*t] ? R[B[i + 195*t]] * R[C[i + 195*t]] : R[B[i + 195*t]] + R[C[i + 195*t]];
R[i + 1260*t] = Op[i + 196*t] ? R[B[i + 196*t]] * R[C[i + 196*t]] : R[B[i + 196*t]] + R[C[i + 196*t]];
R[i + 1261*t] = Op[i + 197*t] ? R[B[i + 197*t]] * R[C[i + 197*t]] : R[B[i + 197*t]] + R[C[i + 197*t]];
R[i + 1262*t] = Op[i + 198*t] ? R[B[i + 198*t]] * R[C[i + 198*t]] : R[B[i + 198*t]] + R[C[i + 198*t]];
R[i + 1263*t] = Op[i + 199*t] ? R[B[i + 199*t]] * R[C[i + 199*t]] : R[B[i + 199*t]] + R[C[i + 199*t]];
R[i + 1264*t] = Op[i + 200*t] ? R[B[i + 200*t]] * R[C[i + 200*t]] : R[B[i + 200*t]] + R[C[i + 200*t]];
R[i + 1265*t] = Op[i + 201*t] ? R[B[i + 201*t]] * R[C[i + 201*t]] : R[B[i + 201*t]] + R[C[i + 201*t]];
R[i + 1266*t] = Op[i + 202*t] ? R[B[i + 202*t]] * R[C[i + 202*t]] : R[B[i + 202*t]] + R[C[i + 202*t]];
R[i + 1267*t] = Op[i + 203*t] ? R[B[i + 203*t]] * R[C[i + 203*t]] : R[B[i + 203*t]] + R[C[i + 203*t]];
R[i + 1268*t] = Op[i + 204*t] ? R[B[i + 204*t]] * R[C[i + 204*t]] : R[B[i + 204*t]] + R[C[i + 204*t]];
R[i + 1269*t] = Op[i + 205*t] ? R[B[i + 205*t]] * R[C[i + 205*t]] : R[B[i + 205*t]] + R[C[i + 205*t]];
R[i + 1270*t] = Op[i + 206*t] ? R[B[i + 206*t]] * R[C[i + 206*t]] : R[B[i + 206*t]] + R[C[i + 206*t]];
R[i + 1271*t] = Op[i + 207*t] ? R[B[i + 207*t]] * R[C[i + 207*t]] : R[B[i + 207*t]] + R[C[i + 207*t]];
R[i + 1272*t] = Op[i + 208*t] ? R[B[i + 208*t]] * R[C[i + 208*t]] : R[B[i + 208*t]] + R[C[i + 208*t]];
R[i + 1273*t] = Op[i + 209*t] ? R[B[i + 209*t]] * R[C[i + 209*t]] : R[B[i + 209*t]] + R[C[i + 209*t]];
R[i + 1274*t] = Op[i + 210*t] ? R[B[i + 210*t]] * R[C[i + 210*t]] : R[B[i + 210*t]] + R[C[i + 210*t]];
R[i + 1275*t] = Op[i + 211*t] ? R[B[i + 211*t]] * R[C[i + 211*t]] : R[B[i + 211*t]] + R[C[i + 211*t]];
R[i + 1276*t] = Op[i + 212*t] ? R[B[i + 212*t]] * R[C[i + 212*t]] : R[B[i + 212*t]] + R[C[i + 212*t]];
R[i + 1277*t] = Op[i + 213*t] ? R[B[i + 213*t]] * R[C[i + 213*t]] : R[B[i + 213*t]] + R[C[i + 213*t]];
R[i + 1278*t] = Op[i + 214*t] ? R[B[i + 214*t]] * R[C[i + 214*t]] : R[B[i + 214*t]] + R[C[i + 214*t]];
R[i + 1279*t] = Op[i + 215*t] ? R[B[i + 215*t]] * R[C[i + 215*t]] : R[B[i + 215*t]] + R[C[i + 215*t]];
R[i + 1280*t] = Op[i + 216*t] ? R[B[i + 216*t]] * R[C[i + 216*t]] : R[B[i + 216*t]] + R[C[i + 216*t]];
R[i + 1281*t] = Op[i + 217*t] ? R[B[i + 217*t]] * R[C[i + 217*t]] : R[B[i + 217*t]] + R[C[i + 217*t]];
R[i + 1282*t] = Op[i + 218*t] ? R[B[i + 218*t]] * R[C[i + 218*t]] : R[B[i + 218*t]] + R[C[i + 218*t]];
R[i + 1283*t] = Op[i + 219*t] ? R[B[i + 219*t]] * R[C[i + 219*t]] : R[B[i + 219*t]] + R[C[i + 219*t]];
R[i + 1284*t] = Op[i + 220*t] ? R[B[i + 220*t]] * R[C[i + 220*t]] : R[B[i + 220*t]] + R[C[i + 220*t]];
R[i + 1285*t] = Op[i + 221*t] ? R[B[i + 221*t]] * R[C[i + 221*t]] : R[B[i + 221*t]] + R[C[i + 221*t]];
R[i + 1286*t] = Op[i + 222*t] ? R[B[i + 222*t]] * R[C[i + 222*t]] : R[B[i + 222*t]] + R[C[i + 222*t]];
R[i + 1287*t] = Op[i + 223*t] ? R[B[i + 223*t]] * R[C[i + 223*t]] : R[B[i + 223*t]] + R[C[i + 223*t]];
R[i + 1288*t] = Op[i + 224*t] ? R[B[i + 224*t]] * R[C[i + 224*t]] : R[B[i + 224*t]] + R[C[i + 224*t]];
R[i + 1289*t] = Op[i + 225*t] ? R[B[i + 225*t]] * R[C[i + 225*t]] : R[B[i + 225*t]] + R[C[i + 225*t]];
R[i + 1290*t] = Op[i + 226*t] ? R[B[i + 226*t]] * R[C[i + 226*t]] : R[B[i + 226*t]] + R[C[i + 226*t]];
R[i + 1291*t] = Op[i + 227*t] ? R[B[i + 227*t]] * R[C[i + 227*t]] : R[B[i + 227*t]] + R[C[i + 227*t]];
R[i + 1292*t] = Op[i + 228*t] ? R[B[i + 228*t]] * R[C[i + 228*t]] : R[B[i + 228*t]] + R[C[i + 228*t]];
R[i + 1293*t] = Op[i + 229*t] ? R[B[i + 229*t]] * R[C[i + 229*t]] : R[B[i + 229*t]] + R[C[i + 229*t]];
R[i + 1294*t] = Op[i + 230*t] ? R[B[i + 230*t]] * R[C[i + 230*t]] : R[B[i + 230*t]] + R[C[i + 230*t]];
R[i + 1295*t] = Op[i + 231*t] ? R[B[i + 231*t]] * R[C[i + 231*t]] : R[B[i + 231*t]] + R[C[i + 231*t]];
R[i + 1296*t] = Op[i + 232*t] ? R[B[i + 232*t]] * R[C[i + 232*t]] : R[B[i + 232*t]] + R[C[i + 232*t]];
R[i + 1297*t] = Op[i + 233*t] ? R[B[i + 233*t]] * R[C[i + 233*t]] : R[B[i + 233*t]] + R[C[i + 233*t]];
R[i + 1298*t] = Op[i + 234*t] ? R[B[i + 234*t]] * R[C[i + 234*t]] : R[B[i + 234*t]] + R[C[i + 234*t]];
R[i + 1299*t] = Op[i + 235*t] ? R[B[i + 235*t]] * R[C[i + 235*t]] : R[B[i + 235*t]] + R[C[i + 235*t]];
R[i + 1300*t] = Op[i + 236*t] ? R[B[i + 236*t]] * R[C[i + 236*t]] : R[B[i + 236*t]] + R[C[i + 236*t]];
R[i + 1301*t] = Op[i + 237*t] ? R[B[i + 237*t]] * R[C[i + 237*t]] : R[B[i + 237*t]] + R[C[i + 237*t]];
R[i + 1302*t] = Op[i + 238*t] ? R[B[i + 238*t]] * R[C[i + 238*t]] : R[B[i + 238*t]] + R[C[i + 238*t]];
R[i + 1303*t] = Op[i + 239*t] ? R[B[i + 239*t]] * R[C[i + 239*t]] : R[B[i + 239*t]] + R[C[i + 239*t]];
R[i + 1304*t] = Op[i + 240*t] ? R[B[i + 240*t]] * R[C[i + 240*t]] : R[B[i + 240*t]] + R[C[i + 240*t]];
R[i + 1305*t] = Op[i + 241*t] ? R[B[i + 241*t]] * R[C[i + 241*t]] : R[B[i + 241*t]] + R[C[i + 241*t]];
R[i + 1306*t] = Op[i + 242*t] ? R[B[i + 242*t]] * R[C[i + 242*t]] : R[B[i + 242*t]] + R[C[i + 242*t]];
R[i + 1307*t] = Op[i + 243*t] ? R[B[i + 243*t]] * R[C[i + 243*t]] : R[B[i + 243*t]] + R[C[i + 243*t]];
R[i + 1308*t] = Op[i + 244*t] ? R[B[i + 244*t]] * R[C[i + 244*t]] : R[B[i + 244*t]] + R[C[i + 244*t]];
R[i + 1309*t] = Op[i + 245*t] ? R[B[i + 245*t]] * R[C[i + 245*t]] : R[B[i + 245*t]] + R[C[i + 245*t]];
R[i + 1310*t] = Op[i + 246*t] ? R[B[i + 246*t]] * R[C[i + 246*t]] : R[B[i + 246*t]] + R[C[i + 246*t]];
R[i + 1311*t] = Op[i + 247*t] ? R[B[i + 247*t]] * R[C[i + 247*t]] : R[B[i + 247*t]] + R[C[i + 247*t]];
R[i + 1312*t] = Op[i + 248*t] ? R[B[i + 248*t]] * R[C[i + 248*t]] : R[B[i + 248*t]] + R[C[i + 248*t]];
R[i + 1313*t] = Op[i + 249*t] ? R[B[i + 249*t]] * R[C[i + 249*t]] : R[B[i + 249*t]] + R[C[i + 249*t]];
R[i + 1314*t] = Op[i + 250*t] ? R[B[i + 250*t]] * R[C[i + 250*t]] : R[B[i + 250*t]] + R[C[i + 250*t]];
R[i + 1315*t] = Op[i + 251*t] ? R[B[i + 251*t]] * R[C[i + 251*t]] : R[B[i + 251*t]] + R[C[i + 251*t]];
R[i + 1316*t] = Op[i + 252*t] ? R[B[i + 252*t]] * R[C[i + 252*t]] : R[B[i + 252*t]] + R[C[i + 252*t]];
R[i + 1317*t] = Op[i + 253*t] ? R[B[i + 253*t]] * R[C[i + 253*t]] : R[B[i + 253*t]] + R[C[i + 253*t]];
R[i + 1318*t] = Op[i + 254*t] ? R[B[i + 254*t]] * R[C[i + 254*t]] : R[B[i + 254*t]] + R[C[i + 254*t]];
R[i + 1319*t] = Op[i + 255*t] ? R[B[i + 255*t]] * R[C[i + 255*t]] : R[B[i + 255*t]] + R[C[i + 255*t]];
R[i + 1320*t] = Op[i + 256*t] ? R[B[i + 256*t]] * R[C[i + 256*t]] : R[B[i + 256*t]] + R[C[i + 256*t]];
R[i + 1321*t] = Op[i + 257*t] ? R[B[i + 257*t]] * R[C[i + 257*t]] : R[B[i + 257*t]] + R[C[i + 257*t]];
R[i + 1322*t] = Op[i + 258*t] ? R[B[i + 258*t]] * R[C[i + 258*t]] : R[B[i + 258*t]] + R[C[i + 258*t]];
R[i + 1323*t] = Op[i + 259*t] ? R[B[i + 259*t]] * R[C[i + 259*t]] : R[B[i + 259*t]] + R[C[i + 259*t]];
R[i + 1324*t] = Op[i + 260*t] ? R[B[i + 260*t]] * R[C[i + 260*t]] : R[B[i + 260*t]] + R[C[i + 260*t]];
R[i + 1325*t] = Op[i + 261*t] ? R[B[i + 261*t]] * R[C[i + 261*t]] : R[B[i + 261*t]] + R[C[i + 261*t]];
R[i + 1326*t] = Op[i + 262*t] ? R[B[i + 262*t]] * R[C[i + 262*t]] : R[B[i + 262*t]] + R[C[i + 262*t]];
R[i + 1327*t] = Op[i + 263*t] ? R[B[i + 263*t]] * R[C[i + 263*t]] : R[B[i + 263*t]] + R[C[i + 263*t]];
R[i + 1328*t] = Op[i + 264*t] ? R[B[i + 264*t]] * R[C[i + 264*t]] : R[B[i + 264*t]] + R[C[i + 264*t]];
R[i + 1329*t] = Op[i + 265*t] ? R[B[i + 265*t]] * R[C[i + 265*t]] : R[B[i + 265*t]] + R[C[i + 265*t]];
R[i + 1330*t] = Op[i + 266*t] ? R[B[i + 266*t]] * R[C[i + 266*t]] : R[B[i + 266*t]] + R[C[i + 266*t]];
R[i + 1331*t] = Op[i + 267*t] ? R[B[i + 267*t]] * R[C[i + 267*t]] : R[B[i + 267*t]] + R[C[i + 267*t]];
R[i + 1332*t] = Op[i + 268*t] ? R[B[i + 268*t]] * R[C[i + 268*t]] : R[B[i + 268*t]] + R[C[i + 268*t]];
R[i + 1333*t] = Op[i + 269*t] ? R[B[i + 269*t]] * R[C[i + 269*t]] : R[B[i + 269*t]] + R[C[i + 269*t]];
R[i + 1334*t] = Op[i + 270*t] ? R[B[i + 270*t]] * R[C[i + 270*t]] : R[B[i + 270*t]] + R[C[i + 270*t]];
R[i + 1335*t] = Op[i + 271*t] ? R[B[i + 271*t]] * R[C[i + 271*t]] : R[B[i + 271*t]] + R[C[i + 271*t]];
R[i + 1336*t] = Op[i + 272*t] ? R[B[i + 272*t]] * R[C[i + 272*t]] : R[B[i + 272*t]] + R[C[i + 272*t]];
R[i + 1337*t] = Op[i + 273*t] ? R[B[i + 273*t]] * R[C[i + 273*t]] : R[B[i + 273*t]] + R[C[i + 273*t]];
R[i + 1338*t] = Op[i + 274*t] ? R[B[i + 274*t]] * R[C[i + 274*t]] : R[B[i + 274*t]] + R[C[i + 274*t]];
R[i + 1339*t] = Op[i + 275*t] ? R[B[i + 275*t]] * R[C[i + 275*t]] : R[B[i + 275*t]] + R[C[i + 275*t]];
R[i + 1340*t] = Op[i + 276*t] ? R[B[i + 276*t]] * R[C[i + 276*t]] : R[B[i + 276*t]] + R[C[i + 276*t]];
R[i + 1341*t] = Op[i + 277*t] ? R[B[i + 277*t]] * R[C[i + 277*t]] : R[B[i + 277*t]] + R[C[i + 277*t]];
R[i + 1342*t] = Op[i + 278*t] ? R[B[i + 278*t]] * R[C[i + 278*t]] : R[B[i + 278*t]] + R[C[i + 278*t]];
R[i + 1343*t] = Op[i + 279*t] ? R[B[i + 279*t]] * R[C[i + 279*t]] : R[B[i + 279*t]] + R[C[i + 279*t]];
R[i + 1344*t] = Op[i + 280*t] ? R[B[i + 280*t]] * R[C[i + 280*t]] : R[B[i + 280*t]] + R[C[i + 280*t]];
R[i + 1345*t] = Op[i + 281*t] ? R[B[i + 281*t]] * R[C[i + 281*t]] : R[B[i + 281*t]] + R[C[i + 281*t]];
R[i + 1346*t] = Op[i + 282*t] ? R[B[i + 282*t]] * R[C[i + 282*t]] : R[B[i + 282*t]] + R[C[i + 282*t]];
R[i + 1347*t] = Op[i + 283*t] ? R[B[i + 283*t]] * R[C[i + 283*t]] : R[B[i + 283*t]] + R[C[i + 283*t]];
R[i + 1348*t] = Op[i + 284*t] ? R[B[i + 284*t]] * R[C[i + 284*t]] : R[B[i + 284*t]] + R[C[i + 284*t]];
R[i + 1349*t] = Op[i + 285*t] ? R[B[i + 285*t]] * R[C[i + 285*t]] : R[B[i + 285*t]] + R[C[i + 285*t]];
R[i + 1350*t] = Op[i + 286*t] ? R[B[i + 286*t]] * R[C[i + 286*t]] : R[B[i + 286*t]] + R[C[i + 286*t]];
R[i + 1351*t] = Op[i + 287*t] ? R[B[i + 287*t]] * R[C[i + 287*t]] : R[B[i + 287*t]] + R[C[i + 287*t]];
R[i + 1352*t] = Op[i + 288*t] ? R[B[i + 288*t]] * R[C[i + 288*t]] : R[B[i + 288*t]] + R[C[i + 288*t]];
R[i + 1353*t] = Op[i + 289*t] ? R[B[i + 289*t]] * R[C[i + 289*t]] : R[B[i + 289*t]] + R[C[i + 289*t]];
R[i + 1354*t] = Op[i + 290*t] ? R[B[i + 290*t]] * R[C[i + 290*t]] : R[B[i + 290*t]] + R[C[i + 290*t]];
R[i + 1355*t] = Op[i + 291*t] ? R[B[i + 291*t]] * R[C[i + 291*t]] : R[B[i + 291*t]] + R[C[i + 291*t]];
R[i + 1356*t] = Op[i + 292*t] ? R[B[i + 292*t]] * R[C[i + 292*t]] : R[B[i + 292*t]] + R[C[i + 292*t]];
R[i + 1357*t] = Op[i + 293*t] ? R[B[i + 293*t]] * R[C[i + 293*t]] : R[B[i + 293*t]] + R[C[i + 293*t]];
R[i + 1358*t] = Op[i + 294*t] ? R[B[i + 294*t]] * R[C[i + 294*t]] : R[B[i + 294*t]] + R[C[i + 294*t]];
R[i + 1359*t] = Op[i + 295*t] ? R[B[i + 295*t]] * R[C[i + 295*t]] : R[B[i + 295*t]] + R[C[i + 295*t]];
R[i + 1360*t] = Op[i + 296*t] ? R[B[i + 296*t]] * R[C[i + 296*t]] : R[B[i + 296*t]] + R[C[i + 296*t]];
R[i + 1361*t] = Op[i + 297*t] ? R[B[i + 297*t]] * R[C[i + 297*t]] : R[B[i + 297*t]] + R[C[i + 297*t]];
R[i + 1362*t] = Op[i + 298*t] ? R[B[i + 298*t]] * R[C[i + 298*t]] : R[B[i + 298*t]] + R[C[i + 298*t]];
R[i + 1363*t] = Op[i + 299*t] ? R[B[i + 299*t]] * R[C[i + 299*t]] : R[B[i + 299*t]] + R[C[i + 299*t]];
R[i + 1364*t] = Op[i + 300*t] ? R[B[i + 300*t]] * R[C[i + 300*t]] : R[B[i + 300*t]] + R[C[i + 300*t]];
R[i + 1365*t] = Op[i + 301*t] ? R[B[i + 301*t]] * R[C[i + 301*t]] : R[B[i + 301*t]] + R[C[i + 301*t]];
R[i + 1366*t] = Op[i + 302*t] ? R[B[i + 302*t]] * R[C[i + 302*t]] : R[B[i + 302*t]] + R[C[i + 302*t]];
R[i + 1367*t] = Op[i + 303*t] ? R[B[i + 303*t]] * R[C[i + 303*t]] : R[B[i + 303*t]] + R[C[i + 303*t]];
R[i + 1368*t] = Op[i + 304*t] ? R[B[i + 304*t]] * R[C[i + 304*t]] : R[B[i + 304*t]] + R[C[i + 304*t]];
R[i + 1369*t] = Op[i + 305*t] ? R[B[i + 305*t]] * R[C[i + 305*t]] : R[B[i + 305*t]] + R[C[i + 305*t]];
R[i + 1370*t] = Op[i + 306*t] ? R[B[i + 306*t]] * R[C[i + 306*t]] : R[B[i + 306*t]] + R[C[i + 306*t]];
R[i + 1371*t] = Op[i + 307*t] ? R[B[i + 307*t]] * R[C[i + 307*t]] : R[B[i + 307*t]] + R[C[i + 307*t]];
R[i + 1372*t] = Op[i + 308*t] ? R[B[i + 308*t]] * R[C[i + 308*t]] : R[B[i + 308*t]] + R[C[i + 308*t]];
R[i + 1373*t] = Op[i + 309*t] ? R[B[i + 309*t]] * R[C[i + 309*t]] : R[B[i + 309*t]] + R[C[i + 309*t]];
R[i + 1374*t] = Op[i + 310*t] ? R[B[i + 310*t]] * R[C[i + 310*t]] : R[B[i + 310*t]] + R[C[i + 310*t]];
R[i + 1375*t] = Op[i + 311*t] ? R[B[i + 311*t]] * R[C[i + 311*t]] : R[B[i + 311*t]] + R[C[i + 311*t]];
R[i + 1376*t] = Op[i + 312*t] ? R[B[i + 312*t]] * R[C[i + 312*t]] : R[B[i + 312*t]] + R[C[i + 312*t]];
R[i + 1377*t] = Op[i + 313*t] ? R[B[i + 313*t]] * R[C[i + 313*t]] : R[B[i + 313*t]] + R[C[i + 313*t]];
R[i + 1378*t] = Op[i + 314*t] ? R[B[i + 314*t]] * R[C[i + 314*t]] : R[B[i + 314*t]] + R[C[i + 314*t]];
R[i + 1379*t] = Op[i + 315*t] ? R[B[i + 315*t]] * R[C[i + 315*t]] : R[B[i + 315*t]] + R[C[i + 315*t]];
R[i + 1380*t] = Op[i + 316*t] ? R[B[i + 316*t]] * R[C[i + 316*t]] : R[B[i + 316*t]] + R[C[i + 316*t]];
R[i + 1381*t] = Op[i + 317*t] ? R[B[i + 317*t]] * R[C[i + 317*t]] : R[B[i + 317*t]] + R[C[i + 317*t]];
R[i + 1382*t] = Op[i + 318*t] ? R[B[i + 318*t]] * R[C[i + 318*t]] : R[B[i + 318*t]] + R[C[i + 318*t]];
R[i + 1383*t] = Op[i + 319*t] ? R[B[i + 319*t]] * R[C[i + 319*t]] : R[B[i + 319*t]] + R[C[i + 319*t]];
R[i + 1384*t] = Op[i + 320*t] ? R[B[i + 320*t]] * R[C[i + 320*t]] : R[B[i + 320*t]] + R[C[i + 320*t]];
R[i + 1385*t] = Op[i + 321*t] ? R[B[i + 321*t]] * R[C[i + 321*t]] : R[B[i + 321*t]] + R[C[i + 321*t]];
R[i + 1386*t] = Op[i + 322*t] ? R[B[i + 322*t]] * R[C[i + 322*t]] : R[B[i + 322*t]] + R[C[i + 322*t]];
R[i + 1387*t] = Op[i + 323*t] ? R[B[i + 323*t]] * R[C[i + 323*t]] : R[B[i + 323*t]] + R[C[i + 323*t]];
R[i + 1388*t] = Op[i + 324*t] ? R[B[i + 324*t]] * R[C[i + 324*t]] : R[B[i + 324*t]] + R[C[i + 324*t]];
R[i + 1389*t] = Op[i + 325*t] ? R[B[i + 325*t]] * R[C[i + 325*t]] : R[B[i + 325*t]] + R[C[i + 325*t]];
R[i + 1390*t] = Op[i + 326*t] ? R[B[i + 326*t]] * R[C[i + 326*t]] : R[B[i + 326*t]] + R[C[i + 326*t]];
R[i + 1391*t] = Op[i + 327*t] ? R[B[i + 327*t]] * R[C[i + 327*t]] : R[B[i + 327*t]] + R[C[i + 327*t]];
R[i + 1392*t] = Op[i + 328*t] ? R[B[i + 328*t]] * R[C[i + 328*t]] : R[B[i + 328*t]] + R[C[i + 328*t]];
R[i + 1393*t] = Op[i + 329*t] ? R[B[i + 329*t]] * R[C[i + 329*t]] : R[B[i + 329*t]] + R[C[i + 329*t]];
R[i + 1394*t] = Op[i + 330*t] ? R[B[i + 330*t]] * R[C[i + 330*t]] : R[B[i + 330*t]] + R[C[i + 330*t]];
R[i + 1395*t] = Op[i + 331*t] ? R[B[i + 331*t]] * R[C[i + 331*t]] : R[B[i + 331*t]] + R[C[i + 331*t]];
R[i + 1396*t] = Op[i + 332*t] ? R[B[i + 332*t]] * R[C[i + 332*t]] : R[B[i + 332*t]] + R[C[i + 332*t]];
R[i + 1397*t] = Op[i + 333*t] ? R[B[i + 333*t]] * R[C[i + 333*t]] : R[B[i + 333*t]] + R[C[i + 333*t]];
__syncthreads();
R[i + 1398*t] = Op[i + 334*t] ? R[B[i + 334*t]] * R[C[i + 334*t]] : R[B[i + 334*t]] + R[C[i + 334*t]];
R[i + 1399*t] = Op[i + 335*t] ? R[B[i + 335*t]] * R[C[i + 335*t]] : R[B[i + 335*t]] + R[C[i + 335*t]];
R[i + 1400*t] = Op[i + 336*t] ? R[B[i + 336*t]] * R[C[i + 336*t]] : R[B[i + 336*t]] + R[C[i + 336*t]];
R[i + 1401*t] = Op[i + 337*t] ? R[B[i + 337*t]] * R[C[i + 337*t]] : R[B[i + 337*t]] + R[C[i + 337*t]];
R[i + 1402*t] = Op[i + 338*t] ? R[B[i + 338*t]] * R[C[i + 338*t]] : R[B[i + 338*t]] + R[C[i + 338*t]];
R[i + 1403*t] = Op[i + 339*t] ? R[B[i + 339*t]] * R[C[i + 339*t]] : R[B[i + 339*t]] + R[C[i + 339*t]];
R[i + 1404*t] = Op[i + 340*t] ? R[B[i + 340*t]] * R[C[i + 340*t]] : R[B[i + 340*t]] + R[C[i + 340*t]];
R[i + 1405*t] = Op[i + 341*t] ? R[B[i + 341*t]] * R[C[i + 341*t]] : R[B[i + 341*t]] + R[C[i + 341*t]];
R[i + 1406*t] = Op[i + 342*t] ? R[B[i + 342*t]] * R[C[i + 342*t]] : R[B[i + 342*t]] + R[C[i + 342*t]];
R[i + 1407*t] = Op[i + 343*t] ? R[B[i + 343*t]] * R[C[i + 343*t]] : R[B[i + 343*t]] + R[C[i + 343*t]];
R[i + 1408*t] = Op[i + 344*t] ? R[B[i + 344*t]] * R[C[i + 344*t]] : R[B[i + 344*t]] + R[C[i + 344*t]];
R[i + 1409*t] = Op[i + 345*t] ? R[B[i + 345*t]] * R[C[i + 345*t]] : R[B[i + 345*t]] + R[C[i + 345*t]];
R[i + 1410*t] = Op[i + 346*t] ? R[B[i + 346*t]] * R[C[i + 346*t]] : R[B[i + 346*t]] + R[C[i + 346*t]];
R[i + 1411*t] = Op[i + 347*t] ? R[B[i + 347*t]] * R[C[i + 347*t]] : R[B[i + 347*t]] + R[C[i + 347*t]];
R[i + 1412*t] = Op[i + 348*t] ? R[B[i + 348*t]] * R[C[i + 348*t]] : R[B[i + 348*t]] + R[C[i + 348*t]];
R[i + 1413*t] = Op[i + 349*t] ? R[B[i + 349*t]] * R[C[i + 349*t]] : R[B[i + 349*t]] + R[C[i + 349*t]];
R[i + 1414*t] = Op[i + 350*t] ? R[B[i + 350*t]] * R[C[i + 350*t]] : R[B[i + 350*t]] + R[C[i + 350*t]];
R[i + 1415*t] = Op[i + 351*t] ? R[B[i + 351*t]] * R[C[i + 351*t]] : R[B[i + 351*t]] + R[C[i + 351*t]];
R[i + 1416*t] = Op[i + 352*t] ? R[B[i + 352*t]] * R[C[i + 352*t]] : R[B[i + 352*t]] + R[C[i + 352*t]];
R[i + 1417*t] = Op[i + 353*t] ? R[B[i + 353*t]] * R[C[i + 353*t]] : R[B[i + 353*t]] + R[C[i + 353*t]];
R[i + 1418*t] = Op[i + 354*t] ? R[B[i + 354*t]] * R[C[i + 354*t]] : R[B[i + 354*t]] + R[C[i + 354*t]];
R[i + 1419*t] = Op[i + 355*t] ? R[B[i + 355*t]] * R[C[i + 355*t]] : R[B[i + 355*t]] + R[C[i + 355*t]];
R[i + 1420*t] = Op[i + 356*t] ? R[B[i + 356*t]] * R[C[i + 356*t]] : R[B[i + 356*t]] + R[C[i + 356*t]];
R[i + 1421*t] = Op[i + 357*t] ? R[B[i + 357*t]] * R[C[i + 357*t]] : R[B[i + 357*t]] + R[C[i + 357*t]];
R[i + 1422*t] = Op[i + 358*t] ? R[B[i + 358*t]] * R[C[i + 358*t]] : R[B[i + 358*t]] + R[C[i + 358*t]];
R[i + 1423*t] = Op[i + 359*t] ? R[B[i + 359*t]] * R[C[i + 359*t]] : R[B[i + 359*t]] + R[C[i + 359*t]];
R[i + 1424*t] = Op[i + 360*t] ? R[B[i + 360*t]] * R[C[i + 360*t]] : R[B[i + 360*t]] + R[C[i + 360*t]];
R[i + 1425*t] = Op[i + 361*t] ? R[B[i + 361*t]] * R[C[i + 361*t]] : R[B[i + 361*t]] + R[C[i + 361*t]];
R[i + 1426*t] = Op[i + 362*t] ? R[B[i + 362*t]] * R[C[i + 362*t]] : R[B[i + 362*t]] + R[C[i + 362*t]];
R[i + 1427*t] = Op[i + 363*t] ? R[B[i + 363*t]] * R[C[i + 363*t]] : R[B[i + 363*t]] + R[C[i + 363*t]];
R[i + 1428*t] = Op[i + 364*t] ? R[B[i + 364*t]] * R[C[i + 364*t]] : R[B[i + 364*t]] + R[C[i + 364*t]];
R[i + 1429*t] = Op[i + 365*t] ? R[B[i + 365*t]] * R[C[i + 365*t]] : R[B[i + 365*t]] + R[C[i + 365*t]];
R[i + 1430*t] = Op[i + 366*t] ? R[B[i + 366*t]] * R[C[i + 366*t]] : R[B[i + 366*t]] + R[C[i + 366*t]];
R[i + 1431*t] = Op[i + 367*t] ? R[B[i + 367*t]] * R[C[i + 367*t]] : R[B[i + 367*t]] + R[C[i + 367*t]];
R[i + 1432*t] = Op[i + 368*t] ? R[B[i + 368*t]] * R[C[i + 368*t]] : R[B[i + 368*t]] + R[C[i + 368*t]];
R[i + 1433*t] = Op[i + 369*t] ? R[B[i + 369*t]] * R[C[i + 369*t]] : R[B[i + 369*t]] + R[C[i + 369*t]];
R[i + 1434*t] = Op[i + 370*t] ? R[B[i + 370*t]] * R[C[i + 370*t]] : R[B[i + 370*t]] + R[C[i + 370*t]];
R[i + 1435*t] = Op[i + 371*t] ? R[B[i + 371*t]] * R[C[i + 371*t]] : R[B[i + 371*t]] + R[C[i + 371*t]];
R[i + 1436*t] = Op[i + 372*t] ? R[B[i + 372*t]] * R[C[i + 372*t]] : R[B[i + 372*t]] + R[C[i + 372*t]];
R[i + 1437*t] = Op[i + 373*t] ? R[B[i + 373*t]] * R[C[i + 373*t]] : R[B[i + 373*t]] + R[C[i + 373*t]];
R[i + 1438*t] = Op[i + 374*t] ? R[B[i + 374*t]] * R[C[i + 374*t]] : R[B[i + 374*t]] + R[C[i + 374*t]];
R[i + 1439*t] = Op[i + 375*t] ? R[B[i + 375*t]] * R[C[i + 375*t]] : R[B[i + 375*t]] + R[C[i + 375*t]];
R[i + 1440*t] = Op[i + 376*t] ? R[B[i + 376*t]] * R[C[i + 376*t]] : R[B[i + 376*t]] + R[C[i + 376*t]];
R[i + 1441*t] = Op[i + 377*t] ? R[B[i + 377*t]] * R[C[i + 377*t]] : R[B[i + 377*t]] + R[C[i + 377*t]];
R[i + 1442*t] = Op[i + 378*t] ? R[B[i + 378*t]] * R[C[i + 378*t]] : R[B[i + 378*t]] + R[C[i + 378*t]];
R[i + 1443*t] = Op[i + 379*t] ? R[B[i + 379*t]] * R[C[i + 379*t]] : R[B[i + 379*t]] + R[C[i + 379*t]];
R[i + 1444*t] = Op[i + 380*t] ? R[B[i + 380*t]] * R[C[i + 380*t]] : R[B[i + 380*t]] + R[C[i + 380*t]];
R[i + 1445*t] = Op[i + 381*t] ? R[B[i + 381*t]] * R[C[i + 381*t]] : R[B[i + 381*t]] + R[C[i + 381*t]];
R[i + 1446*t] = Op[i + 382*t] ? R[B[i + 382*t]] * R[C[i + 382*t]] : R[B[i + 382*t]] + R[C[i + 382*t]];
R[i + 1447*t] = Op[i + 383*t] ? R[B[i + 383*t]] * R[C[i + 383*t]] : R[B[i + 383*t]] + R[C[i + 383*t]];
R[i + 1448*t] = Op[i + 384*t] ? R[B[i + 384*t]] * R[C[i + 384*t]] : R[B[i + 384*t]] + R[C[i + 384*t]];
R[i + 1449*t] = Op[i + 385*t] ? R[B[i + 385*t]] * R[C[i + 385*t]] : R[B[i + 385*t]] + R[C[i + 385*t]];
R[i + 1450*t] = Op[i + 386*t] ? R[B[i + 386*t]] * R[C[i + 386*t]] : R[B[i + 386*t]] + R[C[i + 386*t]];
R[i + 1451*t] = Op[i + 387*t] ? R[B[i + 387*t]] * R[C[i + 387*t]] : R[B[i + 387*t]] + R[C[i + 387*t]];
R[i + 1452*t] = Op[i + 388*t] ? R[B[i + 388*t]] * R[C[i + 388*t]] : R[B[i + 388*t]] + R[C[i + 388*t]];
R[i + 1453*t] = Op[i + 389*t] ? R[B[i + 389*t]] * R[C[i + 389*t]] : R[B[i + 389*t]] + R[C[i + 389*t]];
R[i + 1454*t] = Op[i + 390*t] ? R[B[i + 390*t]] * R[C[i + 390*t]] : R[B[i + 390*t]] + R[C[i + 390*t]];
R[i + 1455*t] = Op[i + 391*t] ? R[B[i + 391*t]] * R[C[i + 391*t]] : R[B[i + 391*t]] + R[C[i + 391*t]];
R[i + 1456*t] = Op[i + 392*t] ? R[B[i + 392*t]] * R[C[i + 392*t]] : R[B[i + 392*t]] + R[C[i + 392*t]];
R[i + 1457*t] = Op[i + 393*t] ? R[B[i + 393*t]] * R[C[i + 393*t]] : R[B[i + 393*t]] + R[C[i + 393*t]];
R[i + 1458*t] = Op[i + 394*t] ? R[B[i + 394*t]] * R[C[i + 394*t]] : R[B[i + 394*t]] + R[C[i + 394*t]];
R[i + 1459*t] = Op[i + 395*t] ? R[B[i + 395*t]] * R[C[i + 395*t]] : R[B[i + 395*t]] + R[C[i + 395*t]];
R[i + 1460*t] = Op[i + 396*t] ? R[B[i + 396*t]] * R[C[i + 396*t]] : R[B[i + 396*t]] + R[C[i + 396*t]];
R[i + 1461*t] = Op[i + 397*t] ? R[B[i + 397*t]] * R[C[i + 397*t]] : R[B[i + 397*t]] + R[C[i + 397*t]];
R[i + 1462*t] = Op[i + 398*t] ? R[B[i + 398*t]] * R[C[i + 398*t]] : R[B[i + 398*t]] + R[C[i + 398*t]];
R[i + 1463*t] = Op[i + 399*t] ? R[B[i + 399*t]] * R[C[i + 399*t]] : R[B[i + 399*t]] + R[C[i + 399*t]];
R[i + 1464*t] = Op[i + 400*t] ? R[B[i + 400*t]] * R[C[i + 400*t]] : R[B[i + 400*t]] + R[C[i + 400*t]];
R[i + 1465*t] = Op[i + 401*t] ? R[B[i + 401*t]] * R[C[i + 401*t]] : R[B[i + 401*t]] + R[C[i + 401*t]];
R[i + 1466*t] = Op[i + 402*t] ? R[B[i + 402*t]] * R[C[i + 402*t]] : R[B[i + 402*t]] + R[C[i + 402*t]];
R[i + 1467*t] = Op[i + 403*t] ? R[B[i + 403*t]] * R[C[i + 403*t]] : R[B[i + 403*t]] + R[C[i + 403*t]];
R[i + 1468*t] = Op[i + 404*t] ? R[B[i + 404*t]] * R[C[i + 404*t]] : R[B[i + 404*t]] + R[C[i + 404*t]];
R[i + 1469*t] = Op[i + 405*t] ? R[B[i + 405*t]] * R[C[i + 405*t]] : R[B[i + 405*t]] + R[C[i + 405*t]];
R[i + 1470*t] = Op[i + 406*t] ? R[B[i + 406*t]] * R[C[i + 406*t]] : R[B[i + 406*t]] + R[C[i + 406*t]];
R[i + 1471*t] = Op[i + 407*t] ? R[B[i + 407*t]] * R[C[i + 407*t]] : R[B[i + 407*t]] + R[C[i + 407*t]];
R[i + 1472*t] = Op[i + 408*t] ? R[B[i + 408*t]] * R[C[i + 408*t]] : R[B[i + 408*t]] + R[C[i + 408*t]];
R[i + 1473*t] = Op[i + 409*t] ? R[B[i + 409*t]] * R[C[i + 409*t]] : R[B[i + 409*t]] + R[C[i + 409*t]];
R[i + 1474*t] = Op[i + 410*t] ? R[B[i + 410*t]] * R[C[i + 410*t]] : R[B[i + 410*t]] + R[C[i + 410*t]];
R[i + 1475*t] = Op[i + 411*t] ? R[B[i + 411*t]] * R[C[i + 411*t]] : R[B[i + 411*t]] + R[C[i + 411*t]];
R[i + 1476*t] = Op[i + 412*t] ? R[B[i + 412*t]] * R[C[i + 412*t]] : R[B[i + 412*t]] + R[C[i + 412*t]];
R[i + 1477*t] = Op[i + 413*t] ? R[B[i + 413*t]] * R[C[i + 413*t]] : R[B[i + 413*t]] + R[C[i + 413*t]];
R[i + 1478*t] = Op[i + 414*t] ? R[B[i + 414*t]] * R[C[i + 414*t]] : R[B[i + 414*t]] + R[C[i + 414*t]];
R[i + 1479*t] = Op[i + 415*t] ? R[B[i + 415*t]] * R[C[i + 415*t]] : R[B[i + 415*t]] + R[C[i + 415*t]];
R[i + 1480*t] = Op[i + 416*t] ? R[B[i + 416*t]] * R[C[i + 416*t]] : R[B[i + 416*t]] + R[C[i + 416*t]];
R[i + 1481*t] = Op[i + 417*t] ? R[B[i + 417*t]] * R[C[i + 417*t]] : R[B[i + 417*t]] + R[C[i + 417*t]];
R[i + 1482*t] = Op[i + 418*t] ? R[B[i + 418*t]] * R[C[i + 418*t]] : R[B[i + 418*t]] + R[C[i + 418*t]];
R[i + 1483*t] = Op[i + 419*t] ? R[B[i + 419*t]] * R[C[i + 419*t]] : R[B[i + 419*t]] + R[C[i + 419*t]];
R[i + 1484*t] = Op[i + 420*t] ? R[B[i + 420*t]] * R[C[i + 420*t]] : R[B[i + 420*t]] + R[C[i + 420*t]];
R[i + 1485*t] = Op[i + 421*t] ? R[B[i + 421*t]] * R[C[i + 421*t]] : R[B[i + 421*t]] + R[C[i + 421*t]];
R[i + 1486*t] = Op[i + 422*t] ? R[B[i + 422*t]] * R[C[i + 422*t]] : R[B[i + 422*t]] + R[C[i + 422*t]];
R[i + 1487*t] = Op[i + 423*t] ? R[B[i + 423*t]] * R[C[i + 423*t]] : R[B[i + 423*t]] + R[C[i + 423*t]];
R[i + 1488*t] = Op[i + 424*t] ? R[B[i + 424*t]] * R[C[i + 424*t]] : R[B[i + 424*t]] + R[C[i + 424*t]];
R[i + 1489*t] = Op[i + 425*t] ? R[B[i + 425*t]] * R[C[i + 425*t]] : R[B[i + 425*t]] + R[C[i + 425*t]];
R[i + 1490*t] = Op[i + 426*t] ? R[B[i + 426*t]] * R[C[i + 426*t]] : R[B[i + 426*t]] + R[C[i + 426*t]];
R[i + 1491*t] = Op[i + 427*t] ? R[B[i + 427*t]] * R[C[i + 427*t]] : R[B[i + 427*t]] + R[C[i + 427*t]];
R[i + 1492*t] = Op[i + 428*t] ? R[B[i + 428*t]] * R[C[i + 428*t]] : R[B[i + 428*t]] + R[C[i + 428*t]];
R[i + 1493*t] = Op[i + 429*t] ? R[B[i + 429*t]] * R[C[i + 429*t]] : R[B[i + 429*t]] + R[C[i + 429*t]];
R[i + 1494*t] = Op[i + 430*t] ? R[B[i + 430*t]] * R[C[i + 430*t]] : R[B[i + 430*t]] + R[C[i + 430*t]];
R[i + 1495*t] = Op[i + 431*t] ? R[B[i + 431*t]] * R[C[i + 431*t]] : R[B[i + 431*t]] + R[C[i + 431*t]];
R[i + 1496*t] = Op[i + 432*t] ? R[B[i + 432*t]] * R[C[i + 432*t]] : R[B[i + 432*t]] + R[C[i + 432*t]];
R[i + 1497*t] = Op[i + 433*t] ? R[B[i + 433*t]] * R[C[i + 433*t]] : R[B[i + 433*t]] + R[C[i + 433*t]];
R[i + 1498*t] = Op[i + 434*t] ? R[B[i + 434*t]] * R[C[i + 434*t]] : R[B[i + 434*t]] + R[C[i + 434*t]];
R[i + 1499*t] = Op[i + 435*t] ? R[B[i + 435*t]] * R[C[i + 435*t]] : R[B[i + 435*t]] + R[C[i + 435*t]];
R[i + 1500*t] = Op[i + 436*t] ? R[B[i + 436*t]] * R[C[i + 436*t]] : R[B[i + 436*t]] + R[C[i + 436*t]];
R[i + 1501*t] = Op[i + 437*t] ? R[B[i + 437*t]] * R[C[i + 437*t]] : R[B[i + 437*t]] + R[C[i + 437*t]];
R[i + 1502*t] = Op[i + 438*t] ? R[B[i + 438*t]] * R[C[i + 438*t]] : R[B[i + 438*t]] + R[C[i + 438*t]];
R[i + 1503*t] = Op[i + 439*t] ? R[B[i + 439*t]] * R[C[i + 439*t]] : R[B[i + 439*t]] + R[C[i + 439*t]];
R[i + 1504*t] = Op[i + 440*t] ? R[B[i + 440*t]] * R[C[i + 440*t]] : R[B[i + 440*t]] + R[C[i + 440*t]];
R[i + 1505*t] = Op[i + 441*t] ? R[B[i + 441*t]] * R[C[i + 441*t]] : R[B[i + 441*t]] + R[C[i + 441*t]];
R[i + 1506*t] = Op[i + 442*t] ? R[B[i + 442*t]] * R[C[i + 442*t]] : R[B[i + 442*t]] + R[C[i + 442*t]];
R[i + 1507*t] = Op[i + 443*t] ? R[B[i + 443*t]] * R[C[i + 443*t]] : R[B[i + 443*t]] + R[C[i + 443*t]];
R[i + 1508*t] = Op[i + 444*t] ? R[B[i + 444*t]] * R[C[i + 444*t]] : R[B[i + 444*t]] + R[C[i + 444*t]];
R[i + 1509*t] = Op[i + 445*t] ? R[B[i + 445*t]] * R[C[i + 445*t]] : R[B[i + 445*t]] + R[C[i + 445*t]];
R[i + 1510*t] = Op[i + 446*t] ? R[B[i + 446*t]] * R[C[i + 446*t]] : R[B[i + 446*t]] + R[C[i + 446*t]];
R[i + 1511*t] = Op[i + 447*t] ? R[B[i + 447*t]] * R[C[i + 447*t]] : R[B[i + 447*t]] + R[C[i + 447*t]];
R[i + 1512*t] = Op[i + 448*t] ? R[B[i + 448*t]] * R[C[i + 448*t]] : R[B[i + 448*t]] + R[C[i + 448*t]];
R[i + 1513*t] = Op[i + 449*t] ? R[B[i + 449*t]] * R[C[i + 449*t]] : R[B[i + 449*t]] + R[C[i + 449*t]];
R[i + 1514*t] = Op[i + 450*t] ? R[B[i + 450*t]] * R[C[i + 450*t]] : R[B[i + 450*t]] + R[C[i + 450*t]];
R[i + 1515*t] = Op[i + 451*t] ? R[B[i + 451*t]] * R[C[i + 451*t]] : R[B[i + 451*t]] + R[C[i + 451*t]];
R[i + 1516*t] = Op[i + 452*t] ? R[B[i + 452*t]] * R[C[i + 452*t]] : R[B[i + 452*t]] + R[C[i + 452*t]];
R[i + 1517*t] = Op[i + 453*t] ? R[B[i + 453*t]] * R[C[i + 453*t]] : R[B[i + 453*t]] + R[C[i + 453*t]];
R[i + 1518*t] = Op[i + 454*t] ? R[B[i + 454*t]] * R[C[i + 454*t]] : R[B[i + 454*t]] + R[C[i + 454*t]];
R[i + 1519*t] = Op[i + 455*t] ? R[B[i + 455*t]] * R[C[i + 455*t]] : R[B[i + 455*t]] + R[C[i + 455*t]];
R[i + 1520*t] = Op[i + 456*t] ? R[B[i + 456*t]] * R[C[i + 456*t]] : R[B[i + 456*t]] + R[C[i + 456*t]];
R[i + 1521*t] = Op[i + 457*t] ? R[B[i + 457*t]] * R[C[i + 457*t]] : R[B[i + 457*t]] + R[C[i + 457*t]];
R[i + 1522*t] = Op[i + 458*t] ? R[B[i + 458*t]] * R[C[i + 458*t]] : R[B[i + 458*t]] + R[C[i + 458*t]];
R[i + 1523*t] = Op[i + 459*t] ? R[B[i + 459*t]] * R[C[i + 459*t]] : R[B[i + 459*t]] + R[C[i + 459*t]];
R[i + 1524*t] = Op[i + 460*t] ? R[B[i + 460*t]] * R[C[i + 460*t]] : R[B[i + 460*t]] + R[C[i + 460*t]];
R[i + 1525*t] = Op[i + 461*t] ? R[B[i + 461*t]] * R[C[i + 461*t]] : R[B[i + 461*t]] + R[C[i + 461*t]];
R[i + 1526*t] = Op[i + 462*t] ? R[B[i + 462*t]] * R[C[i + 462*t]] : R[B[i + 462*t]] + R[C[i + 462*t]];
R[i + 1527*t] = Op[i + 463*t] ? R[B[i + 463*t]] * R[C[i + 463*t]] : R[B[i + 463*t]] + R[C[i + 463*t]];
R[i + 1528*t] = Op[i + 464*t] ? R[B[i + 464*t]] * R[C[i + 464*t]] : R[B[i + 464*t]] + R[C[i + 464*t]];
R[i + 1529*t] = Op[i + 465*t] ? R[B[i + 465*t]] * R[C[i + 465*t]] : R[B[i + 465*t]] + R[C[i + 465*t]];
R[i + 1530*t] = Op[i + 466*t] ? R[B[i + 466*t]] * R[C[i + 466*t]] : R[B[i + 466*t]] + R[C[i + 466*t]];
R[i + 1531*t] = Op[i + 467*t] ? R[B[i + 467*t]] * R[C[i + 467*t]] : R[B[i + 467*t]] + R[C[i + 467*t]];
R[i + 1532*t] = Op[i + 468*t] ? R[B[i + 468*t]] * R[C[i + 468*t]] : R[B[i + 468*t]] + R[C[i + 468*t]];
R[i + 1533*t] = Op[i + 469*t] ? R[B[i + 469*t]] * R[C[i + 469*t]] : R[B[i + 469*t]] + R[C[i + 469*t]];
R[i + 1534*t] = Op[i + 470*t] ? R[B[i + 470*t]] * R[C[i + 470*t]] : R[B[i + 470*t]] + R[C[i + 470*t]];
R[i + 1535*t] = Op[i + 471*t] ? R[B[i + 471*t]] * R[C[i + 471*t]] : R[B[i + 471*t]] + R[C[i + 471*t]];
R[i + 1536*t] = Op[i + 472*t] ? R[B[i + 472*t]] * R[C[i + 472*t]] : R[B[i + 472*t]] + R[C[i + 472*t]];
R[i + 1537*t] = Op[i + 473*t] ? R[B[i + 473*t]] * R[C[i + 473*t]] : R[B[i + 473*t]] + R[C[i + 473*t]];
R[i + 1538*t] = Op[i + 474*t] ? R[B[i + 474*t]] * R[C[i + 474*t]] : R[B[i + 474*t]] + R[C[i + 474*t]];
R[i + 1539*t] = Op[i + 475*t] ? R[B[i + 475*t]] * R[C[i + 475*t]] : R[B[i + 475*t]] + R[C[i + 475*t]];
R[i + 1540*t] = Op[i + 476*t] ? R[B[i + 476*t]] * R[C[i + 476*t]] : R[B[i + 476*t]] + R[C[i + 476*t]];
R[i + 1541*t] = Op[i + 477*t] ? R[B[i + 477*t]] * R[C[i + 477*t]] : R[B[i + 477*t]] + R[C[i + 477*t]];
R[i + 1542*t] = Op[i + 478*t] ? R[B[i + 478*t]] * R[C[i + 478*t]] : R[B[i + 478*t]] + R[C[i + 478*t]];
R[i + 1543*t] = Op[i + 479*t] ? R[B[i + 479*t]] * R[C[i + 479*t]] : R[B[i + 479*t]] + R[C[i + 479*t]];
R[i + 1544*t] = Op[i + 480*t] ? R[B[i + 480*t]] * R[C[i + 480*t]] : R[B[i + 480*t]] + R[C[i + 480*t]];
R[i + 1545*t] = Op[i + 481*t] ? R[B[i + 481*t]] * R[C[i + 481*t]] : R[B[i + 481*t]] + R[C[i + 481*t]];
R[i + 1546*t] = Op[i + 482*t] ? R[B[i + 482*t]] * R[C[i + 482*t]] : R[B[i + 482*t]] + R[C[i + 482*t]];
R[i + 1547*t] = Op[i + 483*t] ? R[B[i + 483*t]] * R[C[i + 483*t]] : R[B[i + 483*t]] + R[C[i + 483*t]];
R[i + 1548*t] = Op[i + 484*t] ? R[B[i + 484*t]] * R[C[i + 484*t]] : R[B[i + 484*t]] + R[C[i + 484*t]];
R[i + 1549*t] = Op[i + 485*t] ? R[B[i + 485*t]] * R[C[i + 485*t]] : R[B[i + 485*t]] + R[C[i + 485*t]];
R[i + 1550*t] = Op[i + 486*t] ? R[B[i + 486*t]] * R[C[i + 486*t]] : R[B[i + 486*t]] + R[C[i + 486*t]];
R[i + 1551*t] = Op[i + 487*t] ? R[B[i + 487*t]] * R[C[i + 487*t]] : R[B[i + 487*t]] + R[C[i + 487*t]];
R[i + 1552*t] = Op[i + 488*t] ? R[B[i + 488*t]] * R[C[i + 488*t]] : R[B[i + 488*t]] + R[C[i + 488*t]];
R[i + 1553*t] = Op[i + 489*t] ? R[B[i + 489*t]] * R[C[i + 489*t]] : R[B[i + 489*t]] + R[C[i + 489*t]];
R[i + 1554*t] = Op[i + 490*t] ? R[B[i + 490*t]] * R[C[i + 490*t]] : R[B[i + 490*t]] + R[C[i + 490*t]];
R[i + 1555*t] = Op[i + 491*t] ? R[B[i + 491*t]] * R[C[i + 491*t]] : R[B[i + 491*t]] + R[C[i + 491*t]];
R[i + 1556*t] = Op[i + 492*t] ? R[B[i + 492*t]] * R[C[i + 492*t]] : R[B[i + 492*t]] + R[C[i + 492*t]];
R[i + 1557*t] = Op[i + 493*t] ? R[B[i + 493*t]] * R[C[i + 493*t]] : R[B[i + 493*t]] + R[C[i + 493*t]];
R[i + 1558*t] = Op[i + 494*t] ? R[B[i + 494*t]] * R[C[i + 494*t]] : R[B[i + 494*t]] + R[C[i + 494*t]];
R[i + 1559*t] = Op[i + 495*t] ? R[B[i + 495*t]] * R[C[i + 495*t]] : R[B[i + 495*t]] + R[C[i + 495*t]];
R[i + 1560*t] = Op[i + 496*t] ? R[B[i + 496*t]] * R[C[i + 496*t]] : R[B[i + 496*t]] + R[C[i + 496*t]];
R[i + 1561*t] = Op[i + 497*t] ? R[B[i + 497*t]] * R[C[i + 497*t]] : R[B[i + 497*t]] + R[C[i + 497*t]];
R[i + 1562*t] = Op[i + 498*t] ? R[B[i + 498*t]] * R[C[i + 498*t]] : R[B[i + 498*t]] + R[C[i + 498*t]];
R[i + 1563*t] = Op[i + 499*t] ? R[B[i + 499*t]] * R[C[i + 499*t]] : R[B[i + 499*t]] + R[C[i + 499*t]];
R[i + 1564*t] = Op[i + 500*t] ? R[B[i + 500*t]] * R[C[i + 500*t]] : R[B[i + 500*t]] + R[C[i + 500*t]];
R[i + 1565*t] = Op[i + 501*t] ? R[B[i + 501*t]] * R[C[i + 501*t]] : R[B[i + 501*t]] + R[C[i + 501*t]];
R[i + 1566*t] = Op[i + 502*t] ? R[B[i + 502*t]] * R[C[i + 502*t]] : R[B[i + 502*t]] + R[C[i + 502*t]];
R[i + 1567*t] = Op[i + 503*t] ? R[B[i + 503*t]] * R[C[i + 503*t]] : R[B[i + 503*t]] + R[C[i + 503*t]];
R[i + 1568*t] = Op[i + 504*t] ? R[B[i + 504*t]] * R[C[i + 504*t]] : R[B[i + 504*t]] + R[C[i + 504*t]];
R[i + 1569*t] = Op[i + 505*t] ? R[B[i + 505*t]] * R[C[i + 505*t]] : R[B[i + 505*t]] + R[C[i + 505*t]];
R[i + 1570*t] = Op[i + 506*t] ? R[B[i + 506*t]] * R[C[i + 506*t]] : R[B[i + 506*t]] + R[C[i + 506*t]];
R[i + 1571*t] = Op[i + 507*t] ? R[B[i + 507*t]] * R[C[i + 507*t]] : R[B[i + 507*t]] + R[C[i + 507*t]];
R[i + 1572*t] = Op[i + 508*t] ? R[B[i + 508*t]] * R[C[i + 508*t]] : R[B[i + 508*t]] + R[C[i + 508*t]];
R[i + 1573*t] = Op[i + 509*t] ? R[B[i + 509*t]] * R[C[i + 509*t]] : R[B[i + 509*t]] + R[C[i + 509*t]];
R[i + 1574*t] = Op[i + 510*t] ? R[B[i + 510*t]] * R[C[i + 510*t]] : R[B[i + 510*t]] + R[C[i + 510*t]];
R[i + 1575*t] = Op[i + 511*t] ? R[B[i + 511*t]] * R[C[i + 511*t]] : R[B[i + 511*t]] + R[C[i + 511*t]];
R[i + 1576*t] = Op[i + 512*t] ? R[B[i + 512*t]] * R[C[i + 512*t]] : R[B[i + 512*t]] + R[C[i + 512*t]];
R[i + 1577*t] = Op[i + 513*t] ? R[B[i + 513*t]] * R[C[i + 513*t]] : R[B[i + 513*t]] + R[C[i + 513*t]];
R[i + 1578*t] = Op[i + 514*t] ? R[B[i + 514*t]] * R[C[i + 514*t]] : R[B[i + 514*t]] + R[C[i + 514*t]];
R[i + 1579*t] = Op[i + 515*t] ? R[B[i + 515*t]] * R[C[i + 515*t]] : R[B[i + 515*t]] + R[C[i + 515*t]];
R[i + 1580*t] = Op[i + 516*t] ? R[B[i + 516*t]] * R[C[i + 516*t]] : R[B[i + 516*t]] + R[C[i + 516*t]];
R[i + 1581*t] = Op[i + 517*t] ? R[B[i + 517*t]] * R[C[i + 517*t]] : R[B[i + 517*t]] + R[C[i + 517*t]];
R[i + 1582*t] = Op[i + 518*t] ? R[B[i + 518*t]] * R[C[i + 518*t]] : R[B[i + 518*t]] + R[C[i + 518*t]];
__syncthreads();
R[i + 1583*t] = Op[i + 519*t] ? R[B[i + 519*t]] * R[C[i + 519*t]] : R[B[i + 519*t]] + R[C[i + 519*t]];
R[i + 1584*t] = Op[i + 520*t] ? R[B[i + 520*t]] * R[C[i + 520*t]] : R[B[i + 520*t]] + R[C[i + 520*t]];
R[i + 1585*t] = Op[i + 521*t] ? R[B[i + 521*t]] * R[C[i + 521*t]] : R[B[i + 521*t]] + R[C[i + 521*t]];
R[i + 1586*t] = Op[i + 522*t] ? R[B[i + 522*t]] * R[C[i + 522*t]] : R[B[i + 522*t]] + R[C[i + 522*t]];
R[i + 1587*t] = Op[i + 523*t] ? R[B[i + 523*t]] * R[C[i + 523*t]] : R[B[i + 523*t]] + R[C[i + 523*t]];
R[i + 1588*t] = Op[i + 524*t] ? R[B[i + 524*t]] * R[C[i + 524*t]] : R[B[i + 524*t]] + R[C[i + 524*t]];
R[i + 1589*t] = Op[i + 525*t] ? R[B[i + 525*t]] * R[C[i + 525*t]] : R[B[i + 525*t]] + R[C[i + 525*t]];
R[i + 1590*t] = Op[i + 526*t] ? R[B[i + 526*t]] * R[C[i + 526*t]] : R[B[i + 526*t]] + R[C[i + 526*t]];
R[i + 1591*t] = Op[i + 527*t] ? R[B[i + 527*t]] * R[C[i + 527*t]] : R[B[i + 527*t]] + R[C[i + 527*t]];
R[i + 1592*t] = Op[i + 528*t] ? R[B[i + 528*t]] * R[C[i + 528*t]] : R[B[i + 528*t]] + R[C[i + 528*t]];
R[i + 1593*t] = Op[i + 529*t] ? R[B[i + 529*t]] * R[C[i + 529*t]] : R[B[i + 529*t]] + R[C[i + 529*t]];
R[i + 1594*t] = Op[i + 530*t] ? R[B[i + 530*t]] * R[C[i + 530*t]] : R[B[i + 530*t]] + R[C[i + 530*t]];
R[i + 1595*t] = Op[i + 531*t] ? R[B[i + 531*t]] * R[C[i + 531*t]] : R[B[i + 531*t]] + R[C[i + 531*t]];
R[i + 1596*t] = Op[i + 532*t] ? R[B[i + 532*t]] * R[C[i + 532*t]] : R[B[i + 532*t]] + R[C[i + 532*t]];
R[i + 1597*t] = Op[i + 533*t] ? R[B[i + 533*t]] * R[C[i + 533*t]] : R[B[i + 533*t]] + R[C[i + 533*t]];
R[i + 1598*t] = Op[i + 534*t] ? R[B[i + 534*t]] * R[C[i + 534*t]] : R[B[i + 534*t]] + R[C[i + 534*t]];
R[i + 1599*t] = Op[i + 535*t] ? R[B[i + 535*t]] * R[C[i + 535*t]] : R[B[i + 535*t]] + R[C[i + 535*t]];
R[i + 1600*t] = Op[i + 536*t] ? R[B[i + 536*t]] * R[C[i + 536*t]] : R[B[i + 536*t]] + R[C[i + 536*t]];
R[i + 1601*t] = Op[i + 537*t] ? R[B[i + 537*t]] * R[C[i + 537*t]] : R[B[i + 537*t]] + R[C[i + 537*t]];
R[i + 1602*t] = Op[i + 538*t] ? R[B[i + 538*t]] * R[C[i + 538*t]] : R[B[i + 538*t]] + R[C[i + 538*t]];
R[i + 1603*t] = Op[i + 539*t] ? R[B[i + 539*t]] * R[C[i + 539*t]] : R[B[i + 539*t]] + R[C[i + 539*t]];
R[i + 1604*t] = Op[i + 540*t] ? R[B[i + 540*t]] * R[C[i + 540*t]] : R[B[i + 540*t]] + R[C[i + 540*t]];
R[i + 1605*t] = Op[i + 541*t] ? R[B[i + 541*t]] * R[C[i + 541*t]] : R[B[i + 541*t]] + R[C[i + 541*t]];
R[i + 1606*t] = Op[i + 542*t] ? R[B[i + 542*t]] * R[C[i + 542*t]] : R[B[i + 542*t]] + R[C[i + 542*t]];
R[i + 1607*t] = Op[i + 543*t] ? R[B[i + 543*t]] * R[C[i + 543*t]] : R[B[i + 543*t]] + R[C[i + 543*t]];
R[i + 1608*t] = Op[i + 544*t] ? R[B[i + 544*t]] * R[C[i + 544*t]] : R[B[i + 544*t]] + R[C[i + 544*t]];
R[i + 1609*t] = Op[i + 545*t] ? R[B[i + 545*t]] * R[C[i + 545*t]] : R[B[i + 545*t]] + R[C[i + 545*t]];
R[i + 1610*t] = Op[i + 546*t] ? R[B[i + 546*t]] * R[C[i + 546*t]] : R[B[i + 546*t]] + R[C[i + 546*t]];
R[i + 1611*t] = Op[i + 547*t] ? R[B[i + 547*t]] * R[C[i + 547*t]] : R[B[i + 547*t]] + R[C[i + 547*t]];
R[i + 1612*t] = Op[i + 548*t] ? R[B[i + 548*t]] * R[C[i + 548*t]] : R[B[i + 548*t]] + R[C[i + 548*t]];
R[i + 1613*t] = Op[i + 549*t] ? R[B[i + 549*t]] * R[C[i + 549*t]] : R[B[i + 549*t]] + R[C[i + 549*t]];
R[i + 1614*t] = Op[i + 550*t] ? R[B[i + 550*t]] * R[C[i + 550*t]] : R[B[i + 550*t]] + R[C[i + 550*t]];
R[i + 1615*t] = Op[i + 551*t] ? R[B[i + 551*t]] * R[C[i + 551*t]] : R[B[i + 551*t]] + R[C[i + 551*t]];
R[i + 1616*t] = Op[i + 552*t] ? R[B[i + 552*t]] * R[C[i + 552*t]] : R[B[i + 552*t]] + R[C[i + 552*t]];
R[i + 1617*t] = Op[i + 553*t] ? R[B[i + 553*t]] * R[C[i + 553*t]] : R[B[i + 553*t]] + R[C[i + 553*t]];
R[i + 1618*t] = Op[i + 554*t] ? R[B[i + 554*t]] * R[C[i + 554*t]] : R[B[i + 554*t]] + R[C[i + 554*t]];
R[i + 1619*t] = Op[i + 555*t] ? R[B[i + 555*t]] * R[C[i + 555*t]] : R[B[i + 555*t]] + R[C[i + 555*t]];
R[i + 1620*t] = Op[i + 556*t] ? R[B[i + 556*t]] * R[C[i + 556*t]] : R[B[i + 556*t]] + R[C[i + 556*t]];
R[i + 1621*t] = Op[i + 557*t] ? R[B[i + 557*t]] * R[C[i + 557*t]] : R[B[i + 557*t]] + R[C[i + 557*t]];
R[i + 1622*t] = Op[i + 558*t] ? R[B[i + 558*t]] * R[C[i + 558*t]] : R[B[i + 558*t]] + R[C[i + 558*t]];
R[i + 1623*t] = Op[i + 559*t] ? R[B[i + 559*t]] * R[C[i + 559*t]] : R[B[i + 559*t]] + R[C[i + 559*t]];
R[i + 1624*t] = Op[i + 560*t] ? R[B[i + 560*t]] * R[C[i + 560*t]] : R[B[i + 560*t]] + R[C[i + 560*t]];
R[i + 1625*t] = Op[i + 561*t] ? R[B[i + 561*t]] * R[C[i + 561*t]] : R[B[i + 561*t]] + R[C[i + 561*t]];
R[i + 1626*t] = Op[i + 562*t] ? R[B[i + 562*t]] * R[C[i + 562*t]] : R[B[i + 562*t]] + R[C[i + 562*t]];
R[i + 1627*t] = Op[i + 563*t] ? R[B[i + 563*t]] * R[C[i + 563*t]] : R[B[i + 563*t]] + R[C[i + 563*t]];
R[i + 1628*t] = Op[i + 564*t] ? R[B[i + 564*t]] * R[C[i + 564*t]] : R[B[i + 564*t]] + R[C[i + 564*t]];
R[i + 1629*t] = Op[i + 565*t] ? R[B[i + 565*t]] * R[C[i + 565*t]] : R[B[i + 565*t]] + R[C[i + 565*t]];
R[i + 1630*t] = Op[i + 566*t] ? R[B[i + 566*t]] * R[C[i + 566*t]] : R[B[i + 566*t]] + R[C[i + 566*t]];
R[i + 1631*t] = Op[i + 567*t] ? R[B[i + 567*t]] * R[C[i + 567*t]] : R[B[i + 567*t]] + R[C[i + 567*t]];
R[i + 1632*t] = Op[i + 568*t] ? R[B[i + 568*t]] * R[C[i + 568*t]] : R[B[i + 568*t]] + R[C[i + 568*t]];
R[i + 1633*t] = Op[i + 569*t] ? R[B[i + 569*t]] * R[C[i + 569*t]] : R[B[i + 569*t]] + R[C[i + 569*t]];
R[i + 1634*t] = Op[i + 570*t] ? R[B[i + 570*t]] * R[C[i + 570*t]] : R[B[i + 570*t]] + R[C[i + 570*t]];
R[i + 1635*t] = Op[i + 571*t] ? R[B[i + 571*t]] * R[C[i + 571*t]] : R[B[i + 571*t]] + R[C[i + 571*t]];
R[i + 1636*t] = Op[i + 572*t] ? R[B[i + 572*t]] * R[C[i + 572*t]] : R[B[i + 572*t]] + R[C[i + 572*t]];
R[i + 1637*t] = Op[i + 573*t] ? R[B[i + 573*t]] * R[C[i + 573*t]] : R[B[i + 573*t]] + R[C[i + 573*t]];
R[i + 1638*t] = Op[i + 574*t] ? R[B[i + 574*t]] * R[C[i + 574*t]] : R[B[i + 574*t]] + R[C[i + 574*t]];
R[i + 1639*t] = Op[i + 575*t] ? R[B[i + 575*t]] * R[C[i + 575*t]] : R[B[i + 575*t]] + R[C[i + 575*t]];
R[i + 1640*t] = Op[i + 576*t] ? R[B[i + 576*t]] * R[C[i + 576*t]] : R[B[i + 576*t]] + R[C[i + 576*t]];
R[i + 1641*t] = Op[i + 577*t] ? R[B[i + 577*t]] * R[C[i + 577*t]] : R[B[i + 577*t]] + R[C[i + 577*t]];
R[i + 1642*t] = Op[i + 578*t] ? R[B[i + 578*t]] * R[C[i + 578*t]] : R[B[i + 578*t]] + R[C[i + 578*t]];
R[i + 1643*t] = Op[i + 579*t] ? R[B[i + 579*t]] * R[C[i + 579*t]] : R[B[i + 579*t]] + R[C[i + 579*t]];
R[i + 1644*t] = Op[i + 580*t] ? R[B[i + 580*t]] * R[C[i + 580*t]] : R[B[i + 580*t]] + R[C[i + 580*t]];
R[i + 1645*t] = Op[i + 581*t] ? R[B[i + 581*t]] * R[C[i + 581*t]] : R[B[i + 581*t]] + R[C[i + 581*t]];
R[i + 1646*t] = Op[i + 582*t] ? R[B[i + 582*t]] * R[C[i + 582*t]] : R[B[i + 582*t]] + R[C[i + 582*t]];
R[i + 1647*t] = Op[i + 583*t] ? R[B[i + 583*t]] * R[C[i + 583*t]] : R[B[i + 583*t]] + R[C[i + 583*t]];
R[i + 1648*t] = Op[i + 584*t] ? R[B[i + 584*t]] * R[C[i + 584*t]] : R[B[i + 584*t]] + R[C[i + 584*t]];
R[i + 1649*t] = Op[i + 585*t] ? R[B[i + 585*t]] * R[C[i + 585*t]] : R[B[i + 585*t]] + R[C[i + 585*t]];
R[i + 1650*t] = Op[i + 586*t] ? R[B[i + 586*t]] * R[C[i + 586*t]] : R[B[i + 586*t]] + R[C[i + 586*t]];
R[i + 1651*t] = Op[i + 587*t] ? R[B[i + 587*t]] * R[C[i + 587*t]] : R[B[i + 587*t]] + R[C[i + 587*t]];
R[i + 1652*t] = Op[i + 588*t] ? R[B[i + 588*t]] * R[C[i + 588*t]] : R[B[i + 588*t]] + R[C[i + 588*t]];
R[i + 1653*t] = Op[i + 589*t] ? R[B[i + 589*t]] * R[C[i + 589*t]] : R[B[i + 589*t]] + R[C[i + 589*t]];
R[i + 1654*t] = Op[i + 590*t] ? R[B[i + 590*t]] * R[C[i + 590*t]] : R[B[i + 590*t]] + R[C[i + 590*t]];
R[i + 1655*t] = Op[i + 591*t] ? R[B[i + 591*t]] * R[C[i + 591*t]] : R[B[i + 591*t]] + R[C[i + 591*t]];
R[i + 1656*t] = Op[i + 592*t] ? R[B[i + 592*t]] * R[C[i + 592*t]] : R[B[i + 592*t]] + R[C[i + 592*t]];
R[i + 1657*t] = Op[i + 593*t] ? R[B[i + 593*t]] * R[C[i + 593*t]] : R[B[i + 593*t]] + R[C[i + 593*t]];
R[i + 1658*t] = Op[i + 594*t] ? R[B[i + 594*t]] * R[C[i + 594*t]] : R[B[i + 594*t]] + R[C[i + 594*t]];
R[i + 1659*t] = Op[i + 595*t] ? R[B[i + 595*t]] * R[C[i + 595*t]] : R[B[i + 595*t]] + R[C[i + 595*t]];
R[i + 1660*t] = Op[i + 596*t] ? R[B[i + 596*t]] * R[C[i + 596*t]] : R[B[i + 596*t]] + R[C[i + 596*t]];
R[i + 1661*t] = Op[i + 597*t] ? R[B[i + 597*t]] * R[C[i + 597*t]] : R[B[i + 597*t]] + R[C[i + 597*t]];
R[i + 1662*t] = Op[i + 598*t] ? R[B[i + 598*t]] * R[C[i + 598*t]] : R[B[i + 598*t]] + R[C[i + 598*t]];
R[i + 1663*t] = Op[i + 599*t] ? R[B[i + 599*t]] * R[C[i + 599*t]] : R[B[i + 599*t]] + R[C[i + 599*t]];
R[i + 1664*t] = Op[i + 600*t] ? R[B[i + 600*t]] * R[C[i + 600*t]] : R[B[i + 600*t]] + R[C[i + 600*t]];
R[i + 1665*t] = Op[i + 601*t] ? R[B[i + 601*t]] * R[C[i + 601*t]] : R[B[i + 601*t]] + R[C[i + 601*t]];
R[i + 1666*t] = Op[i + 602*t] ? R[B[i + 602*t]] * R[C[i + 602*t]] : R[B[i + 602*t]] + R[C[i + 602*t]];
R[i + 1667*t] = Op[i + 603*t] ? R[B[i + 603*t]] * R[C[i + 603*t]] : R[B[i + 603*t]] + R[C[i + 603*t]];
R[i + 1668*t] = Op[i + 604*t] ? R[B[i + 604*t]] * R[C[i + 604*t]] : R[B[i + 604*t]] + R[C[i + 604*t]];
R[i + 1669*t] = Op[i + 605*t] ? R[B[i + 605*t]] * R[C[i + 605*t]] : R[B[i + 605*t]] + R[C[i + 605*t]];
R[i + 1670*t] = Op[i + 606*t] ? R[B[i + 606*t]] * R[C[i + 606*t]] : R[B[i + 606*t]] + R[C[i + 606*t]];
R[i + 1671*t] = Op[i + 607*t] ? R[B[i + 607*t]] * R[C[i + 607*t]] : R[B[i + 607*t]] + R[C[i + 607*t]];
R[i + 1672*t] = Op[i + 608*t] ? R[B[i + 608*t]] * R[C[i + 608*t]] : R[B[i + 608*t]] + R[C[i + 608*t]];
R[i + 1673*t] = Op[i + 609*t] ? R[B[i + 609*t]] * R[C[i + 609*t]] : R[B[i + 609*t]] + R[C[i + 609*t]];
R[i + 1674*t] = Op[i + 610*t] ? R[B[i + 610*t]] * R[C[i + 610*t]] : R[B[i + 610*t]] + R[C[i + 610*t]];
R[i + 1675*t] = Op[i + 611*t] ? R[B[i + 611*t]] * R[C[i + 611*t]] : R[B[i + 611*t]] + R[C[i + 611*t]];
R[i + 1676*t] = Op[i + 612*t] ? R[B[i + 612*t]] * R[C[i + 612*t]] : R[B[i + 612*t]] + R[C[i + 612*t]];
R[i + 1677*t] = Op[i + 613*t] ? R[B[i + 613*t]] * R[C[i + 613*t]] : R[B[i + 613*t]] + R[C[i + 613*t]];
R[i + 1678*t] = Op[i + 614*t] ? R[B[i + 614*t]] * R[C[i + 614*t]] : R[B[i + 614*t]] + R[C[i + 614*t]];
R[i + 1679*t] = Op[i + 615*t] ? R[B[i + 615*t]] * R[C[i + 615*t]] : R[B[i + 615*t]] + R[C[i + 615*t]];
R[i + 1680*t] = Op[i + 616*t] ? R[B[i + 616*t]] * R[C[i + 616*t]] : R[B[i + 616*t]] + R[C[i + 616*t]];
R[i + 1681*t] = Op[i + 617*t] ? R[B[i + 617*t]] * R[C[i + 617*t]] : R[B[i + 617*t]] + R[C[i + 617*t]];
R[i + 1682*t] = Op[i + 618*t] ? R[B[i + 618*t]] * R[C[i + 618*t]] : R[B[i + 618*t]] + R[C[i + 618*t]];
R[i + 1683*t] = Op[i + 619*t] ? R[B[i + 619*t]] * R[C[i + 619*t]] : R[B[i + 619*t]] + R[C[i + 619*t]];
R[i + 1684*t] = Op[i + 620*t] ? R[B[i + 620*t]] * R[C[i + 620*t]] : R[B[i + 620*t]] + R[C[i + 620*t]];
R[i + 1685*t] = Op[i + 621*t] ? R[B[i + 621*t]] * R[C[i + 621*t]] : R[B[i + 621*t]] + R[C[i + 621*t]];
R[i + 1686*t] = Op[i + 622*t] ? R[B[i + 622*t]] * R[C[i + 622*t]] : R[B[i + 622*t]] + R[C[i + 622*t]];
R[i + 1687*t] = Op[i + 623*t] ? R[B[i + 623*t]] * R[C[i + 623*t]] : R[B[i + 623*t]] + R[C[i + 623*t]];
R[i + 1688*t] = Op[i + 624*t] ? R[B[i + 624*t]] * R[C[i + 624*t]] : R[B[i + 624*t]] + R[C[i + 624*t]];
R[i + 1689*t] = Op[i + 625*t] ? R[B[i + 625*t]] * R[C[i + 625*t]] : R[B[i + 625*t]] + R[C[i + 625*t]];
R[i + 1690*t] = Op[i + 626*t] ? R[B[i + 626*t]] * R[C[i + 626*t]] : R[B[i + 626*t]] + R[C[i + 626*t]];
R[i + 1691*t] = Op[i + 627*t] ? R[B[i + 627*t]] * R[C[i + 627*t]] : R[B[i + 627*t]] + R[C[i + 627*t]];
R[i + 1692*t] = Op[i + 628*t] ? R[B[i + 628*t]] * R[C[i + 628*t]] : R[B[i + 628*t]] + R[C[i + 628*t]];
R[i + 1693*t] = Op[i + 629*t] ? R[B[i + 629*t]] * R[C[i + 629*t]] : R[B[i + 629*t]] + R[C[i + 629*t]];
R[i + 1694*t] = Op[i + 630*t] ? R[B[i + 630*t]] * R[C[i + 630*t]] : R[B[i + 630*t]] + R[C[i + 630*t]];
R[i + 1695*t] = Op[i + 631*t] ? R[B[i + 631*t]] * R[C[i + 631*t]] : R[B[i + 631*t]] + R[C[i + 631*t]];
R[i + 1696*t] = Op[i + 632*t] ? R[B[i + 632*t]] * R[C[i + 632*t]] : R[B[i + 632*t]] + R[C[i + 632*t]];
R[i + 1697*t] = Op[i + 633*t] ? R[B[i + 633*t]] * R[C[i + 633*t]] : R[B[i + 633*t]] + R[C[i + 633*t]];
R[i + 1698*t] = Op[i + 634*t] ? R[B[i + 634*t]] * R[C[i + 634*t]] : R[B[i + 634*t]] + R[C[i + 634*t]];
R[i + 1699*t] = Op[i + 635*t] ? R[B[i + 635*t]] * R[C[i + 635*t]] : R[B[i + 635*t]] + R[C[i + 635*t]];
R[i + 1700*t] = Op[i + 636*t] ? R[B[i + 636*t]] * R[C[i + 636*t]] : R[B[i + 636*t]] + R[C[i + 636*t]];
R[i + 1701*t] = Op[i + 637*t] ? R[B[i + 637*t]] * R[C[i + 637*t]] : R[B[i + 637*t]] + R[C[i + 637*t]];
R[i + 1702*t] = Op[i + 638*t] ? R[B[i + 638*t]] * R[C[i + 638*t]] : R[B[i + 638*t]] + R[C[i + 638*t]];
R[i + 1703*t] = Op[i + 639*t] ? R[B[i + 639*t]] * R[C[i + 639*t]] : R[B[i + 639*t]] + R[C[i + 639*t]];
R[i + 1704*t] = Op[i + 640*t] ? R[B[i + 640*t]] * R[C[i + 640*t]] : R[B[i + 640*t]] + R[C[i + 640*t]];
R[i + 1705*t] = Op[i + 641*t] ? R[B[i + 641*t]] * R[C[i + 641*t]] : R[B[i + 641*t]] + R[C[i + 641*t]];
R[i + 1706*t] = Op[i + 642*t] ? R[B[i + 642*t]] * R[C[i + 642*t]] : R[B[i + 642*t]] + R[C[i + 642*t]];
R[i + 1707*t] = Op[i + 643*t] ? R[B[i + 643*t]] * R[C[i + 643*t]] : R[B[i + 643*t]] + R[C[i + 643*t]];
R[i + 1708*t] = Op[i + 644*t] ? R[B[i + 644*t]] * R[C[i + 644*t]] : R[B[i + 644*t]] + R[C[i + 644*t]];
R[i + 1709*t] = Op[i + 645*t] ? R[B[i + 645*t]] * R[C[i + 645*t]] : R[B[i + 645*t]] + R[C[i + 645*t]];
R[i + 1710*t] = Op[i + 646*t] ? R[B[i + 646*t]] * R[C[i + 646*t]] : R[B[i + 646*t]] + R[C[i + 646*t]];
R[i + 1711*t] = Op[i + 647*t] ? R[B[i + 647*t]] * R[C[i + 647*t]] : R[B[i + 647*t]] + R[C[i + 647*t]];
R[i + 1712*t] = Op[i + 648*t] ? R[B[i + 648*t]] * R[C[i + 648*t]] : R[B[i + 648*t]] + R[C[i + 648*t]];
R[i + 1713*t] = Op[i + 649*t] ? R[B[i + 649*t]] * R[C[i + 649*t]] : R[B[i + 649*t]] + R[C[i + 649*t]];
R[i + 1714*t] = Op[i + 650*t] ? R[B[i + 650*t]] * R[C[i + 650*t]] : R[B[i + 650*t]] + R[C[i + 650*t]];
R[i + 1715*t] = Op[i + 651*t] ? R[B[i + 651*t]] * R[C[i + 651*t]] : R[B[i + 651*t]] + R[C[i + 651*t]];
R[i + 1716*t] = Op[i + 652*t] ? R[B[i + 652*t]] * R[C[i + 652*t]] : R[B[i + 652*t]] + R[C[i + 652*t]];
R[i + 1717*t] = Op[i + 653*t] ? R[B[i + 653*t]] * R[C[i + 653*t]] : R[B[i + 653*t]] + R[C[i + 653*t]];
R[i + 1718*t] = Op[i + 654*t] ? R[B[i + 654*t]] * R[C[i + 654*t]] : R[B[i + 654*t]] + R[C[i + 654*t]];
R[i + 1719*t] = Op[i + 655*t] ? R[B[i + 655*t]] * R[C[i + 655*t]] : R[B[i + 655*t]] + R[C[i + 655*t]];
R[i + 1720*t] = Op[i + 656*t] ? R[B[i + 656*t]] * R[C[i + 656*t]] : R[B[i + 656*t]] + R[C[i + 656*t]];
R[i + 1721*t] = Op[i + 657*t] ? R[B[i + 657*t]] * R[C[i + 657*t]] : R[B[i + 657*t]] + R[C[i + 657*t]];
R[i + 1722*t] = Op[i + 658*t] ? R[B[i + 658*t]] * R[C[i + 658*t]] : R[B[i + 658*t]] + R[C[i + 658*t]];
R[i + 1723*t] = Op[i + 659*t] ? R[B[i + 659*t]] * R[C[i + 659*t]] : R[B[i + 659*t]] + R[C[i + 659*t]];
R[i + 1724*t] = Op[i + 660*t] ? R[B[i + 660*t]] * R[C[i + 660*t]] : R[B[i + 660*t]] + R[C[i + 660*t]];
R[i + 1725*t] = Op[i + 661*t] ? R[B[i + 661*t]] * R[C[i + 661*t]] : R[B[i + 661*t]] + R[C[i + 661*t]];
R[i + 1726*t] = Op[i + 662*t] ? R[B[i + 662*t]] * R[C[i + 662*t]] : R[B[i + 662*t]] + R[C[i + 662*t]];
R[i + 1727*t] = Op[i + 663*t] ? R[B[i + 663*t]] * R[C[i + 663*t]] : R[B[i + 663*t]] + R[C[i + 663*t]];
R[i + 1728*t] = Op[i + 664*t] ? R[B[i + 664*t]] * R[C[i + 664*t]] : R[B[i + 664*t]] + R[C[i + 664*t]];
R[i + 1729*t] = Op[i + 665*t] ? R[B[i + 665*t]] * R[C[i + 665*t]] : R[B[i + 665*t]] + R[C[i + 665*t]];
R[i + 1730*t] = Op[i + 666*t] ? R[B[i + 666*t]] * R[C[i + 666*t]] : R[B[i + 666*t]] + R[C[i + 666*t]];
R[i + 1731*t] = Op[i + 667*t] ? R[B[i + 667*t]] * R[C[i + 667*t]] : R[B[i + 667*t]] + R[C[i + 667*t]];
R[i + 1732*t] = Op[i + 668*t] ? R[B[i + 668*t]] * R[C[i + 668*t]] : R[B[i + 668*t]] + R[C[i + 668*t]];
R[i + 1733*t] = Op[i + 669*t] ? R[B[i + 669*t]] * R[C[i + 669*t]] : R[B[i + 669*t]] + R[C[i + 669*t]];
R[i + 1734*t] = Op[i + 670*t] ? R[B[i + 670*t]] * R[C[i + 670*t]] : R[B[i + 670*t]] + R[C[i + 670*t]];
R[i + 1735*t] = Op[i + 671*t] ? R[B[i + 671*t]] * R[C[i + 671*t]] : R[B[i + 671*t]] + R[C[i + 671*t]];
R[i + 1736*t] = Op[i + 672*t] ? R[B[i + 672*t]] * R[C[i + 672*t]] : R[B[i + 672*t]] + R[C[i + 672*t]];
R[i + 1737*t] = Op[i + 673*t] ? R[B[i + 673*t]] * R[C[i + 673*t]] : R[B[i + 673*t]] + R[C[i + 673*t]];
R[i + 1738*t] = Op[i + 674*t] ? R[B[i + 674*t]] * R[C[i + 674*t]] : R[B[i + 674*t]] + R[C[i + 674*t]];
R[i + 1739*t] = Op[i + 675*t] ? R[B[i + 675*t]] * R[C[i + 675*t]] : R[B[i + 675*t]] + R[C[i + 675*t]];
R[i + 1740*t] = Op[i + 676*t] ? R[B[i + 676*t]] * R[C[i + 676*t]] : R[B[i + 676*t]] + R[C[i + 676*t]];
R[i + 1741*t] = Op[i + 677*t] ? R[B[i + 677*t]] * R[C[i + 677*t]] : R[B[i + 677*t]] + R[C[i + 677*t]];
R[i + 1742*t] = Op[i + 678*t] ? R[B[i + 678*t]] * R[C[i + 678*t]] : R[B[i + 678*t]] + R[C[i + 678*t]];
R[i + 1743*t] = Op[i + 679*t] ? R[B[i + 679*t]] * R[C[i + 679*t]] : R[B[i + 679*t]] + R[C[i + 679*t]];
R[i + 1744*t] = Op[i + 680*t] ? R[B[i + 680*t]] * R[C[i + 680*t]] : R[B[i + 680*t]] + R[C[i + 680*t]];
R[i + 1745*t] = Op[i + 681*t] ? R[B[i + 681*t]] * R[C[i + 681*t]] : R[B[i + 681*t]] + R[C[i + 681*t]];
R[i + 1746*t] = Op[i + 682*t] ? R[B[i + 682*t]] * R[C[i + 682*t]] : R[B[i + 682*t]] + R[C[i + 682*t]];
R[i + 1747*t] = Op[i + 683*t] ? R[B[i + 683*t]] * R[C[i + 683*t]] : R[B[i + 683*t]] + R[C[i + 683*t]];
R[i + 1748*t] = Op[i + 684*t] ? R[B[i + 684*t]] * R[C[i + 684*t]] : R[B[i + 684*t]] + R[C[i + 684*t]];
R[i + 1749*t] = Op[i + 685*t] ? R[B[i + 685*t]] * R[C[i + 685*t]] : R[B[i + 685*t]] + R[C[i + 685*t]];
R[i + 1750*t] = Op[i + 686*t] ? R[B[i + 686*t]] * R[C[i + 686*t]] : R[B[i + 686*t]] + R[C[i + 686*t]];
R[i + 1751*t] = Op[i + 687*t] ? R[B[i + 687*t]] * R[C[i + 687*t]] : R[B[i + 687*t]] + R[C[i + 687*t]];
R[i + 1752*t] = Op[i + 688*t] ? R[B[i + 688*t]] * R[C[i + 688*t]] : R[B[i + 688*t]] + R[C[i + 688*t]];
R[i + 1753*t] = Op[i + 689*t] ? R[B[i + 689*t]] * R[C[i + 689*t]] : R[B[i + 689*t]] + R[C[i + 689*t]];
R[i + 1754*t] = Op[i + 690*t] ? R[B[i + 690*t]] * R[C[i + 690*t]] : R[B[i + 690*t]] + R[C[i + 690*t]];
R[i + 1755*t] = Op[i + 691*t] ? R[B[i + 691*t]] * R[C[i + 691*t]] : R[B[i + 691*t]] + R[C[i + 691*t]];
R[i + 1756*t] = Op[i + 692*t] ? R[B[i + 692*t]] * R[C[i + 692*t]] : R[B[i + 692*t]] + R[C[i + 692*t]];
R[i + 1757*t] = Op[i + 693*t] ? R[B[i + 693*t]] * R[C[i + 693*t]] : R[B[i + 693*t]] + R[C[i + 693*t]];
R[i + 1758*t] = Op[i + 694*t] ? R[B[i + 694*t]] * R[C[i + 694*t]] : R[B[i + 694*t]] + R[C[i + 694*t]];
R[i + 1759*t] = Op[i + 695*t] ? R[B[i + 695*t]] * R[C[i + 695*t]] : R[B[i + 695*t]] + R[C[i + 695*t]];
R[i + 1760*t] = Op[i + 696*t] ? R[B[i + 696*t]] * R[C[i + 696*t]] : R[B[i + 696*t]] + R[C[i + 696*t]];
R[i + 1761*t] = Op[i + 697*t] ? R[B[i + 697*t]] * R[C[i + 697*t]] : R[B[i + 697*t]] + R[C[i + 697*t]];
R[i + 1762*t] = Op[i + 698*t] ? R[B[i + 698*t]] * R[C[i + 698*t]] : R[B[i + 698*t]] + R[C[i + 698*t]];
R[i + 1763*t] = Op[i + 699*t] ? R[B[i + 699*t]] * R[C[i + 699*t]] : R[B[i + 699*t]] + R[C[i + 699*t]];
R[i + 1764*t] = Op[i + 700*t] ? R[B[i + 700*t]] * R[C[i + 700*t]] : R[B[i + 700*t]] + R[C[i + 700*t]];
R[i + 1765*t] = Op[i + 701*t] ? R[B[i + 701*t]] * R[C[i + 701*t]] : R[B[i + 701*t]] + R[C[i + 701*t]];
R[i + 1766*t] = Op[i + 702*t] ? R[B[i + 702*t]] * R[C[i + 702*t]] : R[B[i + 702*t]] + R[C[i + 702*t]];
R[i + 1767*t] = Op[i + 703*t] ? R[B[i + 703*t]] * R[C[i + 703*t]] : R[B[i + 703*t]] + R[C[i + 703*t]];
R[i + 1768*t] = Op[i + 704*t] ? R[B[i + 704*t]] * R[C[i + 704*t]] : R[B[i + 704*t]] + R[C[i + 704*t]];
R[i + 1769*t] = Op[i + 705*t] ? R[B[i + 705*t]] * R[C[i + 705*t]] : R[B[i + 705*t]] + R[C[i + 705*t]];
R[i + 1770*t] = Op[i + 706*t] ? R[B[i + 706*t]] * R[C[i + 706*t]] : R[B[i + 706*t]] + R[C[i + 706*t]];
R[i + 1771*t] = Op[i + 707*t] ? R[B[i + 707*t]] * R[C[i + 707*t]] : R[B[i + 707*t]] + R[C[i + 707*t]];
__syncthreads();
R[i + 1772*t] = Op[i + 708*t] ? R[B[i + 708*t]] * R[C[i + 708*t]] : R[B[i + 708*t]] + R[C[i + 708*t]];
R[i + 1773*t] = Op[i + 709*t] ? R[B[i + 709*t]] * R[C[i + 709*t]] : R[B[i + 709*t]] + R[C[i + 709*t]];
R[i + 1774*t] = Op[i + 710*t] ? R[B[i + 710*t]] * R[C[i + 710*t]] : R[B[i + 710*t]] + R[C[i + 710*t]];
R[i + 1775*t] = Op[i + 711*t] ? R[B[i + 711*t]] * R[C[i + 711*t]] : R[B[i + 711*t]] + R[C[i + 711*t]];
R[i + 1776*t] = Op[i + 712*t] ? R[B[i + 712*t]] * R[C[i + 712*t]] : R[B[i + 712*t]] + R[C[i + 712*t]];
R[i + 1777*t] = Op[i + 713*t] ? R[B[i + 713*t]] * R[C[i + 713*t]] : R[B[i + 713*t]] + R[C[i + 713*t]];
R[i + 1778*t] = Op[i + 714*t] ? R[B[i + 714*t]] * R[C[i + 714*t]] : R[B[i + 714*t]] + R[C[i + 714*t]];
R[i + 1779*t] = Op[i + 715*t] ? R[B[i + 715*t]] * R[C[i + 715*t]] : R[B[i + 715*t]] + R[C[i + 715*t]];
R[i + 1780*t] = Op[i + 716*t] ? R[B[i + 716*t]] * R[C[i + 716*t]] : R[B[i + 716*t]] + R[C[i + 716*t]];
R[i + 1781*t] = Op[i + 717*t] ? R[B[i + 717*t]] * R[C[i + 717*t]] : R[B[i + 717*t]] + R[C[i + 717*t]];
R[i + 1782*t] = Op[i + 718*t] ? R[B[i + 718*t]] * R[C[i + 718*t]] : R[B[i + 718*t]] + R[C[i + 718*t]];
R[i + 1783*t] = Op[i + 719*t] ? R[B[i + 719*t]] * R[C[i + 719*t]] : R[B[i + 719*t]] + R[C[i + 719*t]];
R[i + 1784*t] = Op[i + 720*t] ? R[B[i + 720*t]] * R[C[i + 720*t]] : R[B[i + 720*t]] + R[C[i + 720*t]];
R[i + 1785*t] = Op[i + 721*t] ? R[B[i + 721*t]] * R[C[i + 721*t]] : R[B[i + 721*t]] + R[C[i + 721*t]];
R[i + 1786*t] = Op[i + 722*t] ? R[B[i + 722*t]] * R[C[i + 722*t]] : R[B[i + 722*t]] + R[C[i + 722*t]];
R[i + 1787*t] = Op[i + 723*t] ? R[B[i + 723*t]] * R[C[i + 723*t]] : R[B[i + 723*t]] + R[C[i + 723*t]];
R[i + 1788*t] = Op[i + 724*t] ? R[B[i + 724*t]] * R[C[i + 724*t]] : R[B[i + 724*t]] + R[C[i + 724*t]];
R[i + 1789*t] = Op[i + 725*t] ? R[B[i + 725*t]] * R[C[i + 725*t]] : R[B[i + 725*t]] + R[C[i + 725*t]];
R[i + 1790*t] = Op[i + 726*t] ? R[B[i + 726*t]] * R[C[i + 726*t]] : R[B[i + 726*t]] + R[C[i + 726*t]];
R[i + 1791*t] = Op[i + 727*t] ? R[B[i + 727*t]] * R[C[i + 727*t]] : R[B[i + 727*t]] + R[C[i + 727*t]];
R[i + 1792*t] = Op[i + 728*t] ? R[B[i + 728*t]] * R[C[i + 728*t]] : R[B[i + 728*t]] + R[C[i + 728*t]];
R[i + 1793*t] = Op[i + 729*t] ? R[B[i + 729*t]] * R[C[i + 729*t]] : R[B[i + 729*t]] + R[C[i + 729*t]];
R[i + 1794*t] = Op[i + 730*t] ? R[B[i + 730*t]] * R[C[i + 730*t]] : R[B[i + 730*t]] + R[C[i + 730*t]];
R[i + 1795*t] = Op[i + 731*t] ? R[B[i + 731*t]] * R[C[i + 731*t]] : R[B[i + 731*t]] + R[C[i + 731*t]];
R[i + 1796*t] = Op[i + 732*t] ? R[B[i + 732*t]] * R[C[i + 732*t]] : R[B[i + 732*t]] + R[C[i + 732*t]];
R[i + 1797*t] = Op[i + 733*t] ? R[B[i + 733*t]] * R[C[i + 733*t]] : R[B[i + 733*t]] + R[C[i + 733*t]];
R[i + 1798*t] = Op[i + 734*t] ? R[B[i + 734*t]] * R[C[i + 734*t]] : R[B[i + 734*t]] + R[C[i + 734*t]];
R[i + 1799*t] = Op[i + 735*t] ? R[B[i + 735*t]] * R[C[i + 735*t]] : R[B[i + 735*t]] + R[C[i + 735*t]];
R[i + 1800*t] = Op[i + 736*t] ? R[B[i + 736*t]] * R[C[i + 736*t]] : R[B[i + 736*t]] + R[C[i + 736*t]];
R[i + 1801*t] = Op[i + 737*t] ? R[B[i + 737*t]] * R[C[i + 737*t]] : R[B[i + 737*t]] + R[C[i + 737*t]];
R[i + 1802*t] = Op[i + 738*t] ? R[B[i + 738*t]] * R[C[i + 738*t]] : R[B[i + 738*t]] + R[C[i + 738*t]];
R[i + 1803*t] = Op[i + 739*t] ? R[B[i + 739*t]] * R[C[i + 739*t]] : R[B[i + 739*t]] + R[C[i + 739*t]];
R[i + 1804*t] = Op[i + 740*t] ? R[B[i + 740*t]] * R[C[i + 740*t]] : R[B[i + 740*t]] + R[C[i + 740*t]];
R[i + 1805*t] = Op[i + 741*t] ? R[B[i + 741*t]] * R[C[i + 741*t]] : R[B[i + 741*t]] + R[C[i + 741*t]];
R[i + 1806*t] = Op[i + 742*t] ? R[B[i + 742*t]] * R[C[i + 742*t]] : R[B[i + 742*t]] + R[C[i + 742*t]];
R[i + 1807*t] = Op[i + 743*t] ? R[B[i + 743*t]] * R[C[i + 743*t]] : R[B[i + 743*t]] + R[C[i + 743*t]];
R[i + 1808*t] = Op[i + 744*t] ? R[B[i + 744*t]] * R[C[i + 744*t]] : R[B[i + 744*t]] + R[C[i + 744*t]];
R[i + 1809*t] = Op[i + 745*t] ? R[B[i + 745*t]] * R[C[i + 745*t]] : R[B[i + 745*t]] + R[C[i + 745*t]];
R[i + 1810*t] = Op[i + 746*t] ? R[B[i + 746*t]] * R[C[i + 746*t]] : R[B[i + 746*t]] + R[C[i + 746*t]];
R[i + 1811*t] = Op[i + 747*t] ? R[B[i + 747*t]] * R[C[i + 747*t]] : R[B[i + 747*t]] + R[C[i + 747*t]];
R[i + 1812*t] = Op[i + 748*t] ? R[B[i + 748*t]] * R[C[i + 748*t]] : R[B[i + 748*t]] + R[C[i + 748*t]];
R[i + 1813*t] = Op[i + 749*t] ? R[B[i + 749*t]] * R[C[i + 749*t]] : R[B[i + 749*t]] + R[C[i + 749*t]];
R[i + 1814*t] = Op[i + 750*t] ? R[B[i + 750*t]] * R[C[i + 750*t]] : R[B[i + 750*t]] + R[C[i + 750*t]];
R[i + 1815*t] = Op[i + 751*t] ? R[B[i + 751*t]] * R[C[i + 751*t]] : R[B[i + 751*t]] + R[C[i + 751*t]];
R[i + 1816*t] = Op[i + 752*t] ? R[B[i + 752*t]] * R[C[i + 752*t]] : R[B[i + 752*t]] + R[C[i + 752*t]];
R[i + 1817*t] = Op[i + 753*t] ? R[B[i + 753*t]] * R[C[i + 753*t]] : R[B[i + 753*t]] + R[C[i + 753*t]];
R[i + 1818*t] = Op[i + 754*t] ? R[B[i + 754*t]] * R[C[i + 754*t]] : R[B[i + 754*t]] + R[C[i + 754*t]];
R[i + 1819*t] = Op[i + 755*t] ? R[B[i + 755*t]] * R[C[i + 755*t]] : R[B[i + 755*t]] + R[C[i + 755*t]];
R[i + 1820*t] = Op[i + 756*t] ? R[B[i + 756*t]] * R[C[i + 756*t]] : R[B[i + 756*t]] + R[C[i + 756*t]];
R[i + 1821*t] = Op[i + 757*t] ? R[B[i + 757*t]] * R[C[i + 757*t]] : R[B[i + 757*t]] + R[C[i + 757*t]];
R[i + 1822*t] = Op[i + 758*t] ? R[B[i + 758*t]] * R[C[i + 758*t]] : R[B[i + 758*t]] + R[C[i + 758*t]];
R[i + 1823*t] = Op[i + 759*t] ? R[B[i + 759*t]] * R[C[i + 759*t]] : R[B[i + 759*t]] + R[C[i + 759*t]];
R[i + 1824*t] = Op[i + 760*t] ? R[B[i + 760*t]] * R[C[i + 760*t]] : R[B[i + 760*t]] + R[C[i + 760*t]];
R[i + 1825*t] = Op[i + 761*t] ? R[B[i + 761*t]] * R[C[i + 761*t]] : R[B[i + 761*t]] + R[C[i + 761*t]];
R[i + 1826*t] = Op[i + 762*t] ? R[B[i + 762*t]] * R[C[i + 762*t]] : R[B[i + 762*t]] + R[C[i + 762*t]];
R[i + 1827*t] = Op[i + 763*t] ? R[B[i + 763*t]] * R[C[i + 763*t]] : R[B[i + 763*t]] + R[C[i + 763*t]];
R[i + 1828*t] = Op[i + 764*t] ? R[B[i + 764*t]] * R[C[i + 764*t]] : R[B[i + 764*t]] + R[C[i + 764*t]];
R[i + 1829*t] = Op[i + 765*t] ? R[B[i + 765*t]] * R[C[i + 765*t]] : R[B[i + 765*t]] + R[C[i + 765*t]];
R[i + 1830*t] = Op[i + 766*t] ? R[B[i + 766*t]] * R[C[i + 766*t]] : R[B[i + 766*t]] + R[C[i + 766*t]];
R[i + 1831*t] = Op[i + 767*t] ? R[B[i + 767*t]] * R[C[i + 767*t]] : R[B[i + 767*t]] + R[C[i + 767*t]];
R[i + 1832*t] = Op[i + 768*t] ? R[B[i + 768*t]] * R[C[i + 768*t]] : R[B[i + 768*t]] + R[C[i + 768*t]];
R[i + 1833*t] = Op[i + 769*t] ? R[B[i + 769*t]] * R[C[i + 769*t]] : R[B[i + 769*t]] + R[C[i + 769*t]];
R[i + 1834*t] = Op[i + 770*t] ? R[B[i + 770*t]] * R[C[i + 770*t]] : R[B[i + 770*t]] + R[C[i + 770*t]];
R[i + 1835*t] = Op[i + 771*t] ? R[B[i + 771*t]] * R[C[i + 771*t]] : R[B[i + 771*t]] + R[C[i + 771*t]];
R[i + 1836*t] = Op[i + 772*t] ? R[B[i + 772*t]] * R[C[i + 772*t]] : R[B[i + 772*t]] + R[C[i + 772*t]];
R[i + 1837*t] = Op[i + 773*t] ? R[B[i + 773*t]] * R[C[i + 773*t]] : R[B[i + 773*t]] + R[C[i + 773*t]];
R[i + 1838*t] = Op[i + 774*t] ? R[B[i + 774*t]] * R[C[i + 774*t]] : R[B[i + 774*t]] + R[C[i + 774*t]];
R[i + 1839*t] = Op[i + 775*t] ? R[B[i + 775*t]] * R[C[i + 775*t]] : R[B[i + 775*t]] + R[C[i + 775*t]];
R[i + 1840*t] = Op[i + 776*t] ? R[B[i + 776*t]] * R[C[i + 776*t]] : R[B[i + 776*t]] + R[C[i + 776*t]];
R[i + 1841*t] = Op[i + 777*t] ? R[B[i + 777*t]] * R[C[i + 777*t]] : R[B[i + 777*t]] + R[C[i + 777*t]];
R[i + 1842*t] = Op[i + 778*t] ? R[B[i + 778*t]] * R[C[i + 778*t]] : R[B[i + 778*t]] + R[C[i + 778*t]];
R[i + 1843*t] = Op[i + 779*t] ? R[B[i + 779*t]] * R[C[i + 779*t]] : R[B[i + 779*t]] + R[C[i + 779*t]];
R[i + 1844*t] = Op[i + 780*t] ? R[B[i + 780*t]] * R[C[i + 780*t]] : R[B[i + 780*t]] + R[C[i + 780*t]];
R[i + 1845*t] = Op[i + 781*t] ? R[B[i + 781*t]] * R[C[i + 781*t]] : R[B[i + 781*t]] + R[C[i + 781*t]];
R[i + 1846*t] = Op[i + 782*t] ? R[B[i + 782*t]] * R[C[i + 782*t]] : R[B[i + 782*t]] + R[C[i + 782*t]];
R[i + 1847*t] = Op[i + 783*t] ? R[B[i + 783*t]] * R[C[i + 783*t]] : R[B[i + 783*t]] + R[C[i + 783*t]];
R[i + 1848*t] = Op[i + 784*t] ? R[B[i + 784*t]] * R[C[i + 784*t]] : R[B[i + 784*t]] + R[C[i + 784*t]];
R[i + 1849*t] = Op[i + 785*t] ? R[B[i + 785*t]] * R[C[i + 785*t]] : R[B[i + 785*t]] + R[C[i + 785*t]];
R[i + 1850*t] = Op[i + 786*t] ? R[B[i + 786*t]] * R[C[i + 786*t]] : R[B[i + 786*t]] + R[C[i + 786*t]];
R[i + 1851*t] = Op[i + 787*t] ? R[B[i + 787*t]] * R[C[i + 787*t]] : R[B[i + 787*t]] + R[C[i + 787*t]];
R[i + 1852*t] = Op[i + 788*t] ? R[B[i + 788*t]] * R[C[i + 788*t]] : R[B[i + 788*t]] + R[C[i + 788*t]];
R[i + 1853*t] = Op[i + 789*t] ? R[B[i + 789*t]] * R[C[i + 789*t]] : R[B[i + 789*t]] + R[C[i + 789*t]];
R[i + 1854*t] = Op[i + 790*t] ? R[B[i + 790*t]] * R[C[i + 790*t]] : R[B[i + 790*t]] + R[C[i + 790*t]];
R[i + 1855*t] = Op[i + 791*t] ? R[B[i + 791*t]] * R[C[i + 791*t]] : R[B[i + 791*t]] + R[C[i + 791*t]];
R[i + 1856*t] = Op[i + 792*t] ? R[B[i + 792*t]] * R[C[i + 792*t]] : R[B[i + 792*t]] + R[C[i + 792*t]];
R[i + 1857*t] = Op[i + 793*t] ? R[B[i + 793*t]] * R[C[i + 793*t]] : R[B[i + 793*t]] + R[C[i + 793*t]];
R[i + 1858*t] = Op[i + 794*t] ? R[B[i + 794*t]] * R[C[i + 794*t]] : R[B[i + 794*t]] + R[C[i + 794*t]];
R[i + 1859*t] = Op[i + 795*t] ? R[B[i + 795*t]] * R[C[i + 795*t]] : R[B[i + 795*t]] + R[C[i + 795*t]];
R[i + 1860*t] = Op[i + 796*t] ? R[B[i + 796*t]] * R[C[i + 796*t]] : R[B[i + 796*t]] + R[C[i + 796*t]];
R[i + 1861*t] = Op[i + 797*t] ? R[B[i + 797*t]] * R[C[i + 797*t]] : R[B[i + 797*t]] + R[C[i + 797*t]];
R[i + 1862*t] = Op[i + 798*t] ? R[B[i + 798*t]] * R[C[i + 798*t]] : R[B[i + 798*t]] + R[C[i + 798*t]];
R[i + 1863*t] = Op[i + 799*t] ? R[B[i + 799*t]] * R[C[i + 799*t]] : R[B[i + 799*t]] + R[C[i + 799*t]];
R[i + 1864*t] = Op[i + 800*t] ? R[B[i + 800*t]] * R[C[i + 800*t]] : R[B[i + 800*t]] + R[C[i + 800*t]];
R[i + 1865*t] = Op[i + 801*t] ? R[B[i + 801*t]] * R[C[i + 801*t]] : R[B[i + 801*t]] + R[C[i + 801*t]];
R[i + 1866*t] = Op[i + 802*t] ? R[B[i + 802*t]] * R[C[i + 802*t]] : R[B[i + 802*t]] + R[C[i + 802*t]];
R[i + 1867*t] = Op[i + 803*t] ? R[B[i + 803*t]] * R[C[i + 803*t]] : R[B[i + 803*t]] + R[C[i + 803*t]];
R[i + 1868*t] = Op[i + 804*t] ? R[B[i + 804*t]] * R[C[i + 804*t]] : R[B[i + 804*t]] + R[C[i + 804*t]];
R[i + 1869*t] = Op[i + 805*t] ? R[B[i + 805*t]] * R[C[i + 805*t]] : R[B[i + 805*t]] + R[C[i + 805*t]];
R[i + 1870*t] = Op[i + 806*t] ? R[B[i + 806*t]] * R[C[i + 806*t]] : R[B[i + 806*t]] + R[C[i + 806*t]];
R[i + 1871*t] = Op[i + 807*t] ? R[B[i + 807*t]] * R[C[i + 807*t]] : R[B[i + 807*t]] + R[C[i + 807*t]];
R[i + 1872*t] = Op[i + 808*t] ? R[B[i + 808*t]] * R[C[i + 808*t]] : R[B[i + 808*t]] + R[C[i + 808*t]];
R[i + 1873*t] = Op[i + 809*t] ? R[B[i + 809*t]] * R[C[i + 809*t]] : R[B[i + 809*t]] + R[C[i + 809*t]];
R[i + 1874*t] = Op[i + 810*t] ? R[B[i + 810*t]] * R[C[i + 810*t]] : R[B[i + 810*t]] + R[C[i + 810*t]];
R[i + 1875*t] = Op[i + 811*t] ? R[B[i + 811*t]] * R[C[i + 811*t]] : R[B[i + 811*t]] + R[C[i + 811*t]];
R[i + 1876*t] = Op[i + 812*t] ? R[B[i + 812*t]] * R[C[i + 812*t]] : R[B[i + 812*t]] + R[C[i + 812*t]];
R[i + 1877*t] = Op[i + 813*t] ? R[B[i + 813*t]] * R[C[i + 813*t]] : R[B[i + 813*t]] + R[C[i + 813*t]];
R[i + 1878*t] = Op[i + 814*t] ? R[B[i + 814*t]] * R[C[i + 814*t]] : R[B[i + 814*t]] + R[C[i + 814*t]];
R[i + 1879*t] = Op[i + 815*t] ? R[B[i + 815*t]] * R[C[i + 815*t]] : R[B[i + 815*t]] + R[C[i + 815*t]];
R[i + 1880*t] = Op[i + 816*t] ? R[B[i + 816*t]] * R[C[i + 816*t]] : R[B[i + 816*t]] + R[C[i + 816*t]];
R[i + 1881*t] = Op[i + 817*t] ? R[B[i + 817*t]] * R[C[i + 817*t]] : R[B[i + 817*t]] + R[C[i + 817*t]];
R[i + 1882*t] = Op[i + 818*t] ? R[B[i + 818*t]] * R[C[i + 818*t]] : R[B[i + 818*t]] + R[C[i + 818*t]];
R[i + 1883*t] = Op[i + 819*t] ? R[B[i + 819*t]] * R[C[i + 819*t]] : R[B[i + 819*t]] + R[C[i + 819*t]];
R[i + 1884*t] = Op[i + 820*t] ? R[B[i + 820*t]] * R[C[i + 820*t]] : R[B[i + 820*t]] + R[C[i + 820*t]];
R[i + 1885*t] = Op[i + 821*t] ? R[B[i + 821*t]] * R[C[i + 821*t]] : R[B[i + 821*t]] + R[C[i + 821*t]];
R[i + 1886*t] = Op[i + 822*t] ? R[B[i + 822*t]] * R[C[i + 822*t]] : R[B[i + 822*t]] + R[C[i + 822*t]];
R[i + 1887*t] = Op[i + 823*t] ? R[B[i + 823*t]] * R[C[i + 823*t]] : R[B[i + 823*t]] + R[C[i + 823*t]];
R[i + 1888*t] = Op[i + 824*t] ? R[B[i + 824*t]] * R[C[i + 824*t]] : R[B[i + 824*t]] + R[C[i + 824*t]];
R[i + 1889*t] = Op[i + 825*t] ? R[B[i + 825*t]] * R[C[i + 825*t]] : R[B[i + 825*t]] + R[C[i + 825*t]];
R[i + 1890*t] = Op[i + 826*t] ? R[B[i + 826*t]] * R[C[i + 826*t]] : R[B[i + 826*t]] + R[C[i + 826*t]];
R[i + 1891*t] = Op[i + 827*t] ? R[B[i + 827*t]] * R[C[i + 827*t]] : R[B[i + 827*t]] + R[C[i + 827*t]];
R[i + 1892*t] = Op[i + 828*t] ? R[B[i + 828*t]] * R[C[i + 828*t]] : R[B[i + 828*t]] + R[C[i + 828*t]];
R[i + 1893*t] = Op[i + 829*t] ? R[B[i + 829*t]] * R[C[i + 829*t]] : R[B[i + 829*t]] + R[C[i + 829*t]];
R[i + 1894*t] = Op[i + 830*t] ? R[B[i + 830*t]] * R[C[i + 830*t]] : R[B[i + 830*t]] + R[C[i + 830*t]];
R[i + 1895*t] = Op[i + 831*t] ? R[B[i + 831*t]] * R[C[i + 831*t]] : R[B[i + 831*t]] + R[C[i + 831*t]];
R[i + 1896*t] = Op[i + 832*t] ? R[B[i + 832*t]] * R[C[i + 832*t]] : R[B[i + 832*t]] + R[C[i + 832*t]];
R[i + 1897*t] = Op[i + 833*t] ? R[B[i + 833*t]] * R[C[i + 833*t]] : R[B[i + 833*t]] + R[C[i + 833*t]];
R[i + 1898*t] = Op[i + 834*t] ? R[B[i + 834*t]] * R[C[i + 834*t]] : R[B[i + 834*t]] + R[C[i + 834*t]];
R[i + 1899*t] = Op[i + 835*t] ? R[B[i + 835*t]] * R[C[i + 835*t]] : R[B[i + 835*t]] + R[C[i + 835*t]];
R[i + 1900*t] = Op[i + 836*t] ? R[B[i + 836*t]] * R[C[i + 836*t]] : R[B[i + 836*t]] + R[C[i + 836*t]];
R[i + 1901*t] = Op[i + 837*t] ? R[B[i + 837*t]] * R[C[i + 837*t]] : R[B[i + 837*t]] + R[C[i + 837*t]];
R[i + 1902*t] = Op[i + 838*t] ? R[B[i + 838*t]] * R[C[i + 838*t]] : R[B[i + 838*t]] + R[C[i + 838*t]];
R[i + 1903*t] = Op[i + 839*t] ? R[B[i + 839*t]] * R[C[i + 839*t]] : R[B[i + 839*t]] + R[C[i + 839*t]];
R[i + 1904*t] = Op[i + 840*t] ? R[B[i + 840*t]] * R[C[i + 840*t]] : R[B[i + 840*t]] + R[C[i + 840*t]];
R[i + 1905*t] = Op[i + 841*t] ? R[B[i + 841*t]] * R[C[i + 841*t]] : R[B[i + 841*t]] + R[C[i + 841*t]];
R[i + 1906*t] = Op[i + 842*t] ? R[B[i + 842*t]] * R[C[i + 842*t]] : R[B[i + 842*t]] + R[C[i + 842*t]];
R[i + 1907*t] = Op[i + 843*t] ? R[B[i + 843*t]] * R[C[i + 843*t]] : R[B[i + 843*t]] + R[C[i + 843*t]];
R[i + 1908*t] = Op[i + 844*t] ? R[B[i + 844*t]] * R[C[i + 844*t]] : R[B[i + 844*t]] + R[C[i + 844*t]];
R[i + 1909*t] = Op[i + 845*t] ? R[B[i + 845*t]] * R[C[i + 845*t]] : R[B[i + 845*t]] + R[C[i + 845*t]];
R[i + 1910*t] = Op[i + 846*t] ? R[B[i + 846*t]] * R[C[i + 846*t]] : R[B[i + 846*t]] + R[C[i + 846*t]];
R[i + 1911*t] = Op[i + 847*t] ? R[B[i + 847*t]] * R[C[i + 847*t]] : R[B[i + 847*t]] + R[C[i + 847*t]];
R[i + 1912*t] = Op[i + 848*t] ? R[B[i + 848*t]] * R[C[i + 848*t]] : R[B[i + 848*t]] + R[C[i + 848*t]];
R[i + 1913*t] = Op[i + 849*t] ? R[B[i + 849*t]] * R[C[i + 849*t]] : R[B[i + 849*t]] + R[C[i + 849*t]];
R[i + 1914*t] = Op[i + 850*t] ? R[B[i + 850*t]] * R[C[i + 850*t]] : R[B[i + 850*t]] + R[C[i + 850*t]];
R[i + 1915*t] = Op[i + 851*t] ? R[B[i + 851*t]] * R[C[i + 851*t]] : R[B[i + 851*t]] + R[C[i + 851*t]];
R[i + 1916*t] = Op[i + 852*t] ? R[B[i + 852*t]] * R[C[i + 852*t]] : R[B[i + 852*t]] + R[C[i + 852*t]];
R[i + 1917*t] = Op[i + 853*t] ? R[B[i + 853*t]] * R[C[i + 853*t]] : R[B[i + 853*t]] + R[C[i + 853*t]];
R[i + 1918*t] = Op[i + 854*t] ? R[B[i + 854*t]] * R[C[i + 854*t]] : R[B[i + 854*t]] + R[C[i + 854*t]];
R[i + 1919*t] = Op[i + 855*t] ? R[B[i + 855*t]] * R[C[i + 855*t]] : R[B[i + 855*t]] + R[C[i + 855*t]];
R[i + 1920*t] = Op[i + 856*t] ? R[B[i + 856*t]] * R[C[i + 856*t]] : R[B[i + 856*t]] + R[C[i + 856*t]];
R[i + 1921*t] = Op[i + 857*t] ? R[B[i + 857*t]] * R[C[i + 857*t]] : R[B[i + 857*t]] + R[C[i + 857*t]];
R[i + 1922*t] = Op[i + 858*t] ? R[B[i + 858*t]] * R[C[i + 858*t]] : R[B[i + 858*t]] + R[C[i + 858*t]];
R[i + 1923*t] = Op[i + 859*t] ? R[B[i + 859*t]] * R[C[i + 859*t]] : R[B[i + 859*t]] + R[C[i + 859*t]];
R[i + 1924*t] = Op[i + 860*t] ? R[B[i + 860*t]] * R[C[i + 860*t]] : R[B[i + 860*t]] + R[C[i + 860*t]];
R[i + 1925*t] = Op[i + 861*t] ? R[B[i + 861*t]] * R[C[i + 861*t]] : R[B[i + 861*t]] + R[C[i + 861*t]];
R[i + 1926*t] = Op[i + 862*t] ? R[B[i + 862*t]] * R[C[i + 862*t]] : R[B[i + 862*t]] + R[C[i + 862*t]];
R[i + 1927*t] = Op[i + 863*t] ? R[B[i + 863*t]] * R[C[i + 863*t]] : R[B[i + 863*t]] + R[C[i + 863*t]];
R[i + 1928*t] = Op[i + 864*t] ? R[B[i + 864*t]] * R[C[i + 864*t]] : R[B[i + 864*t]] + R[C[i + 864*t]];
R[i + 1929*t] = Op[i + 865*t] ? R[B[i + 865*t]] * R[C[i + 865*t]] : R[B[i + 865*t]] + R[C[i + 865*t]];
R[i + 1930*t] = Op[i + 866*t] ? R[B[i + 866*t]] * R[C[i + 866*t]] : R[B[i + 866*t]] + R[C[i + 866*t]];
R[i + 1931*t] = Op[i + 867*t] ? R[B[i + 867*t]] * R[C[i + 867*t]] : R[B[i + 867*t]] + R[C[i + 867*t]];
R[i + 1932*t] = Op[i + 868*t] ? R[B[i + 868*t]] * R[C[i + 868*t]] : R[B[i + 868*t]] + R[C[i + 868*t]];
R[i + 1933*t] = Op[i + 869*t] ? R[B[i + 869*t]] * R[C[i + 869*t]] : R[B[i + 869*t]] + R[C[i + 869*t]];
R[i + 1934*t] = Op[i + 870*t] ? R[B[i + 870*t]] * R[C[i + 870*t]] : R[B[i + 870*t]] + R[C[i + 870*t]];
R[i + 1935*t] = Op[i + 871*t] ? R[B[i + 871*t]] * R[C[i + 871*t]] : R[B[i + 871*t]] + R[C[i + 871*t]];
R[i + 1936*t] = Op[i + 872*t] ? R[B[i + 872*t]] * R[C[i + 872*t]] : R[B[i + 872*t]] + R[C[i + 872*t]];
R[i + 1937*t] = Op[i + 873*t] ? R[B[i + 873*t]] * R[C[i + 873*t]] : R[B[i + 873*t]] + R[C[i + 873*t]];
R[i + 1938*t] = Op[i + 874*t] ? R[B[i + 874*t]] * R[C[i + 874*t]] : R[B[i + 874*t]] + R[C[i + 874*t]];
R[i + 1939*t] = Op[i + 875*t] ? R[B[i + 875*t]] * R[C[i + 875*t]] : R[B[i + 875*t]] + R[C[i + 875*t]];
R[i + 1940*t] = Op[i + 876*t] ? R[B[i + 876*t]] * R[C[i + 876*t]] : R[B[i + 876*t]] + R[C[i + 876*t]];
R[i + 1941*t] = Op[i + 877*t] ? R[B[i + 877*t]] * R[C[i + 877*t]] : R[B[i + 877*t]] + R[C[i + 877*t]];
R[i + 1942*t] = Op[i + 878*t] ? R[B[i + 878*t]] * R[C[i + 878*t]] : R[B[i + 878*t]] + R[C[i + 878*t]];
R[i + 1943*t] = Op[i + 879*t] ? R[B[i + 879*t]] * R[C[i + 879*t]] : R[B[i + 879*t]] + R[C[i + 879*t]];
R[i + 1944*t] = Op[i + 880*t] ? R[B[i + 880*t]] * R[C[i + 880*t]] : R[B[i + 880*t]] + R[C[i + 880*t]];
R[i + 1945*t] = Op[i + 881*t] ? R[B[i + 881*t]] * R[C[i + 881*t]] : R[B[i + 881*t]] + R[C[i + 881*t]];
R[i + 1946*t] = Op[i + 882*t] ? R[B[i + 882*t]] * R[C[i + 882*t]] : R[B[i + 882*t]] + R[C[i + 882*t]];
R[i + 1947*t] = Op[i + 883*t] ? R[B[i + 883*t]] * R[C[i + 883*t]] : R[B[i + 883*t]] + R[C[i + 883*t]];
__syncthreads();
R[i + 1948*t] = Op[i + 884*t] ? R[B[i + 884*t]] * R[C[i + 884*t]] : R[B[i + 884*t]] + R[C[i + 884*t]];
R[i + 1949*t] = Op[i + 885*t] ? R[B[i + 885*t]] * R[C[i + 885*t]] : R[B[i + 885*t]] + R[C[i + 885*t]];
R[i + 1950*t] = Op[i + 886*t] ? R[B[i + 886*t]] * R[C[i + 886*t]] : R[B[i + 886*t]] + R[C[i + 886*t]];
R[i + 1951*t] = Op[i + 887*t] ? R[B[i + 887*t]] * R[C[i + 887*t]] : R[B[i + 887*t]] + R[C[i + 887*t]];
R[i + 1952*t] = Op[i + 888*t] ? R[B[i + 888*t]] * R[C[i + 888*t]] : R[B[i + 888*t]] + R[C[i + 888*t]];
R[i + 1953*t] = Op[i + 889*t] ? R[B[i + 889*t]] * R[C[i + 889*t]] : R[B[i + 889*t]] + R[C[i + 889*t]];
R[i + 1954*t] = Op[i + 890*t] ? R[B[i + 890*t]] * R[C[i + 890*t]] : R[B[i + 890*t]] + R[C[i + 890*t]];
R[i + 1955*t] = Op[i + 891*t] ? R[B[i + 891*t]] * R[C[i + 891*t]] : R[B[i + 891*t]] + R[C[i + 891*t]];
R[i + 1956*t] = Op[i + 892*t] ? R[B[i + 892*t]] * R[C[i + 892*t]] : R[B[i + 892*t]] + R[C[i + 892*t]];
R[i + 1957*t] = Op[i + 893*t] ? R[B[i + 893*t]] * R[C[i + 893*t]] : R[B[i + 893*t]] + R[C[i + 893*t]];
R[i + 1958*t] = Op[i + 894*t] ? R[B[i + 894*t]] * R[C[i + 894*t]] : R[B[i + 894*t]] + R[C[i + 894*t]];
R[i + 1959*t] = Op[i + 895*t] ? R[B[i + 895*t]] * R[C[i + 895*t]] : R[B[i + 895*t]] + R[C[i + 895*t]];
R[i + 1960*t] = Op[i + 896*t] ? R[B[i + 896*t]] * R[C[i + 896*t]] : R[B[i + 896*t]] + R[C[i + 896*t]];
R[i + 1961*t] = Op[i + 897*t] ? R[B[i + 897*t]] * R[C[i + 897*t]] : R[B[i + 897*t]] + R[C[i + 897*t]];
R[i + 1962*t] = Op[i + 898*t] ? R[B[i + 898*t]] * R[C[i + 898*t]] : R[B[i + 898*t]] + R[C[i + 898*t]];
R[i + 1963*t] = Op[i + 899*t] ? R[B[i + 899*t]] * R[C[i + 899*t]] : R[B[i + 899*t]] + R[C[i + 899*t]];
R[i + 1964*t] = Op[i + 900*t] ? R[B[i + 900*t]] * R[C[i + 900*t]] : R[B[i + 900*t]] + R[C[i + 900*t]];
R[i + 1965*t] = Op[i + 901*t] ? R[B[i + 901*t]] * R[C[i + 901*t]] : R[B[i + 901*t]] + R[C[i + 901*t]];
R[i + 1966*t] = Op[i + 902*t] ? R[B[i + 902*t]] * R[C[i + 902*t]] : R[B[i + 902*t]] + R[C[i + 902*t]];
R[i + 1967*t] = Op[i + 903*t] ? R[B[i + 903*t]] * R[C[i + 903*t]] : R[B[i + 903*t]] + R[C[i + 903*t]];
R[i + 1968*t] = Op[i + 904*t] ? R[B[i + 904*t]] * R[C[i + 904*t]] : R[B[i + 904*t]] + R[C[i + 904*t]];
R[i + 1969*t] = Op[i + 905*t] ? R[B[i + 905*t]] * R[C[i + 905*t]] : R[B[i + 905*t]] + R[C[i + 905*t]];
R[i + 1970*t] = Op[i + 906*t] ? R[B[i + 906*t]] * R[C[i + 906*t]] : R[B[i + 906*t]] + R[C[i + 906*t]];
R[i + 1971*t] = Op[i + 907*t] ? R[B[i + 907*t]] * R[C[i + 907*t]] : R[B[i + 907*t]] + R[C[i + 907*t]];
R[i + 1972*t] = Op[i + 908*t] ? R[B[i + 908*t]] * R[C[i + 908*t]] : R[B[i + 908*t]] + R[C[i + 908*t]];
R[i + 1973*t] = Op[i + 909*t] ? R[B[i + 909*t]] * R[C[i + 909*t]] : R[B[i + 909*t]] + R[C[i + 909*t]];
R[i + 1974*t] = Op[i + 910*t] ? R[B[i + 910*t]] * R[C[i + 910*t]] : R[B[i + 910*t]] + R[C[i + 910*t]];
R[i + 1975*t] = Op[i + 911*t] ? R[B[i + 911*t]] * R[C[i + 911*t]] : R[B[i + 911*t]] + R[C[i + 911*t]];
R[i + 1976*t] = Op[i + 912*t] ? R[B[i + 912*t]] * R[C[i + 912*t]] : R[B[i + 912*t]] + R[C[i + 912*t]];
R[i + 1977*t] = Op[i + 913*t] ? R[B[i + 913*t]] * R[C[i + 913*t]] : R[B[i + 913*t]] + R[C[i + 913*t]];
R[i + 1978*t] = Op[i + 914*t] ? R[B[i + 914*t]] * R[C[i + 914*t]] : R[B[i + 914*t]] + R[C[i + 914*t]];
R[i + 1979*t] = Op[i + 915*t] ? R[B[i + 915*t]] * R[C[i + 915*t]] : R[B[i + 915*t]] + R[C[i + 915*t]];
R[i + 1980*t] = Op[i + 916*t] ? R[B[i + 916*t]] * R[C[i + 916*t]] : R[B[i + 916*t]] + R[C[i + 916*t]];
R[i + 1981*t] = Op[i + 917*t] ? R[B[i + 917*t]] * R[C[i + 917*t]] : R[B[i + 917*t]] + R[C[i + 917*t]];
R[i + 1982*t] = Op[i + 918*t] ? R[B[i + 918*t]] * R[C[i + 918*t]] : R[B[i + 918*t]] + R[C[i + 918*t]];
R[i + 1983*t] = Op[i + 919*t] ? R[B[i + 919*t]] * R[C[i + 919*t]] : R[B[i + 919*t]] + R[C[i + 919*t]];
R[i + 1984*t] = Op[i + 920*t] ? R[B[i + 920*t]] * R[C[i + 920*t]] : R[B[i + 920*t]] + R[C[i + 920*t]];
R[i + 1985*t] = Op[i + 921*t] ? R[B[i + 921*t]] * R[C[i + 921*t]] : R[B[i + 921*t]] + R[C[i + 921*t]];
R[i + 1986*t] = Op[i + 922*t] ? R[B[i + 922*t]] * R[C[i + 922*t]] : R[B[i + 922*t]] + R[C[i + 922*t]];
R[i + 1987*t] = Op[i + 923*t] ? R[B[i + 923*t]] * R[C[i + 923*t]] : R[B[i + 923*t]] + R[C[i + 923*t]];
R[i + 1988*t] = Op[i + 924*t] ? R[B[i + 924*t]] * R[C[i + 924*t]] : R[B[i + 924*t]] + R[C[i + 924*t]];
R[i + 1989*t] = Op[i + 925*t] ? R[B[i + 925*t]] * R[C[i + 925*t]] : R[B[i + 925*t]] + R[C[i + 925*t]];
R[i + 1990*t] = Op[i + 926*t] ? R[B[i + 926*t]] * R[C[i + 926*t]] : R[B[i + 926*t]] + R[C[i + 926*t]];
R[i + 1991*t] = Op[i + 927*t] ? R[B[i + 927*t]] * R[C[i + 927*t]] : R[B[i + 927*t]] + R[C[i + 927*t]];
R[i + 1992*t] = Op[i + 928*t] ? R[B[i + 928*t]] * R[C[i + 928*t]] : R[B[i + 928*t]] + R[C[i + 928*t]];
R[i + 1993*t] = Op[i + 929*t] ? R[B[i + 929*t]] * R[C[i + 929*t]] : R[B[i + 929*t]] + R[C[i + 929*t]];
R[i + 1994*t] = Op[i + 930*t] ? R[B[i + 930*t]] * R[C[i + 930*t]] : R[B[i + 930*t]] + R[C[i + 930*t]];
R[i + 1995*t] = Op[i + 931*t] ? R[B[i + 931*t]] * R[C[i + 931*t]] : R[B[i + 931*t]] + R[C[i + 931*t]];
R[i + 1996*t] = Op[i + 932*t] ? R[B[i + 932*t]] * R[C[i + 932*t]] : R[B[i + 932*t]] + R[C[i + 932*t]];
R[i + 1997*t] = Op[i + 933*t] ? R[B[i + 933*t]] * R[C[i + 933*t]] : R[B[i + 933*t]] + R[C[i + 933*t]];
R[i + 1998*t] = Op[i + 934*t] ? R[B[i + 934*t]] * R[C[i + 934*t]] : R[B[i + 934*t]] + R[C[i + 934*t]];
R[i + 1999*t] = Op[i + 935*t] ? R[B[i + 935*t]] * R[C[i + 935*t]] : R[B[i + 935*t]] + R[C[i + 935*t]];
R[i + 2000*t] = Op[i + 936*t] ? R[B[i + 936*t]] * R[C[i + 936*t]] : R[B[i + 936*t]] + R[C[i + 936*t]];
R[i + 2001*t] = Op[i + 937*t] ? R[B[i + 937*t]] * R[C[i + 937*t]] : R[B[i + 937*t]] + R[C[i + 937*t]];
R[i + 2002*t] = Op[i + 938*t] ? R[B[i + 938*t]] * R[C[i + 938*t]] : R[B[i + 938*t]] + R[C[i + 938*t]];
R[i + 2003*t] = Op[i + 939*t] ? R[B[i + 939*t]] * R[C[i + 939*t]] : R[B[i + 939*t]] + R[C[i + 939*t]];
R[i + 2004*t] = Op[i + 940*t] ? R[B[i + 940*t]] * R[C[i + 940*t]] : R[B[i + 940*t]] + R[C[i + 940*t]];
R[i + 2005*t] = Op[i + 941*t] ? R[B[i + 941*t]] * R[C[i + 941*t]] : R[B[i + 941*t]] + R[C[i + 941*t]];
R[i + 2006*t] = Op[i + 942*t] ? R[B[i + 942*t]] * R[C[i + 942*t]] : R[B[i + 942*t]] + R[C[i + 942*t]];
R[i + 2007*t] = Op[i + 943*t] ? R[B[i + 943*t]] * R[C[i + 943*t]] : R[B[i + 943*t]] + R[C[i + 943*t]];
R[i + 2008*t] = Op[i + 944*t] ? R[B[i + 944*t]] * R[C[i + 944*t]] : R[B[i + 944*t]] + R[C[i + 944*t]];
R[i + 2009*t] = Op[i + 945*t] ? R[B[i + 945*t]] * R[C[i + 945*t]] : R[B[i + 945*t]] + R[C[i + 945*t]];
R[i + 2010*t] = Op[i + 946*t] ? R[B[i + 946*t]] * R[C[i + 946*t]] : R[B[i + 946*t]] + R[C[i + 946*t]];
R[i + 2011*t] = Op[i + 947*t] ? R[B[i + 947*t]] * R[C[i + 947*t]] : R[B[i + 947*t]] + R[C[i + 947*t]];
R[i + 2012*t] = Op[i + 948*t] ? R[B[i + 948*t]] * R[C[i + 948*t]] : R[B[i + 948*t]] + R[C[i + 948*t]];
R[i + 2013*t] = Op[i + 949*t] ? R[B[i + 949*t]] * R[C[i + 949*t]] : R[B[i + 949*t]] + R[C[i + 949*t]];
R[i + 2014*t] = Op[i + 950*t] ? R[B[i + 950*t]] * R[C[i + 950*t]] : R[B[i + 950*t]] + R[C[i + 950*t]];
R[i + 2015*t] = Op[i + 951*t] ? R[B[i + 951*t]] * R[C[i + 951*t]] : R[B[i + 951*t]] + R[C[i + 951*t]];
R[i + 2016*t] = Op[i + 952*t] ? R[B[i + 952*t]] * R[C[i + 952*t]] : R[B[i + 952*t]] + R[C[i + 952*t]];
R[i + 2017*t] = Op[i + 953*t] ? R[B[i + 953*t]] * R[C[i + 953*t]] : R[B[i + 953*t]] + R[C[i + 953*t]];
R[i + 2018*t] = Op[i + 954*t] ? R[B[i + 954*t]] * R[C[i + 954*t]] : R[B[i + 954*t]] + R[C[i + 954*t]];
R[i + 2019*t] = Op[i + 955*t] ? R[B[i + 955*t]] * R[C[i + 955*t]] : R[B[i + 955*t]] + R[C[i + 955*t]];
R[i + 2020*t] = Op[i + 956*t] ? R[B[i + 956*t]] * R[C[i + 956*t]] : R[B[i + 956*t]] + R[C[i + 956*t]];
R[i + 2021*t] = Op[i + 957*t] ? R[B[i + 957*t]] * R[C[i + 957*t]] : R[B[i + 957*t]] + R[C[i + 957*t]];
R[i + 2022*t] = Op[i + 958*t] ? R[B[i + 958*t]] * R[C[i + 958*t]] : R[B[i + 958*t]] + R[C[i + 958*t]];
R[i + 2023*t] = Op[i + 959*t] ? R[B[i + 959*t]] * R[C[i + 959*t]] : R[B[i + 959*t]] + R[C[i + 959*t]];
R[i + 2024*t] = Op[i + 960*t] ? R[B[i + 960*t]] * R[C[i + 960*t]] : R[B[i + 960*t]] + R[C[i + 960*t]];
R[i + 2025*t] = Op[i + 961*t] ? R[B[i + 961*t]] * R[C[i + 961*t]] : R[B[i + 961*t]] + R[C[i + 961*t]];
R[i + 2026*t] = Op[i + 962*t] ? R[B[i + 962*t]] * R[C[i + 962*t]] : R[B[i + 962*t]] + R[C[i + 962*t]];
R[i + 2027*t] = Op[i + 963*t] ? R[B[i + 963*t]] * R[C[i + 963*t]] : R[B[i + 963*t]] + R[C[i + 963*t]];
R[i + 2028*t] = Op[i + 964*t] ? R[B[i + 964*t]] * R[C[i + 964*t]] : R[B[i + 964*t]] + R[C[i + 964*t]];
R[i + 2029*t] = Op[i + 965*t] ? R[B[i + 965*t]] * R[C[i + 965*t]] : R[B[i + 965*t]] + R[C[i + 965*t]];
__syncthreads();
R[i + 2030*t] = Op[i + 966*t] ? R[B[i + 966*t]] * R[C[i + 966*t]] : R[B[i + 966*t]] + R[C[i + 966*t]];
R[i + 2031*t] = Op[i + 967*t] ? R[B[i + 967*t]] * R[C[i + 967*t]] : R[B[i + 967*t]] + R[C[i + 967*t]];
R[i + 2032*t] = Op[i + 968*t] ? R[B[i + 968*t]] * R[C[i + 968*t]] : R[B[i + 968*t]] + R[C[i + 968*t]];
R[i + 2033*t] = Op[i + 969*t] ? R[B[i + 969*t]] * R[C[i + 969*t]] : R[B[i + 969*t]] + R[C[i + 969*t]];
R[i + 2034*t] = Op[i + 970*t] ? R[B[i + 970*t]] * R[C[i + 970*t]] : R[B[i + 970*t]] + R[C[i + 970*t]];
R[i + 2035*t] = Op[i + 971*t] ? R[B[i + 971*t]] * R[C[i + 971*t]] : R[B[i + 971*t]] + R[C[i + 971*t]];
R[i + 2036*t] = Op[i + 972*t] ? R[B[i + 972*t]] * R[C[i + 972*t]] : R[B[i + 972*t]] + R[C[i + 972*t]];
R[i + 2037*t] = Op[i + 973*t] ? R[B[i + 973*t]] * R[C[i + 973*t]] : R[B[i + 973*t]] + R[C[i + 973*t]];
R[i + 2038*t] = Op[i + 974*t] ? R[B[i + 974*t]] * R[C[i + 974*t]] : R[B[i + 974*t]] + R[C[i + 974*t]];
R[i + 2039*t] = Op[i + 975*t] ? R[B[i + 975*t]] * R[C[i + 975*t]] : R[B[i + 975*t]] + R[C[i + 975*t]];
R[i + 2040*t] = Op[i + 976*t] ? R[B[i + 976*t]] * R[C[i + 976*t]] : R[B[i + 976*t]] + R[C[i + 976*t]];
R[i + 2041*t] = Op[i + 977*t] ? R[B[i + 977*t]] * R[C[i + 977*t]] : R[B[i + 977*t]] + R[C[i + 977*t]];
R[i + 2042*t] = Op[i + 978*t] ? R[B[i + 978*t]] * R[C[i + 978*t]] : R[B[i + 978*t]] + R[C[i + 978*t]];
R[i + 2043*t] = Op[i + 979*t] ? R[B[i + 979*t]] * R[C[i + 979*t]] : R[B[i + 979*t]] + R[C[i + 979*t]];
R[i + 2044*t] = Op[i + 980*t] ? R[B[i + 980*t]] * R[C[i + 980*t]] : R[B[i + 980*t]] + R[C[i + 980*t]];
R[i + 2045*t] = Op[i + 981*t] ? R[B[i + 981*t]] * R[C[i + 981*t]] : R[B[i + 981*t]] + R[C[i + 981*t]];
R[i + 2046*t] = Op[i + 982*t] ? R[B[i + 982*t]] * R[C[i + 982*t]] : R[B[i + 982*t]] + R[C[i + 982*t]];
R[i + 2047*t] = Op[i + 983*t] ? R[B[i + 983*t]] * R[C[i + 983*t]] : R[B[i + 983*t]] + R[C[i + 983*t]];
R[i + 2048*t] = Op[i + 984*t] ? R[B[i + 984*t]] * R[C[i + 984*t]] : R[B[i + 984*t]] + R[C[i + 984*t]];
R[i + 2049*t] = Op[i + 985*t] ? R[B[i + 985*t]] * R[C[i + 985*t]] : R[B[i + 985*t]] + R[C[i + 985*t]];
R[i + 2050*t] = Op[i + 986*t] ? R[B[i + 986*t]] * R[C[i + 986*t]] : R[B[i + 986*t]] + R[C[i + 986*t]];
R[i + 2051*t] = Op[i + 987*t] ? R[B[i + 987*t]] * R[C[i + 987*t]] : R[B[i + 987*t]] + R[C[i + 987*t]];
R[i + 2052*t] = Op[i + 988*t] ? R[B[i + 988*t]] * R[C[i + 988*t]] : R[B[i + 988*t]] + R[C[i + 988*t]];
R[i + 2053*t] = Op[i + 989*t] ? R[B[i + 989*t]] * R[C[i + 989*t]] : R[B[i + 989*t]] + R[C[i + 989*t]];
R[i + 2054*t] = Op[i + 990*t] ? R[B[i + 990*t]] * R[C[i + 990*t]] : R[B[i + 990*t]] + R[C[i + 990*t]];
R[i + 2055*t] = Op[i + 991*t] ? R[B[i + 991*t]] * R[C[i + 991*t]] : R[B[i + 991*t]] + R[C[i + 991*t]];
R[i + 2056*t] = Op[i + 992*t] ? R[B[i + 992*t]] * R[C[i + 992*t]] : R[B[i + 992*t]] + R[C[i + 992*t]];
R[i + 2057*t] = Op[i + 993*t] ? R[B[i + 993*t]] * R[C[i + 993*t]] : R[B[i + 993*t]] + R[C[i + 993*t]];
R[i + 2058*t] = Op[i + 994*t] ? R[B[i + 994*t]] * R[C[i + 994*t]] : R[B[i + 994*t]] + R[C[i + 994*t]];
R[i + 2059*t] = Op[i + 995*t] ? R[B[i + 995*t]] * R[C[i + 995*t]] : R[B[i + 995*t]] + R[C[i + 995*t]];
R[i + 2060*t] = Op[i + 996*t] ? R[B[i + 996*t]] * R[C[i + 996*t]] : R[B[i + 996*t]] + R[C[i + 996*t]];
R[i + 2061*t] = Op[i + 997*t] ? R[B[i + 997*t]] * R[C[i + 997*t]] : R[B[i + 997*t]] + R[C[i + 997*t]];
R[i + 2062*t] = Op[i + 998*t] ? R[B[i + 998*t]] * R[C[i + 998*t]] : R[B[i + 998*t]] + R[C[i + 998*t]];
R[i + 2063*t] = Op[i + 999*t] ? R[B[i + 999*t]] * R[C[i + 999*t]] : R[B[i + 999*t]] + R[C[i + 999*t]];
R[i + 2064*t] = Op[i + 1000*t] ? R[B[i + 1000*t]] * R[C[i + 1000*t]] : R[B[i + 1000*t]] + R[C[i + 1000*t]];
R[i + 2065*t] = Op[i + 1001*t] ? R[B[i + 1001*t]] * R[C[i + 1001*t]] : R[B[i + 1001*t]] + R[C[i + 1001*t]];
R[i + 2066*t] = Op[i + 1002*t] ? R[B[i + 1002*t]] * R[C[i + 1002*t]] : R[B[i + 1002*t]] + R[C[i + 1002*t]];
R[i + 2067*t] = Op[i + 1003*t] ? R[B[i + 1003*t]] * R[C[i + 1003*t]] : R[B[i + 1003*t]] + R[C[i + 1003*t]];
R[i + 2068*t] = Op[i + 1004*t] ? R[B[i + 1004*t]] * R[C[i + 1004*t]] : R[B[i + 1004*t]] + R[C[i + 1004*t]];
R[i + 2069*t] = Op[i + 1005*t] ? R[B[i + 1005*t]] * R[C[i + 1005*t]] : R[B[i + 1005*t]] + R[C[i + 1005*t]];
R[i + 2070*t] = Op[i + 1006*t] ? R[B[i + 1006*t]] * R[C[i + 1006*t]] : R[B[i + 1006*t]] + R[C[i + 1006*t]];
R[i + 2071*t] = Op[i + 1007*t] ? R[B[i + 1007*t]] * R[C[i + 1007*t]] : R[B[i + 1007*t]] + R[C[i + 1007*t]];
R[i + 2072*t] = Op[i + 1008*t] ? R[B[i + 1008*t]] * R[C[i + 1008*t]] : R[B[i + 1008*t]] + R[C[i + 1008*t]];
R[i + 2073*t] = Op[i + 1009*t] ? R[B[i + 1009*t]] * R[C[i + 1009*t]] : R[B[i + 1009*t]] + R[C[i + 1009*t]];
R[i + 2074*t] = Op[i + 1010*t] ? R[B[i + 1010*t]] * R[C[i + 1010*t]] : R[B[i + 1010*t]] + R[C[i + 1010*t]];
R[i + 2075*t] = Op[i + 1011*t] ? R[B[i + 1011*t]] * R[C[i + 1011*t]] : R[B[i + 1011*t]] + R[C[i + 1011*t]];
R[i + 2076*t] = Op[i + 1012*t] ? R[B[i + 1012*t]] * R[C[i + 1012*t]] : R[B[i + 1012*t]] + R[C[i + 1012*t]];
R[i + 2077*t] = Op[i + 1013*t] ? R[B[i + 1013*t]] * R[C[i + 1013*t]] : R[B[i + 1013*t]] + R[C[i + 1013*t]];
R[i + 2078*t] = Op[i + 1014*t] ? R[B[i + 1014*t]] * R[C[i + 1014*t]] : R[B[i + 1014*t]] + R[C[i + 1014*t]];
R[i + 2079*t] = Op[i + 1015*t] ? R[B[i + 1015*t]] * R[C[i + 1015*t]] : R[B[i + 1015*t]] + R[C[i + 1015*t]];
R[i + 2080*t] = Op[i + 1016*t] ? R[B[i + 1016*t]] * R[C[i + 1016*t]] : R[B[i + 1016*t]] + R[C[i + 1016*t]];
R[i + 2081*t] = Op[i + 1017*t] ? R[B[i + 1017*t]] * R[C[i + 1017*t]] : R[B[i + 1017*t]] + R[C[i + 1017*t]];
R[i + 2082*t] = Op[i + 1018*t] ? R[B[i + 1018*t]] * R[C[i + 1018*t]] : R[B[i + 1018*t]] + R[C[i + 1018*t]];
R[i + 2083*t] = Op[i + 1019*t] ? R[B[i + 1019*t]] * R[C[i + 1019*t]] : R[B[i + 1019*t]] + R[C[i + 1019*t]];
R[i + 2084*t] = Op[i + 1020*t] ? R[B[i + 1020*t]] * R[C[i + 1020*t]] : R[B[i + 1020*t]] + R[C[i + 1020*t]];
R[i + 2085*t] = Op[i + 1021*t] ? R[B[i + 1021*t]] * R[C[i + 1021*t]] : R[B[i + 1021*t]] + R[C[i + 1021*t]];
R[i + 2086*t] = Op[i + 1022*t] ? R[B[i + 1022*t]] * R[C[i + 1022*t]] : R[B[i + 1022*t]] + R[C[i + 1022*t]];
R[i + 2087*t] = Op[i + 1023*t] ? R[B[i + 1023*t]] * R[C[i + 1023*t]] : R[B[i + 1023*t]] + R[C[i + 1023*t]];
R[i + 2088*t] = Op[i + 1024*t] ? R[B[i + 1024*t]] * R[C[i + 1024*t]] : R[B[i + 1024*t]] + R[C[i + 1024*t]];
R[i + 2089*t] = Op[i + 1025*t] ? R[B[i + 1025*t]] * R[C[i + 1025*t]] : R[B[i + 1025*t]] + R[C[i + 1025*t]];
R[i + 2090*t] = Op[i + 1026*t] ? R[B[i + 1026*t]] * R[C[i + 1026*t]] : R[B[i + 1026*t]] + R[C[i + 1026*t]];
R[i + 2091*t] = Op[i + 1027*t] ? R[B[i + 1027*t]] * R[C[i + 1027*t]] : R[B[i + 1027*t]] + R[C[i + 1027*t]];
R[i + 2092*t] = Op[i + 1028*t] ? R[B[i + 1028*t]] * R[C[i + 1028*t]] : R[B[i + 1028*t]] + R[C[i + 1028*t]];
R[i + 2093*t] = Op[i + 1029*t] ? R[B[i + 1029*t]] * R[C[i + 1029*t]] : R[B[i + 1029*t]] + R[C[i + 1029*t]];
R[i + 2094*t] = Op[i + 1030*t] ? R[B[i + 1030*t]] * R[C[i + 1030*t]] : R[B[i + 1030*t]] + R[C[i + 1030*t]];
R[i + 2095*t] = Op[i + 1031*t] ? R[B[i + 1031*t]] * R[C[i + 1031*t]] : R[B[i + 1031*t]] + R[C[i + 1031*t]];
R[i + 2096*t] = Op[i + 1032*t] ? R[B[i + 1032*t]] * R[C[i + 1032*t]] : R[B[i + 1032*t]] + R[C[i + 1032*t]];
R[i + 2097*t] = Op[i + 1033*t] ? R[B[i + 1033*t]] * R[C[i + 1033*t]] : R[B[i + 1033*t]] + R[C[i + 1033*t]];
R[i + 2098*t] = Op[i + 1034*t] ? R[B[i + 1034*t]] * R[C[i + 1034*t]] : R[B[i + 1034*t]] + R[C[i + 1034*t]];
R[i + 2099*t] = Op[i + 1035*t] ? R[B[i + 1035*t]] * R[C[i + 1035*t]] : R[B[i + 1035*t]] + R[C[i + 1035*t]];
R[i + 2100*t] = Op[i + 1036*t] ? R[B[i + 1036*t]] * R[C[i + 1036*t]] : R[B[i + 1036*t]] + R[C[i + 1036*t]];
R[i + 2101*t] = Op[i + 1037*t] ? R[B[i + 1037*t]] * R[C[i + 1037*t]] : R[B[i + 1037*t]] + R[C[i + 1037*t]];
R[i + 2102*t] = Op[i + 1038*t] ? R[B[i + 1038*t]] * R[C[i + 1038*t]] : R[B[i + 1038*t]] + R[C[i + 1038*t]];
R[i + 2103*t] = Op[i + 1039*t] ? R[B[i + 1039*t]] * R[C[i + 1039*t]] : R[B[i + 1039*t]] + R[C[i + 1039*t]];
R[i + 2104*t] = Op[i + 1040*t] ? R[B[i + 1040*t]] * R[C[i + 1040*t]] : R[B[i + 1040*t]] + R[C[i + 1040*t]];
R[i + 2105*t] = Op[i + 1041*t] ? R[B[i + 1041*t]] * R[C[i + 1041*t]] : R[B[i + 1041*t]] + R[C[i + 1041*t]];
R[i + 2106*t] = Op[i + 1042*t] ? R[B[i + 1042*t]] * R[C[i + 1042*t]] : R[B[i + 1042*t]] + R[C[i + 1042*t]];
R[i + 2107*t] = Op[i + 1043*t] ? R[B[i + 1043*t]] * R[C[i + 1043*t]] : R[B[i + 1043*t]] + R[C[i + 1043*t]];
R[i + 2108*t] = Op[i + 1044*t] ? R[B[i + 1044*t]] * R[C[i + 1044*t]] : R[B[i + 1044*t]] + R[C[i + 1044*t]];
R[i + 2109*t] = Op[i + 1045*t] ? R[B[i + 1045*t]] * R[C[i + 1045*t]] : R[B[i + 1045*t]] + R[C[i + 1045*t]];
R[i + 2110*t] = Op[i + 1046*t] ? R[B[i + 1046*t]] * R[C[i + 1046*t]] : R[B[i + 1046*t]] + R[C[i + 1046*t]];
__syncthreads();
R[i + 2111*t] = Op[i + 1047*t] ? R[B[i + 1047*t]] * R[C[i + 1047*t]] : R[B[i + 1047*t]] + R[C[i + 1047*t]];
R[i + 2112*t] = Op[i + 1048*t] ? R[B[i + 1048*t]] * R[C[i + 1048*t]] : R[B[i + 1048*t]] + R[C[i + 1048*t]];
R[i + 2113*t] = Op[i + 1049*t] ? R[B[i + 1049*t]] * R[C[i + 1049*t]] : R[B[i + 1049*t]] + R[C[i + 1049*t]];
R[i + 2114*t] = Op[i + 1050*t] ? R[B[i + 1050*t]] * R[C[i + 1050*t]] : R[B[i + 1050*t]] + R[C[i + 1050*t]];
R[i + 2115*t] = Op[i + 1051*t] ? R[B[i + 1051*t]] * R[C[i + 1051*t]] : R[B[i + 1051*t]] + R[C[i + 1051*t]];
R[i + 2116*t] = Op[i + 1052*t] ? R[B[i + 1052*t]] * R[C[i + 1052*t]] : R[B[i + 1052*t]] + R[C[i + 1052*t]];
R[i + 2117*t] = Op[i + 1053*t] ? R[B[i + 1053*t]] * R[C[i + 1053*t]] : R[B[i + 1053*t]] + R[C[i + 1053*t]];
R[i + 2118*t] = Op[i + 1054*t] ? R[B[i + 1054*t]] * R[C[i + 1054*t]] : R[B[i + 1054*t]] + R[C[i + 1054*t]];
R[i + 2119*t] = Op[i + 1055*t] ? R[B[i + 1055*t]] * R[C[i + 1055*t]] : R[B[i + 1055*t]] + R[C[i + 1055*t]];
R[i + 2120*t] = Op[i + 1056*t] ? R[B[i + 1056*t]] * R[C[i + 1056*t]] : R[B[i + 1056*t]] + R[C[i + 1056*t]];
R[i + 2121*t] = Op[i + 1057*t] ? R[B[i + 1057*t]] * R[C[i + 1057*t]] : R[B[i + 1057*t]] + R[C[i + 1057*t]];
R[i + 2122*t] = Op[i + 1058*t] ? R[B[i + 1058*t]] * R[C[i + 1058*t]] : R[B[i + 1058*t]] + R[C[i + 1058*t]];
R[i + 2123*t] = Op[i + 1059*t] ? R[B[i + 1059*t]] * R[C[i + 1059*t]] : R[B[i + 1059*t]] + R[C[i + 1059*t]];
R[i + 2124*t] = Op[i + 1060*t] ? R[B[i + 1060*t]] * R[C[i + 1060*t]] : R[B[i + 1060*t]] + R[C[i + 1060*t]];
R[i + 2125*t] = Op[i + 1061*t] ? R[B[i + 1061*t]] * R[C[i + 1061*t]] : R[B[i + 1061*t]] + R[C[i + 1061*t]];
R[i + 2126*t] = Op[i + 1062*t] ? R[B[i + 1062*t]] * R[C[i + 1062*t]] : R[B[i + 1062*t]] + R[C[i + 1062*t]];
R[i + 2127*t] = Op[i + 1063*t] ? R[B[i + 1063*t]] * R[C[i + 1063*t]] : R[B[i + 1063*t]] + R[C[i + 1063*t]];
R[i + 2128*t] = Op[i + 1064*t] ? R[B[i + 1064*t]] * R[C[i + 1064*t]] : R[B[i + 1064*t]] + R[C[i + 1064*t]];
R[i + 2129*t] = Op[i + 1065*t] ? R[B[i + 1065*t]] * R[C[i + 1065*t]] : R[B[i + 1065*t]] + R[C[i + 1065*t]];
R[i + 2130*t] = Op[i + 1066*t] ? R[B[i + 1066*t]] * R[C[i + 1066*t]] : R[B[i + 1066*t]] + R[C[i + 1066*t]];
R[i + 2131*t] = Op[i + 1067*t] ? R[B[i + 1067*t]] * R[C[i + 1067*t]] : R[B[i + 1067*t]] + R[C[i + 1067*t]];
R[i + 2132*t] = Op[i + 1068*t] ? R[B[i + 1068*t]] * R[C[i + 1068*t]] : R[B[i + 1068*t]] + R[C[i + 1068*t]];
R[i + 2133*t] = Op[i + 1069*t] ? R[B[i + 1069*t]] * R[C[i + 1069*t]] : R[B[i + 1069*t]] + R[C[i + 1069*t]];
R[i + 2134*t] = Op[i + 1070*t] ? R[B[i + 1070*t]] * R[C[i + 1070*t]] : R[B[i + 1070*t]] + R[C[i + 1070*t]];
R[i + 2135*t] = Op[i + 1071*t] ? R[B[i + 1071*t]] * R[C[i + 1071*t]] : R[B[i + 1071*t]] + R[C[i + 1071*t]];
R[i + 2136*t] = Op[i + 1072*t] ? R[B[i + 1072*t]] * R[C[i + 1072*t]] : R[B[i + 1072*t]] + R[C[i + 1072*t]];
R[i + 2137*t] = Op[i + 1073*t] ? R[B[i + 1073*t]] * R[C[i + 1073*t]] : R[B[i + 1073*t]] + R[C[i + 1073*t]];
R[i + 2138*t] = Op[i + 1074*t] ? R[B[i + 1074*t]] * R[C[i + 1074*t]] : R[B[i + 1074*t]] + R[C[i + 1074*t]];
R[i + 2139*t] = Op[i + 1075*t] ? R[B[i + 1075*t]] * R[C[i + 1075*t]] : R[B[i + 1075*t]] + R[C[i + 1075*t]];
R[i + 2140*t] = Op[i + 1076*t] ? R[B[i + 1076*t]] * R[C[i + 1076*t]] : R[B[i + 1076*t]] + R[C[i + 1076*t]];
R[i + 2141*t] = Op[i + 1077*t] ? R[B[i + 1077*t]] * R[C[i + 1077*t]] : R[B[i + 1077*t]] + R[C[i + 1077*t]];
R[i + 2142*t] = Op[i + 1078*t] ? R[B[i + 1078*t]] * R[C[i + 1078*t]] : R[B[i + 1078*t]] + R[C[i + 1078*t]];
R[i + 2143*t] = Op[i + 1079*t] ? R[B[i + 1079*t]] * R[C[i + 1079*t]] : R[B[i + 1079*t]] + R[C[i + 1079*t]];
R[i + 2144*t] = Op[i + 1080*t] ? R[B[i + 1080*t]] * R[C[i + 1080*t]] : R[B[i + 1080*t]] + R[C[i + 1080*t]];
R[i + 2145*t] = Op[i + 1081*t] ? R[B[i + 1081*t]] * R[C[i + 1081*t]] : R[B[i + 1081*t]] + R[C[i + 1081*t]];
R[i + 2146*t] = Op[i + 1082*t] ? R[B[i + 1082*t]] * R[C[i + 1082*t]] : R[B[i + 1082*t]] + R[C[i + 1082*t]];
R[i + 2147*t] = Op[i + 1083*t] ? R[B[i + 1083*t]] * R[C[i + 1083*t]] : R[B[i + 1083*t]] + R[C[i + 1083*t]];
R[i + 2148*t] = Op[i + 1084*t] ? R[B[i + 1084*t]] * R[C[i + 1084*t]] : R[B[i + 1084*t]] + R[C[i + 1084*t]];
R[i + 2149*t] = Op[i + 1085*t] ? R[B[i + 1085*t]] * R[C[i + 1085*t]] : R[B[i + 1085*t]] + R[C[i + 1085*t]];
R[i + 2150*t] = Op[i + 1086*t] ? R[B[i + 1086*t]] * R[C[i + 1086*t]] : R[B[i + 1086*t]] + R[C[i + 1086*t]];
R[i + 2151*t] = Op[i + 1087*t] ? R[B[i + 1087*t]] * R[C[i + 1087*t]] : R[B[i + 1087*t]] + R[C[i + 1087*t]];
R[i + 2152*t] = Op[i + 1088*t] ? R[B[i + 1088*t]] * R[C[i + 1088*t]] : R[B[i + 1088*t]] + R[C[i + 1088*t]];
R[i + 2153*t] = Op[i + 1089*t] ? R[B[i + 1089*t]] * R[C[i + 1089*t]] : R[B[i + 1089*t]] + R[C[i + 1089*t]];
R[i + 2154*t] = Op[i + 1090*t] ? R[B[i + 1090*t]] * R[C[i + 1090*t]] : R[B[i + 1090*t]] + R[C[i + 1090*t]];
R[i + 2155*t] = Op[i + 1091*t] ? R[B[i + 1091*t]] * R[C[i + 1091*t]] : R[B[i + 1091*t]] + R[C[i + 1091*t]];
R[i + 2156*t] = Op[i + 1092*t] ? R[B[i + 1092*t]] * R[C[i + 1092*t]] : R[B[i + 1092*t]] + R[C[i + 1092*t]];
R[i + 2157*t] = Op[i + 1093*t] ? R[B[i + 1093*t]] * R[C[i + 1093*t]] : R[B[i + 1093*t]] + R[C[i + 1093*t]];
R[i + 2158*t] = Op[i + 1094*t] ? R[B[i + 1094*t]] * R[C[i + 1094*t]] : R[B[i + 1094*t]] + R[C[i + 1094*t]];
R[i + 2159*t] = Op[i + 1095*t] ? R[B[i + 1095*t]] * R[C[i + 1095*t]] : R[B[i + 1095*t]] + R[C[i + 1095*t]];
R[i + 2160*t] = Op[i + 1096*t] ? R[B[i + 1096*t]] * R[C[i + 1096*t]] : R[B[i + 1096*t]] + R[C[i + 1096*t]];
R[i + 2161*t] = Op[i + 1097*t] ? R[B[i + 1097*t]] * R[C[i + 1097*t]] : R[B[i + 1097*t]] + R[C[i + 1097*t]];
R[i + 2162*t] = Op[i + 1098*t] ? R[B[i + 1098*t]] * R[C[i + 1098*t]] : R[B[i + 1098*t]] + R[C[i + 1098*t]];
R[i + 2163*t] = Op[i + 1099*t] ? R[B[i + 1099*t]] * R[C[i + 1099*t]] : R[B[i + 1099*t]] + R[C[i + 1099*t]];
R[i + 2164*t] = Op[i + 1100*t] ? R[B[i + 1100*t]] * R[C[i + 1100*t]] : R[B[i + 1100*t]] + R[C[i + 1100*t]];
R[i + 2165*t] = Op[i + 1101*t] ? R[B[i + 1101*t]] * R[C[i + 1101*t]] : R[B[i + 1101*t]] + R[C[i + 1101*t]];
R[i + 2166*t] = Op[i + 1102*t] ? R[B[i + 1102*t]] * R[C[i + 1102*t]] : R[B[i + 1102*t]] + R[C[i + 1102*t]];
R[i + 2167*t] = Op[i + 1103*t] ? R[B[i + 1103*t]] * R[C[i + 1103*t]] : R[B[i + 1103*t]] + R[C[i + 1103*t]];
R[i + 2168*t] = Op[i + 1104*t] ? R[B[i + 1104*t]] * R[C[i + 1104*t]] : R[B[i + 1104*t]] + R[C[i + 1104*t]];
R[i + 2169*t] = Op[i + 1105*t] ? R[B[i + 1105*t]] * R[C[i + 1105*t]] : R[B[i + 1105*t]] + R[C[i + 1105*t]];
R[i + 2170*t] = Op[i + 1106*t] ? R[B[i + 1106*t]] * R[C[i + 1106*t]] : R[B[i + 1106*t]] + R[C[i + 1106*t]];
R[i + 2171*t] = Op[i + 1107*t] ? R[B[i + 1107*t]] * R[C[i + 1107*t]] : R[B[i + 1107*t]] + R[C[i + 1107*t]];
R[i + 2172*t] = Op[i + 1108*t] ? R[B[i + 1108*t]] * R[C[i + 1108*t]] : R[B[i + 1108*t]] + R[C[i + 1108*t]];
R[i + 2173*t] = Op[i + 1109*t] ? R[B[i + 1109*t]] * R[C[i + 1109*t]] : R[B[i + 1109*t]] + R[C[i + 1109*t]];
R[i + 2174*t] = Op[i + 1110*t] ? R[B[i + 1110*t]] * R[C[i + 1110*t]] : R[B[i + 1110*t]] + R[C[i + 1110*t]];
R[i + 2175*t] = Op[i + 1111*t] ? R[B[i + 1111*t]] * R[C[i + 1111*t]] : R[B[i + 1111*t]] + R[C[i + 1111*t]];
R[i + 2176*t] = Op[i + 1112*t] ? R[B[i + 1112*t]] * R[C[i + 1112*t]] : R[B[i + 1112*t]] + R[C[i + 1112*t]];
R[i + 2177*t] = Op[i + 1113*t] ? R[B[i + 1113*t]] * R[C[i + 1113*t]] : R[B[i + 1113*t]] + R[C[i + 1113*t]];
R[i + 2178*t] = Op[i + 1114*t] ? R[B[i + 1114*t]] * R[C[i + 1114*t]] : R[B[i + 1114*t]] + R[C[i + 1114*t]];
R[i + 2179*t] = Op[i + 1115*t] ? R[B[i + 1115*t]] * R[C[i + 1115*t]] : R[B[i + 1115*t]] + R[C[i + 1115*t]];
R[i + 2180*t] = Op[i + 1116*t] ? R[B[i + 1116*t]] * R[C[i + 1116*t]] : R[B[i + 1116*t]] + R[C[i + 1116*t]];
R[i + 2181*t] = Op[i + 1117*t] ? R[B[i + 1117*t]] * R[C[i + 1117*t]] : R[B[i + 1117*t]] + R[C[i + 1117*t]];
R[i + 2182*t] = Op[i + 1118*t] ? R[B[i + 1118*t]] * R[C[i + 1118*t]] : R[B[i + 1118*t]] + R[C[i + 1118*t]];
__syncthreads();
R[i + 2183*t] = Op[i + 1119*t] ? R[B[i + 1119*t]] * R[C[i + 1119*t]] : R[B[i + 1119*t]] + R[C[i + 1119*t]];
R[i + 2184*t] = Op[i + 1120*t] ? R[B[i + 1120*t]] * R[C[i + 1120*t]] : R[B[i + 1120*t]] + R[C[i + 1120*t]];
R[i + 2185*t] = Op[i + 1121*t] ? R[B[i + 1121*t]] * R[C[i + 1121*t]] : R[B[i + 1121*t]] + R[C[i + 1121*t]];
R[i + 2186*t] = Op[i + 1122*t] ? R[B[i + 1122*t]] * R[C[i + 1122*t]] : R[B[i + 1122*t]] + R[C[i + 1122*t]];
R[i + 2187*t] = Op[i + 1123*t] ? R[B[i + 1123*t]] * R[C[i + 1123*t]] : R[B[i + 1123*t]] + R[C[i + 1123*t]];
R[i + 2188*t] = Op[i + 1124*t] ? R[B[i + 1124*t]] * R[C[i + 1124*t]] : R[B[i + 1124*t]] + R[C[i + 1124*t]];
R[i + 2189*t] = Op[i + 1125*t] ? R[B[i + 1125*t]] * R[C[i + 1125*t]] : R[B[i + 1125*t]] + R[C[i + 1125*t]];
R[i + 2190*t] = Op[i + 1126*t] ? R[B[i + 1126*t]] * R[C[i + 1126*t]] : R[B[i + 1126*t]] + R[C[i + 1126*t]];
R[i + 2191*t] = Op[i + 1127*t] ? R[B[i + 1127*t]] * R[C[i + 1127*t]] : R[B[i + 1127*t]] + R[C[i + 1127*t]];
R[i + 2192*t] = Op[i + 1128*t] ? R[B[i + 1128*t]] * R[C[i + 1128*t]] : R[B[i + 1128*t]] + R[C[i + 1128*t]];
R[i + 2193*t] = Op[i + 1129*t] ? R[B[i + 1129*t]] * R[C[i + 1129*t]] : R[B[i + 1129*t]] + R[C[i + 1129*t]];
R[i + 2194*t] = Op[i + 1130*t] ? R[B[i + 1130*t]] * R[C[i + 1130*t]] : R[B[i + 1130*t]] + R[C[i + 1130*t]];
R[i + 2195*t] = Op[i + 1131*t] ? R[B[i + 1131*t]] * R[C[i + 1131*t]] : R[B[i + 1131*t]] + R[C[i + 1131*t]];
R[i + 2196*t] = Op[i + 1132*t] ? R[B[i + 1132*t]] * R[C[i + 1132*t]] : R[B[i + 1132*t]] + R[C[i + 1132*t]];
R[i + 2197*t] = Op[i + 1133*t] ? R[B[i + 1133*t]] * R[C[i + 1133*t]] : R[B[i + 1133*t]] + R[C[i + 1133*t]];
R[i + 2198*t] = Op[i + 1134*t] ? R[B[i + 1134*t]] * R[C[i + 1134*t]] : R[B[i + 1134*t]] + R[C[i + 1134*t]];
R[i + 2199*t] = Op[i + 1135*t] ? R[B[i + 1135*t]] * R[C[i + 1135*t]] : R[B[i + 1135*t]] + R[C[i + 1135*t]];
R[i + 2200*t] = Op[i + 1136*t] ? R[B[i + 1136*t]] * R[C[i + 1136*t]] : R[B[i + 1136*t]] + R[C[i + 1136*t]];
R[i + 2201*t] = Op[i + 1137*t] ? R[B[i + 1137*t]] * R[C[i + 1137*t]] : R[B[i + 1137*t]] + R[C[i + 1137*t]];
R[i + 2202*t] = Op[i + 1138*t] ? R[B[i + 1138*t]] * R[C[i + 1138*t]] : R[B[i + 1138*t]] + R[C[i + 1138*t]];
R[i + 2203*t] = Op[i + 1139*t] ? R[B[i + 1139*t]] * R[C[i + 1139*t]] : R[B[i + 1139*t]] + R[C[i + 1139*t]];
R[i + 2204*t] = Op[i + 1140*t] ? R[B[i + 1140*t]] * R[C[i + 1140*t]] : R[B[i + 1140*t]] + R[C[i + 1140*t]];
R[i + 2205*t] = Op[i + 1141*t] ? R[B[i + 1141*t]] * R[C[i + 1141*t]] : R[B[i + 1141*t]] + R[C[i + 1141*t]];
R[i + 2206*t] = Op[i + 1142*t] ? R[B[i + 1142*t]] * R[C[i + 1142*t]] : R[B[i + 1142*t]] + R[C[i + 1142*t]];
R[i + 2207*t] = Op[i + 1143*t] ? R[B[i + 1143*t]] * R[C[i + 1143*t]] : R[B[i + 1143*t]] + R[C[i + 1143*t]];
R[i + 2208*t] = Op[i + 1144*t] ? R[B[i + 1144*t]] * R[C[i + 1144*t]] : R[B[i + 1144*t]] + R[C[i + 1144*t]];
R[i + 2209*t] = Op[i + 1145*t] ? R[B[i + 1145*t]] * R[C[i + 1145*t]] : R[B[i + 1145*t]] + R[C[i + 1145*t]];
R[i + 2210*t] = Op[i + 1146*t] ? R[B[i + 1146*t]] * R[C[i + 1146*t]] : R[B[i + 1146*t]] + R[C[i + 1146*t]];
R[i + 2211*t] = Op[i + 1147*t] ? R[B[i + 1147*t]] * R[C[i + 1147*t]] : R[B[i + 1147*t]] + R[C[i + 1147*t]];
R[i + 2212*t] = Op[i + 1148*t] ? R[B[i + 1148*t]] * R[C[i + 1148*t]] : R[B[i + 1148*t]] + R[C[i + 1148*t]];
R[i + 2213*t] = Op[i + 1149*t] ? R[B[i + 1149*t]] * R[C[i + 1149*t]] : R[B[i + 1149*t]] + R[C[i + 1149*t]];
R[i + 2214*t] = Op[i + 1150*t] ? R[B[i + 1150*t]] * R[C[i + 1150*t]] : R[B[i + 1150*t]] + R[C[i + 1150*t]];
R[i + 2215*t] = Op[i + 1151*t] ? R[B[i + 1151*t]] * R[C[i + 1151*t]] : R[B[i + 1151*t]] + R[C[i + 1151*t]];
R[i + 2216*t] = Op[i + 1152*t] ? R[B[i + 1152*t]] * R[C[i + 1152*t]] : R[B[i + 1152*t]] + R[C[i + 1152*t]];
R[i + 2217*t] = Op[i + 1153*t] ? R[B[i + 1153*t]] * R[C[i + 1153*t]] : R[B[i + 1153*t]] + R[C[i + 1153*t]];
__syncthreads();
R[i + 2218*t] = Op[i + 1154*t] ? R[B[i + 1154*t]] * R[C[i + 1154*t]] : R[B[i + 1154*t]] + R[C[i + 1154*t]];
R[i + 2219*t] = Op[i + 1155*t] ? R[B[i + 1155*t]] * R[C[i + 1155*t]] : R[B[i + 1155*t]] + R[C[i + 1155*t]];
R[i + 2220*t] = Op[i + 1156*t] ? R[B[i + 1156*t]] * R[C[i + 1156*t]] : R[B[i + 1156*t]] + R[C[i + 1156*t]];
R[i + 2221*t] = Op[i + 1157*t] ? R[B[i + 1157*t]] * R[C[i + 1157*t]] : R[B[i + 1157*t]] + R[C[i + 1157*t]];
R[i + 2222*t] = Op[i + 1158*t] ? R[B[i + 1158*t]] * R[C[i + 1158*t]] : R[B[i + 1158*t]] + R[C[i + 1158*t]];
R[i + 2223*t] = Op[i + 1159*t] ? R[B[i + 1159*t]] * R[C[i + 1159*t]] : R[B[i + 1159*t]] + R[C[i + 1159*t]];
R[i + 2224*t] = Op[i + 1160*t] ? R[B[i + 1160*t]] * R[C[i + 1160*t]] : R[B[i + 1160*t]] + R[C[i + 1160*t]];
R[i + 2225*t] = Op[i + 1161*t] ? R[B[i + 1161*t]] * R[C[i + 1161*t]] : R[B[i + 1161*t]] + R[C[i + 1161*t]];
R[i + 2226*t] = Op[i + 1162*t] ? R[B[i + 1162*t]] * R[C[i + 1162*t]] : R[B[i + 1162*t]] + R[C[i + 1162*t]];
R[i + 2227*t] = Op[i + 1163*t] ? R[B[i + 1163*t]] * R[C[i + 1163*t]] : R[B[i + 1163*t]] + R[C[i + 1163*t]];
R[i + 2228*t] = Op[i + 1164*t] ? R[B[i + 1164*t]] * R[C[i + 1164*t]] : R[B[i + 1164*t]] + R[C[i + 1164*t]];
R[i + 2229*t] = Op[i + 1165*t] ? R[B[i + 1165*t]] * R[C[i + 1165*t]] : R[B[i + 1165*t]] + R[C[i + 1165*t]];
R[i + 2230*t] = Op[i + 1166*t] ? R[B[i + 1166*t]] * R[C[i + 1166*t]] : R[B[i + 1166*t]] + R[C[i + 1166*t]];
R[i + 2231*t] = Op[i + 1167*t] ? R[B[i + 1167*t]] * R[C[i + 1167*t]] : R[B[i + 1167*t]] + R[C[i + 1167*t]];
R[i + 2232*t] = Op[i + 1168*t] ? R[B[i + 1168*t]] * R[C[i + 1168*t]] : R[B[i + 1168*t]] + R[C[i + 1168*t]];
R[i + 2233*t] = Op[i + 1169*t] ? R[B[i + 1169*t]] * R[C[i + 1169*t]] : R[B[i + 1169*t]] + R[C[i + 1169*t]];
R[i + 2234*t] = Op[i + 1170*t] ? R[B[i + 1170*t]] * R[C[i + 1170*t]] : R[B[i + 1170*t]] + R[C[i + 1170*t]];
R[i + 2235*t] = Op[i + 1171*t] ? R[B[i + 1171*t]] * R[C[i + 1171*t]] : R[B[i + 1171*t]] + R[C[i + 1171*t]];
R[i + 2236*t] = Op[i + 1172*t] ? R[B[i + 1172*t]] * R[C[i + 1172*t]] : R[B[i + 1172*t]] + R[C[i + 1172*t]];
R[i + 2237*t] = Op[i + 1173*t] ? R[B[i + 1173*t]] * R[C[i + 1173*t]] : R[B[i + 1173*t]] + R[C[i + 1173*t]];
R[i + 2238*t] = Op[i + 1174*t] ? R[B[i + 1174*t]] * R[C[i + 1174*t]] : R[B[i + 1174*t]] + R[C[i + 1174*t]];
R[i + 2239*t] = Op[i + 1175*t] ? R[B[i + 1175*t]] * R[C[i + 1175*t]] : R[B[i + 1175*t]] + R[C[i + 1175*t]];
R[i + 2240*t] = Op[i + 1176*t] ? R[B[i + 1176*t]] * R[C[i + 1176*t]] : R[B[i + 1176*t]] + R[C[i + 1176*t]];
R[i + 2241*t] = Op[i + 1177*t] ? R[B[i + 1177*t]] * R[C[i + 1177*t]] : R[B[i + 1177*t]] + R[C[i + 1177*t]];
R[i + 2242*t] = Op[i + 1178*t] ? R[B[i + 1178*t]] * R[C[i + 1178*t]] : R[B[i + 1178*t]] + R[C[i + 1178*t]];
R[i + 2243*t] = Op[i + 1179*t] ? R[B[i + 1179*t]] * R[C[i + 1179*t]] : R[B[i + 1179*t]] + R[C[i + 1179*t]];
R[i + 2244*t] = Op[i + 1180*t] ? R[B[i + 1180*t]] * R[C[i + 1180*t]] : R[B[i + 1180*t]] + R[C[i + 1180*t]];
R[i + 2245*t] = Op[i + 1181*t] ? R[B[i + 1181*t]] * R[C[i + 1181*t]] : R[B[i + 1181*t]] + R[C[i + 1181*t]];
R[i + 2246*t] = Op[i + 1182*t] ? R[B[i + 1182*t]] * R[C[i + 1182*t]] : R[B[i + 1182*t]] + R[C[i + 1182*t]];
R[i + 2247*t] = Op[i + 1183*t] ? R[B[i + 1183*t]] * R[C[i + 1183*t]] : R[B[i + 1183*t]] + R[C[i + 1183*t]];
R[i + 2248*t] = Op[i + 1184*t] ? R[B[i + 1184*t]] * R[C[i + 1184*t]] : R[B[i + 1184*t]] + R[C[i + 1184*t]];
R[i + 2249*t] = Op[i + 1185*t] ? R[B[i + 1185*t]] * R[C[i + 1185*t]] : R[B[i + 1185*t]] + R[C[i + 1185*t]];
R[i + 2250*t] = Op[i + 1186*t] ? R[B[i + 1186*t]] * R[C[i + 1186*t]] : R[B[i + 1186*t]] + R[C[i + 1186*t]];
R[i + 2251*t] = Op[i + 1187*t] ? R[B[i + 1187*t]] * R[C[i + 1187*t]] : R[B[i + 1187*t]] + R[C[i + 1187*t]];
__syncthreads();
R[i + 2252*t] = Op[i + 1188*t] ? R[B[i + 1188*t]] * R[C[i + 1188*t]] : R[B[i + 1188*t]] + R[C[i + 1188*t]];
R[i + 2253*t] = Op[i + 1189*t] ? R[B[i + 1189*t]] * R[C[i + 1189*t]] : R[B[i + 1189*t]] + R[C[i + 1189*t]];
R[i + 2254*t] = Op[i + 1190*t] ? R[B[i + 1190*t]] * R[C[i + 1190*t]] : R[B[i + 1190*t]] + R[C[i + 1190*t]];
R[i + 2255*t] = Op[i + 1191*t] ? R[B[i + 1191*t]] * R[C[i + 1191*t]] : R[B[i + 1191*t]] + R[C[i + 1191*t]];
R[i + 2256*t] = Op[i + 1192*t] ? R[B[i + 1192*t]] * R[C[i + 1192*t]] : R[B[i + 1192*t]] + R[C[i + 1192*t]];
R[i + 2257*t] = Op[i + 1193*t] ? R[B[i + 1193*t]] * R[C[i + 1193*t]] : R[B[i + 1193*t]] + R[C[i + 1193*t]];
R[i + 2258*t] = Op[i + 1194*t] ? R[B[i + 1194*t]] * R[C[i + 1194*t]] : R[B[i + 1194*t]] + R[C[i + 1194*t]];
R[i + 2259*t] = Op[i + 1195*t] ? R[B[i + 1195*t]] * R[C[i + 1195*t]] : R[B[i + 1195*t]] + R[C[i + 1195*t]];
R[i + 2260*t] = Op[i + 1196*t] ? R[B[i + 1196*t]] * R[C[i + 1196*t]] : R[B[i + 1196*t]] + R[C[i + 1196*t]];
R[i + 2261*t] = Op[i + 1197*t] ? R[B[i + 1197*t]] * R[C[i + 1197*t]] : R[B[i + 1197*t]] + R[C[i + 1197*t]];
R[i + 2262*t] = Op[i + 1198*t] ? R[B[i + 1198*t]] * R[C[i + 1198*t]] : R[B[i + 1198*t]] + R[C[i + 1198*t]];
R[i + 2263*t] = Op[i + 1199*t] ? R[B[i + 1199*t]] * R[C[i + 1199*t]] : R[B[i + 1199*t]] + R[C[i + 1199*t]];
R[i + 2264*t] = Op[i + 1200*t] ? R[B[i + 1200*t]] * R[C[i + 1200*t]] : R[B[i + 1200*t]] + R[C[i + 1200*t]];
R[i + 2265*t] = Op[i + 1201*t] ? R[B[i + 1201*t]] * R[C[i + 1201*t]] : R[B[i + 1201*t]] + R[C[i + 1201*t]];
R[i + 2266*t] = Op[i + 1202*t] ? R[B[i + 1202*t]] * R[C[i + 1202*t]] : R[B[i + 1202*t]] + R[C[i + 1202*t]];
R[i + 2267*t] = Op[i + 1203*t] ? R[B[i + 1203*t]] * R[C[i + 1203*t]] : R[B[i + 1203*t]] + R[C[i + 1203*t]];
R[i + 2268*t] = Op[i + 1204*t] ? R[B[i + 1204*t]] * R[C[i + 1204*t]] : R[B[i + 1204*t]] + R[C[i + 1204*t]];
R[i + 2269*t] = Op[i + 1205*t] ? R[B[i + 1205*t]] * R[C[i + 1205*t]] : R[B[i + 1205*t]] + R[C[i + 1205*t]];
R[i + 2270*t] = Op[i + 1206*t] ? R[B[i + 1206*t]] * R[C[i + 1206*t]] : R[B[i + 1206*t]] + R[C[i + 1206*t]];
R[i + 2271*t] = Op[i + 1207*t] ? R[B[i + 1207*t]] * R[C[i + 1207*t]] : R[B[i + 1207*t]] + R[C[i + 1207*t]];
R[i + 2272*t] = Op[i + 1208*t] ? R[B[i + 1208*t]] * R[C[i + 1208*t]] : R[B[i + 1208*t]] + R[C[i + 1208*t]];
R[i + 2273*t] = Op[i + 1209*t] ? R[B[i + 1209*t]] * R[C[i + 1209*t]] : R[B[i + 1209*t]] + R[C[i + 1209*t]];
R[i + 2274*t] = Op[i + 1210*t] ? R[B[i + 1210*t]] * R[C[i + 1210*t]] : R[B[i + 1210*t]] + R[C[i + 1210*t]];
R[i + 2275*t] = Op[i + 1211*t] ? R[B[i + 1211*t]] * R[C[i + 1211*t]] : R[B[i + 1211*t]] + R[C[i + 1211*t]];
__syncthreads();
R[i + 2276*t] = Op[i + 1212*t] ? R[B[i + 1212*t]] * R[C[i + 1212*t]] : R[B[i + 1212*t]] + R[C[i + 1212*t]];
R[i + 2277*t] = Op[i + 1213*t] ? R[B[i + 1213*t]] * R[C[i + 1213*t]] : R[B[i + 1213*t]] + R[C[i + 1213*t]];
R[i + 2278*t] = Op[i + 1214*t] ? R[B[i + 1214*t]] * R[C[i + 1214*t]] : R[B[i + 1214*t]] + R[C[i + 1214*t]];
R[i + 2279*t] = Op[i + 1215*t] ? R[B[i + 1215*t]] * R[C[i + 1215*t]] : R[B[i + 1215*t]] + R[C[i + 1215*t]];
R[i + 2280*t] = Op[i + 1216*t] ? R[B[i + 1216*t]] * R[C[i + 1216*t]] : R[B[i + 1216*t]] + R[C[i + 1216*t]];
R[i + 2281*t] = Op[i + 1217*t] ? R[B[i + 1217*t]] * R[C[i + 1217*t]] : R[B[i + 1217*t]] + R[C[i + 1217*t]];
R[i + 2282*t] = Op[i + 1218*t] ? R[B[i + 1218*t]] * R[C[i + 1218*t]] : R[B[i + 1218*t]] + R[C[i + 1218*t]];
R[i + 2283*t] = Op[i + 1219*t] ? R[B[i + 1219*t]] * R[C[i + 1219*t]] : R[B[i + 1219*t]] + R[C[i + 1219*t]];
R[i + 2284*t] = Op[i + 1220*t] ? R[B[i + 1220*t]] * R[C[i + 1220*t]] : R[B[i + 1220*t]] + R[C[i + 1220*t]];
R[i + 2285*t] = Op[i + 1221*t] ? R[B[i + 1221*t]] * R[C[i + 1221*t]] : R[B[i + 1221*t]] + R[C[i + 1221*t]];
R[i + 2286*t] = Op[i + 1222*t] ? R[B[i + 1222*t]] * R[C[i + 1222*t]] : R[B[i + 1222*t]] + R[C[i + 1222*t]];
R[i + 2287*t] = Op[i + 1223*t] ? R[B[i + 1223*t]] * R[C[i + 1223*t]] : R[B[i + 1223*t]] + R[C[i + 1223*t]];
R[i + 2288*t] = Op[i + 1224*t] ? R[B[i + 1224*t]] * R[C[i + 1224*t]] : R[B[i + 1224*t]] + R[C[i + 1224*t]];
R[i + 2289*t] = Op[i + 1225*t] ? R[B[i + 1225*t]] * R[C[i + 1225*t]] : R[B[i + 1225*t]] + R[C[i + 1225*t]];
R[i + 2290*t] = Op[i + 1226*t] ? R[B[i + 1226*t]] * R[C[i + 1226*t]] : R[B[i + 1226*t]] + R[C[i + 1226*t]];
R[i + 2291*t] = Op[i + 1227*t] ? R[B[i + 1227*t]] * R[C[i + 1227*t]] : R[B[i + 1227*t]] + R[C[i + 1227*t]];
R[i + 2292*t] = Op[i + 1228*t] ? R[B[i + 1228*t]] * R[C[i + 1228*t]] : R[B[i + 1228*t]] + R[C[i + 1228*t]];
R[i + 2293*t] = Op[i + 1229*t] ? R[B[i + 1229*t]] * R[C[i + 1229*t]] : R[B[i + 1229*t]] + R[C[i + 1229*t]];
R[i + 2294*t] = Op[i + 1230*t] ? R[B[i + 1230*t]] * R[C[i + 1230*t]] : R[B[i + 1230*t]] + R[C[i + 1230*t]];
R[i + 2295*t] = Op[i + 1231*t] ? R[B[i + 1231*t]] * R[C[i + 1231*t]] : R[B[i + 1231*t]] + R[C[i + 1231*t]];
__syncthreads();
R[i + 2296*t] = Op[i + 1232*t] ? R[B[i + 1232*t]] * R[C[i + 1232*t]] : R[B[i + 1232*t]] + R[C[i + 1232*t]];
R[i + 2297*t] = Op[i + 1233*t] ? R[B[i + 1233*t]] * R[C[i + 1233*t]] : R[B[i + 1233*t]] + R[C[i + 1233*t]];
R[i + 2298*t] = Op[i + 1234*t] ? R[B[i + 1234*t]] * R[C[i + 1234*t]] : R[B[i + 1234*t]] + R[C[i + 1234*t]];
R[i + 2299*t] = Op[i + 1235*t] ? R[B[i + 1235*t]] * R[C[i + 1235*t]] : R[B[i + 1235*t]] + R[C[i + 1235*t]];
R[i + 2300*t] = Op[i + 1236*t] ? R[B[i + 1236*t]] * R[C[i + 1236*t]] : R[B[i + 1236*t]] + R[C[i + 1236*t]];
R[i + 2301*t] = Op[i + 1237*t] ? R[B[i + 1237*t]] * R[C[i + 1237*t]] : R[B[i + 1237*t]] + R[C[i + 1237*t]];
R[i + 2302*t] = Op[i + 1238*t] ? R[B[i + 1238*t]] * R[C[i + 1238*t]] : R[B[i + 1238*t]] + R[C[i + 1238*t]];
R[i + 2303*t] = Op[i + 1239*t] ? R[B[i + 1239*t]] * R[C[i + 1239*t]] : R[B[i + 1239*t]] + R[C[i + 1239*t]];
R[i + 2304*t] = Op[i + 1240*t] ? R[B[i + 1240*t]] * R[C[i + 1240*t]] : R[B[i + 1240*t]] + R[C[i + 1240*t]];
R[i + 2305*t] = Op[i + 1241*t] ? R[B[i + 1241*t]] * R[C[i + 1241*t]] : R[B[i + 1241*t]] + R[C[i + 1241*t]];
R[i + 2306*t] = Op[i + 1242*t] ? R[B[i + 1242*t]] * R[C[i + 1242*t]] : R[B[i + 1242*t]] + R[C[i + 1242*t]];
R[i + 2307*t] = Op[i + 1243*t] ? R[B[i + 1243*t]] * R[C[i + 1243*t]] : R[B[i + 1243*t]] + R[C[i + 1243*t]];
R[i + 2308*t] = Op[i + 1244*t] ? R[B[i + 1244*t]] * R[C[i + 1244*t]] : R[B[i + 1244*t]] + R[C[i + 1244*t]];
R[i + 2309*t] = Op[i + 1245*t] ? R[B[i + 1245*t]] * R[C[i + 1245*t]] : R[B[i + 1245*t]] + R[C[i + 1245*t]];
R[i + 2310*t] = Op[i + 1246*t] ? R[B[i + 1246*t]] * R[C[i + 1246*t]] : R[B[i + 1246*t]] + R[C[i + 1246*t]];
R[i + 2311*t] = Op[i + 1247*t] ? R[B[i + 1247*t]] * R[C[i + 1247*t]] : R[B[i + 1247*t]] + R[C[i + 1247*t]];
R[i + 2312*t] = Op[i + 1248*t] ? R[B[i + 1248*t]] * R[C[i + 1248*t]] : R[B[i + 1248*t]] + R[C[i + 1248*t]];
R[i + 2313*t] = Op[i + 1249*t] ? R[B[i + 1249*t]] * R[C[i + 1249*t]] : R[B[i + 1249*t]] + R[C[i + 1249*t]];
R[i + 2314*t] = Op[i + 1250*t] ? R[B[i + 1250*t]] * R[C[i + 1250*t]] : R[B[i + 1250*t]] + R[C[i + 1250*t]];
__syncthreads();
R[i + 2315*t] = Op[i + 1251*t] ? R[B[i + 1251*t]] * R[C[i + 1251*t]] : R[B[i + 1251*t]] + R[C[i + 1251*t]];
R[i + 2316*t] = Op[i + 1252*t] ? R[B[i + 1252*t]] * R[C[i + 1252*t]] : R[B[i + 1252*t]] + R[C[i + 1252*t]];
R[i + 2317*t] = Op[i + 1253*t] ? R[B[i + 1253*t]] * R[C[i + 1253*t]] : R[B[i + 1253*t]] + R[C[i + 1253*t]];
R[i + 2318*t] = Op[i + 1254*t] ? R[B[i + 1254*t]] * R[C[i + 1254*t]] : R[B[i + 1254*t]] + R[C[i + 1254*t]];
R[i + 2319*t] = Op[i + 1255*t] ? R[B[i + 1255*t]] * R[C[i + 1255*t]] : R[B[i + 1255*t]] + R[C[i + 1255*t]];
R[i + 2320*t] = Op[i + 1256*t] ? R[B[i + 1256*t]] * R[C[i + 1256*t]] : R[B[i + 1256*t]] + R[C[i + 1256*t]];
R[i + 2321*t] = Op[i + 1257*t] ? R[B[i + 1257*t]] * R[C[i + 1257*t]] : R[B[i + 1257*t]] + R[C[i + 1257*t]];
R[i + 2322*t] = Op[i + 1258*t] ? R[B[i + 1258*t]] * R[C[i + 1258*t]] : R[B[i + 1258*t]] + R[C[i + 1258*t]];
R[i + 2323*t] = Op[i + 1259*t] ? R[B[i + 1259*t]] * R[C[i + 1259*t]] : R[B[i + 1259*t]] + R[C[i + 1259*t]];
R[i + 2324*t] = Op[i + 1260*t] ? R[B[i + 1260*t]] * R[C[i + 1260*t]] : R[B[i + 1260*t]] + R[C[i + 1260*t]];
R[i + 2325*t] = Op[i + 1261*t] ? R[B[i + 1261*t]] * R[C[i + 1261*t]] : R[B[i + 1261*t]] + R[C[i + 1261*t]];
R[i + 2326*t] = Op[i + 1262*t] ? R[B[i + 1262*t]] * R[C[i + 1262*t]] : R[B[i + 1262*t]] + R[C[i + 1262*t]];
R[i + 2327*t] = Op[i + 1263*t] ? R[B[i + 1263*t]] * R[C[i + 1263*t]] : R[B[i + 1263*t]] + R[C[i + 1263*t]];
R[i + 2328*t] = Op[i + 1264*t] ? R[B[i + 1264*t]] * R[C[i + 1264*t]] : R[B[i + 1264*t]] + R[C[i + 1264*t]];
__syncthreads();
R[i + 2329*t] = Op[i + 1265*t] ? R[B[i + 1265*t]] * R[C[i + 1265*t]] : R[B[i + 1265*t]] + R[C[i + 1265*t]];
R[i + 2330*t] = Op[i + 1266*t] ? R[B[i + 1266*t]] * R[C[i + 1266*t]] : R[B[i + 1266*t]] + R[C[i + 1266*t]];
R[i + 2331*t] = Op[i + 1267*t] ? R[B[i + 1267*t]] * R[C[i + 1267*t]] : R[B[i + 1267*t]] + R[C[i + 1267*t]];
R[i + 2332*t] = Op[i + 1268*t] ? R[B[i + 1268*t]] * R[C[i + 1268*t]] : R[B[i + 1268*t]] + R[C[i + 1268*t]];
R[i + 2333*t] = Op[i + 1269*t] ? R[B[i + 1269*t]] * R[C[i + 1269*t]] : R[B[i + 1269*t]] + R[C[i + 1269*t]];
R[i + 2334*t] = Op[i + 1270*t] ? R[B[i + 1270*t]] * R[C[i + 1270*t]] : R[B[i + 1270*t]] + R[C[i + 1270*t]];
R[i + 2335*t] = Op[i + 1271*t] ? R[B[i + 1271*t]] * R[C[i + 1271*t]] : R[B[i + 1271*t]] + R[C[i + 1271*t]];
R[i + 2336*t] = Op[i + 1272*t] ? R[B[i + 1272*t]] * R[C[i + 1272*t]] : R[B[i + 1272*t]] + R[C[i + 1272*t]];
R[i + 2337*t] = Op[i + 1273*t] ? R[B[i + 1273*t]] * R[C[i + 1273*t]] : R[B[i + 1273*t]] + R[C[i + 1273*t]];
R[i + 2338*t] = Op[i + 1274*t] ? R[B[i + 1274*t]] * R[C[i + 1274*t]] : R[B[i + 1274*t]] + R[C[i + 1274*t]];
R[i + 2339*t] = Op[i + 1275*t] ? R[B[i + 1275*t]] * R[C[i + 1275*t]] : R[B[i + 1275*t]] + R[C[i + 1275*t]];
R[i + 2340*t] = Op[i + 1276*t] ? R[B[i + 1276*t]] * R[C[i + 1276*t]] : R[B[i + 1276*t]] + R[C[i + 1276*t]];
R[i + 2341*t] = Op[i + 1277*t] ? R[B[i + 1277*t]] * R[C[i + 1277*t]] : R[B[i + 1277*t]] + R[C[i + 1277*t]];
R[i + 2342*t] = Op[i + 1278*t] ? R[B[i + 1278*t]] * R[C[i + 1278*t]] : R[B[i + 1278*t]] + R[C[i + 1278*t]];
R[i + 2343*t] = Op[i + 1279*t] ? R[B[i + 1279*t]] * R[C[i + 1279*t]] : R[B[i + 1279*t]] + R[C[i + 1279*t]];
R[i + 2344*t] = Op[i + 1280*t] ? R[B[i + 1280*t]] * R[C[i + 1280*t]] : R[B[i + 1280*t]] + R[C[i + 1280*t]];
R[i + 2345*t] = Op[i + 1281*t] ? R[B[i + 1281*t]] * R[C[i + 1281*t]] : R[B[i + 1281*t]] + R[C[i + 1281*t]];
R[i + 2346*t] = Op[i + 1282*t] ? R[B[i + 1282*t]] * R[C[i + 1282*t]] : R[B[i + 1282*t]] + R[C[i + 1282*t]];
__syncthreads();
R[i + 2347*t] = Op[i + 1283*t] ? R[B[i + 1283*t]] * R[C[i + 1283*t]] : R[B[i + 1283*t]] + R[C[i + 1283*t]];
R[i + 2348*t] = Op[i + 1284*t] ? R[B[i + 1284*t]] * R[C[i + 1284*t]] : R[B[i + 1284*t]] + R[C[i + 1284*t]];
R[i + 2349*t] = Op[i + 1285*t] ? R[B[i + 1285*t]] * R[C[i + 1285*t]] : R[B[i + 1285*t]] + R[C[i + 1285*t]];
R[i + 2350*t] = Op[i + 1286*t] ? R[B[i + 1286*t]] * R[C[i + 1286*t]] : R[B[i + 1286*t]] + R[C[i + 1286*t]];
R[i + 2351*t] = Op[i + 1287*t] ? R[B[i + 1287*t]] * R[C[i + 1287*t]] : R[B[i + 1287*t]] + R[C[i + 1287*t]];
R[i + 2352*t] = Op[i + 1288*t] ? R[B[i + 1288*t]] * R[C[i + 1288*t]] : R[B[i + 1288*t]] + R[C[i + 1288*t]];
R[i + 2353*t] = Op[i + 1289*t] ? R[B[i + 1289*t]] * R[C[i + 1289*t]] : R[B[i + 1289*t]] + R[C[i + 1289*t]];
R[i + 2354*t] = Op[i + 1290*t] ? R[B[i + 1290*t]] * R[C[i + 1290*t]] : R[B[i + 1290*t]] + R[C[i + 1290*t]];
R[i + 2355*t] = Op[i + 1291*t] ? R[B[i + 1291*t]] * R[C[i + 1291*t]] : R[B[i + 1291*t]] + R[C[i + 1291*t]];
R[i + 2356*t] = Op[i + 1292*t] ? R[B[i + 1292*t]] * R[C[i + 1292*t]] : R[B[i + 1292*t]] + R[C[i + 1292*t]];
R[i + 2357*t] = Op[i + 1293*t] ? R[B[i + 1293*t]] * R[C[i + 1293*t]] : R[B[i + 1293*t]] + R[C[i + 1293*t]];
R[i + 2358*t] = Op[i + 1294*t] ? R[B[i + 1294*t]] * R[C[i + 1294*t]] : R[B[i + 1294*t]] + R[C[i + 1294*t]];
R[i + 2359*t] = Op[i + 1295*t] ? R[B[i + 1295*t]] * R[C[i + 1295*t]] : R[B[i + 1295*t]] + R[C[i + 1295*t]];
R[i + 2360*t] = Op[i + 1296*t] ? R[B[i + 1296*t]] * R[C[i + 1296*t]] : R[B[i + 1296*t]] + R[C[i + 1296*t]];
__syncthreads();
R[i + 2361*t] = Op[i + 1297*t] ? R[B[i + 1297*t]] * R[C[i + 1297*t]] : R[B[i + 1297*t]] + R[C[i + 1297*t]];
R[i + 2362*t] = Op[i + 1298*t] ? R[B[i + 1298*t]] * R[C[i + 1298*t]] : R[B[i + 1298*t]] + R[C[i + 1298*t]];
R[i + 2363*t] = Op[i + 1299*t] ? R[B[i + 1299*t]] * R[C[i + 1299*t]] : R[B[i + 1299*t]] + R[C[i + 1299*t]];
R[i + 2364*t] = Op[i + 1300*t] ? R[B[i + 1300*t]] * R[C[i + 1300*t]] : R[B[i + 1300*t]] + R[C[i + 1300*t]];
R[i + 2365*t] = Op[i + 1301*t] ? R[B[i + 1301*t]] * R[C[i + 1301*t]] : R[B[i + 1301*t]] + R[C[i + 1301*t]];
R[i + 2366*t] = Op[i + 1302*t] ? R[B[i + 1302*t]] * R[C[i + 1302*t]] : R[B[i + 1302*t]] + R[C[i + 1302*t]];
R[i + 2367*t] = Op[i + 1303*t] ? R[B[i + 1303*t]] * R[C[i + 1303*t]] : R[B[i + 1303*t]] + R[C[i + 1303*t]];
R[i + 2368*t] = Op[i + 1304*t] ? R[B[i + 1304*t]] * R[C[i + 1304*t]] : R[B[i + 1304*t]] + R[C[i + 1304*t]];
R[i + 2369*t] = Op[i + 1305*t] ? R[B[i + 1305*t]] * R[C[i + 1305*t]] : R[B[i + 1305*t]] + R[C[i + 1305*t]];
R[i + 2370*t] = Op[i + 1306*t] ? R[B[i + 1306*t]] * R[C[i + 1306*t]] : R[B[i + 1306*t]] + R[C[i + 1306*t]];
R[i + 2371*t] = Op[i + 1307*t] ? R[B[i + 1307*t]] * R[C[i + 1307*t]] : R[B[i + 1307*t]] + R[C[i + 1307*t]];
R[i + 2372*t] = Op[i + 1308*t] ? R[B[i + 1308*t]] * R[C[i + 1308*t]] : R[B[i + 1308*t]] + R[C[i + 1308*t]];
R[i + 2373*t] = Op[i + 1309*t] ? R[B[i + 1309*t]] * R[C[i + 1309*t]] : R[B[i + 1309*t]] + R[C[i + 1309*t]];
R[i + 2374*t] = Op[i + 1310*t] ? R[B[i + 1310*t]] * R[C[i + 1310*t]] : R[B[i + 1310*t]] + R[C[i + 1310*t]];
R[i + 2375*t] = Op[i + 1311*t] ? R[B[i + 1311*t]] * R[C[i + 1311*t]] : R[B[i + 1311*t]] + R[C[i + 1311*t]];
__syncthreads();
R[i + 2376*t] = Op[i + 1312*t] ? R[B[i + 1312*t]] * R[C[i + 1312*t]] : R[B[i + 1312*t]] + R[C[i + 1312*t]];
R[i + 2377*t] = Op[i + 1313*t] ? R[B[i + 1313*t]] * R[C[i + 1313*t]] : R[B[i + 1313*t]] + R[C[i + 1313*t]];
R[i + 2378*t] = Op[i + 1314*t] ? R[B[i + 1314*t]] * R[C[i + 1314*t]] : R[B[i + 1314*t]] + R[C[i + 1314*t]];
R[i + 2379*t] = Op[i + 1315*t] ? R[B[i + 1315*t]] * R[C[i + 1315*t]] : R[B[i + 1315*t]] + R[C[i + 1315*t]];
R[i + 2380*t] = Op[i + 1316*t] ? R[B[i + 1316*t]] * R[C[i + 1316*t]] : R[B[i + 1316*t]] + R[C[i + 1316*t]];
R[i + 2381*t] = Op[i + 1317*t] ? R[B[i + 1317*t]] * R[C[i + 1317*t]] : R[B[i + 1317*t]] + R[C[i + 1317*t]];
R[i + 2382*t] = Op[i + 1318*t] ? R[B[i + 1318*t]] * R[C[i + 1318*t]] : R[B[i + 1318*t]] + R[C[i + 1318*t]];
R[i + 2383*t] = Op[i + 1319*t] ? R[B[i + 1319*t]] * R[C[i + 1319*t]] : R[B[i + 1319*t]] + R[C[i + 1319*t]];
R[i + 2384*t] = Op[i + 1320*t] ? R[B[i + 1320*t]] * R[C[i + 1320*t]] : R[B[i + 1320*t]] + R[C[i + 1320*t]];
R[i + 2385*t] = Op[i + 1321*t] ? R[B[i + 1321*t]] * R[C[i + 1321*t]] : R[B[i + 1321*t]] + R[C[i + 1321*t]];
R[i + 2386*t] = Op[i + 1322*t] ? R[B[i + 1322*t]] * R[C[i + 1322*t]] : R[B[i + 1322*t]] + R[C[i + 1322*t]];
R[i + 2387*t] = Op[i + 1323*t] ? R[B[i + 1323*t]] * R[C[i + 1323*t]] : R[B[i + 1323*t]] + R[C[i + 1323*t]];
R[i + 2388*t] = Op[i + 1324*t] ? R[B[i + 1324*t]] * R[C[i + 1324*t]] : R[B[i + 1324*t]] + R[C[i + 1324*t]];
R[i + 2389*t] = Op[i + 1325*t] ? R[B[i + 1325*t]] * R[C[i + 1325*t]] : R[B[i + 1325*t]] + R[C[i + 1325*t]];
__syncthreads();
R[i + 2390*t] = Op[i + 1326*t] ? R[B[i + 1326*t]] * R[C[i + 1326*t]] : R[B[i + 1326*t]] + R[C[i + 1326*t]];
R[i + 2391*t] = Op[i + 1327*t] ? R[B[i + 1327*t]] * R[C[i + 1327*t]] : R[B[i + 1327*t]] + R[C[i + 1327*t]];
R[i + 2392*t] = Op[i + 1328*t] ? R[B[i + 1328*t]] * R[C[i + 1328*t]] : R[B[i + 1328*t]] + R[C[i + 1328*t]];
R[i + 2393*t] = Op[i + 1329*t] ? R[B[i + 1329*t]] * R[C[i + 1329*t]] : R[B[i + 1329*t]] + R[C[i + 1329*t]];
R[i + 2394*t] = Op[i + 1330*t] ? R[B[i + 1330*t]] * R[C[i + 1330*t]] : R[B[i + 1330*t]] + R[C[i + 1330*t]];
R[i + 2395*t] = Op[i + 1331*t] ? R[B[i + 1331*t]] * R[C[i + 1331*t]] : R[B[i + 1331*t]] + R[C[i + 1331*t]];
R[i + 2396*t] = Op[i + 1332*t] ? R[B[i + 1332*t]] * R[C[i + 1332*t]] : R[B[i + 1332*t]] + R[C[i + 1332*t]];
R[i + 2397*t] = Op[i + 1333*t] ? R[B[i + 1333*t]] * R[C[i + 1333*t]] : R[B[i + 1333*t]] + R[C[i + 1333*t]];
R[i + 2398*t] = Op[i + 1334*t] ? R[B[i + 1334*t]] * R[C[i + 1334*t]] : R[B[i + 1334*t]] + R[C[i + 1334*t]];
__syncthreads();
R[i + 2399*t] = Op[i + 1335*t] ? R[B[i + 1335*t]] * R[C[i + 1335*t]] : R[B[i + 1335*t]] + R[C[i + 1335*t]];
R[i + 2400*t] = Op[i + 1336*t] ? R[B[i + 1336*t]] * R[C[i + 1336*t]] : R[B[i + 1336*t]] + R[C[i + 1336*t]];
R[i + 2401*t] = Op[i + 1337*t] ? R[B[i + 1337*t]] * R[C[i + 1337*t]] : R[B[i + 1337*t]] + R[C[i + 1337*t]];
R[i + 2402*t] = Op[i + 1338*t] ? R[B[i + 1338*t]] * R[C[i + 1338*t]] : R[B[i + 1338*t]] + R[C[i + 1338*t]];
R[i + 2403*t] = Op[i + 1339*t] ? R[B[i + 1339*t]] * R[C[i + 1339*t]] : R[B[i + 1339*t]] + R[C[i + 1339*t]];
R[i + 2404*t] = Op[i + 1340*t] ? R[B[i + 1340*t]] * R[C[i + 1340*t]] : R[B[i + 1340*t]] + R[C[i + 1340*t]];
R[i + 2405*t] = Op[i + 1341*t] ? R[B[i + 1341*t]] * R[C[i + 1341*t]] : R[B[i + 1341*t]] + R[C[i + 1341*t]];
R[i + 2406*t] = Op[i + 1342*t] ? R[B[i + 1342*t]] * R[C[i + 1342*t]] : R[B[i + 1342*t]] + R[C[i + 1342*t]];
__syncthreads();
R[i + 2407*t] = Op[i + 1343*t] ? R[B[i + 1343*t]] * R[C[i + 1343*t]] : R[B[i + 1343*t]] + R[C[i + 1343*t]];
R[i + 2408*t] = Op[i + 1344*t] ? R[B[i + 1344*t]] * R[C[i + 1344*t]] : R[B[i + 1344*t]] + R[C[i + 1344*t]];
R[i + 2409*t] = Op[i + 1345*t] ? R[B[i + 1345*t]] * R[C[i + 1345*t]] : R[B[i + 1345*t]] + R[C[i + 1345*t]];
R[i + 2410*t] = Op[i + 1346*t] ? R[B[i + 1346*t]] * R[C[i + 1346*t]] : R[B[i + 1346*t]] + R[C[i + 1346*t]];
R[i + 2411*t] = Op[i + 1347*t] ? R[B[i + 1347*t]] * R[C[i + 1347*t]] : R[B[i + 1347*t]] + R[C[i + 1347*t]];
R[i + 2412*t] = Op[i + 1348*t] ? R[B[i + 1348*t]] * R[C[i + 1348*t]] : R[B[i + 1348*t]] + R[C[i + 1348*t]];
__syncthreads();
R[i + 2413*t] = Op[i + 1349*t] ? R[B[i + 1349*t]] * R[C[i + 1349*t]] : R[B[i + 1349*t]] + R[C[i + 1349*t]];
R[i + 2414*t] = Op[i + 1350*t] ? R[B[i + 1350*t]] * R[C[i + 1350*t]] : R[B[i + 1350*t]] + R[C[i + 1350*t]];
R[i + 2415*t] = Op[i + 1351*t] ? R[B[i + 1351*t]] * R[C[i + 1351*t]] : R[B[i + 1351*t]] + R[C[i + 1351*t]];
R[i + 2416*t] = Op[i + 1352*t] ? R[B[i + 1352*t]] * R[C[i + 1352*t]] : R[B[i + 1352*t]] + R[C[i + 1352*t]];
R[i + 2417*t] = Op[i + 1353*t] ? R[B[i + 1353*t]] * R[C[i + 1353*t]] : R[B[i + 1353*t]] + R[C[i + 1353*t]];
__syncthreads();
R[i + 2418*t] = Op[i + 1354*t] ? R[B[i + 1354*t]] * R[C[i + 1354*t]] : R[B[i + 1354*t]] + R[C[i + 1354*t]];
R[i + 2419*t] = Op[i + 1355*t] ? R[B[i + 1355*t]] * R[C[i + 1355*t]] : R[B[i + 1355*t]] + R[C[i + 1355*t]];
R[i + 2420*t] = Op[i + 1356*t] ? R[B[i + 1356*t]] * R[C[i + 1356*t]] : R[B[i + 1356*t]] + R[C[i + 1356*t]];
R[i + 2421*t] = Op[i + 1357*t] ? R[B[i + 1357*t]] * R[C[i + 1357*t]] : R[B[i + 1357*t]] + R[C[i + 1357*t]];
__syncthreads();
R[i + 2422*t] = Op[i + 1358*t] ? R[B[i + 1358*t]] * R[C[i + 1358*t]] : R[B[i + 1358*t]] + R[C[i + 1358*t]];
R[i + 2423*t] = Op[i + 1359*t] ? R[B[i + 1359*t]] * R[C[i + 1359*t]] : R[B[i + 1359*t]] + R[C[i + 1359*t]];
R[i + 2424*t] = Op[i + 1360*t] ? R[B[i + 1360*t]] * R[C[i + 1360*t]] : R[B[i + 1360*t]] + R[C[i + 1360*t]];
__syncthreads();
R[i + 2425*t] = Op[i + 1361*t] ? R[B[i + 1361*t]] * R[C[i + 1361*t]] : R[B[i + 1361*t]] + R[C[i + 1361*t]];
R[i + 2426*t] = Op[i + 1362*t] ? R[B[i + 1362*t]] * R[C[i + 1362*t]] : R[B[i + 1362*t]] + R[C[i + 1362*t]];
R[i + 2427*t] = Op[i + 1363*t] ? R[B[i + 1363*t]] * R[C[i + 1363*t]] : R[B[i + 1363*t]] + R[C[i + 1363*t]];
__syncthreads();
R[i + 2428*t] = Op[i + 1364*t] ? R[B[i + 1364*t]] * R[C[i + 1364*t]] : R[B[i + 1364*t]] + R[C[i + 1364*t]];
R[i + 2429*t] = Op[i + 1365*t] ? R[B[i + 1365*t]] * R[C[i + 1365*t]] : R[B[i + 1365*t]] + R[C[i + 1365*t]];
__syncthreads();
R[i + 2430*t] = Op[i + 1366*t] ? R[B[i + 1366*t]] * R[C[i + 1366*t]] : R[B[i + 1366*t]] + R[C[i + 1366*t]];
R[i + 2431*t] = Op[i + 1367*t] ? R[B[i + 1367*t]] * R[C[i + 1367*t]] : R[B[i + 1367*t]] + R[C[i + 1367*t]];
__syncthreads();
R[i + 2432*t] = Op[i + 1368*t] ? R[B[i + 1368*t]] * R[C[i + 1368*t]] : R[B[i + 1368*t]] + R[C[i + 1368*t]];
__syncthreads();
R[i + 2433*t] = Op[i + 1369*t] ? R[B[i + 1369*t]] * R[C[i + 1369*t]] : R[B[i + 1369*t]] + R[C[i + 1369*t]];
__syncthreads();
R[i + 2434*t] = Op[i + 1370*t] ? R[B[i + 1370*t]] * R[C[i + 1370*t]] : R[B[i + 1370*t]] + R[C[i + 1370*t]];
__syncthreads();
R[i + 2435*t] = Op[i + 1371*t] ? R[B[i + 1371*t]] * R[C[i + 1371*t]] : R[B[i + 1371*t]] + R[C[i + 1371*t]];
__syncthreads();
R[i + 2436*t] = Op[i + 1372*t] ? R[B[i + 1372*t]] * R[C[i + 1372*t]] : R[B[i + 1372*t]] + R[C[i + 1372*t]];
__syncthreads();
R[i + 2437*t] = Op[i + 1373*t] ? R[B[i + 1373*t]] * R[C[i + 1373*t]] : R[B[i + 1373*t]] + R[C[i + 1373*t]];
__syncthreads();
R[i + 2438*t] = Op[i + 1374*t] ? R[B[i + 1374*t]] * R[C[i + 1374*t]] : R[B[i + 1374*t]] + R[C[i + 1374*t]];
__syncthreads();
R[i + 2439*t] = Op[i + 1375*t] ? R[B[i + 1375*t]] * R[C[i + 1375*t]] : R[B[i + 1375*t]] + R[C[i + 1375*t]];
__syncthreads();
R[i + 2440*t] = Op[i + 1376*t] ? R[B[i + 1376*t]] * R[C[i + 1376*t]] : R[B[i + 1376*t]] + R[C[i + 1376*t]];
if (i==0) { final += R[2440*t]; }
__syncthreads();
}
if (i==0) { A[0]= final;}
}
| 63d1abef7a278671f27cf8ede2963c0be0e91ea6.cu | float h_A[]= {
0.9873887992938584, 0.7518116992518353, 0.8598940369927895, 0.7155885551816444, 0.8797425552930407, 0.587458783832594, 0.6267841600435446, 0.8217790907969758, 0.818894981367343, 0.7872291426722382, 0.6357493323386308, 0.5550871670133021, 0.8461501536000029, 0.7977451395282227, 0.7555389775190972, 0.5292387580096789, 0.5408085398597656, 0.8163708105099419, 0.5610129281465945, 0.5963355184694463, 0.6615023855648287, 0.8243805863473359, 0.5513643866625441, 0.6167492041543463, 0.6084409256700565, 0.9325979278891088, 0.7816587716738894, 0.6195649477892154, 0.8220997730263204, 0.5107161504409763, 0.7523904022882593, 0.5058736893901825, 0.606176072120739, 0.6434569290861027, 0.7960049633446116, 0.8391603079006115, 0.9028060550539083, 0.7012005336764833, 0.7196394099649538, 0.814032573839834, 0.9332142819092752, 0.7643479164281066, 0.8572039249712273, 0.6581116332456617, 0.8831502252329125, 0.735246302770505, 0.9052967234705973, 0.9386105386049532, 0.6445925301281801, 0.5809045836844399, 0.8756480705242975, 0.9547324386468568, 0.7634880643823876, 0.63017750870097, 0.9706364636854989, 0.684193584527232, 0.7099757252235765, 0.8856831456976266, 0.7217729723184566, 0.882024111590653, 0.7581537817900296, 0.6433412540275687, 0.6035784185185884, 0.915318950046528, 0.977512981758409, 0.5839927084681016, 0.9784115088385195, 0.6514832437172302, 0.8549174047743417, 0.5563793142977376, 0.605903117136309, 0.5537608356289365, 0.6169820307841094, 0.5519704501953835, 0.8571574272739233, 0.6131752749151639, 0.6292018912171328, 0.7613884907224823, 0.6483208000700782, 0.9676032497501634, 0.7211267524828632, 0.54161244110958, 0.5907961081074111, 0.6392989307993807, 0.5312485506823461, 0.6426775737775133, 0.6287493190025022, 0.6853275513104178, 0.801715250665508, 0.9806268492785526, 0.8335524143211881, 0.9523226273681229, 0.7979322321917615, 0.5761906857975251, 0.7609614590249472, 0.6962710813985764, 0.82329046975484, 0.6188244405105823, 0.6709106285130378, 0.747187208460914, 0.518298561325272, 0.9366258706162892, 0.7308534791679668, 0.5523018767708051, 0.911051594106451, 0.673686766883247, 0.9271637293025594, 0.5400298883718841, 0.5075619683495305, 0.5631642672841077, 0.9856701528710157, 0.5029537179843656, 0.5873715239577169, 0.906177537898226, 0.807870656294317, 0.7406076258108997, 0.5309122479310391, 0.9634807267301542, 0.8388565572157327, 0.6516681227367411, 0.8040573439689465, 0.8426067325019722, 0.6862521693092227, 0.770866606504957, 0.966250695695041, 0.9914794295963107, 0.9940888515692078, 0.5943596829204034, 0.7675196897973086, 0.834066484777552, 0.6259818894895984, 0.7678372482110907, 0.7481588138043995, 0.9829697035966287, 0.5703672330089176, 0.6816343339468353, 0.7491289779751693, 0.6092983201326765, 0.9558026008937839, 0.8598379123250666, 0.5832153473618932, 0.9107107140854593, 0.8656108605804345, 0.5240922235248828, 0.7547578744669943, 0.7879631371868944, 0.9720893781370343, 0.706654435937283, 0.8687811722736392, 0.6128092834034508, 0.9832943506687434, 0.8960315888786147, 0.6574120018114272, 0.9184966104333021, 0.8322931005886973, 0.6598222681366639, 0.6905279159038262, 0.9429690741166823, 0.5738758425583128, 0.6074647566406836, 0.9379260564008778, 0.5144041845914757, 0.6805774821279678, 0.6560911555350712, 0.8029975623197818, 0.8386971428278434, 0.641583085490713, 0.6694589742843875, 0.9465007652728994, 0.677386529587717, 0.8581123594398272, 0.985885033034536, 0.5683837089240931, 0.6095121950923683, 0.5741860297389088, 0.925926847289021, 0.6160676124582172, 0.8874308495922196, 0.6979003989459664, 0.5069924947902891, 0.8551803770570955, 0.6942747048544508, 0.683759955437177, 0.9038374300290415, 0.6352176769559298, 0.7649224960110357, 0.8150810778809752, 0.622761078825717, 0.895941646999187, 0.8684371931043464, 0.5909344102941986, 0.7794499197136305, 0.5591928397823457, 0.9780197375505989, 0.855985506811566, 0.7469876487257581, 0.548399917437048, 0.5419548829095413, 0.85090366024281, 0.5982106334019789, 0.7363967412117727, 0.5166940647358599, 0.7486316654216305, 0.9695241405093227, 0.702621234054025, 0.968417262184461, 0.5511685340822354, 0.7269181633451169, 0.7027686640332665, 0.5770704674256446, 0.610897596933887, 0.8086493414676644, 0.8716198458166471, 0.680927438041169, 0.7730517340807052, 0.8438397062201423, 0.7240004605973127, 0.7955193000746689, 0.9105209074583045, 0.5353457297015245, 0.5783003174045958, 0.8932656906865397, 0.7920268837944431, 0.6081406453190547, 0.5437838995685991, 0.5040520258200972, 0.7563145910619486, 0.9565505805524205, 0.5768297704393688, 0.8337801665797757, 0.692709644387637, 0.7037022751242846, 0.8250775049288817, 0.6982573030691286, 0.6385703054173311, 0.8901686884653965, 0.8691044027920404, 0.6434982985686291, 0.650907917102282, 0.7574465848489202, 0.9187222359931069, 0.9537230130118233, 0.7429676147773174, 0.5075474870857042, 0.5276533839836797, 0.8482832398730646, 0.6043779883537685, 0.9994343938372521, 0.7139737092667813, 0.852306786155933, 0.550973208912384, 0.9321179130951442, 0.935564718328989, 0.7608331778895917, 0.7526719187714597, 0.8644567394899946, 0.7627254656885976, 0.6729053192651407, 0.7559562853360888, 0.5382941977731497, 0.6826721482220992, 0.5915794348180616, 0.5154105459823537, 0.8728169991529307, 0.7247913050475217, 0.664857645536003, 0.7321998924143291, 0.6676480472824828, 0.6869264578933358, 0.8575117947360786, 0.6850691221716878, 0.891030255489857, 0.9763827707377857, 0.5810755825490929, 0.87481602764215, 0.6346417988604247, 0.9073063572906421, 0.6589032436094528, 0.6263181749160205, 0.5228315250585129, 0.8789869213331138, 0.8831761226579544, 0.5650108778015349, 0.9963376737661727, 0.5180604480066486, 0.5282625883289407, 0.5901106196379324, 0.5938981093934128, 0.7559324097538445, 0.5999583093892109, 0.8339015810263406, 0.9689040705777066, 0.6195002410591459, 0.6843522088798009, 0.9732095143116724, 0.7679268475703316, 0.8867255160700478, 0.9800235551340614, 0.7906231062110758, 0.5661002926346704, 0.6669240200603289, 0.8533419253191146, 0.9281929293234508, 0.5703898433997759, 0.680833082738628, 0.7251064795681637, 0.7483648850281328, 0.9650960534927178, 0.7687464854531383, 0.6616253139648867, 0.7667357246285597, 0.781715650929798, 0.7131457212317911, 0.6512625095571962, 0.5551442985421178, 0.9645370054033653, 0.6370885504176269, 0.6512580260238058, 0.6857103114748357, 0.6980972466314304, 0.9613502135331046, 0.5168897955571674, 0.6454097143519825, 0.8658894491827238, 0.8039386964878721, 0.722655488761203, 0.804844537343654, 0.6512327522177668, 0.5117040480739877, 0.9543060465550064, 0.6857821765108075, 0.7100018880865389, 0.6976186251145577, 0.6964120095750104, 0.9415602339202684, 0.9831023561342398, 0.78095440772624, 0.55041336112988, 0.6065601574304764, 0.9429543218969325, 0.7674690881802468, 0.6161719984917517, 0.5803077366063498, 0.8760417817529479, 0.9494647471127876, 0.9099851304402415, 0.5497163863626613, 0.6305383524623622, 0.5437045612509942, 0.9136293525084882, 0.7318611584057553, 0.6371043868516049, 0.5333667745975267, 0.8379749091271791, 0.664820248911362, 0.5664260606918152, 0.7544060232997121, 0.9297524068181686, 0.9180898522091927, 0.6325339497089348, 0.8640845531533917, 0.5395553530054222, 0.6665549185681676, 0.6559016159838257, 0.5943869131177766, 0.9020683950098671, 0.706885505153577, 0.8533562559583731, 0.5680467947171077, 0.6379703254505011, 0.9275364488655524, 0.63427794235475, 0.5204359614285576, 0.5837202743611076, 0.5197531303207465, 0.733735555252905, 0.8635854087798438, 0.7308675941733167, 0.7496803428457608, 0.5326748369111625, 0.9735000921134471, 0.5840481121132972, 0.8251425137150197, 0.9862139226131214, 0.536967259258061, 0.9815426367547764, 0.5510447279690006, 0.93040031985411, 0.9978628113712606, 0.868372806575608, 0.5156173366795616, 0.8006590357455698, 0.5328998958678239, 0.6090429205693715, 0.5748196293438881, 0.6797380309528254, 0.5061406811129958, 0.7890955168021314, 0.7468284312386848, 0.7995851422972794, 0.9575932002007685, 0.7574117631629482, 0.8990912995948614, 0.6706076462287267, 0.9256481849545741, 0.758708431923616, 0.8063524504521644, 0.6172003152794898, 0.7501484424264333, 0.7938735957844767, 0.7512640315127024, 0.9156461950750487, 0.6558783781435606, 0.9430122611716341, 0.7971646983654626, 0.5521716250093912, 0.7735692927966907, 0.5230714067203319, 0.7825264805439227, 0.6418739550138816, 0.7563683922854213, 0.9665634886212088, 0.7977156326453572, 0.5741836069248272, 0.8509994904088166, 0.949515272567327, 0.686998792130099, 0.6477396082624807, 0.9122659688563781, 0.7249348640361367, 0.9020419788588603, 0.8106359558438798, 0.8315111270709667, 0.64951842745161, 0.5134593054909165, 0.5566124339940084, 0.6701801055379537, 0.5196038579969711, 0.9748934454742821, 0.5651590840589593, 0.6714294366924799, 0.7863828727018196, 0.913858802503279, 0.809917271789527, 0.9081696787816929, 0.974176842501167, 0.8409665727274855, 0.8933133068985109, 0.5650237361290951, 0.8374208076823695, 0.9725178653476566, 0.691128992251266, 0.8751440072557719, 0.7053925890136412, 0.699350021715855, 0.8450247389883085, 0.8367710863473194, 0.9546152445128686, 0.6942381701549085, 0.6085674036186964, 0.6818954287371239, 0.8153217721476993, 0.5203522161638577, 0.6242281812220885, 0.7220591234127187, 0.8492281398417656, 0.9675597045379287, 0.6767304446653533, 0.7720794407118511, 0.8895473067326978, 0.6226386897238801, 0.7065902138335216, 0.7461493115792395, 0.5284147748971431, 0.7064138718899026, 0.8486717316806678, 0.7452348837799767, 0.9169223242439806, 0.7195800967525802, 0.7106975873561505, 0.7613378577150366, 0.7047971283007108, 0.5877299576665542, 0.8819016979608973, 0.6343976723670517, 0.9699223817830688, 0.734668422333335, 0.8380547270590863, 0.5682157108082594, 0.7005338944801736, 0.9889027074021162, 0.7697621933273877, 0.9157689647258015, 0.7030957793947004, 0.6488721900392699, 0.7921238459341038, 0.7081563009603621, 0.8529680145174764, 0.5501213819692206, 0.9806322253603919, 0.8203514994249395, 0.6824905613670414, 0.5834639748665214, 0.9692467494754631, 0.5717500870275207, 0.8430790705155815, 0.7686240284288777, 0.6465376621086197, 0.5358453593209238, 0.6844386351683291, 0.6712332005145685, 0.871889730385432, 0.9544128184399707, 0.6730161393706668, 0.8206495391467982, 0.8440463809721327, 0.6228005335797644, 0.6862210880707785, 0.8101489856494852, 0.5115898755007777, 0.6861528142909818, 0.7000407491424825, 0.7835174551525574, 0.987838307216956, 0.8505623915916025, 0.7212200618116493, 0.6246698307070163, 0.8339384793662157, 0.6887862865213743, 0.7724905123233807, 0.9192473628267951, 0.7825012627754433, 0.6152579630310967, 0.7162371023580205, 0.7096045299412352, 0.553240285676697, 0.604787426542956, 0.5180470981007429, 0.9726713449986202, 0.7314275065565115, 0.5365298208676914, 0.7386709733166377, 0.5514026978702083, 0.784500668097285, 0.9567590029181687, 0.9819910783250801, 0.7440072840868148, 0.6125839365798385, 0.8905042172122765, 0.8817844435863067, 0.8512991451263401, 0.6917757960395037, 0.5211507582210355, 0.8320884658811438, 0.7275526589445434, 0.8362428074299335, 0.5861152847063916, 0.9515809256753556, 0.6765576461772067, 0.7415408152368544, 0.5135566288360056, 0.5662900491802862, 0.8877930986622692, 0.8559453596822503, 0.8690239842826766, 0.5932881374103105, 0.8477238865051469, 0.9935402879515457, 0.603950296096841, 0.8899416617718054, 0.6684252492290257, 0.6867161129076065, 0.5481489015637779, 0.8516605024137276, 0.6697455535196144, 0.5886181152760277, 0.7933374801185402, 0.5863378661993146, 0.5276553169576783, 0.6721302967261746, 0.6631416062977276, 0.9356530040897495, 0.5095006178486912, 0.7113885164699163, 0.7006150163125408, 0.6073329420764753, 0.9954589453604181, 0.9003499084389798, 0.7481295369947254, 0.7922804499615458, 0.85585361407768, 0.5142092130234189, 0.8477153580977111, 0.946324445939795, 0.7719735908963603, 0.91582336619714, 0.5002952381620078, 0.580959961104103, 0.6507166288630877, 0.8261153893589215, 0.9993180793249838, 0.8974172499600944, 0.8719998871686087, 0.7947200176923674, 0.9608470893505232, 0.8110587203260455, 0.7725943496633663, 0.8659060687224153, 0.51702002479546, 0.7356929706162326, 0.7336955671347789, 0.6999823334612811, 0.7174106321234596, 0.9487653711955819, 0.810831626796905, 0.6180236171489433, 0.9669904726084952, 0.6852119630422148, 0.9063943898496873, 0.5849740987201735, 0.7002424241445055, 0.5038415122374568, 0.6015461030116267, 0.9748354167605575, 0.5110795731189022, 0.8100067744229016, 0.5733595197795776, 0.9534187536207501, 0.9515415950941613, 0.7830184920198511, 0.8753342069000705, 0.5255641041271396, 0.8532316620591187, 0.6953592663450643, 0.6210954292784914, 0.5074243607984454, 0.7206432346215552, 0.7448164616196414, 0.6006678973748265, 0.6819166572477112, 0.5132365595825821, 0.7587411675444842, 0.5233120096555277, 0.7910678385424161, 0.9218974386334835, 0.5718246717588121, 0.9554952723221313, 0.9048385902780773, 0.9376730838075168, 0.6574279541691859, 0.5854076250286624, 0.5008399456675952, 0.9749598658468381, 0.7221839977680268, 0.5431379037657579, 0.6334022485525458, 0.6417610270318581, 0.5910564287623148, 0.8653772929128911, 0.5350040710089436, 0.8769541228832378, 0.7356779217289777, 0.9052991423135377, 0.567185081334656, 0.982558035917428, 0.6630008789438311, 0.9043078404818499, 0.837863405090878, 0.7633462228976969, 0.6387379192710623, 0.5948447091272709, 0.7287043894535463, 0.5545002467465363, 0.925247311899189, 0.7896684213657962, 0.6584713403813098, 0.7709999832081317, 0.9954056096333697, 0.7374027827412089, 0.6621722413970081, 0.7785857447486094, 0.8445328743839065, 0.9158012277091989, 0.9575327105859605, 0.5257103009135398, 0.65428663952549, 0.5750848742950461, 0.9327447694891209, 0.9402892844192234, 0.6815840099317352, 0.7885855380336404, 0.9627699035684346, 0.9638362725045371, 0.9262143778696929, 0.7179475324764755, 0.6084850562812917, 0.7371555099760889, 0.9441719795879823, 0.7641697757574483, 0.9629044485771991, 0.648312345677337, 0.5122006504170717, 0.7986999450352197, 0.5093200892525207, 0.9026511386735185, 0.6268841435153525, 0.863297627716402, 0.9906210977403493, 0.8910432016176726, 0.8665265113678873, 0.637797555206671, 0.5759838522378825, 0.8448416396683712, 0.5110223206151283, 0.5129018975342297, 0.6707369568705517, 0.6207191068862427, 0.5921760374852525, 0.7211631949014823, 0.942437645013692, 0.8841359460244997, 0.7205626915780556, 0.5666770681212792, 0.7601032560593831, 0.6862951945787716, 0.5300083435330237, 0.765042936737357, 0.8090517675044298, 0.6991627162332097, 0.8078110685697564, 0.6769017606894595, 0.6538504580064564, 0.7450865723003313, 0.5897220674899502, 0.6148351110857895, 0.5241240501300686, 0.8578694494753915, 0.5627688046821863, 0.6958871547432977, 0.7220639220692409, 0.9115616081554974, 0.7795972686919863, 0.9455834376852315, 0.9079674760205528, 0.6775286420205712, 0.887392658618295, 0.8432436205204912, 0.9478502109623947, 0.7848358642626148, 0.7764240901328686, 0.5299158340502093, 0.5623006443730525, 0.6618054156426989, 0.6382948266729133, 0.905648000248072, 0.600076481479098, 0.788846984017006, 0.942510863620593, 0.6154150975381208, 0.5383863593501123, 0.9386379702156725, 0.9279887422808115, 0.6547366327923393, 0.6298822932153483, 0.6139763723339837, 0.5638563626122246, 0.9542081160479825, 0.5596408079633629, 0.5820056998779202, 0.661611834221896, 0.9448067739513049, 0.7208292070602556, 0.71969207982869, 0.8691233167990817, 0.8209674235860165, 0.9403547329285875, 0.7001431321880657, 0.60332770388996, 0.8270608081778772, 0.904358089147654, 0.715491892015604, 0.5276054565505, 0.6280174877560531, 0.5778040088877303, 0.5013989150761256, 0.8343070266116003, 0.7247326370520998, 0.5583815164624097, 0.9492705617969692, 0.6471579441946751, 0.6795564462217385, 0.7897745095284089, 0.9770224498198543, 0.8957805235481333, 0.5423929899055222, 0.5497638242492726, 0.5584578950774022, 0.8124314765497315, 0.5187743037316634, 0.5015059790416996, 0.5382073041037504, 0.634683323472422, 0.5883023522478059, 0.5439940807562798, 0.9215328505321165, 0.8758680035920889, 0.7998349225419288, 0.5417486685909989, 0.8227370868417367, 0.9652421499190844, 0.6649222967184754, 0.8357966322019649, 0.927708498219958, 0.7186819032636205, 0.9480469398913703, 0.6121704378660365, 0.7354519369469874, 0.852351017755436, 0.666415973006079, 0.5107955385229805, 0.8114386565673553, 0.6160931628157491, 0.7034629087580055, 0.7712681650618738, 0.931522156025228, 0.8290796674897871, 0.9710505246424211, 0.6728331737059129, 0.6383415298490998, 0.9000891844514854, 0.8539881255474087, 0.6841302548968518, 0.8492232503888326, 0.5222486896014752, 0.5297349586244403, 0.7773631870650417, 0.7222942504283876, 0.95172361271578, 0.5736861323320599, 0.6522021820809887, 0.584399582296829, 0.7080634820293137, 0.6761027116957701, 0.5513831189550765, 0.8402726003151456, 0.9355836214646376, 0.5248596232347256, 0.9289032888962825, 0.816142131142414, 0.5520877758136933, 0.9579246047894387, 0.710273173978247, 0.6833942905681154, 0.8047344403925312, 0.6454781832725036, 0.8791790016000758, 0.7323902841736332, 0.753223312358141, 0.8951747442789569, 0.7433258843796176, 0.5921862113661965, 0.5971416587757398, 0.7135692180643698, 0.9017003787263047, 0.6688884302707774, 0.9721573390545875, 0.8578080151024081, 0.583217888247113, 0.5899661197964297, 0.7019863197452507, 0.9293149287848175, 0.8383217634692846, 0.5107063141767907, 0.6541870061559776, 0.8413751038539888, 0.5667954800176009, 0.5324670287250333, 0.9101636713406163, 0.7977153682242727, 0.8287572352803227, 0.7714548830204764, 0.6507439191749751, 0.5017548276236616, 0.8656335932763315, 0.5043366900889235, 0.9722299516823705, 0.9936647365173927, 0.6679678268244218, 0.7017888788504192, 0.5083322355494563, 0.8187244421301461, 0.7797979423916968, 0.6742662486615709, 0.5188834711495063, 0.7617738961071028, 0.5721371094120598, 0.6097260059716954, 0.7370722634086724, 0.6711299176724804, 0.6044538735398881, 0.9738817575724046, 0.7070246538659721, 0.852297793197975, 0.5426739017077423, 0.9245946431788739, 0.57930020099923, 0.7105064553769993, 0.8965556219427626, 0.5822520240985188, 0.6352382787724777, 0.8607344359514209, 0.8059193413998875, 0.8636140961150588, 0.9310910307548772, 0.8638246265062702, 0.6634730132744152, 0.8875100228321278, 0.5532720081256295, 0.835366110661536, 0.5589804561927798, 0.6738953026831025, 0.5069330772630096, 0.9811773255535292, 0.7523468533347598, 0.5038254299925531, 0.5163153834652874, 0.821636672741925, 0.6044713752347893, 0.8646379166772372, 0.7022417080010763, 0.9372837290910088, 0.5494301802419779, 0.8369631995981521, 0.6821936979926791, 0.8358279394274424, 0.6169579538443483, 0.5828107807220058, 0.6270741816036912, 0.5169275700352334, 0.7479553143958468, 0.8503664805186155, 0.8541561838996096, 0.5104887735620496, 0.5346812325414421, 0.8602484999771941, 0.5591968377104881, 0.6608154356032825, 0.6011866047781195, 0.6376354075151747, 0.6709696650959318, 0.626053819105727, 0.5770500237242404, 0.6202117450755023, 0.8519658297920467, 0.5126088506853511, 0.6766487314230607, 0.7734899634550203, 0.6138884991238456, 0.9707053155758603, 0.6895502145808594, 0.5739856656280006, 0.6197547692578926, 0.5562273578418818, 0.9085701668046209, 0.704456648007084, 0.5864907701542276, 0.814653597937131, 0.794305619244001, 0.7328913252282012, 0.7987260251520019, 0.6309803979959722, 0.7676347771937209, 0.8658128466490442, 0.6295921998966577, 0.5066593457071293, 0.5761235474438036, 0.8920850836573373, 0.6221973098796418, 0.8624486845421405, 0.6780002427886087, 0.5194145385071753, 0.5217080624827046, 0.6881501051777025, 0.7565100533172109, 0.8011067900756234, 0.6141950796415435, 0.6730379840341634, 0.6503075536685277, 0.7493405950077608, 0.6352471867547964, 0.5779925369493126, 0.7547809147142139, 0.6361599203148367, 0.5680089891160098, 0.6947286902696099, 0.9759290622446543, 0.9090167866327912, 0.9895913070291293, 0.6178474895219674, 0.5708131686829274, 0.9814425902984489, 0.5499475469832209, 0.5714350423534797, 0.7985130117015344, 0.8035555938190182, 0.922333304625689, 0.7834668654589376, 0.8531368339810023, 0.6634081877142862, 0.6079291117718983, 0.6676135603114819, 0.6442202689869819, 0.6734866799555255, 0.5183209309242458, 0.6723718631467628, 0.9749627478098409, 0.906680697855986, 0.5355104193178007, 0.9258718694374322, 0.7631564009320196, 0.514835645587129, 0.8626967014573681, 0.5129921417563168, 0.9090552720718426, 0.7672500748697505, 0.9805477520082009, 0.6860282291368236, 0.9499832918674227, 0.6877318048511916, 0.7603607710030336, 0.8611811539995864, 0.7629044537491323, 0.8110572474405566, 0.7514310381470863, 0.998253118494552, 0.6681528143034332, 0.619219832575227, 0.7715641134585902, 0.9222377849483172, 0.906240691908414, 0.549208480742204, 0.9010636943269463, 0.8995010002391182, 0.6051045802206454, 0.9226902468713644, 0.7931084776437555, 0.6585914793534584, 0.5601240147318856, 0.5866716413600057, 0.7848700172561363, 0.680707714544049, 0.8395406788652708, 0.7823887507457589, 0.8496382135859313, 0.9493243949842407, 0.7889211875178391, 0.9117305097667605, 0.760447796377159, 0.9835484994521995, 0.9960579843210042, 0.7272420968306597, 0.5747042652366237, 0.5482793293596866, 0.8103912184221156, 0.8201310789702989, 0.9575389545849866, 0.5758837896142366, 0.6444761200517921, 0.8348750153091231, 0.6739935345157226, 0.9643880799292265, 0.5261763314806887, 0.7495078549856391, 0.7169058575719638, 0.6888556207133727, 0.9219216284054566, 0.9318075158718464, 0.946899745257167, 0.6812970971581149, 0.6922192571750592, 0.6275433755179708, 0.6358797576555584, 0.7022988517700277, 0.8284505040984136, 0.6976715755343152, 0.5160586248813319, 0.9548739335999195, 0.9004962034764263, 0.5831684961413828, 0.9140182838042485, 0.6342052028775893, 0.8432964674632332, 0.533659434576921, 0.5607394505251804, 0.6400592009336713, 0.8573548320401831, 0.5938438320515836, 0.8471809549116193, 0.8234543192143268, 0.5393429056477121, 0.9199756126062356, 0.6864680251157875, 0.5524130510612235, 0.7676331589271698, 0.7655116025427776, 0.9953025921779459, 0.5570419938227202, 0.9375172692938039, 0.8853303360086092, 0.6175030299463964, 0.9610472210633798, 0.9793248335277396, 0.5191287014376579, 0.7753567788242528, 0.7955941019744179, 0.6177725343482777, 0.7633507462052056, 0.5284562292793976, 0.9701896681063125, 0.8072347072472058, 0.733189353883057, 0.5366567094934325, 0.7043852277373038, 0.5890813236043562, 0.5128749592266433, 0.662925715314866, 0.634118704998855, 0.6073286126450305, 0.6339794898714393, 0.9768593614997104, 0.9403310291891498, 0.6613222526244944, 0.8688283540651561, 0.6530872785635146, 0.6823556177341259, 0.8965312739261171, 0.9580323639255706, 0.7094752316152784, 0.918482500424982, 0.7982409299403675, 0.6484321851363647, 0.574382527592835, 0.756294658808444, 0.5565721021824525, 0.5645684831616065, 0.8739784566942836, 0.9873306332583123, 0.6252512468057462, 0.5613924201032876, 0.7389377280453467, 0.6643104747289368, 0.5304822919651617, 0.7979964493627698, 0.6923930751660302, 0.8323096466661393, 0.9735570617704836, 0.5891404986546582, 0.593401389404634, 0.8192316372367301, 0.9237068295025916, 0.6310494756440153, 0.5581849391728807, 0.8510849728043521, 0.5613400282745914, 0.9432726497815518, 0.7141715839921408, 0.9057275545421295, 0.5662998730997464, 0.6568582494975623, 0.6852846273228743, 0.8042002699574438, 0.5105769787671085, 0.7233736503660408, 0.5154833872497582, 0.568486672565546, 0.594443472026247, 0.7948245274034946, 0.5793937766879824, 0.8600956771572617, 0.5320358034617989, 0.9618695984661347, 0.6386498162402656, 0.7838922524187857, 0.8365156140047558, 0.7214695265549724, 0.5814916389385755, 0.7337945470416782, 0.9149735523963454, 0.7069045149880879, 0.5937918340524155, 0.5870837698037336, 0.7760239321042872, 0.5566682240873941, 0.8701858058033329, 0.559957905083224, 0.6868775103852829, 0.6049275948089552, 0.5078142933745745, 0.6561567686059698, 0.9215191193948356, 0.9987068896756399, 0.6503803107297433, 0.9789303619630705, 0.8244886878099575, 0.533970627566881, 0.7823262533935302, 0.8337203859266198, 0.9186001666762071, 0.8612054541402141, 0.6980752332923608, 0.546064862925015, 0.5342341323827181, 0.5841958591588285, 0.7068364820395158, 0.7431607457789626, 0.9352208459670728, 0.8896984989878948, 0.665090196088792, 0.867544278291807, 0.8047371189358428, 0.7172106943146808, 0.5700442417707028, 0.6901429390349088, 0.5596744895598463, 0.7914726333497988, 0.7444565892434192, 0.681840507109756, 0.6227365016031077, 0.5698373434516414, 0.9585100158021241, 0.8330946677632858, 0.9784114892442951, 0.6981278505348296, 0.7700433741729185, 0.6792122274191101, 0.6366476335755539, 0.6942501550318357, 0.9253623727608254, 0.8743869264714181, 0.9846218259186119, 0.7381229735146361, 0.7868145252359038, 0.9022077208149559, 0.6412850102254031, 0.8843707959441309, 0.525813193409796, 0.9010322176428593, 0.8529121450082326, 0.8159254051643297, 0.8657496703058141, 0.7310938215212875, 0.6693534264381795, 0.6081908730780452, 0.5078327869929541, 0.6675349357039286, 0.8329594148439462, 0.6386257269545503, 0.9591838494332057, 0.5171191514607427, 0.8250080292591149, 0.8021918525827711, 0.6240963078146204, 0.6457766937961278, 0.5749080073475301, 0.6092564011025896, 0.7789152677146471, 0.6606618315608496, 0.7061804433153324, 0.5990220416748253, 0.7167414490738575, 0.9484018025959075, 0.9776373065899113, 0.5453540066057403, 0.6557048771442111, 0.9770257825095994, 0.8208287243995628, 0.8557261688043865, 0.9365278118615676, 0.838600723388286, 0.7691412774605069, 0.541459049890217, 0.7180585242481053, 0.5236421046496819, 0.6510866362800691, 0.7312937130588637, 0.6910996167770598, 0.6896662579893474, 0.6545135355239611, 0.9865893564246024, 0.7219395811393453, 0.767728835627317, 0.9742175933968511, 0.6423920162844072, 0.5030002267625844, 0.7805394429434502, 0.583170405113753, 0.8183732553789314, 0.8148633703880361, 0.8085945968220128, 0.9165476813354043, 0.7553756952814482, 0.5137261597678339, 0.721347047682237, 0.6946931478569904, 0.9417279372625851, 0.7728628051376001, 0.7968472591646721, 0.9662044017616189, 0.7877198818958219, 0.7743699516371786, 0.7480903177961968, 0.6683205963221507, 0.5619766221505266, 0.8735973865084579, 0.7234470993613408, 0.6603994612739229, 0.8426060209887806, 0.6740302623469663, 0.9035155963656075, 0.6129785355306381, 0.6625335366056964, 0.6361148890307096, 0.7908923689539582, 0.8452976523762015, 0.5168369797047809, 0.9201647478523824, 0.8320107971030108, 0.7976045095905966, 0.8553390032399679, 0.9990966041766876, 0.5888320460009936, 0.8463714940095114, 0.6383322593457366, 0.5187035016222965, 0.8202002830813369, 0.793093255552435, 0.9691209268577288, 0.6216722337652812, 0.7984991700991679, 0.8407511368146738, 0.5638720447278417, 0.515413076038455, 0.5531994457273571, 0.9299593296115144, 0.5668408990302968, 0.5521774659220027, 0.9326922060082296, 0.9259433492611779, 0.6662838328584184, 0.799808380770201, 0.7374960491378271, 0.5505121481395759, 0.5208967641601245, 0.5700765120263961, 0.9625008398125984, 0.8181170003865718, 0.8207561789153588, 0.9245508365004179, 0.5261435565486274, 0.6247926339144809, 0.7522210450172375, 0.5772851722520327, 0.8917841795495183, 0.5555253768364711, 0.5781479272063148, 0.9487085818671304, 0.5041632945328296, 0.5081146910859649, 0.5791108143367838, 0.7580900802673299, 0.5678307011374661, 0.7354991004488236, 0.7259114791949398, 0.5515278298721178, 0.6865217434019175, 0.790982422313669, 0.6793471171481189, 0.8923010653841341, 0.674624407214061, 0.6385002046554912, 0.6736653988105925, 0.8023843444579186, 0.5955901079078598, 0.9279549580419815, 0.8089958775688229, 0.6201689744414038, 0.5257882065642188, 0.65454071137272, 0.8426701061394113, 0.8601234660163113, 0.7390534707305672, 0.8415096372410609, 0.6998523333068822, 0.5303867615610325, 0.5117662875829172, 0.6021839461879711, 0.6142739759881944, 0.6265751054421258, 0.8751803454950071, 0.6075217130472317, 0.8611795599872722, 0.7726667389659722, 0.866747480914604, 0.7283244235173543, 0.5572137318096201, 0.5373894018489307, 0.9893486339143355, 0.9333162859252834, 0.6864114985291957, 0.6237082155546392, 0.7275215851983154, 0.919660250113697, 0.8908150520192266, 0.8911797988021533, 0.8224829578563083, 0.6712394924189643, 0.8249961661091952, 0.9727623967072361, 0.8884312685511355, 0.6922801866008519, 0.6031860144026449, 0.6527665466923591, 0.5753437286895564, 0.6197467156461114, 0.751226533722879, 0.7960781043721632, 0.8856852588373404, 0.6732690011045864, 0.6505248066373865, 0.7584456067966638, 0.5172164748801227, 0.5766599110641248, 0.7105342033937381, 0.9841448695215047, 0.6067888239328546, 0.5303088375088798, 0.6682100191795148, 0.7530130795877281, 0.7621049427010862, 0.7409890955884882, 0.9880016603229849, 0.9027377669984347, 0.7200918113259207, 0.9816907189261395, 0.8313937744851118, 0.8821471789743529, 0.8428474963856065, 0.686459563587485, 0.7236633950603435, 0.6279475578641325, 0.5530378153345542, 0.8280342069696734, 0.5831296815498765, 0.7309537297075459, 0.5497286650665941, 0.9496104973452879, 0.8913824938394426, 0.7708583858924642, 0.6775361148146136, 0.7627736681038344, 0.7341813047807181, 0.7802086396651471, 0.6111277939896712, 0.5490286952643797, 0.7562168283023454, 0.8647510556594236, 0.8784151847902903, 0.8426734605180034, 0.6829016492887707, 0.5518496793808838, 0.7512867947667048, 0.7097589102559632, 0.7200545520717985, 0.8969926214560151, 0.7166897444881704, 0.9477809956669567, 0.8696349258860455, 0.8836696136852488, 0.8751350632409691, 0.6147781634235743, 0.5841730785261305, 0.6889989172246067, 0.656643224060296, 0.9099243260043262, 0.7822208441723049, 0.6280269525255031, 0.9987036626435553, 0.5570499161369904, 0.7908236317372199, 0.8637018753327514, 0.5907701824553611, 0.5776935567960886, 0.8290296438327065, 0.7435613246129783, 0.6015286749755311, 0.5995235826720258, 0.9375453753476061, 0.7012002371260095, 0.7749991778621644, 0.9253704730883306, 0.6622140668868548, 0.8751943564013189, 0.7848423373736819, 0.8198278792021207, 0.8355433698405764, 0.8469988242147546, 0.7106440610493622, 0.7717760644577925, 0.8482549381045632, 0.7034315994835767, 0.5270767228313629, 0.556492097409392, 0.5250211901533995, 0.8918169642699785, 0.5139485363551743, 0.6954633749790347, 0.987187423155844, 0.5315473511832489, 0.7285022880698455, 0.8661422408107078, 0.6516973230662462, 0.6354979750416763, 0.9830992736962674, 0.8691798460050983, 0.7686794233892067, 0.6525221134454748, 0.9123585394561009, 0.9303674491084057, 0.9794572793236938, 0.5550963053004957, 0.729361192447174, 0.8459382293093394, 0.6879934976664148, 0.9989854797851149, 0.9780797416837166, 0.7613314083550312, 0.9394825964922469, 0.8705502950910118, 0.8511169886138406, 0.9222937230627088, 0.6850968484654274, 0.9835874948069996, 0.9926888396330584, 0.8934012011583182, 0.8097093053680482, 0.9743344885060357, 0.748710060235414, 0.9123053268037992, 0.9856391728506535, 0.7783410740734211, 0.5082660591033406, 0.5874971284407258, 0.9889645244945021, 0.5733249199915006, 0.873712288378057, 0.7326146031690979, 0.842795099683517, 0.5744157743142706, 0.7014663162806251, 0.883831631303523, 0.5849015137361229, 0.9140526361301875, 0.8873271514852801, 0.5494967478417436, 0.874968099053878, 0.7421143213011119, 0.9011196518276421, 0.8632163794597882, 0.8969297013072101, 0.5624541337333624, 0.6778708609603378, 0.924162224627148, 0.5154289743993294, 0.7458758719331515, 0.5469403653409253, 0.9215395156228849, 0.7799415997385775, 0.7862252530612792, 0.9354661780047933, 0.605963339186317, 0.5527759922349667, 0.9182524165405077, 0.7943670702990648, 0.795223026566696, 0.8602001542136881, 0.922973558181093, 0.9179628291958914, 0.580328894939382, 0.9000257481154881, 0.718314169790058, 0.7041269434422357, 0.6900607086063142, 0.8922006735454748, 0.5505014482824976, 0.9037199616632444, 0.6410981719693336, 0.8973433098098043, 0.6861078292486007, 0.8827252899079499, 0.5892580771943755, 0.8016799510270737, 0.8323327364140867, 0.7299127830481688, 0.7835154822704542, 0.6733228975055118, 0.6493436880123682, 0.9059927803158938, 0.7323349679516069, 0.5980729852183468, 0.6863097255040689, 0.6645852343485337, 0.620372040956285, 0.5458973103848912, 0.8657363881097485, 0.7905836114211817, 0.5036995450338966, 0.9195203447568345, 0.6544717617295861, 0.5396829356282755, 0.910378312688753, 0.9492269913122429, 0.8769485873961267, 0.7762565508713445, 0.6520835391671049, 0.5075913506076314, 0.7719496271787065, 0.544706117135028, 0.8568393309344577, 0.8830751740816742, 0.6504940690472749, 0.848284111674153, 0.6821736299621233, 0.9031641413921191, 0.9234596745505517, 0.5735460530083105, 0.8068274671523968, 0.9158344449291893, 0.9632170156195525, 0.5263199366027761, 0.5575760550518836, 0.6103518588063305, 0.8594017070491671, 0.6218180181133764, 0.9487769327859035, 0.8512721338475786, 0.5694329526333961, 0.5084531620731285, 0.7745145186520663, 0.8817808889809999, 0.8925934561560604, 0.5986434715766739, 0.9985606733713617, 0.5367805328811999, 0.826009252291035, 0.7103474443087405, 0.7462800449580925, 0.508985381797112, 0.9120763768249291, 0.9125693158409376, 0.5388215081457304, 0.8092770093092625, 0.6761982074455362, 0.6058736009585359, 0.7814833100226954, 0.8910713075053154, 0.8141887348167696, 0.5480861174366163, 0.6921464246606681, 0.7179643028257499, 0.5061217952170456, 0.968407520029003, 0.922271381525906, 0.7496451988390506, 0.6898703084799815, 0.6696405819330484, 0.9916512508767323, 0.5810620283619279, 0.944418680873119, 0.7048581369198572, 0.8859638380570314, 0.5203577185488292, 0.7776281930212128, 0.8545235567376395, 0.9914514761334134, 0.8169061415794385, 0.5714422667163903, 0.8732905547043115, 0.8270889941149437, 0.5547793537132076, 0.9439222152737667, 0.8142495853668739, 0.9749937837262095, 0.8759747666176125, 0.9555406770052443, 0.6380421063914359, 0.5359711291443892, 0.7155269673448915, 0.7155849643790209, 0.7816730574627351, 0.6798348300810456, 0.6055651759460232, 0.9727261953886397, 0.7912306142888441, 0.8691910097617725, 0.6608557641727806, 0.9626008637892334, 0.6878650911076891, 0.7817239228547587, 0.6481065410657477, 0.9011145559825826, 0.7214803631424349, 0.8620379004461355, 0.933556015313342, 0.8574403831026188, 0.7509415129156659, 0.9211861455236182, 0.6168369960024921, 0.6633820162745845, 0.9616777603445013, 0.8788803736885484, 0.8726030255738653, 0.7217398658580272, 0.6894334283729915, 0.629486895990415, 0.8212938316801428, 0.8573172724235018, 0.6615680851391283, 0.8144552650380986, 0.6082748353307735, 0.8515528129677247, 0.5922861060491702, 0.5837782031173523, 0.8163436323267779, 0.8807466248349433, 0.8191767320897085, 0.6871803714128963, 0.7168929180390207, 0.6235046668715849, 0.8145340226638138, 0.5159909405886565, 0.5044284561596328, 0.89945438240737, 0.5321286412908908, 0.8122698694343966, 0.8228389391244573, 0.758352213812126, 0.8699261645657556, 0.5399257278312445, 0.9526232397688114, 0.6534382808519251, 0.8521659411140631, 0.7888843094987481, 0.9416064310050551, 0.9438150707471553, 0.5216865642431225, 0.7309841642152322, 0.9633209055166374, 0.8194455588639351, 0.73234483506185, 0.7616215726802608, 0.5623033400508781, 0.6825482091927366, 0.8145742683161044, 0.9419404708041773, 0.9961303437964112, 0.7479376090144425, 0.9449365544081527, 0.6154405378779453, 0.5129964237539779, 0.7413598000798663, 0.7343551074670996, 0.7580709330388471, 0.975871992369296, 0.9419762804546217, 0.6398191174701254, 0.7531338332217604, 0.5317905945879506, 0.7310179092115932, 0.9643180739180471, 0.796044811328371, 0.6237793050130007, 0.9597411477600923, 0.8878414142285018, 0.8493077875226656, 0.9336290387397532, 0.5179426739916106, 0.5723008243590332, 0.7757566233445711, 0.6840609486852346, 0.652234926049212, 0.5777603852263218, 0.9870331284699823, 0.7512833845741292, 0.9165848533158021, 0.6072714125597752, 0.6431100929146586, 0.9360107032015679, 0.5064702313656091, 0.6252754061352503, 0.5560803323923713, 0.7341948279094492, 0.6959076329434137, 0.8771624849660196, 0.9681348668289143, 0.9419561971893968, 0.7389396976980893, 0.9176850633936793, 0.7154956216644301, 0.8514518164863573, 0.503210979401889, 0.8138140078539334, 0.7273733408912169, 0.5800344193772096, 0.8678376269051095, 0.8423802884234892, 0.582679274608986, 0.8455068386021287, 0.6359802052655221, 0.9615519937100085, 0.7529528123269568, 0.670191555622708, 0.8481756375788627, 0.8925147501438412, 0.6079072803712378, 0.8553564178531987, 0.9214619013634349, 0.9363278461829714, 0.6613962279357846, 0.9575202896793558, 0.8963647593658532, 0.7214945417027565, 0.6797724487084598, 0.5343306066833253, 0.7840111796104805, 0.5225340993323446, 0.6097697554134376, 0.6027957654053502, 0.5884168595428014, 0.7671935491829984, 0.7625906789963944, 0.5140993703895778, 0.5822805871870742, 0.522130975899081, 0.9314354495336391, 0.5786884035873763, 0.8803766455783981, 0.7274456169467368, 0.788104429418368, 0.7820909474796163, 0.9896442771561808, 0.6317233890195373, 0.789502051221912, 0.6273568669531393, 0.9888161679487676, 0.8705875644915162, 0.7766434403710174, 0.8439368725855813, 0.6838823760520569, 0.8398730379424205, 0.691944593241763, 0.9408130209777104, 0.6240421375854819, 0.9438181567003753, 0.9518497199840132, 0.8839368131749776, 0.7281228207058632, 0.7906076700014837, 0.5751293473149006, 0.6952627781175456, 0.5885996207662241, 0.848828798722356, 0.9778850911050707, 0.8049656021478138, 0.7443301670159348, 0.7943131275225426, 0.6351036904237062, 0.9454439445031071, 0.9535622655787213, 0.8171571753376112, 0.6710498720689988, 0.5057838386909665, 0.5964742308695565, 0.8239156898350537, 0.8458682681662731, 0.602397677496227, 0.9237023225436845, 0.6583706520485939, 0.9058575650144642, 0.9865083929946867, 0.9678433267188755, 0.9359345616332035, 0.6342738515744634, 0.9753849516125628, 0.521345505914977, 0.6832022715895418, 0.7544518346551683, 0.9427488674910933, 0.92749588426484, 0.6233320299507704, 0.7349059203336324, 0.7989041657227978, 0.6038435541096787, 0.6098111990866749, 0.641435549487916, 0.9612985796044152, 0.6733649141176147, 0.5242265959657836, 0.7312717635440174, 0.517813635558183, 0.8180419968007417, 0.8956382248508811, 0.5257390895024322, 0.8234993774964166, 0.8659327428399957, 0.8242762576017832, 0.7680683896904879, 0.7420765338699009, 0.8789098051393052, 0.8107464849248789, 0.8397857153366257, 0.7175672759329513, 0.5499357062097117, 0.7630122507976369, 0.9171029061767833, 0.6933572549595917, 0.993779967646145, 0.9675645744734835, 0.9438460026203901, 0.750077048991092, 0.9932433435426168, 0.6535144138056252, 0.5532606664463415, 0.5807604913910591, 0.6591067629364077, 0.9321426923515623, 0.8602804591276665, 0.7117586120095738, 0.9595108399846513, 0.5546578676665417, 0.7429285429218706, 0.907752797107241, 0.7616888792605699, 0.6533834527948483, 0.9219994275762515, 0.7854194649690828, 0.6659721332603528, 0.5381204717143762, 0.9797029499871286, 0.9369598561407934, 0.5354225013328487, 0.9264853861676166, 0.5827959130681847, 0.5533849823812369, 0.7820105978407299, 0.8116206239684358, 0.7406275945655897, 0.942377702107255, 0.870008326657516, 0.8908852849517801, 0.730095712049561, 0.6223105937207494, 0.7895568415501113, 0.5081894632347548, 0.6265271274961686, 0.6559911014679852, 0.7959568063441748, 0.9583354324014506, 0.7807825635319703, 0.7536583161084436, 0.6196542995883767, 0.7880217834613394, 0.557755390107764, 0.6662984321334995, 0.7418885337537433, 0.5129307496427511, 0.6889549303561692, 0.5849011953042409, 0.5033954697417411, 0.5663993916435768, 0.7035009271348664, 0.5276575347789338, 0.8208531942019319, 0.7092756009221833, 0.8427099711730852, 0.7203772988012176, 0.8650949999317956, 0.9573511121067915, 0.6776408214328489, 0.8716436122448941, 0.707200546821056, 0.9921370941398087, 0.9976850049219468, 0.6047869372433134, 0.5389728309831665, 0.5038706549236316, 0.8054783547721096, 0.683087497253656, 0.5816598075836192, 0.798991842333269, 0.9899004632500551, 0.7815063164579186, 0.784621277222917, 0.5441754750626858, 0.7064582379138946, 0.7849148238528161, 0.8168819218887211, 0.6824168704106084, 0.9498599252451023, 0.6636293529455216, 0.5036305750753175, 0.5431934333070235, 0.8336329816899479, 0.8523386752466892, 0.7301394707570665, 0.5068898679806332, 0.8911485295390987, 0.9284473803097646, 0.777456798602117, 0.5288240123707417, 0.6501697524785257, 0.6840813639452228, 0.6254913508337028, 0.5779870499922055, 0.8342319106343629, 0.9194128733156128, 0.8461565183313522, 0.5765929020399347, 0.5452626722558229, 0.5471054099973672, 0.6728169799539369, 0.9990512692562148, 0.7512401020183725, 0.8163683150572695, 0.5661950187869003, 0.8054136787514152, 0.8275800998064031, 0.6070113230208019, 0.8013315850783045, 0.753029455466661, 0.8822727253933713, 0.9386488438060794, 0.8592869051568598, 0.8224140990742601, 0.5584376686081944, 0.874287512487942, 0.6034046996054155, 0.6293232914353744, 0.9156059707741431, 0.9264787779312122, 0.5934619724323433, 0.5405231096162741, 0.6861139425595335, 0.9976821589013453, 0.6182426734586393, 0.5163627872904224, 0.5582709611032788, 0.8047765331939429, 0.8347931343470598, 0.8018693924217887, 0.6426330522892574, 0.709601899822681, 0.9885430699289512, 0.9531324494912703, 0.7969670086871179, 0.7473191231013494, 0.9017349576914258, 0.919481371596202, 0.8727721677276652, 0.6524529898821583, 0.7938274190855628, 0.9384058628828111, 0.9754218617812969, 0.8745035861515238, 0.9196014951134299, 0.5436080890871006, 0.6930196847209786, 0.8052396500248113, 0.9907365631214111, 0.6265410812501806, 0.5227252898131762, 0.6782288040296625, 0.8540090886627774, 0.720640513312178, 0.7150287267879748, 0.9932442594872414, 0.5789948348619187, 0.5694184185198192, 0.9304966140234037, 0.8553367547422092, 0.7623887177378854, 0.945390662648234, 0.9283148465319575, 0.762487296228556, 0.8988355354195373, 0.9323642639865879, 0.7561109958249421, 0.5802024526535317, 0.667554115530384, 0.8912211149647045, 0.8073465802320483, 0.9062287873294421, 0.6696503320728115, 0.6152429455039171, 0.7037691180542587, 0.9868284579732365, 0.7522094391479366, 0.6088441028490653, 0.6718567186927763, 0.610334209396717, 0.6704597338427467, 0.8576108172349556, 0.960874099765807, 0.6232888497068088, 0.7214825142489882, 0.5787726406083071, 0.6245644752102442, 0.7807269310157239, 0.6226585470598576, 0.8668373989446609, 0.7966042342663935, 0.9529649152868666, 0.5246874246912082, 0.5516869759513185, 0.56649308863381, 0.9789745524951146, 0.9496852871950414, 0.5386779122261065, 0.6445260480802912, 0.9449786083521714, 0.9857149569129251, 0.7136782475089309, 0.8491391179059555, 0.5735115245229583, 0.8764115766898435, 0.8304735633239748, 0.9047207840723044, 0.8236401867685037, 0.8197004187345822, 0.8760787419677281, 0.7839040204620608, 0.8232783380771773, 0.771492062172041, 0.5485215445288206, 0.7101414964546033, 0.7468179716783991, 0.943076129629608, 0.6485582181810982, 0.7776612904434018, 0.6805493872505524, 0.5745717796597926, 0.5866427337649565, 0.724135611361682, 0.7967476937546305, 0.670410213867908, 0.8467577638374563, 0.6019924202801903, 0.7072093950235276, 0.7226107812259568, 0.7483741855845515, 0.7661283903603473, 0.7588226293506459, 0.7585694252451073, 0.6324189699058531, 0.7541006656692513, 0.7893782045464108, 0.9004883931112262, 0.7151894910235427, 0.7386513183293135, 0.5735710217806608, 0.9970499424762891, 0.5744690490242359, 0.5734831096704305, 0.7648374544830877, 0.6084138536456936, 0.8993814916528686, 0.6219328725619941, 0.6647038361940363, 0.9983769845124535, 0.6970367309588639, 0.748881763883876, 0.9302312016221947, 0.8559614670939176, 0.9264529736367809, 0.7842314534586547, 0.6144915054048794, 0.6659744791493221, 0.8607510057940433, 0.6216279200160648, 0.8465737307707903, 0.6192154893752491, 0.761410445626374, 0.6845537501589128, 0.5822004966775038, 0.7585151938962206, 0.5705334080810196, 0.8653205503388014, 0.5735437619255116, 0.957789518523954, 0.5611956880823774, 0.6267432190919702, 0.5730916287650138, 0.9013782578771603, 0.7202175263172577, 0.8730267590171372, 0.606909376491054, 0.7681647833447562, 0.7639656043782035, 0.9704782350051895, 0.9867910550885812, 0.8764019921967688, 0.8946155800168443, 0.8241605835661041, 0.546719957710035, 0.613029757844382, 0.5198881398069312, 0.7041841178827698, 0.5120162595472617, 0.5483044819923304, 0.8641396867321545, 0.8045763034109598, 0.7857834902097532, 0.8945929109204546, 0.8437244013114102, 0.6390155933712549, 0.6328844768875551, 0.6540550854728389, 0.558521050438116, 0.8454703135827077, 0.8738389324751357, 0.9627675330358019, 0.6839433141547349, 0.5446194743290744, 0.5545464021712998, 0.8148212096320018, 0.5472656547976422, 0.6534928121579728, 0.5979760805615549, 0.9338403486903116, 0.8809093097970554, 0.7060206320105125, 0.7332839426558271, 0.9948041698338481, 0.6809615475371826, 0.8578776942938123, 0.5121570500552264, 0.7829225743418553, 0.767110894655811, 0.9345002673850284, 0.5238438401972527, 0.8912824324294575, 0.9839099479804796, 0.9045493868395353, 0.7757209228740327, 0.5422621902409396, 0.6791352597138329, 0.830002906583806, 0.5858233219129294, 0.6173472076730889, 0.8857523456453903, 0.7904803505381142, 0.8200041110072525, 0.550519872584246, 0.8576540285687243, 0.645541987795931, 0.7490207729244723, 0.9539237543461236, 0.7618302060158133, 0.9303987241789051, 0.9853066887221333, 0.5741385400540355, 0.6081142744971055, 0.6583938767262343, 0.7972013829293325, 0.7283149907299662, 0.6269339388294053, 0.7472870402742721, 0.6827034003943063, 0.7190514821296369, 0.6530402891678248, 0.9511876020023078, 0.6208507243431116, 0.9858895260262135, 0.9783557292526361, 0.8675352318529366, 0.7901701040229372, 0.7384341075225125, 0.5367854529855496, 0.9514713536731769, 0.9400296214134887, 0.8286294373372826, 0.5592204709973336, 0.6594761628946928, 0.5950606025035435, 0.7221371633435741, 0.6845300187404055, 0.6170660387797317, 0.9225705849189845, 0.6724326045766347, 0.8798433508040773, 0.603900263976634, 0.987502949288333, 0.79103953492612, 0.6954819090243163, 0.7348708771383707, 0.5707428243098561, 0.8151629932987586, 0.8537667540209408, 0.9377805402154631, 0.8488724945849958, 0.9254917636933604, 0.8854222906172013, 0.7165710663371802, 0.8190160338491574, 0.51638476256942, 0.8811196729855673, 0.5568135002707819, 0.7995426431692553, 0.7566712913162694, 0.5680931671643223, 0.9118411314527897, 0.6056239997061084, 0.7096224146707301, 0.6259768433927364, 0.528086819144072, 0.7316810155778761, 0.9504330410336328, 0.8089135413573729, 0.7704511230428248, 0.5888265572735774, 0.5466739300639514, 0.520011037146751, 0.5535324403975586, 0.7758344818530785, 0.5295013213964835, 0.918319031907036, 0.9254405386441498, 0.9544414844085083, 0.9361466292500608, 0.8095249490891725, 0.9215225305006296, 0.8134777299738183, 0.8728984166239229, 0.6930275532601171, 0.5495887276646917, 0.7332665742716535, 0.5455524857629621, 0.7112191020785851, 0.5082324652641281, 0.8178900535899796, 0.9483264870112225, 0.7059249496675781, 0.9579831924249977, 0.8654557394176491, 0.8962471667054129, 0.7638444577085208, 0.5170223044997904, 0.7124446331320667, 0.8184624136060115, 0.9216749522627476, 0.9346204338082811, 0.5774703631673095, 0.6571942688367508, 0.6476085611240648, 0.6795376787309438, 0.8051189227716655, 0.7847324839482273, 0.5498890859956667, 0.503382356466693, 0.6278923728886376, 0.7405147383469359, 0.6272653080099597, 0.6044025302051528, 0.5075107291637935, 0.5769388842565275, 0.7439351079636309, 0.7375842935492922, 0.588433412698567, 0.897694604274381, 0.6538586549906362, 0.9322155651240338, 0.6812081653246791, 0.5159835576250117, 0.9159540670852794, 0.6801040336152755, 0.9508617737179149, 0.7081380758496996, 0.5657910644518125, 0.8948598847494887, 0.8435651138211895, 0.5835786599321598, 0.8157691373010741, 0.9366961607360489, 0.7718556728430339, 0.8904545469552713, 0.9330147427939863, 0.9459868363002704, 0.98472007324742, 0.580530349160258, 0.6230481862897828, 0.6664815067145218, 0.6405256504734773, 0.5092678856911496, 0.8137112415688681, 0.5919497721138686, 0.7188548862401936, 0.990427901683935, 0.8031162773295735, 0.99443994036148, 0.8724913905480718, 0.9527807305153082, 0.9387583662421254, 0.9473135528521122, 0.7648110400555747, 0.9680545657918271, 0.8925418101594249, 0.6560798735095521, 0.8829964931560974, 0.6812947723753826, 0.7745557698657626, 0.7345052370297118, 0.7729711447297521, 0.8224376907160023, 0.53685459509166, 0.7560634240511975, 0.6230169375302815, 0.82351843136012, 0.7846261481430665, 0.8909830675097586, 0.9535249756865936, 0.8545122143642875, 0.5692994966809904, 0.7310055037602801, 0.561088421506988, 0.7041690300602963, 0.5598430231383706, 0.5138019489570651, 0.8082507801265302, 0.721553519821631, 0.9220932270276918, 0.6974537309754427, 0.6318665980954209, 0.5462624098524729, 0.9376459492711666, 0.8622290862984161, 0.5439099244805919, 0.7119091940786684, 0.9261366617182203, 0.9923356827322658, 0.9932512344698807, 0.8622930107350846, 0.8481662127645284, 0.9784631626259015, 0.7161237607738125, 0.625366362010542, 0.5356446207953458, 0.6819523171304591, 0.7858580899105843, 0.7567774179839761, 0.8375915010799317, 0.6225138573417882, 0.5020076615024974, 0.7787257491624826, 0.5993850142330275, 0.7128345282394499, 0.9644998399095244, 0.5250787290636614, 0.8804221289471544, 0.6697639348392562, 0.6067943250832789, 0.7731582741537109, 0.717586580765594, 0.765957704995815, 0.5048937198652932, 0.5293877833133667, 0.8950017844356397, 0.8649754752455019, 0.8635761481304147, 0.8033042375981699, 0.6573923683609801, 0.9933513565991562, 0.745556782748591, 0.5832627876709022, 0.804792784159251, 0.6703537391978178, 0.5345892589002121, 0.5266145758328914, 0.6936744273242503, 0.953977738807283, 0.5095025488472833, 0.6877211694946471, 0.7026991928767232, 0.7877850570334866, 0.9067944826279861, 0.9919713770217854, 0.7341272534713232, 0.5881305820830549, 0.874634971204568, 0.9408279265678616, 0.7129791441430033, 0.6135002411564466, 0.6093460970667846, 0.5512633305945001, 0.5003194623163676, 0.6218836793251322, 0.9057959060801947, 0.8096462299484817, 0.5865201090301336, 0.6644266052697914, 0.5821255321620544, 0.6737694077609252, 0.9408490711169053, 0.9803262018634382, 0.5675815908419777, 0.8890497163427588, 0.6432480709358777, 0.7036995187158785, 0.6029989485540668, 0.9792933358020077, 0.6008615703088787, 0.7935856636868277, 0.691311055231286, 0.8487556202036233, 0.9088997172692466, 0.7937914728242491, 0.8724869684041543, 0.6461789687105394, 0.8500450208706863, 0.895341255800582, 0.8530365043071434, 0.7010778267959137, 0.9497554733714075, 0.853479067160362, 0.7229990805730497, 0.6880442344189868, 0.7093967323305108, 0.5991225040833598, 0.715637385684554, 0.6808893001243806, 0.6499857435970602, 0.8056052340525278, 0.8640777063972114, 0.7019517302757448, 0.9799679402584431, 0.5048635301426572, 0.9744777299413592, 0.8508266792134296, 0.7638146579387308, 0.927635423868534, 0.9060283786231293, 0.5074630254298911, 0.8038686031748743, 0.9435488355577161, 0.8892298673344221, 0.9703979283815609, 0.6690544470344505, 0.538499388506591, 0.8080789769711325, 0.7573409753967475, 0.7927860153966007, 0.8704800261549808, 0.8647935957063753, 0.5093314174529826, 0.7289534948285683, 0.9014352629665201, 0.5388101607801208, 0.84971139903142, 0.6435907324050032, 0.879654735626995, 0.9748421651815369, 0.8827517698501927, 0.5359806415048227, 0.722714900129343, 0.705895104741256, 0.8340399462483716, 0.5466525677397807, 0.7187620764664665, 0.9564777380911362, 0.6317675191514984, 0.8970731394229103, 0.7944013445000829, 0.5605021530201616, 0.5157465075844236, 0.6453092721562225, 0.6687950587314933, 0.5728515756978991, 0.6445214875803211, 0.5899554103739714, 0.8038675552722574, 0.6884094010600414, 0.5434490271303971, 0.8199926487364024, 0.6282235424064164, 0.6309820906617841, 0.8216629861268814, 0.7727832397671578, 0.6651715819874398, 0.9950743304160083, 0.5561086926716348, 0.5088603347640912, 0.9490916394488289, 0.5034562148516721, 0.9753852693248466, 0.872522814463401, 0.8835354783777347, 0.5143753446374932, 0.8014353265499434, 0.9047694414763487, 0.7845335585820752, 0.83613890565, 0.8750747013552952, 0.6274905064089187, 0.7553997760967045, 0.7255365222169716, 0.6903866376396998, 0.6906804974947551, 0.7246132558682306, 0.6610932056299488, 0.9527014173679889, 0.6832438395059051, 0.9239922976151507, 0.605445939472842, 0.6518395962919392, 0.7825615221043225, 0.8783035633764338, 0.9296747215183203, 0.7650948085791414, 0.6254990259605775, 0.832310611542805, 0.9680099101321902, 0.8834640582615376, 0.5508787430665087, 0.6606917431830048, 0.8479342152676099, 0.5415722007583548, 0.9982898323655339, 0.741767822364219, 0.7734270060414674, 0.5730858189549403, 0.7027092762266414, 0.9840775867170788, 0.7185121421467697, 0.6166732540027338, 0.6386450396881844, 0.6903803994904731, 0.6689520016328263, 0.8724129291014617, 0.8072048182807294, 0.6111730665967825, 0.9979844678728165, 0.65856750010966, 0.6887128889669858, 0.8961592671158936, 0.5017454064829068, 0.5833744352227952, 0.5741302312893259, 0.9048704197522223, 0.811073774219659, 0.7758912868142562, 0.5101485581936804, 0.5804669577242665, 0.9154621796094586, 0.8568878660752544, 0.7044663650025234, 0.8473808302591502, 0.7587301963042391, 0.7597068898790127, 0.6639598059580916, 0.9526009445573413, 0.795388150139859, 0.8941119529143653, 0.509101138193803, 0.9823318255808773, 0.5200357228559707, 0.6148604760584369, 0.9512861625797759, 0.6232341214214554, 0.7817244359143392, 0.5243591197745496, 0.5668473521186234, 0.7429922625282936, 0.7527823755005327, 0.7635484901167766, 0.5033478891560843, 0.9207386027580222, 0.6904885512784903, 0.9358931820662346, 0.6714183271676126, 0.589922425619642, 0.9482513261264185, 0.8503872051326704, 0.6024235544886933, 0.828989988744226, 0.8983536980636105, 0.6315021035035568, 0.8691552181486967, 0.7961208723761972, 0.6222632627249577, 0.8091354571791829, 0.9372354789016715, 0.6682274433388158, 0.5783005493856882, 0.7854347539170581, 0.7081432741705104, 0.9941566639963837, 0.8070382872828568, 0.91359297659777, 0.6458091453374808, 0.7407792288880943, 0.6811437685466257, 0.8238521043850613, 0.7066192949702286, 0.5384035105036746, 0.5942042981170953, 0.6303108367541523, 0.6475269568479717, 0.6236200485774424, 0.6926206627977627, 0.590816538989604, 0.8081660703290244, 0.565123856166814, 0.6947146863654641, 0.8515357889500433, 0.6744710401808556, 0.7292938711062995, 0.9287527117456098, 0.6679845943075561, 0.9353795594610438, 0.7784720876931496, 0.6023619109365209, 0.7344405096150775, 0.7899482896693228, 0.5671708415145532, 0.6381703927846225, 0.5442615677218919, 0.6978801896659872, 0.7479735202683223, 0.9176836717194179, 0.5647710141548132, 0.7798971223410167, 0.8874224358468299, 0.9664822822693326, 0.6812543184143471, 0.6085678255858078, 0.9121963336873223, 0.5320677368042718, 0.535500508987919, 0.5913043525007493, 0.5521535183666735, 0.9705626082504943, 0.9403353998353091, 0.5307004965537641, 0.5217895340905863, 0.6979431684326581, 0.5313635226767695, 0.9296276795583276, 0.8381995208014998, 0.9556625014720754, 0.8906374235043664, 0.774235300490324, 0.7270158592817408, 0.6950464683701295, 0.6825199334185159, 0.8397957120199837, 0.8196128412931585, 0.710882973122642, 0.9194178835384372, 0.6868705969740254, 0.541094981973014, 0.9961196978923738, 0.7267210232537534, 0.800807924763078, 0.5957861188663831, 0.9889453840301158, 0.5936898831957134, 0.8509805684447965, 0.9607123481746482, 0.8854528472342187, 0.8091253575590249, 0.5739854362795154, 0.8345139572745803, 0.5250513699550002, 0.6708977131352494, 0.9403296539095766, 0.6833304472476232, 0.6222121981004657, 0.8755863929456987, 0.7303905317732482, 0.8642803189800249, 0.5511735106365158, 0.9494937312798262, 0.5699385654872359, 0.505132359402374, 0.9974140881157458, 0.687856756480863, 0.9937739630920002, 0.8926179235492215, 0.774088375264701, 0.5080825657079377, 0.887694852523325, 0.8163659198470818, 0.5590779645969867, 0.967342533058992, 0.9898446166097465, 0.8016808697598183, 0.5132767991828122, 0.5821496965526777, 0.5517834967537014, 0.5301365553268005, 0.9811128023354831, 0.881459202190904, 0.7298948089209265, 0.5220359131495351, 0.8178078336012076, 0.8764070986636849, 0.9783086929366673, 0.6630876095188032, 0.7958893916634266, 0.7122869250646933, 0.6228905218729481, 0.7150019795187803, 0.6552499669669585, 0.9181743638779631, 0.5844732798149832, 0.7233144383104835, 0.8749866356445268, 0.6901284928762337, 0.6309827687724614, 0.9953946089600276, 0.5081709869260094, 0.5740407500832568, 0.769492294268926, 0.8830697184307874, 0.8877228599025576, 0.5462369073378359, 0.8361847857578624, 0.9367314784929375, 0.9370194105664932, 0.9795646186224982, 0.9959463341341063, 0.7053447389234516, 0.7519259224933236, 0.5138788635651836, 0.7155195762567934, 0.5130675678446565, 0.7297787406186664, 0.9661081120616193, 0.6330577690524221, 0.6169934634479759, 0.7324269190974457, 0.5944887271653896, 0.5408755055512475, 0.753600871869109, 0.6399316269948929, 0.5858174527666515, 0.61192649746056, 0.8351784594897522, 0.9505497025658094, 0.5106552338023869, 0.8758524002188368, 0.6955561079322564, 0.8010202010159175, 0.9796121589169562, 0.6458954400105581, 0.9091065660710216, 0.6389045132510471, 0.9930467556160967, 0.5120225341159279, 0.8878910767677634, 0.753686480914693, 0.7966223279321847, 0.5784254322799792, 0.6538080542463702, 0.5532628417462575, 0.9039439619337051, 0.5650857715605462, 0.9378189813999607, 0.9119753328028852, 0.945944898264203, 0.9898316111491767, 0.6581655644324289, 0.8449689542771204, 0.9849227361736561, 0.5164754514770347, 0.584445991065059, 0.9427527517520122, 0.8432581927837204, 0.529691591666932, 0.6168893688066365, 0.9217468687817689, 0.7998550697736913, 0.9780229685111574, 0.5941080425682029, 0.7812007454618738, 0.8397371062631311, 0.7013568841279345, 0.6099666366165226, 0.8438033541861623, 0.7802228172174895, 0.9222522481432609, 0.6586713620024369, 0.9180105851375675, 0.502047413892468, 0.7001174084775443, 0.9957326601147267, 0.6034505456124322, 0.571074101024529, 0.6594263196716281, 0.8913072633591923, 0.9240784146972987, 0.7722936180708144, 0.6853785758632799, 0.8044621933480027, 0.5044198594066838, 0.8307248894830055, 0.8217089837963765, 0.5029040070765708, 0.8563889877996372, 0.8082029740856036, 0.5331311627290833, 0.8442655577860678, 0.8618934485915544, 0.6116191451476536, 0.9557684393320957, 0.5230671632186423, 0.7078087242371024, 0.5935970854150886, 0.8614325649563856, 0.6915762812608897, 0.6784777026134367, 0.994905475867571, 0.8661050976729261, 0.5279002414935307, 0.8043799611248024, 0.8034805982474351, 0.7290757900378797, 0.714877210427596, 0.7286126007007971, 0.9694819105887185, 0.8027329320739969, 0.8878448867784556, 0.6083931445595379, 0.7138016945354584, 0.9496240821824656, 0.9984327258715082, 0.9137748205073943, 0.6513977950980288, 0.8623932012221595, 0.7329526065547971, 0.6510107026916572, 0.8587310223521458, 0.5582768717767246, 0.725646065204651, 0.6631143970643891, 0.5906931736913301, 0.7847578360249383, 0.5275823311859287, 0.8544204121858342, 0.6668779121414652, 0.9610960715200905, 0.5587516556825447, 0.7593879272392511, 0.7041264990162612, 0.5272732176654671, 0.9335916784983256, 0.9631760036211098, 0.7133572960360777, 0.5712201436194634, 0.6756100583359628, 0.8798201496038324, 0.8095126230976866, 0.6193487407771692, 0.6605388985717656, 0.9526269294707277, 0.80231213593335, 0.9159154135059735, 0.9551812372368164, 0.7390875429315035, 0.9542402778813668, 0.9873914508271509, 0.60451460707057, 0.6453633694161993, 0.93032336403016, 0.5641585112906244, 0.9763784304980054, 0.8061889789761565, 0.6039636083332457, 0.5106710337524447, 0.8900957257516561, 0.5819154221669627, 0.5694166984262967, 0.8016652750071396, 0.5994384475386708, 0.8016378333475619, 0.7180818188149096, 0.6145685094193055, 0.6421303269867241, 0.9707059499390657, 0.7804254022377699, 0.5956962652843056, 0.6784907179608899, 0.751788111183415, 0.7245531774166134, 0.631322938747352, 0.728740473874387, 0.9435387380383469, 0.966485481358325, 0.7678934612663753, 0.991267847190441, 0.6171168878821345, 0.8835708123011248, 0.8508364595025586, 0.864771267513893, 0.9797203825898155, 0.6639151255188387, 0.5336277665926532, 0.5339478752858859, 0.8455502581869532, 0.8951670332433208, 0.8364871478790152, 0.8364029603007681, 0.5186007194607534, 0.5897407854431802, 0.5232170927452919, 0.7832615190670027, 0.7413716915181425, 0.502268796481854, 0.7907725396689351, 0.7565070338235693, 0.7572011664057978, 0.7476216507272591, 0.749813998820911, 0.9541729694335828, 0.9259174876889282, 0.9921611400319046, 0.923707361154409, 0.8801094367164899, 0.8425973089834287, 0.8394427414717618, 0.7189425653393354, 0.7631559725288615, 0.6131412652566017, 0.913635509417872, 0.8460890909620353, 0.8284503197879329, 0.5719388623401898, 0.5379751707192268, 0.8265144292528965, 0.8583950899967636, 0.9601851658421463, 0.6865991736158725, 0.6378873113055666, 0.8441352440677364, 0.9999664398492836, 0.7646791165007896, 0.8956953844667412, 0.850863880051047, 0.5136381082022257, 0.6872175973555201, 0.6014158816869614, 0.5162464305732939, 0.6032626997710836, 0.6706253258399999, 0.6734469788602336, 0.9393701419611216, 0.7597872541116862, 0.6621101054624523, 0.8267239205962977, 0.9410623824928368, 0.7849663401586624, 0.7573626273090079, 0.6662855067696019, 0.6264420345953654, 0.6589915148652108, 0.7613976436999736, 0.7220022991777333, 0.6461094857226959, 0.9616410017457053, 0.6897841993097822, 0.7582177648405184, 0.7691765806915324, 0.8504365879970939, 0.9946033113172452, 0.6206282227063742, 0.6839644074885809, 0.7936280396072264, 0.5928720052256209, 0.9323288856660945, 0.6253899677474146, 0.9609045692487485, 0.6888134670350534, 0.747600661212018, 0.9339787232858101, 0.9984996761358478, 0.816885273494616, 0.651919558526515, 0.6247067566035547, 0.7982063540215856, 0.9137225084747947, 0.997168048986594, 0.8617462982443338, 0.7116635662702646, 0.7901736499834725, 0.8131802154142262, 0.6865400477039785, 0.7802096094709356, 0.9026817126288131, 0.7563332744489142, 0.7526718616384803, 0.8571568055183006, 0.543750714715775, 0.5047827533804088, 0.8946132980199477, 0.6462919164988017, 0.8939099855149525, 0.7244233365177866, 0.7735783615589767, 0.8186219112107798, 0.7077596119645232, 0.7168345548654707, 0.9293164998053256, 0.8464152039089698, 0.9239726717009955, 0.6075207654375128, 0.816042920925863, 0.6796370205641077, 0.5668961610309629, 0.8276424820299084, 0.8358431742205092, 0.896890834185003, 0.5706047639541183, 0.949714976662783, 0.5226564650683241, 0.8402915915779365, 0.7196399412697037, 0.6019976925438817, 0.8363591741624666, 0.7176290193729993, 0.7846155772010196, 0.9690550551243937, 0.9300534401074247, 0.564331591283334, 0.7426733651419322, 0.800576855920658, 0.8892646825801751, 0.9525076489223661, 0.5166703613825114, 0.5376437684413037, 0.5017491110322156, 0.8362386783232794, 0.598522970976798, 0.8587725635186281, 0.9269816524937021, 0.9344895357407395, 0.685186561978284, 0.9077348982460062, 0.9766783025627479, 0.8808598939587557, 0.5932891962206606, 0.8688068636697768, 0.5871983217647038, 0.8818282955561658, 0.9357222259910203, 0.7129055455094031, 0.7683912779676699, 0.812482452851095, 0.8898016535108264, 0.6696676159839067, 0.7995266925232933, 0.6423646471368172, 0.5746927980371244, 0.6296475793370377, 0.6015402201040969, 0.8805193801026857, 0.8432936733106147, 0.808878125738762, 0.8605739581202787, 0.5820395783532379, 0.8004253728290545, 0.7380041884090668, 0.5384235670434399, 0.6281403576917557, 0.5490106353983426, 0.6726721145054811, 0.9545713446915581, 0.806922246601143, 0.6947492827077411, 0.5132757721620862, 0.9657658990421145, 0.8794605316613481, 0.6525717246349103, 0.8889235111771776, 0.6355662007345888, 0.5311900689739271, 0.8057106465315735, 0.9357800227585769, 0.7610442743626524, 0.7707948831746624, 0.83071203409536, 0.7802100380603928, 0.8752737798305381, 0.51690326774977, 0.8008790417405324, 0.8335526924701131, 0.9091206139088157, 0.59377831427522, 0.559093697068838, 0.5417818039078859, 0.7406629377617774, 0.9378470634303053, 0.9722800512669875, 0.9780862547677114, 0.5222013283677556, 0.6548313513352043, 0.7654896380991474, 0.6622934850186539, 0.6050244117786259, 0.8206844327907622, 0.6486181394841415, 0.8304687876400233, 0.6403731723877808, 0.7405349129365112, 0.6953008946844526, 0.6643987158980633, 0.6210511935508894, 0.5953400661964444, 0.6142412785330393, 0.6659325644050663, 0.74799320668601, 0.6088797834632385, 0.5466288526170267, 0.8687217404893687, 0.9906221944909617, 0.995529278549032, 0.9555917729732528, 0.9400720539879961, 0.850554898650826, 0.825194986962542, 0.7681056828227169, 0.7496478948123333, 0.5756829349435337, 0.8902988991371126, 0.6962430692694471, 0.8247111662738302, 0.5025695662532086, 0.5739773541680367, 0.6935579663815139, 0.9896756233931134, 0.6172456300180056, 0.7150037855218608, 0.8745150350172421, 0.5026827504518516, 0.8911789623812153, 0.7552286442965628, 0.928112836335794, 0.7051736454917445, 0.6576341391620677, 0.647018018815497, 0.5914414991023151, 0.9662074356918231, 0.9867625969400309, 0.9708899395082802, 0.9927808701731684, 0.7014982505908629, 0.7035744346067165, 0.8774459092884344, 0.6165577499879795, 0.9965598328840649, 0.8529241664031699, 0.9258074576763804, 0.9023455452816482, 0.9891836133398053, 0.8981532652294457, 0.7032072045301385, 0.8219453207695039, 0.9256157260867561, 0.6933853809236845, 0.7657723202736783, 0.9311053126834312, 0.5608888778936302, 0.8407405047831945, 0.5698989915547235, 0.7962075190607951, 0.928238224637056, 0.988614397025535, 0.7469438054411988, 0.760002599710114, 0.9571397820774552, 0.7641310244932458, 0.5158996353965655, 0.5833449593391113, 0.6463674186734731, 0.6993349779600868, 0.7807478069607097, 0.9665024479164243, 0.7580946274071965, 0.5477977122641207, 0.6632770427815335, 0.6509354836519371, 0.650348491687402, 0.9928258096771858, 0.961006882949901, 0.604637011758868, 0.8165126229303641, 0.8220578062528626, 0.7243960205877988, 0.8349886717687633, 0.6350318759790843, 0.9354211970078474, 0.8302834776867861, 0.8141942424030251, 0.5195417322899896, 0.870549895325842, 0.7167632030736089, 0.9917397835598657, 0.509073445460271, 0.8434384875269924, 0.5981916862136367, 0.9011079733701952, 0.9734547515619536, 0.7475884068644976, 0.8614263131006505, 0.6308014421111223, 0.9882149729155085, 0.531323487184579, 0.9467724579699688, 0.8126174130199384, 0.5992557160656096, 0.9180439789774294, 0.8835287192782819, 0.8136405625592557, 0.6779135707638064, 0.6528048448425445, 0.7283135585791052, 0.5943559621292048, 0.5947035995831786, 0.8548258373464619, 0.8817026329879902, 0.8359965258416922, 0.930841845892859, 0.6990109655330008, 0.7820014058273201, 0.8279631510339156, 0.5632145627286731, 0.8699646273275121, 0.7326351173172747, 0.865244445469588, 0.8186825732811602, 0.8812283005158243, 0.8592274058104723, 0.6936435029049834, 0.5828773902632715, 0.5756134961686682, 0.7771178971898892, 0.5141994981656365, 0.7948968616832983, 0.936241202945161, 0.7777227925379387, 0.7504686688980853, 0.8841724635038831, 0.7416360032545463, 0.691200833393369, 0.760427965854024, 0.6634004261161615, 0.639707449340936, 0.5816598267394147, 0.7781577435825764, 0.6199100587925591, 0.6148036208870278, 0.8152035556413824, 0.8231155322019577, 0.8498771709174935, 0.8593959760506718, 0.8326517313258281, 0.5790650757896592, 0.7219902876935269, 0.962389546707777, 0.663232576905002, 0.6160446690684831, 0.7958280212958273, 0.8057654292630921, 0.8358309988916616, 0.8702603391787984, 0.5114295707165559, 0.6386463664768136, 0.9796995168734561, 0.9627509449241292, 0.7959860160884582, 0.6967844969464321, 0.9233058536605924, 0.9043227197889021, 0.8342288495244623, 0.6566798297080205, 0.6624586586493044, 0.9108223758089109, 0.5439188310220375, 0.5563382035191877, 0.8167401855856682, 0.7623761866826675, 0.6682570780739743, 0.9430044099956643, 0.9794465905731107, 0.9049306879263085, 0.8456399480372239, 0.8383949389477243, 0.5157818769591855, 0.7719973913535443, 0.5017589002047858, 0.8194300825740211, 0.9664992599727042, 0.7714130204919518, 0.8239114879147778, 0.8336022260470644, 0.5407881768109732, 0.548888255554431, 0.7334923391124962, 0.9993260037325824, 0.5831055377139209, 0.7757473301537889, 0.5750233341657687, 0.9717280815510547, 0.9777089315525089, 0.6360150031038376, 0.7824605942486256, 0.7161245252172561, 0.5743866846774113, 0.5390030783270743, 0.9587063636244466, 0.5569046290519752, 0.6515396925701102, 0.6441144884710852, 0.8526825815972603, 0.7887903087846326, 0.7069091370171728, 0.9319984624678956, 0.8467985790722174, 0.9551739683545195, 0.9837581790619293, 0.6295353497591937, 0.9328025565315761, 0.9743276108400862, 0.747366562724519, 0.8954174799785074, 0.9159682828392663, 0.8347267932712678, 0.6183268642947315, 0.9701725202586081, 0.7658081185525788, 0.7785405276976569, 0.6318474264261338, 0.8974474199375724, 0.9169471755121856, 0.984889934202178, 0.7475771804450195, 0.7698561681380888, 0.9251931728478542, 0.5593314058598247, 0.7843782159726695, 0.8009835075367655, 0.7203101932749918, 0.5277322297595733, 0.9669937475426131, 0.9367155457509946, 0.5577690531788697, 0.9110369569644681, 0.7457074915286481, 0.7304186618651969, 0.8428894833509974, 0.5108224012630123, 0.9676943546082724, 0.8036713958888873, 0.885267727559069, 0.8144193069706074, 0.9596694569027019, 0.6311844659712574, 0.5751165823814123, 0.9022218778050708, 0.8036341059651337, 0.7571526285446812, 0.7606153966512067, 0.7542962261457582, 0.5417601321443573, 0.8702360202561994, 0.7792121247253588, 0.6257131771902875, 0.8211757746396372, 0.7401616375862154, 0.6119868894367452, 0.9280218504261188, 0.7830710375257124, 0.6786682161150337, 0.9572653019915751, 0.9259041021742431, 0.902591370866863, 0.7575094023921543, 0.7471903053588917, 0.5119248721276597, 0.7158750447258047, 0.7141502385957013, 0.773787381507377, 0.9733255733855358, 0.8449819195116596, 0.888260243831964, 0.5309333435165842, 0.7865482264084442, 0.7138982672649223, 0.9035890053085944, 0.5899565807044973, 0.9711261039255104, 0.6959331899043497, 0.5791744334264666, 0.963352961113969, 0.6722253667855222, 0.9033882787120593, 0.6357881834191634, 0.7345409183945768, 0.6628193673467773, 0.7304911433631534, 0.7836367169637122, 0.7070814915091888, 0.8291139568137527, 0.8949011596512841, 0.9595996766716808, 0.7397001469162521, 0.6369420129137708, 0.5288878348055468, 0.718926175951268, 0.6337691299244369, 0.7204681941196361, 0.7477403492789927, 0.9293879847652688, 0.9141085264138719, 0.8976409115215362, 0.8960588192965895, 0.715086694005169, 0.7272749534911531, 0.7898210166072788, 0.7064541242765282, 0.614227686463447, 0.800727923179881, 0.554200733465494, 0.8620737405025363, 0.564742953574842, 0.8980283054021114, 0.6155927097545331, 0.7863255106630963, 0.9198563351920834, 0.7175342904663985, 0.8422087624225225, 0.7687567945114056, 0.6318111954066559, 0.5271827186404288, 0.9031241072332947, 0.7357033808424032, 0.7765302129661655, 0.8242812806622946, 0.5431929879854183, 0.7568232880765904, 0.7604351457091321, 0.8134446419821397, 0.520547685845391, 0.9454374490137045, 0.6428776902226172, 0.6680214832097082, 0.6113945147384074, 0.9185669482439611, 0.8364792311579381, 0.8128784206342594, 0.5565439552310651, 0.5723337717035396, 0.7801911127926036, 0.5114788880227492, 0.8866273280196026, 0.7321802959873767, 0.6854431400579164, 0.8343956065991975, 0.9947782458956891, 0.9267697476607493, 0.8987592220899997, 0.5276322248699591, 0.6087219628412242, 0.908070349821515, 0.8192029549057065, 0.5984122413533788, 0.656550184585949, 0.8899014219434909, 0.8992474236637527, 0.7940172799927752, 0.8578046286309946, 0.5512024884846598, 0.8829382311428956, 0.6764322408456509, 0.6910450470733505, 0.7506957104580123, 0.9652667801984417, 0.5576636065847722, 0.5001336034678397, 0.8612560427781049, 0.5455197392722368, 0.8964506943883528, 0.7124278434479127, 0.7230313338102714, 0.8542426301607602, 0.9083460064094822, 0.7901843266987116, 0.766400088737837, 0.8027042026112892, 0.6313822814959918, 0.5110683664531606, 0.902451992815773, 0.5942052523011239, 0.5629642006125313, 0.8712873368077135, 0.7181788070395319, 0.5971766013482163, 0.5611294967883369, 0.734784944566613, 0.9972781791842404, 0.9245596742948867, 0.927086893235036, 0.987840376549887, 0.7488846440988799, 0.55683334981731, 0.7629506250688594, 0.7091880822559782, 0.7970036038741555, 0.6562893234373567, 0.7406886505645176, 0.7236960936065611, 0.6740988800906067, 0.6787385162263175, 0.5455168871936502, 0.7340961599637108, 0.8427801936021888, 0.6238685890576177, 0.5426474468161566, 0.668647060046739, 0.6412730153606091, 0.5598887810692226, 0.5826220027712274, 0.9929694423367394, 0.7436629279342584, 0.7719988383771945, 0.5580829607833628, 0.9086699564604771, 0.9560213463714858, 0.6561908394486992, 0.8432573404597397, 0.7202234558601825, 0.5605393491624695, 0.7044434978955626, 0.5109059377366789, 0.8854812170969821, 0.7709678389146355, 0.58041858625948, 0.5785701309250384, 0.7896047608525882, 0.9759049624063353, 0.683902704743939, 0.5995070074390483, 0.9510228035501807, 0.9271405020648498, 0.8222320013498718, 0.5246692530938826, 0.637397230951358, 0.6447789982434328, 0.7282379046222353, 0.7824910711013253, 0.803657055911326, 0.9242568313403299, 0.6650894760365497, 0.6725910335767853, 0.6839069197962451, 0.5913553298531732, 0.8575160972504131, 0.5373323425874945, 0.5803507889863313, 0.5351716353746337, 0.8960187466867161, 0.6498872023069457, 0.6242756340110243, 0.8975307052545897, 0.7732563722943215, 0.6445655583011023, 0.5185108714151397, 0.9388222228610057, 0.5459650272388292, 0.9661738406875342, 0.8388304695842718, 0.658000958165494, 0.620423605777592, 0.8078180511643807, 0.8594702252327011, 0.8936639603712191, 0.7333627444443636, 0.666846264842812, 0.7082045589817741, 0.7718997419321766, 0.7364174604361448, 0.5608298455695833, 0.7252935184103513, 0.9682841520044289, 0.6019851937384872, 0.5284922203930709, 0.5716859243003886, 0.597105920340462, 0.8694275970439955, 0.6039315901874155, 0.6752766057743053, 0.6844526421146271, 0.5502475615540927, 0.6307611860874345, 0.6439314921889947, 0.8214214023626731, 0.9164985120291872, 0.8406706700486368, 0.7595000381625292, 0.9415600248718055, 0.8957190354285132, 0.6945613136005576, 0.6258690089418391, 0.5560166735039058, 0.9192160102423296, 0.7455708275092636, 0.6375403434920256, 0.8731102490459601, 0.7280785826867896, 0.7356306556657992, 0.7797719103919338, 0.6434667088810702, 0.5940722328901712, 0.6029958229329007, 0.8719703234718883, 0.8776645255695563, 0.9143051759775978, 0.903360424580822, 0.7684003049244632, 0.6745698158391327, 0.7693117249899923, 0.7538611810165996, 0.5713791502102805, 0.696036054288617, 0.9711223433220706, 0.5654051505104836, 0.6779858458828086, 0.7909977323319507, 0.7908020863177767, 0.9736634109850401, 0.6454849306664883, 0.9665809629458143, 0.6436451033434956, 0.8812244828225564, 0.8788460311224423, 0.8742197100709442, 0.9475290362464279, 0.604277102733094, 0.7082608729812053, 0.787594462632144, 0.8272940000969469, 0.7550623062487953, 0.6262436709712238, 0.8259613777315852, 0.9392351463052253, 0.6048080598993854, 0.5109544702056829, 0.7700297444645009, 0.6252727110667393, 0.8849156591927494, 0.6813893659398719, 0.6857736961523735, 0.7514313446813234, 0.8054492656879684, 0.914375539378816, 0.8589821586551027, 0.9027181738947685, 0.6655947526026252, 0.9800410231591039, 0.6147969671094635, 0.8256598947951299, 0.9994876614739396, 0.905034755321875, 0.5307929828562579, 0.9663827647166294, 0.6375519063456715, 0.8277776606388141, 0.9346118266718322, 0.5462172008323236, 0.9233637099718977, 0.8769175792312149, 0.7363687150535523, 0.5937258264543381, 0.9408963090779094, 0.9698390031170969, 0.9291674640675067, 0.5673521522839533, 0.886867439138874, 0.6842669631526668, 0.6347008199222254, 0.9992557652118594, 0.6379428139083138, 0.9389416582540386, 0.9170653066577314, 0.9683445369944117, 0.5356595293973281, 0.6846048244343576, 0.8685290035737733, 0.8183282315370733, 0.9899951621425016, 0.7106655968227205, 0.5131770279100264, 0.5035138957654663, 0.8313219029892803, 0.8955043680461069, 0.7948879367279194, 0.7669779491508629, 0.6805258853921543, 0.9705489568177557, 0.6494872962960987, 0.5430616596794555, 0.9162288187009686, 0.7163861728835987, 0.7816786642304907, 0.9608059668829074, 0.9755238560145933, 0.808051405803614, 0.5769117012274392, 0.8861168818547742, 0.6671737147837389, 0.7282794186174393, 0.8183073989249189, 0.8104202366438874, 0.7531955636675762, 0.9104062364955002, 0.9321703352628311, 0.5273829163584178, 0.863262471747652, 0.7861153795472978, 0.9697074986285359, 0.9426129467281634, 0.8968024600652929, 0.8260756279191765, 0.6544961699506369, 0.8299875408538939, 0.8109471942706923, 0.7051534131979237, 0.5097263822075413, 0.558426099538821, 0.7009907972038316, 0.602602945613981, 0.9026092990669834, 0.6643551255929487, 0.7880131084583677, 0.84617505344902, 0.8427140711705565, 0.5947040343515664, 0.8804933257015441, 0.9765766081531451, 0.8077733111099449, 0.7552823816793716, 0.5713676539054826, 0.523391126209486, 0.7311165367890352, 0.6431962944604187, 0.7982595027939094, 0.9852662501245025, 0.9367025980892598, 0.8954180995012139, 0.5478590429900279, 0.6095153220462287, 0.8748812699870285, 0.6171960836669927, 0.9567820056547649, 0.815367008817353, 0.9041538851169574, 0.957423039532159, 0.8789806019296218, 0.575527709238939, 0.7926854974932998, 0.7603511143653392, 0.5067900767329327, 0.7751174958620849, 0.9239931441110503, 0.5397469274307606, 0.6307781118863305, 0.9842764700556035, 0.5875971004895171, 0.6518280553275178, 0.5687540505240836, 0.6309634561227406, 0.5506661201805493, 0.8683936583365313, 0.9704710744653563, 0.6248275123035869, 0.6334560228566761, 0.5251546938484566, 0.743404289620226, 0.5443608511072165, 0.5985666407364683, 0.5201726036946988, 0.8804871740518267, 0.5515474882186432, 0.8329696526869317, 0.621898989984784, 0.8106953648623096, 0.6603213565579528, 0.5476280281015058, 0.5630939697958766, 0.7150414427188716, 0.6584786328325312, 0.8224563978784134, 0.5027059980528774, 0.6786464568761472, 0.8066290101341054, 0.7752156577857452, 0.7994321090188186, 0.7921193632882303, 0.9832298653439377, 0.9727390682015862, 0.9085632872837022, 0.7225949678132322, 0.8621331757503197, 0.5547413557217691, 0.7305126385021052, 0.684645767237667, 0.6770599053660074, 0.6486799970218013, 0.9897412059185828, 0.9084754527180561, 0.8295186327326727, 0.683972314863232, 0.7985078849229126, 0.5827918188443317, 0.7259038343843542, 0.9357877429749217, 0.7373146008171165, 0.9084252681858657, 0.6634762984884992, 0.6301098608340822, 0.611598102832037, 0.7076332035798896, 0.7033110561242844, 0.5489422849091968, 0.8300384159668306, 0.847033189254546, 0.6050733693486547, 0.5438123352227251, 0.9870310497778397, 0.6693467715262073, 0.86784415250135, 0.8888280190727151, 0.5751386529008136, 0.9364771006987794, 0.9903105879287664, 0.5128287749875002, 0.9612766087444218, 0.676452193440726, 0.8355552632590293, 0.8249289307670286, 0.7662646868096359, 0.829401600602881, 0.6031700945954391, 0.9570000441738263, 0.6480712325337591, 0.6641655582060739, 0.6175134884969018, 0.9344718965935604, 0.7585978651072163, 0.758530726994734, 0.5553511787610881, 0.9153133181108966, 0.7692729918581285, 0.9085699710502311, 0.7697226107049859, 0.6922865653472797, 0.9715937102591742, 0.6912871167429427, 0.7378484991576852, 0.5473902220201774, 0.7571352654375378, 0.8199565722287203, 0.8649376300160734, 0.6975412880863627, 0.667803697945502, 0.7131294929774812, 0.7437780692065276, 0.9115934737985216, 0.833570688514876, 0.9618480544452224, 0.9488596016581533, 0.5457111752704153, 0.5911318535529964, 0.6587139175566996, 0.529488824778573, 0.5217431752793664, 0.7630002613221223, 0.6256637223385811, 0.7182629318844993, 0.9848445514671771, 0.5108458772401279, 0.6873047980656173, 0.6729941849480114, 0.9830903371507416, 0.9244507193545356, 0.5571174869989765, 0.8274671869886201, 0.7212867514664072, 0.5066716337347985, 0.6073642766071774, 0.5759583510910682, 0.6392699913141688, 0.8821419287471213, 0.7627986438514108, 0.9368771868264514, 0.5364579600933459, 0.8334061208433301, 0.872374307359634, 0.5412341658875819, 0.9630747848957222, 0.9004577751116024, 0.7970348309586386, 0.9390997088931234, 0.8588545494482696, 0.6688610320542108, 0.6337846139637908, 0.7015371150634714, 0.542555322343935, 0.5736183885424244, 0.7820830880078845, 0.8896579470173357, 0.6553068303070203, 0.7857488418417224, 0.8678468638236312, 0.538900480977592, 0.8042116031333993, 0.8421650320120525, 0.903673724335393, 0.8977917882433799, 0.8521333087649696, 0.905494854529006, 0.7958656090338774, 0.9389951584997205, 0.8211036204977693, 0.8961815657287313, 0.8741248936469235, 0.844058471817963, 0.9192326794207668, 0.9489503807752173, 0.5277962005196142, 0.7717221577141159, 0.8192811423415947, 0.6980922385642219, 0.8511163900074494, 0.9867113318044372, 0.598384413304989, 0.8489669064615251, 0.6546631678113445, 0.6926494398001981, 0.5768237212760474, 0.7578008831004295, 0.8657771764492432, 0.8742504096776054, 0.9558601835862719, 0.9788198246738857, 0.9271460742569815, 0.5616688905516399, 0.9351778776581972, 0.5273910301864844, 0.8897451714855891, 0.6595886269518045, 0.746190014200573, 0.6287770881094774, 0.8830018759892078, 0.570546846205167, 0.6718541160917998, 0.6525615345601636, 0.5073108797182075, 0.8942285659343384, 0.8959660614915126, 0.8391330777092527, 0.7087404643333085, 0.6279686950063307, 0.5544870399733262, 0.8375364043685447, 0.7248615624181125, 0.6030244962871678, 0.6891095335741182, 0.95597167840707, 0.9965407275970292, 0.5962327088385707, 0.9698858788541271, 0.8886426413395376, 0.6563022708108133, 0.5077242904534092, 0.7702622803156807, 0.8052120034506155, 0.9794115783759336, 0.7180879553854074, 0.7715566648033443, 0.9501201921467592, 0.9147434585386939, 0.8170467091528383, 0.7975684057140691, 0.8000332292605468, 0.6992493764210201, 0.813848226117116, 0.7189673170562068, 0.580896885382431, 0.9758983852979767, 0.860145301925225, 0.5187466938011907, 0.9256934051185923, 0.681202711966026, 0.5129843708244533, 0.7787862876661878, 0.9005694406508705, 0.6760075707146902, 0.5599756293238809, 0.9542774790346125, 0.9539505648462129, 0.948673740566192, 0.9475567872853535, 0.5410278163886764, 0.6885953569841428, 0.5947732352007191, 0.5822112979985499, 0.880700556854966, 0.5226009808695022, 0.9130461206042946, 0.8708589378486544, 0.6400871395096277, 0.7538517292488903, 0.8731824133293549, 0.8940087179026057, 0.5038005316200165, 0.6819057659913471, 0.6084383301628409, 0.8253972319253389, 0.8022156291376722, 0.949646276940258, 0.6147257105017567, 0.768958505229071, 0.8194892977333649, 0.9717034457497509, 0.9349274046588762, 0.7922906651062107, 0.5133717202558533, 0.8646443852872374, 0.8895708573775845, 0.7958749948262864, 0.88834175898304, 0.7485483747891819, 0.6450008809454224, 0.9830978463183739, 0.7035677984402493, 0.904937252189091, 0.9327698952427328, 0.5415424666718529, 0.5340888441373757, 0.8966961943608454, 0.5997002859756368, 0.7582436416870799, 0.7419071283317895, 0.9970877300535752, 0.7110131560261155, 0.5056410741197108, 0.5396599519712753, 0.8980237206600448, 0.8676883248714351, 0.5508724988145814, 0.9751799776766691, 0.7243352714948192, 0.8273690690401767, 0.8131095766792205, 0.5753134015595363, 0.6145527978948102, 0.8044004257086064, 0.7019887573514607, 0.9944493470600588, 0.5411648157193809, 0.7383370419048048, 0.7436611051168809, 0.9639615878246787, 0.7393764123389583, 0.5988350037165315, 0.7586062331941319, 0.5686701865012188, 0.5719649010134908, 0.7691801273520934, 0.9822250889162654, 0.9756609526193615, 0.982363647753953, 0.5951363827966321, 0.7599154396525774, 0.8513924386235832, 0.877420945164737, 0.6124433299397685, 0.9583082628853181, 0.5871413941156409, 0.8997877500697009, 0.8529751033486126, 0.879615290426673, 0.861602587577484, 0.9619991691239644, 0.6273065159107201, 0.8483070099273461, 0.806448841825495, 0.8063020366307576, 0.8698465418371276, 0.5909834338031927, 0.8937028156575402, 0.7052995625577643, 0.7412832953174442, 0.9976246129597981, 0.697163746836896, 0.9974830338854203, 0.5981707556793968, 0.9492901186610515, 0.9857570287308917, 0.8334562843343944, 0.7839655825523958, 0.8218662266548107, 0.5078246841944598, 0.5462843342921223, 0.8023973294418056, 0.7051423788344798, 0.9201600872259676, 0.7996118395282348, 0.6769380932617316, 0.5198249948465449, 0.7917545544425337, 0.562394489013309, 0.9409420845324046, 0.5711421848526925, 0.9978635044356181, 0.5967466647850618, 0.9588410433043884, 0.7805939382888769, 0.9661235971190982, 0.8363594045432248, 0.8194385247188662, 0.7702647321941387, 0.7908745454218515, 0.7727358129963176, 0.6772096121828708, 0.8017695839349496, 0.6051454917110829, 0.6928756225357249, 0.9795043661147482, 0.6004890832569472, 0.5817885540663513, 0.6511120496620849, 0.975784012359415, 0.6417566623007267, 0.9803740182729255, 0.572758781289773, 0.8711886006166697, 0.9085688136950738, 0.889801142533724, 0.7157565325244062, 0.8770220994366902, 0.7267977507951808, 0.5320327327560139, 0.6790819315574073, 0.7277861503805555, 0.6074138960847969, 0.8712484057998082, 0.5623476644806062, 0.767047595951271, 0.5331057144931326, 0.953760944995629, 0.727624724292464, 0.6816501922038616, 0.9446689797254766, 0.5221832796491161, 0.6215673673685829, 0.6911634672392547, 0.6993818463626181, 0.6199733710813371, 0.8163383776609259, 0.9719710079204597, 0.7823407312046144, 0.591475500590094, 0.7892306118865056, 0.5681091129567502, 0.9445409477824799, 0.5437843058359163, 0.844901177148836, 0.8506263992651755, 0.5494094538542607, 0.9511278838856514, 0.7825395703283897, 0.8526179990023592, 0.7448922818534519, 0.9182013309910455, 0.6466952907739364, 0.6155130835979976, 0.842278840114387, 0.9542197460646129, 0.8306374851616845, 0.6540356031032082, 0.9358511403720955, 0.986015716430377, 0.7159051642344116, 0.777851937872202, 0.8533273583308871, 0.6150645895834719, 0.5586859480476404, 0.8467991930629166, 0.7040514806213705, 0.771163087921058, 0.7461307857566088, 0.5555797140652374, 0.9520281760907703, 0.9398792576860725, 0.7357913320730666, 0.7725509043351712, 0.7525952809414207, 0.5245976633653591, 0.8358848408121968, 0.6665231323754834, 0.898811378576037, 0.7157550111838775, 0.6310681153672546, 0.6336289771062926, 0.5870409790260966, 0.8300110302831794, 0.8990203985303973, 0.732286779065446, 0.5161258595955858, 0.8906158677372108, 0.881719748570164, 0.7425171104395281, 0.5153223098028785, 0.6403454916071163, 0.5010969559706748, 0.9624900656013632, 0.5296207748460224, 0.643972928250884, 0.789311771980802, 0.9832446466948478, 0.5618209450352603, 0.9463197822376137, 0.7122047804787885, 0.9409921727296384, 0.5861815657084631, 0.9406432286537747, 0.7518297261613988, 0.5221058348123369, 0.6368412161129671, 0.9718762964754651, 0.5756261685129601, 0.5938248830677837, 0.631437927302481, 0.7543444536857766, 0.6013261549460671, 0.5485774150802245, 0.9663505547170346, 0.9472535081891276, 0.74336956591351, 0.8969152982478908, 0.9071297612854089, 0.9988553291504345, 0.9083008694768588, 0.9832445348075345, 0.8376971582588302, 0.574074065019787, 0.6271352777137036, 0.87420158201666, 0.6860661876464433, 0.5422531973808841, 0.7050466743437009, 0.628215501217007, 0.9512912740426593, 0.5277154570828894, 0.7027990305507696, 0.6703282557749937, 0.9063591159209086, 0.6447658817282526, 0.825592247392927, 0.7481974072674842, 0.6783459811518568, 0.9531836823385675, 0.6854922632433431, 0.5068996441544156, 0.8422565307493477, 0.5387456696346975, 0.7944114485686848, 0.985216122403962, 0.843171988913989, 0.8486095062050252, 0.6877029306468021, 0.9153442827344144, 0.6878005127721023, 0.9784441275300263, 0.8831981046341608, 0.5153381492011975, 0.8418557019858006, 0.9437923127273671, 0.5185889912521449, 0.6750791070164642, 0.9873287617889113, 0.746292346056705, 0.6620372604551864, 0.6773944451300689, 0.5339192198251799, 0.5432333707129327, 0.8645206274583248, 0.798025320653431, 0.9985642786403853, 0.6778369977093459, 0.5930656509902539, 0.9967674657195609, 0.9467266812818258, 0.9710583114574536, 0.8580410907496788, 0.8708937657757163, 0.9586556066642, 0.5142102606738312, 0.833296543696247, 0.5296117212747624, 0.6353451884055382, 0.8297507818874944, 0.5432639790172981, 0.5912603836667414, 0.7542631287488275, 0.7299020792331655, 0.9581032085542213, 0.8916247969034785, 0.9199354529977577, 0.7150075343984952, 0.9670571561701933, 0.6840448471091817, 0.8547273429827227, 0.9507954557305958, 0.9506283298099676, 0.8466999421378987, 0.7794367073923187, 0.6089496836937078, 0.9863852454970479, 0.7348000898661053, 0.8568530571352062, 0.6899602705477119, 0.8178726545426631, 0.7228297313949346, 0.9495790220083671, 0.8416231787353086, 0.9818617646609685, 0.5804898659610493, 0.7032024096453653, 0.5253914104485315, 0.6001327677368943, 0.9431256746535912, 0.6672752007710029, 0.7742166706547223, 0.6575360386427056, 0.7543611373539297, 0.6360277951629019, 0.5639969389740781, 0.6999010933802963, 0.6720793512072085, 0.567943432957245, 0.78856594607749, 0.7218201867665259, 0.9979858454372079, 0.9627634435305905, 0.5930223518544863, 0.7419302902669207, 0.8019829978350488, 0.7603713750842496, 0.7853626567062293, 0.7366560708386092, 0.9699941472063116, 0.9073919035643001, 0.7713984271180845, 0.8579319776798314, 0.567654637916118, 0.8394817318018596, 0.8349947849443335, 0.8239607711368281, 0.5109436438088244, 0.8793590559685529, 0.790379510048979, 0.8255556376714851, 0.9618511684676747, 0.9935470025827519, 0.5381697039823615, 0.7205556481519007, 0.5645666400895701, 0.5955219615647735, 0.982187812976567, 0.9943309256735963, 0.815779608299485, 0.539011956980312, 0.8860776145549837, 0.5646027702365493, 0.6583234203799238, 0.9478114094580077, 0.6898280721207322, 0.6816957739619807, 0.9956648421868209, 0.586579973411156, 0.6660185947348678, 0.6107984708520174, 0.6245415828215732, 0.7129082160238543, 0.9814318270243487, 0.8262055196207452, 0.7960985912287948, 0.793866576128978, 0.6770261298823774, 0.677145387074513, 0.5857277235730532, 0.5165239593228295, 0.5627864036061102, 0.8138806065942239, 0.8592510598991027, 0.5671355236037516, 0.7079219595064885, 0.626124579692505, 0.5690197168668669, 0.6904603286760356, 0.5423468249186547, 0.7826949184796776, 0.676154018991832, 0.7239454743057088, 0.7687337251796165, 0.8910025136539439, 0.7196902167164678, 0.9136748672852456, 0.6857718584589886, 0.5221395193535193, 0.5250847105733526, 0.5858820991591019, 0.5556958843185388, 0.9797830429935083, 0.9362609537120896, 0.5415861955196428, 0.7929525460492699, 0.8727453243619607, 0.7573991149961978, 0.8297532361577785, 0.6926629163242397, 0.876970125753489, 0.862463789777703, 0.7159600833819445, 0.7739556445663951, 0.5076500922214575, 0.5649878721276727, 0.5255838482914401, 0.6905422313786471, 0.6720541710770958, 0.8558364334957782, 0.8896866308490958, 0.5579288460940572, 0.5998164198510771, 0.7098607353216231, 0.8027775326641822, 0.94100384904476, 0.9551931419726285, 0.9561168652241498, 0.5505916630761435, 0.6739312543889427, 0.6734845710533881, 0.8065948572052405, 0.5435247709716231, 0.5365534015038121, 0.9181202133545633, 0.7825286618404055, 0.8203385102599055, 0.8056116341645434, 0.575473372705372, 0.8105700737188023, 0.5114384470364404, 0.7018932212465012, 0.7723512992241972, 0.9034427371256772, 0.6331950374058064, 0.8026279163744874, 0.809307799847798, 0.7674336079314487, 0.8106761787794912, 0.9608923424974276, 0.8021229948616726, 0.876071980454499, 0.9310461233691565, 0.7898894314687062, 0.8128628771985961, 0.7596032944319986, 0.7344172038185359, 0.9323359569369912, 0.791109563871758, 0.6598891100003068, 0.5723780351604557, 0.9868685873049707, 0.7552130450385539, 0.8136781684878651, 0.9797097982070505, 0.9367749492899892, 0.9084403999674996, 0.8979813965319596, 0.9557207233761542, 0.5561547546652043, 0.8056218476172641, 0.8116056273975298, 0.9370416017775722, 0.8050238137321206, 0.7449834606967906, 0.8168074189001426, 0.793908209518051, 0.6753613811995536, 0.9563288052568868, 0.5538285292535674, 0.5290602173550524, 0.7027379946478465, 0.5089662270512351, 0.9395402648216058, 0.8538859289082626, 0.5046232123504315, 0.9122267296289969, 0.5872586390559771, 0.8772278980247449, 0.7519129836110487, 0.5338820817101135, 0.9959622646109378, 0.6837972631824765, 0.9634434544549246, 0.8978022689885989, 0.8676496991767161, 0.6978899967295747, 0.9983728223248416, 0.8156244842744406, 0.5861666569985702, 0.8080121473132869, 0.6734355807336943, 0.8976377669359636, 0.9233654662914008, 0.5365773925976363, 0.5347693528222686, 0.8369425688051725, 0.795377073513881, 0.8581320702684482, 0.7897843754326357, 0.9228527435837008, 0.839680640417894, 0.5693032191994274, 0.8597316507059979, 0.9733688976178037, 0.9285256773984257, 0.7888520355746378, 0.7742655025231746, 0.9995030599051248, 0.7804059936703094, 0.8406892894848075, 0.8759535583985304, 0.9722484646241145, 0.7285979883664637, 0.7910441738699515, 0.6671077617178438, 0.5893432179457612, 0.8066849345504137, 0.6508954117588077, 0.8258402037135792, 0.8476868468280825, 0.9673376762668603, 0.9588189608714468, 0.6630021897501521, 0.6165019944995626, 0.5364916391181712, 0.9545039360639747, 0.8337734493303537, 0.796324485479156, 0.8695250403035228, 0.9972363916628966, 0.6282951222479424, 0.8517778279971411, 0.9095292341525492, 0.5452068841744167, 0.5173369306097473, 0.7646504151585249, 0.7321719376599234, 0.9106746829268809, 0.7642417315425629, 0.9328256105465984, 0.794583893795509, 0.6261639489244435, 0.9163279449570931, 0.9935363908275694, 0.6299322942715584, 0.6376016110758801, 0.9703696610842076, 0.9099836435414679, 0.9929542459629765, 0.5151871800788789, 0.7940459426464317, 0.8991581291733493, 0.5717482198395478, 0.7814930649761669, 0.6290843615820356, 0.7719328279491089, 0.8846583167738545, 0.7549149861322626, 0.9605081385471741, 0.8950715072900877, 0.9489919038932879, 0.6457781633969703, 0.5211326993057397, 0.5856620178111913, 0.6176383060932618, 0.707855325787361, 0.6896463940370787, 0.8273214515887419, 0.8115939921174724, 0.8956159902499549, 0.9005496130248664, 0.8884410008021397, 0.5494633089602083, 0.775083446726425, 0.6835761604352686, 0.7967729427815374, 0.6035555291145629, 0.6408428275684868, 0.752001929862215, 0.603584761339244, 0.633750943044061, 0.8541867961901726, 0.5400259342968416, 0.7479868779694071, 0.5207718241464703, 0.8931222244568144, 0.5212996458939372, 0.6641410994630046, 0.6648588188350685, 0.7173141291178511, 0.8548215508517898, 0.618934045915059, 0.5653643022230057, 0.9161589530333558, 0.6359982362723728, 0.860201039158693, 0.7581130277193617, 0.5036551279003408, 0.5747324836560131, 0.7917287735780105, 0.910937950572779, 0.874650361705939, 0.9855282922839264, 0.5623972370391024, 0.813383438868597, 0.7575992639390989, 0.6732007875185035, 0.6255791031476721, 0.9469490297827741, 0.7345076277192495, 0.5410624991358772, 0.7045753818255991, 0.7288720628002912, 0.7037611638335839, 0.9710070997330117, 0.8022728542451314, 0.6310398238604729, 0.952034574936194, 0.6182836788806969, 0.66641903325216, 0.7694779980804999, 0.9891983845312803, 0.7347075046579122, 0.713850471015705, 0.6755054144140276, 0.5841446822598649, 0.6645289879729257, 0.7990245084587575, 0.9860326491135885, 0.8665715896946498, 0.885137235474478, 0.6695013770772626, 0.5343888769190845, 0.5525726082196901, 0.7653860836824844, 0.675556749997873, 0.5686548254835562, 0.7894511688748038, 0.9171691834213881, 0.6853958062611103, 0.8427754453212237, 0.857063911007617, 0.9404438402226549, 0.8126191258093736, 0.6809918265922235, 0.6470757787239868, 0.5876452279036182, 0.829879576410377, 0.7162384852645303, 0.6260549161649139, 0.710444479676583, 0.5451052704211538, 0.6002762323931472, 0.7446691178260008, 0.876145471605769, 0.9273397734492796, 0.7135532023146662, 0.5735515811287766, 0.6707526032524721, 0.9231366175113177, 0.5299503310883396, 0.6102037625063448, 0.8548620397036211, 0.9662775008794792, 0.7047486309824791, 0.6598282550796781, 0.8186452549456976, 0.7307936711893506, 0.5796963669927085, 0.7153487965850837, 0.6976484306034063, 0.5538058932457308, 0.5157461955796203, 0.8389740243879036, 0.5185213921067179, 0.830047862624796, 0.7260511079100844, 0.8083399761137046, 0.8353305146130443, 0.9397350134339253, 0.922721361261817, 0.6451504647300298, 0.8319343974797508, 0.8883514234760221, 0.9920005374997083, 0.9161627620844868, 0.913915834872233, 0.8375277152774465, 0.8425135079037128, 0.7971326094375357, 0.6973283877213294, 0.8600447842159022, 0.524816287125067, 0.6741986244051593, 0.9770696957677317, 0.6651074225092114, 0.9865308917306403, 0.6913878168569552, 0.6265605065508788, 0.6885832044628699, 0.8855394512455851, 0.8355385169279774, 0.5600393846737515, 0.561788938636468, 0.5734494906387395, 0.6777384512300886, 0.8740514127925297, 0.5331328009856291, 0.6795788612787532, 0.7502081589669968, 0.9742505964718364, 0.7688371856213465, 0.9307962860161374, 0.6990847073319579, 0.9318477071933249, 0.8411117603170926, 0.669683430506105, 0.9134297771359725, 0.9393936374807517, 0.6439682109164526, 0.8995319404719655, 0.7246949126930551, 0.7375625972260613, 0.5483824042802935, 0.7168530932749351, 0.7869219312770118, 0.8191916059227775, 0.6520389803625781, 0.9615790344326561, 0.8981546868997321, 0.5763503228540976, 0.8508323271806302, 0.7246303941207156, 0.5733035755100346, 0.807452920605709, 0.7816183693779772, 0.9043788739695517, 0.931917051800496, 0.853885227208835, 0.9908708049116044, 0.9004444941868063, 0.7229825981616098, 0.5853520759486186, 0.6614469992984804, 0.640833008481379, 0.638935027042622, 0.7204602367782975, 0.8223946169594003, 0.7523393149576953, 0.5827255716149891, 0.8195806248721509, 0.5635362619083791, 0.6684091281615487, 0.7223512116138041, 0.8019469806231896, 0.699152723669537, 0.6647449280008157, 0.7968719046631176, 0.7175236141100166, 0.9009212248094265, 0.6275057220181938, 0.9608758763865051, 0.8245378739210953, 0.8102616284597515, 0.52986843709753, 0.9899661060895855, 0.9959652401519554, 0.8649556855947742, 0.8422907114174498, 0.892369446895227, 0.7465148028670313, 0.6290369676114235, 0.5732713140423558, 0.876396297322728, 0.8378358417430698, 0.7886391937697059, 0.579914917674193, 0.7525939474208412, 0.9479939518109528, 0.8408257500511414, 0.7480701642385175, 0.6440105798001348, 0.921356437424961, 0.8662492727533518, 0.9723069972928817, 0.5464801602347928, 0.5463556785669488, 0.6468105979276256, 0.9245174044700407, 0.5713486922013681, 0.8970366321997925, 0.8513592958696983, 0.7128646488317867, 0.6997105955267222, 0.8100115737229295, 0.7373809163650213, 0.6470165817848742, 0.6963174758510762, 0.5274010888926779, 0.8975889365665966, 0.8077482533425522, 0.8625951116645407, 0.8559497391443734, 0.800421468791622, 0.742291701107636, 0.8249224897322418, 0.7310529854366228, 0.6556702748808696, 0.5329453351700871, 0.6569613862932132, 0.6614806726116169, 0.5125645474092451, 0.8774323763565144, 0.7142010433248891, 0.5157886257404565, 0.71172074677021, 0.5901800666292902, 0.7632796783891271, 0.7564712933310731, 0.6275467755995017, 0.555714163707107, 0.5232984329484609, 0.5858164114855389, 0.6691295382517765, 0.9879307116315642, 0.8316411522117396, 0.9644566556249771, 0.6239150064370961, 0.8740054518657987, 0.7141144091609906, 0.5174238310094156, 0.5824495372617196, 0.8458167674336057, 0.9699665137004154, 0.936831132380469, 0.5596464468297595, 0.7526481178861877, 0.8919111124733012, 0.745887583955118, 0.6382708786274751, 0.8551014958280978, 0.7599463457389295, 0.513422064782439, 0.7552045947933057, 0.7177106429926867, 0.8394835548470536, 0.5159875435852168, 0.5428751897014781, 0.5170081532880351, 0.5777588700692113, 0.5278637062743623, 0.8615262750068757, 0.6223905186234522, 0.5138209986195428, 0.8275406328102839, 0.83250636627959, 0.5638709770563579, 0.6956838067761415, 0.6315801351063457, 0.5304721356039349, 0.8321053649117072, 0.5583566188287628, 0.8105660904874803, 0.8188693252317127, 0.7278980815718052, 0.7776468116842283, 0.8701592795034652, 0.6514998206636178, 0.6291536707351446, 0.7989005457556451, 0.7473357857139687, 0.8898318548377242, 0.9719293399206964, 0.9956308162218706, 0.651661991643838, 0.6902224302727491, 0.6957873646681374, 0.9365263275030906, 0.5835098903605566, 0.9432560033218156, 0.5848738745120371, 0.6294504239936859, 0.9833601846815168, 0.7711198296801145, 0.8132883447352448, 0.667566509089977, 0.5132598109364381, 0.5938004880928874, 0.9754214637536276, 0.7391286659600977, 0.5672932128794796, 0.8745533569539641, 0.9782993233592302, 0.7156249658037258, 0.7543155028580695, 0.9493102827378608, 0.8937006071472917, 0.7911447932580549, 0.9855381683923405, 0.8586052817521845, 0.7011472712991157, 0.8258136235440261, 0.5642270330473658, 0.9945697183459226, 0.8287928757168879, 0.7429517808377253, 0.9963970534253623, 0.8263644154100245, 0.592981974941447, 0.5399275922481565, 0.942384025710859, 0.961978736420062, 0.8631964717064504, 0.5410013157283347, 0.9561008555994224, 0.8212318894280478, 0.7646049689999906, 0.5728929527415028, 0.9691647555497627, 0.7459129202603499, 0.7441968696867587, 0.587967600124082, 0.5981387848245956, 0.7302061702625109, 0.7670092116832441, 0.8282232802583078, 0.9260691649173141, 0.5704608470512363, 0.8828981671100102, 0.8285353894086851, 0.6752371141436279, 0.7069452263381776, 0.6538969911592452, 0.963741242029586, 0.6266097238908028, 0.7724327193337337, 0.6332091642642583, 0.5393818200303799, 0.8204175713339327, 0.6561739521310066, 0.9127391379226872, 0.859230444508692, 0.7510902091607764, 0.9572991665360684, 0.989044560917625, 0.7307395551279501, 0.7796097869080746, 0.5053404824476303, 0.785468082580528, 0.8965354276764349, 0.5785230460792329, 0.5924894209610301, 0.9935439783676838, 0.702950313477561, 0.7492802775916603, 0.6782285856328405, 0.6636992593684625, 0.6413071793249896, 0.7127315056477913, 0.6717149807885753, 0.6800651661422467, 0.5314709521717758, 0.619031794911634, 0.9220908209323071, 0.7795408405600301, 0.6663889490307411, 0.9367949677592188, 0.5125105652729556, 0.5791191769993037, 0.8987802213652216, 0.7518977878940583, 0.6477597194408394, 0.8349440847509481, 0.7588366672542189, 0.5216838889210402, 0.677521291477891, 0.7729767891939664, 0.8394243558966216, 0.5495148741254388, 0.7217335015167576, 0.8703058371134405, 0.6813693417295164, 0.5216629732123264, 0.6461322147729914, 0.8274025593586677, 0.5143245952205897, 0.6050128448806283, 0.7182118925448941, 0.5459193914765588, 0.5560811780944153, 0.5858193742696332, 0.8516208374866294, 0.8277106093948581, 0.5600161590818933, 0.9472122648505632, 0.7987881032103992, 0.7233940662856884, 0.6822130324651339, 0.5268912120708252, 0.5905966997695185, 0.5802535316619912, 0.7191134556488648, 0.9080316771932189, 0.6329606744414182, 0.8641359791728109, 0.9765125252438982, 0.9685931065462753, 0.6699639215823138, 0.8802026543538926, 0.9771938891565776, 0.7366587434536338, 0.8276317785122175, 0.9161736512830435, 0.7744885568500546, 0.505867798141662, 0.6242417196997347, 0.9528817689868889, 0.7798561263585949, 0.8258365514215982, 0.6733069965736135, 0.9978511727270735, 0.8706103113360352, 0.8396192707496899, 0.5304695688570411, 0.6516695200733587, 0.6871161616416605, 0.7913124779639764, 0.9201542187611528, 0.504636726120587, 0.6032929816378145, 0.584941294261315, 0.5078536898025123, 0.7422833547583874, 0.8078195495055125, 0.7438986163305175, 0.7596122002548324, 0.8952863826102007, 0.6656976827056534, 0.7882499060363626, 0.5396210312617837, 0.9343864188981543, 0.7729317294476681, 0.7192004960320767, 0.8243590827179699, 0.7809904810472734, 0.6541557734332233, 0.5708882931023527, 0.5845614828656698, 0.8649416085569746, 0.5717825690266664, 0.5192678536132578, 0.9867976049493608, 0.6581012196609466, 0.856269384798682, 0.5097993415440876, 0.558771252628327, 0.6715163805268409, 0.7475823337119814, 0.795761369824602, 0.8078417880140336, 0.6667115273132811, 0.8847908372233617, 0.6147671282944352, 0.6702903608147446, 0.5633657914382212, 0.5833104277872495, 0.7493863894298907, 0.5473427957445709, 0.7079197058298325, 0.5094684360890881, 0.6494030138418128, 0.8971081867616094, 0.6387942034652503, 0.6102840522606132, 0.5606148759589, 0.8127051742020828, 0.9174846337979778, 0.8952880071161806, 0.8564621003514851, 0.5601157440121107, 0.9611408541605955, 0.810115390186791, 0.7154596055479665, 0.8266540448579287, 0.7105073411854586, 0.9310212882967593, 0.9486382713183564, 0.7200467194737756, 0.8742759464801076, 0.6754115828272063, 0.5031724758972638, 0.7312701989390804, 0.9201451571155081, 0.6840343237932598, 0.8252796833825833, 0.7540479629989906, 0.8060463352061322, 0.5483948252288107, 0.5552825813405071, 0.7616396609117158, 0.845448221417699, 0.5801568365595007, 0.553384195438791, 0.6129424847858133, 0.9658211048910856, 0.7811506286820906, 0.8359402118206284, 0.8453601594405038, 0.5920941360686369, 0.7877184151949626, 0.8740884330391974, 0.5264885217437107, 0.8506129335091841, 0.576270016265632, 0.5369416809265022, 0.9889506820988547, 0.995905910638684, 0.9228971364899929, 0.6089610303845692, 0.8881934480171148, 0.9605867689342578, 0.5445916802734022, 0.8004925249144923, 0.7736821428517642, 0.9674284444555421, 0.6000681164995152, 0.9805666885371555, 0.5045905913433131, 0.5941443579539708, 0.990891295373342, 0.5995310268036143, 0.8245494399670497, 0.9067837765132312, 0.5409548755572682, 0.6793534535902725, 0.9085698705164229, 0.7759448231163337, 0.6089709083079877, 0.671173175070489, 0.9116523919725262, 0.6735922107479115, 0.7522502946362316, 0.8489750575352031, 0.5558785729060405, 0.9638108610710946, 0.6344430516045432, 0.6021271447198198, 0.5616421882812073, 0.6263138867571627, 0.5650168896746738, 0.9548912666099727, 0.9224179432159643, 0.9982396978101751, 0.6624925167818947, 0.7275826857344327, 0.7428468916621833, 0.5113522621249005, 0.7099543077353638, 0.6136789895185205, 0.566176330932205, 0.9044058351706661, 0.5222989942468068, 0.8691028364899727, 0.6406047504764982, 0.9999638962621761, 0.6086567419249724, 0.8299007577927128, 0.5008941885524376, 0.8264708394235982, 0.9164923073679949, 0.8883675052925453, 0.5726312285056617, 0.5258659090192845, 0.7280041464980949, 0.5419894535729457, 0.5854717612887758, 0.8660783618571757, 0.7660949996614441, 0.5905696803849455, 0.7024906457891481, 0.6382984275581485, 0.6563416428452286, 0.742698328968306, 0.8625016273383006, 0.5790231255312375, 0.7906981315030838, 0.8118233139047113, 0.6069129640598616, 0.7420972399092189, 0.6625821551425948, 0.5511387334031239, 0.5855885566868024, 0.5780197950994483, 0.930914970615489, 0.7769776764434414, 0.5195847460702825, 0.7560905098082302, 0.7810325930949806, 0.6248442056574811, 0.6893606450825218, 0.5400554979142027, 0.6084203764375882, 0.8737176334888201, 0.8388925472795412, 0.6326642722622087, 0.7234750053874823, 0.8669547704578986, 0.6852302002341, 0.8329989070144678, 0.585355950897354, 0.8213552400524653, 0.665553898316559, 0.6742760264666516, 0.5591688271276194, 0.6744201062132077, 0.5296093772485649, 0.6966389245532854, 0.7039997069381282, 0.8672442484056482, 0.8996482073362243, 0.7651940969700124, 0.7566574807660927, 0.8937708738139133, 0.6158672616840297, 0.5858255626715134, 0.5587518788639706, 0.7432256350582742, 0.6566931160184731, 0.5807551700899926, 0.6999807217149822, 0.5560024055517158, 0.5207620491613583, 0.8334856495345682, 0.7039588290850237, 0.5159760498945896, 0.6846647354871318, 0.94121436958637, 0.9566980518608961, 0.8598782459755683, 0.8507336242793094, 0.6420893454957929, 0.6361787704845162, 0.8477081929338466, 0.7197486425216039, 0.7155412770799895, 0.8809080334767825, 0.6305700582155724, 0.6128641525363621, 0.7035717590499397, 0.5759097620503701, 0.6109640740085936, 0.8680808816651573, 0.566432792309644, 0.910678914389057, 0.953958826473933, 0.7417122645877672, 0.7309871729388473, 0.7295136026427798, 0.9572752978335426, 0.9034699550730387, 0.7425787393804987, 0.8790753768769771, 0.6255213094713524, 0.6817685641750417, 0.7138655137680499, 0.5144059864252973, 0.7648557350508789, 0.6768563836001743, 0.9555211660760419, 0.6402147656786543, 0.5549251876053078, 0.6073427721317193, 0.9469104751506763, 0.6608595284673631, 0.5431021971671928, 0.6589210176599933, 0.6433846933529379, 0.8502145585826172, 0.8358936901340516, 0.7489534329572841, 0.5433855677859045, 0.7990010821736313, 0.5202276717513219, 0.9216190816300708, 0.8213768464550064, 0.7645287316950993, 0.5966791134188189, 0.6139969902412653, 0.5335402734917094, 0.9834212167825132, 0.9790826283187544, 0.7423810552969724, 0.7685113381345016, 0.9422489410360663, 0.5568646064115031, 0.7910744665576259, 0.675715320202859, 0.5701441794778337, 0.7674049344275067, 0.8705887506615175, 0.9812709599077807, 0.7350018111579065, 0.7489180081434544, 0.6661191357571838, 0.5274016744220933, 0.9230994102388583, 0.774936010172619, 0.97987361670938, 0.7941351971148187, 0.8486760534445272, 0.6592755516430379, 0.8702930806498126, 0.662050495436882, 0.9669699203734436, 0.6971918123913624, 0.7816517532797163, 0.9962085818011526, 0.5439704358627122, 0.770441558227003, 0.8264843061876778, 0.6172410539719976, 0.7010154892747635, 0.5098522915300356, 0.8144951935127346, 0.7337270485967694, 0.553769739630537, 0.578387735557482, 0.74284345413514, 0.9036528026950206, 0.6231425574652864, 0.7677806412705706, 0.9140413471298745, 0.6980782128135046, 0.5747797523430984, 0.954184329798105, 0.6068396582204484, 0.7000764185471255, 0.7950976222335535, 0.8118936712473293, 0.6389225837564353, 0.9921694816218594, 0.9195860430833087, 0.9925726127071341, 0.6982078652849104, 0.8562201387620285, 0.7318217380990515, 0.7763606954594824, 0.6635644026145875, 0.5560275348368974, 0.8144072354650138, 0.5784904222150111, 0.6654202646069458, 0.8202756119980634, 0.8699006808518717, 0.8697572924647476, 0.8727584157032271, 0.7851207223127985, 0.767380522025157, 0.6778942317730191, 0.561742018649688, 0.7540547196419923, 0.756306435656464, 0.9950489579304028, 0.598133180766286, 0.732140931496772, 0.755755642407592, 0.697597231772253, 0.9595690429885345, 0.721455320588205, 0.5886687952111814, 0.8591031598023079, 0.9343196558589785, 0.7723867451669088, 0.8718298429948594, 0.5487819312756759, 0.5175197282688156, 0.540429857258115, 0.7927245190234234, 0.8705746659653346, 0.7169622680223355, 0.6288778840907572, 0.8318545536081161, 0.9271328813380593, 0.9872669826008607, 0.9737561201385017, 0.6979219251730944, 0.548504992360259, 0.8987346768625405, 0.8504145996482984, 0.5886553273204204, 0.5886200087363622, 0.9658561342517473, 0.7817463148723814, 0.9216709710441051, 0.7418147192564939, 0.9797001928220264, 0.7892298719724157, 0.7266085060063657, 0.5512429161408603, 0.5453045684756637, 0.7794074963577964, 0.6788844750806828, 0.9444348302786665, 0.5799743775641748, 0.663395590011838, 0.7125616622227094, 0.7208713529932012, 0.7921407972349217, 0.7420850755745465, 0.9244302415944134, 0.5981866349562603, 0.9054037627758984, 0.6136734538772737, 0.9413777764555471, 0.544687491269382, 0.7390921959339645, 0.9327392704692592, 0.9944081958126352, 0.775968875168214, 0.9929348796991537, 0.5526803986717477, 0.8729718117629128, 0.8725684548472563, 0.5721658575590217, 0.9904184151131059, 0.7973360362696649, 0.5825997213783789, 0.840874856048067, 0.9718692161197253, 0.5634165458866285, 0.8507776256767259, 0.8183647247531189, 0.8935026905039205, 0.9297431626197505, 0.9997518655161526, 0.7566368785712132, 0.9966969547574145, 0.6217697464734366, 0.5988200976620154, 0.9854282967395169, 0.6531219755145137, 0.5254541300711761, 0.684929534722341, 0.5699171760454422, 0.7859511948030944, 0.7229088661180803, 0.9788086012552427, 0.5542711832201961, 0.7518786456318685, 0.7301249909350578, 0.5286314755222858, 0.5328995152103695, 0.9156860921511794, 0.6359632949544634, 0.8979400247721387, 0.6240484275668206, 0.8280349550547123, 0.7897695780637375, 0.9297950879127492, 0.94831461632572, 0.7890474354032722, 0.9059930776892484, 0.6185128656775127, 0.7427826987921111, 0.6520258967731818, 0.5547015752189834, 0.5107305287443895, 0.5123826471894593, 0.7067358783265882, 0.5160790060356811, 0.9871997095794232, 0.5873747771651854, 0.8715889593028792, 0.8003421831672074, 0.5389060907198611, 0.6223982884236646, 0.6049328918119998, 0.8868953726413653, 0.6935289655004521, 0.5788769553502042, 0.7383768832331861, 0.8914646444669371, 0.5692454464866552, 0.5162187524641388, 0.7489696781199597, 0.8283414383839155, 0.7569821912526336, 0.5859686446538119, 0.5576487325882667, 0.9344223529403728, 0.7059371391258551, 0.7709710537597363, 0.8137673648063715, 0.9502318502201439, 0.56604559022925, 0.7008954382865661, 0.9868361046023131, 0.7977961353525549, 0.582644130574272, 0.7874431347279431, 0.9980584918995759, 0.789310122468776, 0.8407931245525555, 0.7450589217104148, 0.856496841303287, 0.7154242016166859, 0.8319566115246562, 0.6088978777159222, 0.969336674384272, 0.9705392789350298, 0.7169551697467809, 0.8906731667843457, 0.7900737130035047, 0.9315741678111638, 0.5532902419322685, 0.7776793325964888, 0.9544199942698819, 0.9175698103583669, 0.6829769405891046, 0.9753427249884719, 0.877455181497548, 0.945436155873872, 0.8862130736158472, 0.5239998382418403, 0.9951807845553589, 0.9373920663100584, 0.953155583754027, 0.5500753214220481, 0.5429302188383733, 0.6748398075333697, 0.5276516213693832, 0.6267422447447204, 0.9290637028379942, 0.9410184661520131, 0.8974467023840378, 0.5046667412064542, 0.9281950986915797, 0.7839086088974605, 0.757312408193934, 0.5227265463028353, 0.5085361175458687, 0.9552642910403837, 0.6400072724204298, 0.8718148032963335, 0.688454235978393, 0.8201961548826123, 0.5956856484262832, 0.6088169702183419, 0.6230693851788118, 0.7372471386999582, 0.7344219216435369, 0.677404468419645, 0.9436183621479366, 0.8131826761248, 0.5966499538889148, 0.8293835708996893, 0.5528687756498625, 0.8181638766320596, 0.5491017943632179, 0.7162943159925165, 0.9571709326418261, 0.5566281390871448, 0.7634029415072986, 0.7267322764447188, 0.6800937485751964, 0.6459942134065051, 0.641956936445277, 0.8663002042380833, 0.9903672008160498, 0.6350961008943754, 0.5771002468792548, 0.6857680019781651, 0.671582504366157, 0.7495717879438077, 0.5052361627574906, 0.8918282934798505, 0.6932259436280963, 0.7733762853240962, 0.9464617862801776, 0.6317394871307492, 0.9915741686154645, 0.8173798291149876, 0.930846276830288, 0.7409715603793355, 0.863141241340979, 0.7083574061187335, 0.5870550567798098, 0.9064655419389714, 0.8702100081327866, 0.6178158020568172, 0.6133416189033155, 0.8629402381745959, 0.8371627610519305, 0.8240448342017411, 0.5217667100189181, 0.6967444994467542, 0.9309395731799461, 0.7771708240289832, 0.6386817886871949, 0.8060414636004307, 0.6709024534920768, 0.9478630112861427, 0.9416252122550615, 0.5893388949480625, 0.9297881120288859, 0.8705002102219389, 0.5936359457836178, 0.9827648363114123, 0.7058623931400307, 0.5004674246380886, 0.8364468316590703, 0.6785062952862357, 0.659331355168805, 0.8459222114876253, 0.5391456408951694, 0.9263318159482641, 0.6855885061874156, 0.6999265778882334, 0.732879303348767, 0.9021468979612064, 0.523417617574548, 0.6642788547252736, 0.5460133256460644, 0.9058939735781094, 0.8653454111195856, 0.9136304702867994, 0.7656502854182594, 0.8132355529653167, 0.750881964239325, 0.8112980055421954, 0.9846128486295681, 0.7200793790845998, 0.612076020682681, 0.7695077816139116, 0.5461866185288305, 0.6505660198907106, 0.6057007426539242, 0.6008796717637691, 0.5087036892615666, 0.6482333433922169, 0.7453839337875594, 0.67216967599966, 0.9120431541574102, 0.9819188731504498, 0.592712640552282, 0.8867869318909145, 0.5898619442530972, 0.6559715575536251, 0.9790330635490946, 0.7318194395851083, 0.666148786702437, 0.6154312001316915, 0.6121343161041068, 0.7731716845642279, 0.8139244302365114, 0.845788324764464, 0.7727181251506487, 0.9434019127452962, 0.932116124906694, 0.6544944053974786, 0.8106542559567544, 0.9940561159106874, 0.7913202799917609, 0.6051945598054909, 0.6915049742501767, 0.6707422160445976, 0.5119733837228657, 0.7023173961484945, 0.9966167081108428, 0.9084701456403417, 0.7301848527519954, 0.8027385143354412, 0.964179001364555, 0.8449856004345195, 0.7435518846247348, 0.5985957605776905, 0.5427311964343167, 0.5213208581446283, 0.7718283886362429, 0.5107101676330407, 0.5863682606623853, 0.5262775105037584, 0.5099264212001844, 0.8096387894912906, 0.6506463553130811, 0.530695167132094, 0.9025801156754409, 0.5085543553220395, 0.7875605140241649, 0.5440091222206505, 0.5343367951947462, 0.5458568979265516, 0.5825812026814492, 0.9762262231096582, 0.8127175572038849, 0.9572465316496404, 0.8018377434260411, 0.8465443692218235, 0.9912223258934127, 0.5898740340722035, 0.8618218402289439, 0.8054568178386758, 0.8754167006687721, 0.8191801984394935, 0.5673753112483875, 0.6636869166295306, 0.7864600787546964, 0.7585968544529738, 0.9496592610818622, 0.6710023010285042, 0.8622178497899734, 0.8451582122111929, 0.7122899693458771, 0.5872263903515376, 0.8128140191149438, 0.8293619152538592, 0.8505607509397198, 0.7555420773413113, 0.8741703336610389, 0.6606455137158631, 0.9047547062376087, 0.8502786808846343, 0.7023652039309038, 0.9538283860765986, 0.7361363907537689, 0.647365722689624, 0.5626962753836757, 0.7404669455141434, 0.8899010177851683, 0.950879768067799, 0.5881523053138948, 0.5286524071446892, 0.9494193275337024, 0.8497543345740302, 0.7162081193825836, 0.8308799725808909, 0.6264084648854031, 0.783720667576471, 0.6239651014903616, 0.5266778154844676, 0.742103934001898, 0.5783037675746934, 0.580377811199722, 0.8959497909036691, 0.9574207270018958, 0.5062743974852837, 0.7064356673830092, 0.9890743335219413, 0.8339115515489721, 0.9077608286623969, 0.5378171611049112, 0.8085896526404055, 0.9155065699104976, 0.8287540477296389, 0.6084204239008288, 0.8430910329060356, 0.7288838954086436, 0.6964494335194742, 0.9684199255276797, 0.974263049281621, 0.5032965652572909, 0.6796532169493623, 0.8818441667890418, 0.7193361213892565, 0.6580867656990395, 0.7421031429673486, 0.5161400844639802, 0.9807587314573347, 0.9228369259308642, 0.7858428705875412, 0.5793121228065565, 0.6686745445379274, 0.8661662908177137, 0.5042139946494313, 0.7308598244774647, 0.7481727077230486, 0.5700829456503385, 0.7662578480420535, 0.5062432358007942, 0.6612044185661856, 0.9480190161089823, 0.5248777006966632, 0.7564038751398339, 0.7180009449504492, 0.5280339636320365, 0.5049154033553802, 0.5223762471127515, 0.6235389026860633, 0.8074839406227879, 0.5236206338070156, 0.9501033244655243, 0.9166974341246319, 0.9979968536922361, 0.9694511497989351, 0.6577338154527081, 0.733420221320051, 0.911085808167339, 0.6088511329524893, 0.8436063570571029, 0.9860340378088093, 0.8000916384857386, 0.6879469704146447, 0.5518638155769762, 0.6095189135435156, 0.8540756356361728, 0.8012124845236758, 0.5412647071555323, 0.5332288548844843, 0.6964807946539167, 0.7663527391045285, 0.6791806267927301, 0.6899720609541509, 0.8258158324228292, 0.889705109165716, 0.9448683445401949, 0.5330246544356809, 0.523187471928688, 0.9467227892838266, 0.5063658801382236, 0.5618377947696565, 0.7435665126240874, 0.9171033798220989, 0.8668681377000924, 0.6639898529783348, 0.9232576676999282, 0.9451908822507905, 0.8647795478786077, 0.7010414952521387, 0.8522496652523712, 0.7893324603774525, 0.8631777683523909, 0.5274708332407461, 0.6308207413075009, 0.6785751453115113, 0.8737268910725705, 0.7031225075182687, 0.5178435669291306, 0.7586967265524305, 0.6681432455666952, 0.6001424321189532, 0.7900758444544713, 0.772579810303018, 0.8583167698124778, 0.9991204729144947, 0.9058311484857399, 0.785585857100012, 0.9596350202453292, 0.829207093965848, 0.7606933286933126, 0.6607744796560944, 0.7780366704654114, 0.7243791016268555, 0.6630329189362447, 0.797447856337036, 0.9562101626597415, 0.9950306254429521, 0.6909187229153623, 0.5305102919737905, 0.9062634995514816, 0.9684297571941274, 0.542456202767541, 0.5013956374522406, 0.7027813165721821, 0.6610149756565437, 0.7844324333053139, 0.5068948757124958, 0.6764318047834862, 0.509124210715443, 0.6060818631912813, 0.7727987785279576, 0.9005322653805814, 0.6054511375920903, 0.7183600486928043, 0.7532639336766846, 0.8288972832608685, 0.7991053114934424, 0.6460717606878981, 0.5954982703956494, 0.797482150922149, 0.7893324881424203, 0.7199704332415435, 0.7947732035273407, 0.7756141867538251, 0.9374403123180506, 0.7836935664320515, 0.6892599595168971, 0.7935027876326375, 0.8912409642419916, 0.7467791117905522, 0.513447069671293, 0.7400167510964011, 0.7430355999340136, 0.6648681552944034, 0.7365008461322445, 0.929408732786793, 0.743102143589544, 0.5752932739633276, 0.5387026239642294, 0.5529986732193248, 0.9348943544076387, 0.5847856149105057, 0.764596202364437, 0.813281192874856, 0.6764366402738369, 0.7080147916267207, 0.6555647046469313, 0.5094971669252669, 0.8303947011435187, 0.5926718409002953, 0.8634538438306065, 0.631700375560344, 0.7415738418351976, 0.527448151036773, 0.6138124172609558, 0.7580249037370894, 0.5903859151219251, 0.7785907022599959, 0.7025817415475515, 0.5356134066577787, 0.7405400382174052, 0.8237882469811204, 0.9135196699866063, 0.9227127058441235, 0.5423186587874402, 0.938931275686331, 0.578302718124619, 0.8431050863678207, 0.7201973626378665, 0.7475947469607485, 0.9617197559006339, 0.935124773870109, 0.7046462727699845, 0.9905345410006821, 0.6020677338317568, 0.8325787188024278, 0.8660579856618855, 0.9251235908759732, 0.8140973897014125, 0.7821630707985789, 0.6256109694288028, 0.7194304674208734, 0.6420295204005506, 0.9950369669182579, 0.5730960234000713, 0.6752199087057882, 0.722434355438895, 0.8911607512422028, 0.6986476350055311, 0.526934641908096, 0.915622923572968, 0.5684923625200983, 0.739299422941662, 0.8839456957576175, 0.8232188452740963, 0.8809147786438902, 0.6551312228694844, 0.8346235007872929, 0.8678293108082842, 0.8811041908747075, 0.8097269246901037, 0.556952585209645, 0.8225162713772683, 0.7808090781481491, 0.8307705835260529, 0.6727531117038972, 0.7136426214775007, 0.8238007199672936, 0.9321718347837872, 0.6076289277863675, 0.978173526452227, 0.5563655590669047, 0.6185687512213348, 0.9482684602009483, 0.976946958477734, 0.9108070873652383, 0.8352500155012024, 0.5130623982267974, 0.7801763582614247, 0.8003629033045574, 0.900234744844689, 0.7172305463566753, 0.9504561327587432, 0.8477789841252792, 0.7425500559595378, 0.5884451641671771, 0.859439384173212, 0.7274063071173862, 0.7079372579598433, 0.6208253240880307, 0.9219616763690401, 0.6851062563075763, 0.625778245593802, 0.6485176300622154, 0.5415634427638609, 0.8629856474985257, 0.9579187636198818, 0.6323474108385296, 0.9422036159564192, 0.9846337988670013, 0.8422040947091338, 0.7906133775318077, 0.7812985155992334, 0.5382677096942758, 0.7231520517909776, 0.927005825827101, 0.698165975632093, 0.9046519680817373, 0.6329640171384997, 0.7131771791372659, 0.5516687292976387, 0.6847723123069245, 0.8234327851151919, 0.926581893072377, 0.5279033994705327, 0.6847709104631574, 0.8914019456308394, 0.5023099877482639, 0.8964212098043833, 0.5445247880677921, 0.8813898307385049, 0.6279758630293667, 0.687399411999598, 0.9306631948432249, 0.7904630380760012, 0.6826491386510247, 0.9999452625957035, 0.6906171622721939, 0.7483332397285016, 0.6992663306674821, 0.9182271782890892, 0.6441658720253727, 0.6197349607321895, 0.6089405638533535, 0.5590959269391933, 0.6840233811555751, 0.8634167567396069, 0.5775629640345031, 0.7832431093882445, 0.5888850341725682, 0.7693280769535162, 0.545053649471704, 0.8155279253647312, 0.9908338478481195, 0.6824989642736443, 0.6099943513287431, 0.937030873324584, 0.5464096331053752, 0.6319861177127181, 0.6624695330652187, 0.6438209305903408, 0.5418411273570553, 0.6219289587508994, 0.8046536436815259, 0.85411806313106, 0.717884497140869, 0.7437030394199814, 0.905181481025213, 0.6376883293260783, 0.7317658689468882, 0.6699899593772414, 0.8110331652886894, 0.6814058858897345, 0.8762460439884021, 0.9421250190019623, 0.6972391391240665, 0.8684692059141462, 0.8638722556653218, 0.8506112121885805, 0.6161897349007277, 0.5795208853240952, 0.8023234584346195, 0.682422489975136, 0.6987459912900349, 0.6316194084868895, 0.8008922729835422, 0.6391378875410442, 0.9456891243094805, 0.5844106492173343, 0.8081300833115906, 0.5912437738590028, 0.7826574887761839, 0.7083896203948024, 0.812361735537992, 0.9887329664184998, 0.709415691921031, 0.7931497480421082, 0.8432810052694459, 0.6974489175871607, 0.5700171933135946, 0.9051480537682468, 0.9815972390407963, 0.6935902894754422, 0.5936835907093121, 0.9621790943693485, 0.6736258072567722, 0.7460498717768458, 0.6984386200637751, 0.6244499272283864, 0.7234784881714736, 0.5449531565443306, 0.6462030627644535, 0.6436807804631188, 0.5592410093826248, 0.5534538427590218, 0.9741294800015223, 0.7154577709278317, 0.865233347248472, 0.6424675347992923, 0.8867138917108346, 0.9428887283649017, 0.6593080419962039, 0.9128707164056122, 0.7021874324556355, 0.7829099038206966, 0.6817970080679874, 0.5627182237922043, 0.8199655279747192, 0.6188031706586765, 0.8928157924265615, 0.7524285568030584, 0.5539400843751782, 0.5712323780526594, 0.7913035726014457, 0.7885494732366991, 0.9164421023794269, 0.6849948239348143, 0.6618981709898856, 0.8959951319968094, 0.5595001683659165, 0.850735943697977, 0.6419537977935379, 0.6486461563669346, 0.8847729026894345, 0.6360373974915556, 0.5661089609418488, 0.8292362834521492, 0.8595127769795088, 0.6498514451950568, 0.9066375496376333, 0.8274548795808813, 0.6027558984523318, 0.870966731648153, 0.8904695694395122, 0.8822081024860686, 0.5582985053174698, 0.903827000582362, 0.7570766537270583, 0.820025036590202, 0.8304134558081862, 0.8255318451476263, 0.8984194608262897, 0.6641892908296316, 0.5624115157989662, 0.5974086130357328, 0.7640844886735927, 0.763855406209373, 0.911871671578806, 0.719420178387592, 0.8780643770723058, 0.9831180760367884, 0.8095292005502779, 0.9631776148806275, 0.5823854690724775, 0.6898506416526051, 0.667087313049116, 0.9474236869045132, 0.6767248580087356, 0.7842580315618826, 0.6110896308181384, 0.8246583776078205, 0.790694062171817, 0.5906757804387653, 0.5191590542354969, 0.8570511438432631, 0.7292086362771015, 0.5388660722543801, 0.5532835321194154, 0.5602260754073451, 0.7566407389528407, 0.9285366879983619, 0.7625209243809374, 0.870315144187709, 0.6006457241110744, 0.6683367799371698, 0.929115465269101, 0.7822982060756175, 0.8677464137073032, 0.7041909078850701, 0.9082406470593876, 0.6733816437664077, 0.7498250546917955, 0.5031211808751004, 0.8126748208066557, 0.575146851253114, 0.6378892037926742, 0.8861764646260932, 0.7368066267909135, 0.8653774327366743, 0.6449583607764507, 0.7833639354767062, 0.607798957680818, 0.9296321868373932, 0.5534689845542275, 0.5947332549153499, 0.9544291974562324, 0.8827581139441881, 0.5110324973269478, 0.8563563095583379, 0.9071393060542947, 0.983205871879089, 0.7808475031386465, 0.541823862880555, 0.787692142065263, 0.928024542859823, 0.6833637660084826, 0.9051045100934837, 0.8269257746323089, 0.9632361155278159, 0.8391572686300126, 0.8478620002343342, 0.5667229895515555, 0.9235672688253626, 0.5660228267083829, 0.8867440076246689, 0.9604958656318834, 0.720086291457054, 0.9163903859783578, 0.7184250351067835, 0.9452881033299697, 0.7930253179240495, 0.9919601765903948, 0.5635455558797542, 0.6833558507393956, 0.8939870533281535, 0.5559351872496681, 0.5901124892001779, 0.7894310502623622, 0.9954883077999146, 0.5419732573401446, 0.8812578547668756, 0.8869936540390354, 0.7601919090076369, 0.6167998803307115, 0.5473335592065931, 0.9584042255017253, 0.6641805095930514, 0.7918454873563452, 0.7097258280302767, 0.5950250411932116, 0.6963540648198678, 0.8214077421639374, 0.9268588289769741, 0.9674591384037634, 0.9089830885157808, 0.5635471606154849, 0.8323009467657474, 0.9111360551456011, 0.9633567638280351, 0.9504260577762278, 0.9971707231648652, 0.6892221744348523, 0.8749622900411709, 0.7401975824251112, 0.8828778110066671, 0.5330689240468907, 0.9813974896135457, 0.937800272642374, 0.9550396499890136, 0.989887845466565, 0.6602682556364838, 0.5793352438753606, 0.9612596306843622, 0.5941039802528087, 0.5347565173017679, 0.7339232698468336, 0.6034719775995567, 0.9678002054464576, 0.6094155725938877, 0.8757728180211145, 0.8986644075215011, 0.9964237465715663, 0.8834850831326431, 0.6989393102912317, 0.9369691930108796, 0.8803795197232258, 0.6890791922609545, 0.8304238825141343, 0.8283871862073534, 0.6069074368475251, 0.5042718933732288, 0.874881564770181, 0.9930980836484282, 0.9396903652041853, 0.9976834144049032, 0.9916621966692621, 0.6824177017424251, 0.6185273236873059, 0.9492536996869148, 0.8090580427960965, 0.5882582629867509, 0.6662635232318606, 0.8123573940983425, 0.7211661790067311, 0.8033471268847697, 0.9347699961858271, 0.6141039594532915, 0.6690006230374781, 0.9168976110713629, 0.909917603760563, 0.9566498312523335, 0.7814995805480387, 0.6185788579489839, 0.7810700274710775, 0.9658290720274084, 0.6389062067736371, 0.6311493094485516, 0.9085436597719905, 0.8317191343674057, 0.5827164587402915, 0.7281115436684225, 0.5578029582254362, 0.7481954538411857, 0.5603283241339125, 0.7773146235591035, 0.89764552491498, 0.7177524277839147, 0.5232959109067884, 0.8785580302108122, 0.7487271856984389, 0.5551598735970031, 0.6880256474770938, 0.7226131462915846, 0.9748642061896929, 0.5308553938320528, 0.6278322927922046, 0.5889962486365423, 0.9489454836864955, 0.9052400645025782, 0.9177007175000185, 0.5336608745160745, 0.8926068340628924, 0.5264125722019598, 0.7643559529420353, 0.8948331964145936, 0.6189399982998951, 0.9803992428720437, 0.794764322343286, 0.8389209684733205, 0.7831581005968328, 0.9137141932240451, 0.5733079982374567, 0.764369685092105, 0.922427304908964, 0.9386782167301486, 0.56166289482561, 0.6741284010512363, 0.8516557299539492, 0.7413985936693184, 0.8941546529541589, 0.9760914582797859, 0.9913554156798294, 0.9790321350528746, 0.7760494058050345, 0.9138021201738428, 0.5714595081206544, 0.9432359568344238, 0.9894719348834349, 0.7689746058166502, 0.7075574178416477, 0.7986718886660286, 0.6210559012116539, 0.5530190270019291, 0.7191343297811448, 0.8778433153774376, 0.5505485201549929, 0.7121988224592851, 0.5869273135070863, 0.8883141214293889, 0.9082703975123476, 0.8008183588689325, 0.8233184446584468, 0.8887252250485669, 0.5182019836488704, 0.6759597361815535, 0.7025404322933956, 0.5244607378255848, 0.7482285869170124, 0.7561332987177299, 0.6279703657083944, 0.9346770127160512, 0.6769013180533838, 0.5708513313947627, 0.8170190948240923, 0.7550830184478037, 0.6575681025635767, 0.839100656607624, 0.5973482297686862, 0.52490154811497, 0.5285118866285883, 0.9720034683804072, 0.7990417705826873, 0.5538223444203417, 0.8920554201470106, 0.8579497966641154, 0.6227632032627877, 0.6832892395134481, 0.9303614757244842, 0.7174538143055389, 0.7377284419245265, 0.5673916465554394, 0.7787800390013122, 0.5470647686277865, 0.5252973508210435, 0.7753804469798946, 0.803789463373563, 0.6480238580827269, 0.598473302228038, 0.738323501024617, 0.975202630017882, 0.7897916806398986, 0.6198515241353462, 0.9727023448681946, 0.5703324018924077, 0.6857607721693992, 0.705881827670054, 0.9114699846855032, 0.8203671026920278, 0.5962748531539066, 0.9350379905528302, 0.9443253701722936, 0.8122504917989792, 0.8183819516386279, 0.6298577062038122, 0.5568061870376015, 0.716461074498496, 0.5942876825396248, 0.53524554296635, 0.6274444373994248, 0.9375013930480551, 0.8243056006001276, 0.7494323214029268, 0.795052679387855, 0.8156265863355154, 0.5169420185555875, 0.8217750192397972, 0.9562363351393381, 0.7848110047254611, 0.6542061882173233, 0.8827829026756696, 0.9119916779439168, 0.940833509201703, 0.9761467713418084, 0.510276406365012, 0.5482997574546107, 0.819595248720465, 0.537018586781913, 0.7424126040948926, 0.9456767339686969, 0.5208277674743182, 0.5759212463088996, 0.9487802880023768, 0.7039771278795187, 0.7229271552739895, 0.6334351421790536, 0.7612789773836135, 0.8393420230377456, 0.9519929703840919, 0.9119554094107654, 0.8741039011176726, 0.7346091985662366, 0.5546946548046046, 0.6694811499848641, 0.5548986862940739, 0.8070612313548778, 0.512038167556698, 0.6600174786057402, 0.9825434116902625, 0.748298441167299, 0.8843825547535398, 0.6136370639122226, 0.6272363102239724, 0.9808262612731685, 0.7261770194371997, 0.5129965125928413, 0.6522245127657433, 0.5332918229325918, 0.9250878292373019, 0.6737457976630284, 0.916349171291963, 0.5147544622634134, 0.7148286988195396, 0.5581486973956054, 0.6418883668506163, 0.8265318853083734, 0.604630506814558, 0.7490923311679105, 0.9323537909204127, 0.6639948177223299, 0.6385521840069479, 0.9710310858709015, 0.9351154623248409, 0.8648765717588821, 0.6465843991116359, 0.8608376556903408, 0.5489151576490092, 0.8828378853992773, 0.8499751625902094, 0.9196551206463954, 0.769966193341926, 0.9595166787920661, 0.7947530410036192, 0.5983740796415209, 0.995334413222436, 0.6247248030794774, 0.7679869496576972, 0.865528432177616, 0.6005541099028064, 0.998508272512023, 0.5985832374825104, 0.9884254221305551, 0.667570841685164, 0.9919014484941333, 0.7325973624197768, 0.7569059080880322, 0.9374108075712257, 0.8153577304685886, 0.6392366147549873, 0.9444765954401427, 0.6533708650881205, 0.9590909669316695, 0.8071336089654098, 0.5094451049486063, 0.8223383802547928, 0.5177110832814797, 0.7239497684358487, 0.9175950650149987, 0.9434025102449579, 0.8903905823468993, 0.6928333171577858, 0.9742734360694918, 0.9867521706827327, 0.6866235540972349, 0.7175797640767081, 0.5862610975533384, 0.6589536637296737, 0.962635761015789, 0.6706657041454611, 0.5591576999620039, 0.8195600231565325, 0.5066777643899201, 0.7370754893931711, 0.7729647030339319, 0.9861254342481719, 0.6773342809372254, 0.7869420844718428, 0.9767155889788706, 0.7468562222139745, 0.8720736997605765, 0.7114238682896715, 0.5867952643898958, 0.7479901545525189, 0.745610061153996, 0.7591423527976298, 0.9980116331970934, 0.5863766666403107, 0.9032327165024643, 0.6704642203323161, 0.6135556514009155, 0.5575774418674295, 0.7926687865674387, 0.7547550575404812, 0.8037251512217296, 0.9843673726279532, 0.9716320865701586, 0.582717592709689, 0.5971303401883402, 0.8007563950481854, 0.7888516775276819, 0.9481422613366952, 0.9953238047752986, 0.9832623961737361, 0.7219799132434106, 0.8455872580620389, 0.5396499360459502, 0.830864177559174, 0.6908138819882927, 0.8987493684192644, 0.7076421621743866, 0.9661528469500238, 0.9312041828549229, 0.6413671002154083, 0.9724654144202969, 0.6240667456004841, 0.5689660047379268, 0.6049377353384531, 0.7807198997897379, 0.6489079078057904, 0.5851044683046951, 0.6885824750156613, 0.6039487440639157, 0.5581824861987503, 0.6456180501855515, 0.6158196774302943, 0.6274916215732927, 0.8348924287391706, 0.5441106572020864, 0.894715309392571, 0.8305970287354342, 0.9637392399961159, 0.9190942844438741, 0.5016427454144248, 0.5144831030216919, 0.8131114005593121, 0.9235623808057387, 0.7433570510214069, 0.6788725104334902, 0.7897516022310271, 0.5591477460779268, 0.6374299059386337, 0.9851708171764078, 0.6958858733753279, 0.9506119775748911, 0.513408319021233, 0.9600792348777095, 0.9156421620139505, 0.7704056651597084, 0.7976727497407938, 0.5605227718811078, 0.85460033139371, 0.8046598014039189, 0.6275195764182605, 0.8347207519213757, 0.7822643274074341, 0.5494853556067538, 0.8738579123535186, 0.9695555828488676, 0.8177476126986842, 0.5708728885530661, 0.9094723042388146, 0.5054529223538989, 0.5850958733737972, 0.9838063305246768, 0.5719445053050195, 0.6994726015708418, 0.8502696469341248, 0.7428894259622743, 0.6826896677414226, 0.6027978338710778, 0.7800765005289056, 0.9765381668566029, 0.564854911797127, 0.8752874163439239, 0.6905284723514119, 0.6052652823632128, 0.6848586050324097, 0.9888460476207576, 0.6230893611974677, 0.912382456290699, 0.5712182566088918, 0.5849434254286188, 0.7414472625346852, 0.7669127394857238, 0.5044320275556798, 0.8962048770923021, 0.5993120061370527, 0.8614293534385289, 0.6658074171788023, 0.6940460442988545, 0.880751151565772, 0.7038726153451635, 0.8268573820726914, 0.9053628299807599, 0.8070574841760016, 0.8626528716928084, 0.5503410673404576, 0.8863535048138043, 0.8477611325650576, 0.772860133356615, 0.8136988274848229, 0.592896415600819, 0.5123098637893996, 0.963401058773848, 0.9319427506727758, 0.6956187042889097, 0.8434103881842847, 0.5220350562770247, 0.8009722151446066, 0.7662007735098311, 0.9262252944295426, 0.5278996911115839, 0.8407894762476906, 0.8844670228090393, 0.8136158807439549, 0.9807800358126109, 0.837110953261153, 0.5154474480920758, 0.952406893292888, 0.6698458291384572, 0.5408854699303349, 0.890081430179045, 0.9440424281254276, 0.5962738492181906, 0.8973035159181093, 0.9062553101845335, 0.9768469608130562, 0.9460822883473345, 0.6212213051448545, 0.8984890894148366, 0.5388684212609081, 0.87959633774549, 0.981890170002143, 0.6104089615748671, 0.6454584248994808, 0.8690959700722654, 0.8957998519622132, 0.8099108175749712, 0.8762051317716515, 0.7758419832206142, 0.7197542243049246, 0.9535408443157516, 0.7417841174855897, 0.8777900794909659, 0.9898081085644618, 0.5938629243032616, 0.7092309892503118, 0.6199333908313642, 0.6861806076875078, 0.6749749804826768, 0.70155085626105, 0.6874591588766809, 0.722193783005282, 0.5932780631854639, 0.7793315557161268, 0.5261053995243774, 0.527170872978121, 0.946964708980004, 0.6535994526831264, 0.9252854423144676, 0.7035979482773671, 0.6571328418390535, 0.820014118258533, 0.5204791927533091, 0.7708648981333386, 0.8674658206695594, 0.6772478243950472, 0.8456156424242888, 0.6415760694812144, 0.6202574071341787, 0.5866037938667894, 0.6313115618145384, 0.5076518548903715, 0.8114506757453084, 0.7949656356491281, 0.6547089947386857, 0.8299196838884373, 0.8733323945989682, 0.7673443877445063, 0.825239658492004, 0.5383727546109098, 0.8050521998876325, 0.693871784135162, 0.790150321638138, 0.9671155474041429, 0.8345385625881119, 0.6390527760803983, 0.7196780196104722, 0.7297108404327401, 0.8223117239992879, 0.528106986167767, 0.6446182757243938, 0.5165324551297897, 0.6509791326293405, 0.7853669098369815, 0.9679597920857614, 0.9824172410721108, 0.7796542501574901, 0.7941097843890841, 0.5156948763900502, 0.7702891912684908, 0.5224639325317206, 0.5192254517083215, 0.9025875657577043, 0.7054778435741672, 0.6625312882702389, 0.8683453614331207, 0.6615061889572023, 0.6603044004518763, 0.7670858612413115, 0.741160477429066, 0.684320526340791, 0.5575127319239829, 0.8133989676585165, 0.5574323321901469, 0.7663061563581854, 0.9409797529459657, 0.7872306302857395, 0.799802740421403, 0.7858312375531022, 0.7444353176363804, 0.6703003238770929, 0.8606905197151649, 0.8440574790646648, 0.724360366762067, 0.7394832122322267, 0.7090461163394524, 0.7496717730216617, 0.6316884383157633, 0.5039833107046263, 0.9450922349533897, 0.610062832784547, 0.5387940337092456, 0.6111969582693622, 0.5055428578878255, 0.9027872268804821, 0.9214188233498157, 0.5309052871369206, 0.5844678153836728, 0.7830885206614093, 0.5974423205505255, 0.5948451493659399, 0.662791421425807, 0.7968092477987587, 0.5936390549630179, 0.8515458100812232, 0.8680827661127255, 0.7932305245109221, 0.5856393833207925, 0.7500465687373512, 0.5881770580874911, 0.6044073553604223, 0.9523191833734457, 0.6225366060908297, 0.8678223764538484, 0.7871588926823976, 0.7999148363228792, 0.9749887493097211, 0.7053317194838641, 0.6344310414983578, 0.7324744771609336, 0.5659629337798708, 0.698431396109491, 0.9033963509075388, 0.5278950159249328, 0.7473089235199109, 0.8490527089023755, 0.6944203887232383, 0.5882472123511306, 0.6860800718524298, 0.6387911133466985, 0.6012298887741834, 0.7805024508582848, 0.9978915332501517, 0.7861963094081712, 0.9812474806306246, 0.9809376323388178, 0.6559770188200376, 0.6092191913769209, 0.5561220239969688, 0.9005971759804936, 0.6114506497803747, 0.7985914537919179, 0.6923451013806736, 0.7563653708065893, 0.7839784542222588, 0.8237486115443838, 0.648486271491687, 0.6596716651516512, 0.7148940242227969, 0.6453674331741157, 0.9784583716750441, 0.5980227288069259, 0.7485293376807676, 0.6761415092914707, 0.5514351340546952, 0.6209765916795988, 0.8441069373866432, 0.6901091822620243, 0.9024910870746738, 0.945710555481172, 0.9466459401095727, 0.6083913592362316, 0.6840524310856135, 0.5762033746123907, 0.9298961081823907, 0.7750168073162189, 0.5756845534710777, 0.6588004476343264, 0.9793058676608556, 0.76393660283477, 0.9079309409983044, 0.8914774648177928, 0.9664402213809136, 0.6051447739145932, 0.578133363834606, 0.877686817079703, 0.8312341706754509, 0.7094768456189575, 0.6653451110187316, 0.9251910038271268, 0.6233691979301765, 0.7563792024437674, 0.965869686612099, 0.5443736128093062, 0.5073109066723114, 0.9347044690784695, 0.6675689619510556, 0.9857494920491037, 0.7775083776016746, 0.9027519503551656, 0.5207379002086867, 0.725747874354862, 0.8905987738766159, 0.810107477528313, 0.6470813311433794, 0.5194327260229452, 0.689619761961218, 0.9126258159053682, 0.7949076969983372, 0.5608959792275199, 0.841981076932728, 0.8420994560898086, 0.7837619434980692, 0.8699305866547173, 0.505405542651282, 0.6930945016652467, 0.5700536975150028, 0.9041260540477529, 0.751923230439177, 0.7068263068532775, 0.9680622911788768, 0.7372092668996582, 0.9015323672981829, 0.5704829268942331, 0.6107583548361912, 0.6980067747702041, 0.9456155735311171, 0.6432826493933339, 0.6976459653375731, 0.7228300289984193, 0.8414555395059073, 0.7371788405055727, 0.8356300843687037, 0.8765181394543748, 0.8465709494438027, 0.9954526266145027, 0.6220581190266767, 0.8589713961590356, 0.6455874216149693, 0.5564142502284442, 0.6107029226710115, 0.8257671418626729, 0.7315280819186978, 0.8625522121012725, 0.9859049534471638, 0.7557558532587713, 0.9339832860855786, 0.6692369724383893, 0.6683005046703899, 0.6850307485405394, 0.7813189494349133, 0.9867910425719009, 0.5503952321686082, 0.9155230999850068, 0.6371287507546585, 0.5567882464697098, 0.5447148068061822, 0.8190305552911765, 0.7901332718434291, 0.6046373499308118, 0.8746206550771225, 0.6865182564623735, 0.6534072187009643, 0.5629690352926577, 0.5958539693978888, 0.6326708898415155, 0.8584662772902594, 0.9214811686256287, 0.8773049870380747, 0.5333108821600224, 0.5852629299998672, 0.6434319310601955, 0.5960821763681785, 0.8988886386335222, 0.8043395009744014, 0.8631315233484782, 0.7465082133311616, 0.8129631282159757, 0.5133565595095768, 0.6957591266384959, 0.9492992812542964, 0.939578520472076, 0.9529090539091443, 0.8177521142278743, 0.960662890952337, 0.6149329331434714, 0.5459826016112364, 0.5748584816317996, 0.7401767543056044, 0.5534610521102783, 0.7527271959780862, 0.6343273019643734, 0.743192050402436, 0.9883986542463791, 0.9524490624902718, 0.7176272799238748, 0.5060353333580465, 0.9449673807786894, 0.6647259599629014, 0.5783045856991826, 0.9624497466786768, 0.6342406088266015, 0.5969861420756546, 0.5488953715633098, 0.5087529534564489, 0.8990078634803487, 0.8379652660457757, 0.6720411029069739, 0.9187910035306173, 0.5090907563571299, 0.7674626224131831, 0.7050732586677767, 0.8395259102101702, 0.9910482646192444, 0.9047715349277596, 0.8171338862220897, 0.5407596600225018, 0.966304393923187, 0.8189184859280818, 0.6062372609276672, 0.891150620190476, 0.754091002044613, 0.6874115245376372, 0.9680866679407073, 0.7665275467626154, 0.6468906272999635, 0.9889685878460008, 0.7499377992818319, 0.9265723468982825, 0.595851589249215, 0.7263375291416361, 0.6596460537107505, 0.8018437430853087, 0.7823258324919822, 0.5078050448397449, 0.919694537194782, 0.6280772372178982, 0.9109915352571805, 0.7028920427397833, 0.8820795170352813, 0.859285861920684, 0.5062074000009835, 0.7232103971626966, 0.704222482344265, 0.8366451991400203, 0.7043883739235066, 0.538535716405364, 0.6169643918635852, 0.9075756131442001, 0.5549909892886931, 0.7522799564357905, 0.7679477398748977, 0.9550222736609467, 0.8892477913444338, 0.9177966755250088, 0.5421447152691174, 0.9588276568743854, 0.9394600047005173, 0.8220867582731144, 0.6191264059849204, 0.60320019826186, 0.6246685229652142, 0.826824720560491, 0.5477420383053443, 0.6689459502312625, 0.700489796052675, 0.6830944411377629, 0.9886090829230592, 0.5722621879882939, 0.9694060888294513, 0.822433124185574, 0.7044246774840524, 0.687058994957638, 0.884197721970515, 0.5397844859331613, 0.525890324136894, 0.9550868515559202, 0.6818209639415758, 0.6419109075739459, 0.6202969610347184, 0.5404418232394006, 0.8685547847904198, 0.7163211062228048, 0.5657448453305479, 0.8283191691877987, 0.6752995567803959, 0.8321080227589708, 0.7912151234169886, 0.6746075610247682, 0.9901614696191849, 0.5836490231459364, 0.8165756605338481, 0.7177047781602113, 0.5874066587787605, 0.6659407742997443, 0.6286495321059241, 0.9230631231337845, 0.5088931090052156, 0.9288270882228518, 0.9700570401689099, 0.8932195260379787, 0.6636138614676956, 0.6792991176362925, 0.7654488577122858, 0.9113309421179525, 0.8278994894280929, 0.7044883580506986, 0.9847648944944184, 0.8917782208587608, 0.5028125258216587, 0.7587315971908744, 0.8876301876278297, 0.720518973597809, 0.5894305544420884, 0.8047216382297624, 0.964229498291366, 0.8250342935691733, 0.5645338145239858, 0.690242817222331, 0.5888718802804452, 0.6222650676863535, 0.8861178404662651, 0.7768411057009335, 0.8466020298983852, 0.9281431413727381, 0.7473384369264363, 0.9346413557994653, 0.6564049782377739, 0.7488503344295097, 0.8167050417131545, 0.5602432133171374, 0.8304660625129281, 0.9474056475920469, 0.7568689786018029, 0.5657150837011993, 0.6500278000705799, 0.8873175474622383, 0.9170185649316052, 0.9669067249579808, 0.9968378934960915, 0.685132828639246, 0.889085078340348, 0.8391861798305398, 0.9557766901194606, 0.8900462605227575, 0.9144852619271934, 0.8520716828711437, 0.7464227931912865, 0.7001100396355224, 0.760791622080246, 0.966247984787129, 0.8591376273719322, 0.6403613295624015, 0.8244465063744686, 0.8308469432587156, 0.7085095061990113, 0.6728417563309455, 0.7221674085679752, 0.6952198041687603, 0.9648841715979524, 0.9195476876194434, 0.549647355002929, 0.5029583248258827, 0.6604706090332493, 0.9986506976671685, 0.6364681283888003, 0.8371382248640302, 0.5618474772994698, 0.5718789929252271, 0.8951088048394519, 0.9237520506152026, 0.9242688171356082, 0.5777086668664515, 0.5163837926565245, 0.7738016043220457, 0.5301177689913029, 0.6270277866460948, 0.6732041806609235, 0.964470260249753, 0.8633151023273118, 0.976689414024152, 0.9380279321253322, 0.9687023728992579, 0.9981999327882047, 0.8313803185990843, 0.5435430752836788, 0.7699424750117423, 0.5337737693097313, 0.8776075524742454, 0.801248640365464, 0.7882263540221566, 0.7976888313431373, 0.6283284815457992, 0.7981696481150418, 0.8548808879007092, 0.7189474574758452, 0.6091056398973735, 0.8301564913658288, 0.8302103944385775, 0.7923165104875916, 0.6434724838772745, 0.9791388180212339, 0.886048166609346, 0.7494783665395961, 0.5372357218894965, 0.5604638430973171, 0.8875468166574859, 0.522274417212603, 0.6524781791128219, 0.8494438981649473, 0.9563794081295218, 0.7793080776751732, 0.6226376279800832, 0.6499647188975537, 0.9622780815638269, 0.6739950316794051, 0.5830503278051167, 0.9909384163906116, 0.9229574655000586, 0.8054957117964279, 0.8608790360648926, 0.7991250297747969, 0.6165930561587145, 0.9888462481714551, 0.7945958692961449, 0.5104918444845319, 0.752170056003338, 0.6451403172331823, 0.8016384397500439, 0.6427839459525895, 0.9329770798718509, 0.7115456583146487, 0.9799311201200058, 0.84294547185146, 0.9569046384035856, 0.8195571870282377, 0.8438428997638391, 0.7294674631855594, 0.9923149576991636, 0.6449055342365111, 0.6359822546487766, 0.6565602281089213, 0.6936070516014479, 0.5254073296032947, 0.6236960346923517, 0.6455597587203782, 0.9426439708622492, 0.5995835796678165, 0.628208360078927, 0.6056562856413128, 0.8902026014541716, 0.8120766384123927, 0.8082944388928102, 0.576483395195688, 0.5141598664888682, 0.5621912204823203, 0.8290283168950268, 0.9680359912995997, 0.991184060321119, 0.5265172969824787, 0.6702792579120442, 0.9130995904382044, 0.701465872419702, 0.7571831440192661, 0.9767025191689747, 0.8872698257927935, 0.5185427778463663, 0.9991919792361301, 0.8683155876516152, 0.6816748755261264, 0.6614242600269429, 0.7723616011517824, 0.6650401037748832, 0.8417333992182424, 0.581271660855109, 0.7651535508109453, 0.7955689930378556, 0.6102312206498288, 0.7165647253545397, 0.9710788648696189, 0.8554902423650752, 0.8165469767199002, 0.8423265034780636, 0.9174888316756652, 0.6015218586806234, 0.5258050288293181, 0.5979659292935091, 0.907996363153721, 0.7651569215015597, 0.5148805041864857, 0.5665569938731914, 0.6737584960648161, 0.5539573285527013, 0.5046239722235524, 0.9044014056390547, 0.7457475786008743, 0.9350892429081559, 0.8646500134683862, 0.768025806359659, 0.7602333798765681, 0.8521832804839891, 0.617921448233006, 0.8505155006997371, 0.8948143232155232, 0.8155875301421517, 0.5959330292360399, 0.6479649193153777, 0.5287258525840279, 0.9130561459899074, 0.5080109231467653, 0.5038102361438119, 0.6159425276468844, 0.7245418772092773, 0.8292154941991707, 0.7217036683891297, 0.5381433680932408, 0.9589708784327151, 0.5842106793756365, 0.5583215838596737, 0.6406115912514614, 0.5158419601729264, 0.8571715095653699, 0.8624059021159243, 0.5982134751913961, 0.5640917335041823, 0.6069884150868534, 0.6129514130700521, 0.9711141369196864, 0.7694321035587373, 0.8947775848923183, 0.5364833541733806, 0.537254875088343, 0.9907842238761584, 0.9154885586586246, 0.9917301569376393, 0.6818228130335979, 0.5573134868249625, 0.9637548355837025, 0.8729569637477222, 0.6969828467279838, 0.9336814347500525, 0.6731479269003973, 0.726335197550745, 0.8741822198333687, 0.6851197566820009, 0.7708640276377672, 0.9969089771547412, 0.8356900641859437, 0.8131941569517843, 0.6209586182782544, 0.9350638798713068, 0.8414879088722093, 0.5231827369733766, 0.815637438677355, 0.8825787831534658, 0.6401049832413874, 0.7724156119083736, 0.7860780943497805, 0.8460261978985318, 0.5405579110159194, 0.8692922848585638, 0.9388772240327381, 0.7673956466329798, 0.7215387696546396, 0.5897402663974423, 0.7122117621569706, 0.524711780761324, 0.6430890362748214, 0.8376461655296241, 0.8725517560553371, 0.7355345131211943, 0.9729313380546838, 0.6155183318390304, 0.7570880386036924, 0.559207334647746, 0.5493733042318032, 0.9913070756371591, 0.5038017353118773, 0.6975377462593072, 0.5204144279134195, 0.6839792423957542, 0.8171959775934675, 0.6846512193540126, 0.8245415404846141, 0.9253710460151601, 0.859710563450203, 0.8606908259471201, 0.7958713072934969, 0.5398786452591486, 0.7967053259427042, 0.8043306155312099, 0.9088934683221475, 0.6189399100761199, 0.9829670500769315, 0.9241275640442868, 0.766027761758076, 0.9006136559854094, 0.856298762439698, 0.9031670571522328, 0.7445183255546143, 0.9854305555636853, 0.7436526892680521, 0.8109720776395017, 0.5463933833455554, 0.7294192189564201, 0.6217811982606777, 0.5033030699422887, 0.7516109905640331, 0.7789017243226075, 0.9979510131191651, 0.9047157112297082, 0.6812041551992241, 0.7941495908911008, 0.8896530683246585, 0.9556079266097075, 0.9954991521442542, 0.8608608953316448, 0.9821841886452369, 0.7667348601668507, 0.9284655167162871, 0.6654524112179078, 0.7589625028688864, 0.7770424902283924, 0.9080740218051744, 0.7678943810352526, 0.9251704062728019, 0.8821007636331423, 0.829571629069423, 0.5928199267691109, 0.5536543864988481, 0.764053673424593, 0.7175594469065913, 0.9854492691348501, 0.695766851561757, 0.6235547249393035, 0.5937355292260879, 0.6039554377100558, 0.8227801431071882, 0.5034894618961586, 0.5023728374789327, 0.6410547607942583, 0.759198272808225, 0.8178520079093508, 0.9770707233032832, 0.863119221220069, 0.8186526142130977, 0.8119811083664223, 0.7240033945974819, 0.5811145513655792, 0.811087137046126, 0.7285625410676941, 0.5700142675844041, 0.8843508282746113, 0.7763237708898778, 0.7036109846902916, 0.6212409754043421, 0.5919318229590185, 0.9746389515723032, 0.9041945788543153, 0.7867375857300155, 0.7191929286488987, 0.9057908119488471, 0.5439121453205171, 0.9360918762348331, 0.9444838032513845, 0.5414439745374948, 0.6250955079505139, 0.7708779291139141, 0.9329249065935243, 0.526826429772737, 0.9625811417429744, 0.9105125958926505, 0.5745946970360315, 0.8105766529958028, 0.9034233742176413, 0.6512340820266764, 0.8289118714816629, 0.5132026114167766, 0.9839861417164342, 0.7610915085388112, 0.542481156514804, 0.7428611198438744, 0.751977993605593, 0.7141652857329663, 0.7386521097501781, 0.9122343698149528, 0.7313173869657337, 0.5824336699151791, 0.9154733597240479, 0.7036875979875906, 0.6797892475719953, 0.809856701387482, 0.8025130700126737, 0.8583428352531792, 0.9368368999045835, 0.6370331903485782, 0.7409663257299841, 0.8432487777178637, 0.7854154686148724, 0.5075542991042616, 0.5010461678112081, 0.6915997746431641, 0.7618124977463983, 0.9213778067003572, 0.5859307143318153, 0.9485312348362845, 0.7259196400956403, 0.9654278201838141, 0.9198337047612068, 0.798315144297858, 0.5886921460127704, 0.8394727602606584, 0.7900529350233687, 0.709349395221804, 0.612208334094265, 0.5052554892156764, 0.8246831716423764, 0.5431941204086466, 0.8292489615149973, 0.5041272099190738, 0.9712516081000173, 0.7383550071328957, 0.7231548757211612, 0.8249380937465876, 0.748829833661411, 0.6607357852110103, 0.5133354028257245, 0.9724050116558659, 0.9372947724518432, 0.7741576273673098, 0.6320637175510366, 0.5559259639658614, 0.8052421844606892, 0.6408672422542894, 0.563854694177484, 0.5987712661927812, 0.9671748741372802, 0.5130852509683612, 0.7821728459084527, 0.5379121246846426, 0.5210759459057516, 0.6296531703872283, 0.8849822344122216, 0.9016138310836042, 0.9363979320588766, 0.7300698485274305, 0.644769662114963, 0.9115113001611215, 0.927634260405632, 0.9067985599416852, 0.7743693578895099, 0.94118442379976, 0.7488240518751487, 0.6806669027847829, 0.8007390491267787, 0.9837697877112902, 0.5871701092692849, 0.9053625464710147, 0.5996722661548233, 0.5311924428664454, 0.8383195957001361, 0.7601458735311374, 0.5194601556931985, 0.6159926046992705, 0.5643928693116387, 0.717622153181485, 0.9414565788070259, 0.509359299428601, 0.9159594871361636, 0.6709034841769879, 0.5487947893898455, 0.5220571543626504, 0.9721026867046936, 0.7018731985849558, 0.6531016339701496, 0.5228161090222498, 0.669885708907058, 0.6598635135581082, 0.6660720425030038, 0.9439281786500697, 0.8349024624359194, 0.9190180190832482, 0.9453543481896801, 0.8352266132167678, 0.9113429016800408, 0.5410307381671615, 0.801591858514731, 0.8514521653564139, 0.6273742241486819, 0.8609824681824154, 0.9089594017784479, 0.8971486432425554, 0.6581217379463642, 0.6880773703380934, 0.970443047406129, 0.6895933218937774, 0.6119874335330988, 0.7640217471232325, 0.937300443758768, 0.9063328919357494, 0.8814883411574135, 0.7174517567289626, 0.819164200538619, 0.8408079581980816, 0.9330340139358866, 0.9832869183837234, 0.7526390982630653, 0.5904506711739095, 0.8466190615513784, 0.6070621452287174, 0.8408255371093087, 0.870080767776898, 0.7746675247456771, 0.660583892433952, 0.562371963012364, 0.9651501414182981, 0.5232250561084555, 0.696521931704013, 0.8382710969555247, 0.8578399468897053, 0.8537090521847699, 0.5077031586270213, 0.9733732233859858, 0.788453745673855, 0.6552080867822279, 0.8101841556809133, 0.7800259995699201, 0.7354640255688746, 0.9244218479662858, 0.5055706605379062, 0.5745734504532283, 0.8217384543127607, 0.8858133521361724, 0.5786153777481564, 0.8390407327914662, 0.8124136820068582, 0.6014878236681401, 0.9073006855339891, 0.8260264734406813, 0.9116143145074063, 0.9944155352534565, 0.6060509761791864, 0.949353687876273, 0.813174384740174, 0.7115992301326288, 0.9101580897416535, 0.5781935563050543, 0.806907870014486, 0.8863669517666597, 0.8030994178462638, 0.6034376068759153, 0.7459593643480318, 0.5844654045731918, 0.7710178301853505, 0.7997821270849541, 0.7589225378363269, 0.6289754623045469, 0.795538758525274, 0.8795840825431552, 0.6414127148686513, 0.6013013192558978, 0.7334564682537627, 0.564010078837013, 0.8974218357793249, 0.8036524108981242, 0.5256703972266336, 0.6640677742982168, 0.6700648562783202, 0.8198626772341806, 0.9000099005246718, 0.6192191830462286, 0.6717935169183257, 0.7827192104956935, 0.7459959292006348, 0.5874794914315411, 0.7421103797588181, 0.901929901784125, 0.857052197731057, 0.7316367585324375, 0.627095119079192, 0.9646978882501422, 0.9881305235384918, 0.7064103522205463, 0.9222315980739525, 0.6273783184243349, 0.8076928929934373, 0.7029977796657974, 0.6923466982410972, 0.7600609311128235, 0.8330129855621684, 0.6745185299297737, 0.7007612052467347, 0.6329333684492683, 0.520824302249917, 0.702594720021837, 0.7848701967604381, 0.9797661382350698, 0.8225963324972114, 0.640428604240754, 0.9141139263872254, 0.9283180501247527, 0.8769244574041609, 0.7101227255249379, 0.7587246217371414, 0.583581225328399, 0.5103938765949867, 0.8522076157459395, 0.9762090651662043, 0.6013846707217283, 0.5550529066764261, 0.7474179068509649, 0.8174264210371761, 0.9493064790527297, 0.5706880729610262, 0.7110419447244343, 0.6183398517233838, 0.714109877826836, 0.674578299304873, 0.9568921332706481, 0.7444988406481031, 0.9113321153609969, 0.801760312996618, 0.6123331937366758, 0.8758125388352682, 0.855017139480351, 0.9672618635939918, 0.8449304824914279, 0.6242475639700553, 0.6348213813651111, 0.8500303273399108, 0.5049660988472583, 0.8927551792195951, 0.7314869448626017, 0.5849221317987516, 0.6981250576549837, 0.7898751704916, 0.6787804721355284, 0.9804132089032516, 0.8874934924614609, 0.5390152694238435, 0.6307348691395767, 0.5520221909557053, 0.52659984573034, 0.990182459159531, 0.723284714751628, 0.5814335072284014, 0.6459641098993985, 0.572524086060613, 0.9468867924375874, 0.5408346463141627, 0.9709439268347991, 0.8392383154629879, 0.5279227396157655, 0.7791028158046647, 0.7973495864549182, 0.7903998628043261, 0.7370517818779858, 0.5546478287742559, 0.6308313415302487, 0.6826958350517203, 0.8392978305280332, 0.8152574418096666, 0.9502693266790441, 0.502269790246592, 0.9384095734427873, 0.8391041595052795, 0.9692379293552325, 0.9226674223307066, 0.6299964832733436, 0.7778062606416607, 0.8084726123194284, 0.5443077570605985, 0.6818440215431351, 0.947470069633493, 0.9906195364029584, 0.9920882020889541, 0.9617413244434452, 0.8979915032572936, 0.8027581505622108, 0.8714468410442227, 0.5235766089016123, 0.8759622121309891, 0.9092169696195753, 0.8940534140510451, 0.6014641815109796, 0.5723428612874331, 0.5868891428586795, 0.5910844800628822, 0.8369256797141947, 0.9937198251844269, 0.7633675628127174, 0.7672740505538562, 0.6550433225761527, 0.6861970984643666, 0.9782745076416961, 0.8050609929537902, 0.5945829767383457, 0.8445921972317367, 0.5658122573367235, 0.9358146780606683, 0.5744067068228575, 0.8149488097327207, 0.5411194085591801, 0.9264054894221538, 0.947557919112639, 0.74813289434302, 0.8108426399662989, 0.5013982310597229, 0.9819814521666967, 0.7632167367132521, 0.9706380371920609, 0.70807526277163, 0.9749228734160302, 0.5150517941293817, 0.59037097202703, 0.898515946236347, 0.5282876737136945, 0.506257090213091, 0.5697715566593443, 0.9207745249987829, 0.9735131906599973, 0.9257087206081238, 0.7171022273670649, 0.6806250479473042, 0.6937394267087875, 0.8902592401696097, 0.5758590902595675, 0.60933861137171, 0.9018533081764231, 0.8624351727877884, 0.6623986565534126, 0.6709932428200328, 0.5356910647504283, 0.8251995681036985, 0.8369875733741111, 0.9493221151027442, 0.7466150483693323, 0.6346165157237302, 0.6099922396780868, 0.5047434948484081, 0.5362454418531324, 0.8418825708185952, 0.9772767519476092, 0.8302883328812403, 0.9809363668886009, 0.7611394604804222, 0.5194735447676424, 0.9501614088877341, 0.8711708378402658, 0.8819613816301721, 0.5043482723680525, 0.5548184643249052, 0.6902503498095636, 0.8850850820380134, 0.7297370472483857, 0.9644850756526453, 0.7527727155277386, 0.6994509359726416, 0.6903035029879929, 0.8896123863796819, 0.6888424451502102, 0.9443271516720726, 0.7783181416177724, 0.7894766097785287, 0.8373453397652817, 0.6839567644066487, 0.9198121670699277, 0.997454479884944, 0.6995201015412367, 0.7680223677668914, 0.5291837322478512, 0.780919315276073, 0.5489078118900181, 0.8214300161910846, 0.6578488576350723, 0.89725370088685, 0.581003597585297, 0.7463887728308907, 0.6927991059847226, 0.9024447570882199, 0.8646781013810896, 0.8131985468652656, 0.6937959759725554, 0.5066788938190259, 0.6017769285846237, 0.5005856192441145, 0.5104798751306023, 0.6372855932477461, 0.8708037261626824, 0.9185016626825628, 0.8326209961613668, 0.5692929377898216, 0.5511185204858656, 0.7241946683499132, 0.6646736933880473, 0.7386696653969449, 0.6585530961988567, 0.8753120919141952, 0.938136725570386, 0.7105736844753514, 0.5035667221353759, 0.5144153858336054, 0.8709646789844672, 0.6309245636573587, 0.8468275003473489, 0.88722909460463, 0.5419597858796016, 0.8698313812737006, 0.9498320101232165, 0.7750244888165683, 0.9404248076209409, 0.643072936885076, 0.8638458035138918, 0.8446221267921228, 0.9846269103692156, 0.9867147788784079, 0.8222060943782699, 0.6417663031822454, 0.51560599788167, 0.6683332029704998, 0.7507409091314823, 0.585565989815634, 0.543393938380803, 0.8852402826898624, 0.5673424732438195, 0.854237481500755, 0.718628743287321, 0.7157639809546689, 0.847429265774853, 0.6882511162940617, 0.790435868398663, 0.6369012872838766, 0.6051922978602646, 0.7845941490154438, 0.7760737350967675, 0.5163625175981419, 0.5652529234023986, 0.9269271384335279, 0.8736613736629031, 0.8768500069542229, 0.6436811193702837, 0.7540803469813451, 0.5339777358447885, 0.8336461306876831, 0.9446716222767333, 0.7052035935578115, 0.697793368018768, 0.5374339753160251, 0.9943225064073526, 0.8425171133662249, 0.5726878497609498, 0.7551602429527802, 0.8511633848822353, 0.5555054278687432, 0.7094050807001984, 0.6293623409172266, 0.5559783508507372, 0.5263709899306948, 0.7264380632971423, 0.8771998930109449, 0.9720484998070197, 0.5132632097812933, 0.7912191469923227, 0.9045764800378457, 0.7013811574746447, 0.995393976067035, 0.9475881191943981, 0.9700166339320866, 0.7569919389153064, 0.9186299133739342, 0.6177328469445718, 0.6784283785361009, 0.7109504514187086, 0.5687196412037916, 0.5298049325968432, 0.6188795917028926, 0.9521483237796204, 0.775400741826022, 0.9630626088162003, 0.69139591676764, 0.6276158225274666, 0.5753253204322495, 0.8102903241493039, 0.5562320473952866, 0.881523863635707, 0.8512472358942289, 0.7532255333096325, 0.6477994579106783, 0.5391944356596201, 0.5051747741692787, 0.6237332671399846, 0.9921768930194709, 0.9469111174424818, 0.8661253963659128, 0.9630510308378062, 0.8862154916876455, 0.8600486946276191, 0.6070001724884632, 0.6508562737407875, 0.8588820218866781, 0.9057966742207499, 0.7080510777182774, 0.7562125853889038, 0.8798273362527562, 0.9470538156227091, 0.5506040395543239, 0.6258351370262554, 0.6862291754620427, 0.9335478266782042, 0.7309076691738292, 0.9328899626306342, 0.9600281187961766, 0.8618145719028919, 0.9100132335084499, 0.5845356609260903, 0.5826590351876035, 0.8770188362734007, 0.9806201443937809, 0.5058835571413053, 0.9816034964429605, 0.5009575651054831, 0.677901985946012, 0.5556655177318093, 0.9823312276081881, 0.6351630266631905, 0.7978814997198018, 0.8374226042373868, 0.9512451417331937, 0.9098585524977139, 0.6222855525882649, 0.8708253849371301, 0.7822693228013153, 0.608757840741897, 0.7471413614312605, 0.9946918612442477, 0.5820088997566983, 0.6880895050807136, 0.5376494208298923, 0.6867604851025064, 0.8357026266401634, 0.8820940583628254, 0.9806440497530631, 0.5550278431584902, 0.8115186445533049, 0.7658881135396349, 0.7695835736977696, 0.7537005825406946, 0.8909609582469415, 0.8847778524201197, 0.5453463385910424, 0.6923757984359737, 0.5749514852019149, 0.580566554132673, 0.5446706216017035, 0.5822358783939323, 0.6639878088505022, 0.6984369685048839, 0.5954195722133007, 0.9895981211018994, 0.926323377703829, 0.5240050976769072, 0.763324136901913, 0.5233419361841511, 0.6447798783883433, 0.5609348513790751, 0.8503154182523299, 0.8060218548050088, 0.9268769671993392, 0.5625097750266526, 0.8108150707439894, 0.8389616739536372, 0.9688007323838552, 0.5760831755523644, 0.6268990859613558, 0.8223163355011647, 0.6586363546223969, 0.7821860995812859, 0.8894305059573833, 0.5887216989522674, 0.5033057547696105, 0.9368487348714156, 0.7249068959341529, 0.9040239849152019, 0.9413043565305232, 0.6884542903303477, 0.8302629017751373, 0.7806076379429538, 0.9917738244667949, 0.5714852810356259, 0.7973214350950706, 0.7068677572150744, 0.9596695650313554, 0.7451459647315075, 0.7259093125650953, 0.8863104244468041, 0.5758377288977246, 0.5260161183495193, 0.9897247652952286, 0.6634080137468426, 0.908996942339845, 0.6848513580578356, 0.9636109774034507, 0.5324637295556977, 0.5347195917791763, 0.8920373034065497, 0.6741144812617941, 0.7017638833980222, 0.8905845371150697, 0.8647698301517392, 0.7675478233608957, 0.8138616631189186, 0.690379788069674, 0.802042650480522, 0.5148090622851723, 0.7607772130729575, 0.553397955274721, 0.7439448828898523, 0.9648788951042672, 0.8616134285440674, 0.8564807260712901, 0.9033796941955646, 0.8544161874343952, 0.5175544756856811, 0.8603246634739357, 0.9774904578562648, 0.8669888270931063, 0.9101701408239417, 0.6200307611816107, 0.9425085367495392, 0.8704864264373716, 0.629615156593744, 0.5191587940616259, 0.6695325444448256, 0.7787219958589346, 0.7360909330696467, 0.9221676740215257, 0.9592698194750131, 0.893523088181046, 0.7708520889033046, 0.546723489919956, 0.6785838475195127, 0.5769816047744045, 0.6095308869187239, 0.6218049399520028, 0.826367226020052, 0.7806735194237294, 0.5074521833814633, 0.6302818756370565, 0.9740665789741263, 0.5695420254648662, 0.6980025778896471, 0.83849355414753, 0.5070416849412501, 0.8634398754253203, 0.5940267007460027, 0.811167356145722, 0.5737921513998532, 0.6805676799324332, 0.7658354067409919, 0.9862088432259773, 0.5603403895417788, 0.971067145255522, 0.7695544381080934, 0.7024562363006956, 0.5521792493465754, 0.5765032228534068, 0.5227511224794537, 0.5187376691875838, 0.9564890066967324, 0.9143104141090457, 0.5459890442617137, 0.5105876383962179, 0.6607000805081411, 0.8017262944544206, 0.7914319916174193, 0.6984601016933913, 0.5267839051026739, 0.9895487818562132, 0.6967583846499653, 0.8456237305422473, 0.8505068250130096, 0.8404236892164845, 0.5223277026856246, 0.7350632751221642, 0.7478513360256124, 0.5709140645602554, 0.6497437589720589, 0.9315821874307785, 0.5830745417316923, 0.8794462532707762, 0.5825286993613203, 0.9132742966523724, 0.7589471073685652, 0.5664428295090553, 0.8680784165427746, 0.6124331557528492, 0.9282005527193711, 0.6135325695206864, 0.5162735805355139, 0.8720972558354385, 0.8300184555201418, 0.5714878412289728, 0.6516474216750903, 0.9618067653348494, 0.5576427532522531, 0.8766484852658711, 0.7928212551513687, 0.520549602515307, 0.962961542076622, 0.5546793878130727, 0.9165985272178452, 0.5813403921942605, 0.7089531175400872, 0.7982511001989829, 0.7921480304544117, 0.917203113878774, 0.7238324952912244, 0.5702157162017121, 0.7511408103579901, 0.9014957475225556, 0.6530536215092761, 0.7701605744145147, 0.9607636034513183, 0.9235082035057229, 0.7841238155583026, 0.65865027901259, 0.5662098593859496, 0.6583252341804349, 0.6353661893711172, 0.8928199638365821, 0.941021272046916, 0.678524064855992, 0.7251045786947012, 0.8185629356764466, 0.7311685099853377, 0.7684432953033302, 0.5559750395431002, 0.6312420769784525, 0.7486944285320045, 0.8314426954681502, 0.9243389193486644, 0.9780969008408669, 0.9191703830782073, 0.8861832173919919, 0.8719118375411996, 0.917653551103559, 0.5830659197456387, 0.5835380992036987, 0.6597280974405607, 0.7764654427676423, 0.8976094263416179, 0.7211088907643723, 0.5283497518915263, 0.7505947314970762, 0.9191830529720662, 0.5665973386381853, 0.5756201404449801, 0.6557904841305604, 0.5501736268465386, 0.5446684972172702, 0.8866827592522293, 0.9684828917198345, 0.6045099392675662, 0.584753859657151, 0.6578694238181307, 0.9703409506083487, 0.8639807435571696, 0.53234876438837, 0.6069166748999125, 0.5575635313961063, 0.5742925775396357, 0.55127117982953, 0.6201177307466719, 0.5864774599804408, 0.8488886977481279, 0.8576015180619811, 0.9455106967284541, 0.5756384831752883, 0.9209915504205162, 0.7218364353276964, 0.6366873630590932, 0.6001966352881118, 0.6918575067526808, 0.9679114302575542, 0.5081625872474553, 0.9257480202189127, 0.8626307025112363, 0.6059244801838208, 0.6923157665478492, 0.8927648029816333, 0.9733456436185381, 0.888410548652653, 0.5584385641682417, 0.6990689813689233, 0.5854579806455369, 0.5105886776842032, 0.5361390169118683, 0.5127891704636875, 0.5792116786300805, 0.8544885792563428, 0.8634334301815674, 0.5850030941067896, 0.759491444578912, 0.6599862614345653, 0.5598895453156034, 0.9003522951318041, 0.7963644273769439, 0.731864624610999, 0.8110330488839842, 0.8277870006035701, 0.9972417001170639, 0.5368612411040885, 0.7875904919953299, 0.5864501480307132, 0.880158325027953, 0.8379646300149355, 0.7384864122799346, 0.7871775221191359, 0.7430808642457745, 0.7790317999194045, 0.7640648119403626, 0.7782988955335091, 0.6658993617441673, 0.6749291650040998, 0.853594993701126, 0.803271036160479, 0.7019067219602951, 0.8492424919171472, 0.6137011210521294, 0.5979878093590896, 0.737603861505636, 0.843536282794367, 0.7701264856420829, 0.5960804176057448, 0.5163149846209865, 0.5765355386008262, 0.5188159907748922, 0.7341663504279734, 0.8226079405751994, 0.5485749110023204, 0.6863450910995328, 0.6509804636675041, 0.828093545213138, 0.921899607624594, 0.7958666942505822, 0.561957391393225, 0.7510228672527333, 0.6842162401701943, 0.7608048179337645, 0.9922479259390893, 0.7543326179366345, 0.9505778859508318, 0.9094369410213934, 0.607224035275532, 0.9489918794936751, 0.5617163001973768, 0.8674833419807976, 0.768760385169875, 0.711502305795564, 0.7932824692361335, 0.7737370915013346, 0.5368581527974092, 0.5954246579264835, 0.9394155400893004, 0.8498567471676559, 0.7351735667588897, 0.6831426660266782, 0.618604820047765, 0.8654878054426827, 0.9550472643205778, 0.8094100401416611, 0.7113990146698721, 0.6709142992827282, 0.8344426062757473, 0.7966392336847029, 0.9816070996329664, 0.7841384630992015, 0.7463061171453678, 0.5999864591071608, 0.7510655233338428, 0.6060525112870543, 0.5573144449990364, 0.7593518041548635, 0.541153608649809, 0.8808319537698909, 0.8320877963760502, 0.7912685181704213, 0.5187161150448036, 0.7332785324917369, 0.8096976758317269, 0.6995694025763073, 0.8790146909680786, 0.8112255878379765, 0.7250710155051223, 0.7229636208266996, 0.5831596308696787, 0.7286548575549461, 0.6214406924091763, 0.9861081312162732, 0.6056006355323809, 0.803670149484488, 0.6736238354921865, 0.9579653703946036, 0.6274745902060499, 0.5520993635372305, 0.7453950576084044, 0.8234273832681387, 0.9377494624070337, 0.9601218220312074, 0.7770846609051625, 0.912975084956219, 0.9943298495788071, 0.9420209906944768, 0.8793647342947826, 0.7256940311751703, 0.5328656408759596, 0.8036278294266349, 0.7170309169994542, 0.7948157145994988, 0.7467407442877284, 0.8485125491402866, 0.7223255982403344, 0.8546061668400584, 0.9703577271334014, 0.5846451310537677, 0.8380359528436829, 0.5569846212418139, 0.9222121075752167, 0.577411759438068, 0.9501948640649414, 0.7128070525407676, 0.5475740269186674, 0.62615909200518, 0.9563727555962954, 0.6060052570615294, 0.981075793445038, 0.684992675914549, 0.9958927372988307, 0.7100562696906836, 0.8696034029452102, 0.6939042486263106, 0.6650776326484488, 0.9351735080347285, 0.5389109092447024, 0.8945482674749194, 0.6151863563590173, 0.8259633226852874, 0.839392493265354, 0.6510055668812258, 0.5447884953122667, 0.9418795681982146, 0.6497553070995642, 0.5996133064477036, 0.8755838854174027, 0.558426330621109, 0.8983461628132783, 0.592322998189703, 0.8561221676169963, 0.7186347220305191, 0.8659765667164572, 0.767356435537712, 0.6522320498156087, 0.5444765518303644, 0.8786344222540474, 0.9356774403172237, 0.5818521922754842, 0.5613806935040536, 0.524386149902654, 0.8567970538110911, 0.9006639475491149, 0.561743235387137, 0.5086518356756287, 0.7899999643890387, 0.8925980586985871, 0.5038780381836234, 0.6891285916020498, 0.5818171587540529, 0.7126604168352824, 0.9869625305282133, 0.6051505064483057, 0.9776972710431142, 0.8775549171471487, 0.927504766160183, 0.8388414490329836, 0.5304651792592603, 0.9808180561063358, 0.6569423729449981, 0.7050023644679321, 0.737966389521864, 0.804373032315107, 0.8309589922923386, 0.7664885336933998, 0.9041710894146179, 0.503990413027713, 0.9462585530688026, 0.858280044361649, 0.843983545922374, 0.7780605048866656, 0.8801263597521005, 0.5621556946043766, 0.8980832966930808, 0.6351894769263873, 0.9018634859798551, 0.695111102017991, 0.7530369068563314, 0.8798596530447123, 0.666698515036386, 0.6597151105457771, 0.5827450085655166, 0.766944762305096, 0.6567469681268033, 0.5956917404419437, 0.6461195489208349, 0.8773131714490252, 0.9138591400816174, 0.8212073525549424, 0.756470621560927, 0.5775336699859517, 0.8514145853481498, 0.9629592556981399, 0.508849476418975, 0.7094206948288075, 0.6715141363045688, 0.612511601744186, 0.523605489740357, 0.6843204152183133, 0.6688811259074918, 0.7565855763228813, 0.5709666473096875, 0.5146148579600189, 0.6056977772366801, 0.9822381996523488, 0.7967086769697316, 0.6781726640243266, 0.8586264117829994, 0.8531903046909504, 0.56748785610925, 0.5375868176887173, 0.6384547887588237, 0.896463706258016, 0.8769334222000204, 0.9035503871847661, 0.6136433968348743, 0.9909291446288357, 0.6057033990633245, 0.8835491815064885, 0.57350476116494, 0.994307789493899, 0.9937275380642681, 0.6178596871510693, 0.9936817040259434, 0.5140534279675837, 0.7418379009854936, 0.855722205510232, 0.9380321691875562, 0.6536517574355186, 0.7724499877588942, 0.9988838202904361, 0.9417863757568519, 0.7854264992316816, 0.627145493182595, 0.9594990223074542, 0.5391001488960877, 0.888343644417205, 0.9599261291952079, 0.6871360836262242, 0.5338816597179432, 0.5885765471443426, 0.6667540779829335, 0.7381633567958801, 0.8069746514903227, 0.7346142218205988, 0.7131402767554853, 0.6322686323300708, 0.6629548592672683, 0.7849047392411077, 0.919135786747539, 0.6331086200906106, 0.9648504082639688, 0.8337344366070093, 0.7326703745313872, 0.5419813505820243, 0.5390875633958496, 0.631650027215827, 0.9239779227856287, 0.5102534989949137, 0.9078231262888208, 0.6562436137618972, 0.9955842376001659, 0.5255723411215891, 0.819435236319136, 0.8642021478469613, 0.8049753797489945, 0.5497313433564113, 0.7096950726510884, 0.8390427427537246, 0.6836548326691811, 0.7535401954756371, 0.7321320862515761, 0.7747691679075739, 0.7963985142779137, 0.8054598210776285, 0.5821569442990133, 0.6865349056101409, 0.8679973659659792, 0.5281573939819264, 0.5852040853290617, 0.7324912265194881, 0.7635768932497408, 0.6496014646786845, 0.9549479448459536, 0.7602004229246957, 0.500395049448696, 0.8996797820203148, 0.6657051368948376, 0.9862623189531949, 0.6849229777764538, 0.6652448372695348, 0.5660331456462786, 0.8459390167098861, 0.7104257299216901, 0.5593113753140054, 0.9007362196605504, 0.5620827236712584, 0.6463948960475381, 0.8387126451174769, 0.5690553724138053, 0.5476938043121762, 0.9144802007916777, 0.6224735873435555, 0.9989196701334264, 0.6942799385740233, 0.9020573539497969, 0.6555386598008095, 0.858810475180299, 0.6090489489703639, 0.8326878670896096, 0.9417396031978391, 0.6209598797241621, 0.6483004342500661, 0.7229518908669575, 0.64758822208357, 0.7812866514239434, 0.5800575554299143, 0.9443525207073085, 0.942318512567349, 0.5641141182648646, 0.5070077290761492, 0.7590327413460957, 0.929012598421424, 0.9369894808956706, 0.5505103070128217, 0.8909321876407844, 0.9879308863418634, 0.6464894895456865, 0.6870308773686602, 0.7297543937542257, 0.832376462265212, 0.7132234569590689, 0.6100024375338885, 0.8965570543703556, 0.711065841582644, 0.8008049032024553, 0.7069141329859182, 0.895176741091624, 0.7676656953590153, 0.8867930732642746, 0.6082221824373839, 0.5267691987950277, 0.959261500170939, 0.663823471941531, 0.9373039098298896, 0.9869877152551745, 0.8963565427781626, 0.5059279981261998, 0.6327687168473952, 0.5545833019804041, 0.5439842710692638, 0.9842901216637396, 0.8234578050305009, 0.6899382605017323, 0.861703988754922, 0.7697300614363312, 0.8837628207876691, 0.9745364469822664, 0.9149159281710457, 0.6118783526162811, 0.8713456396670551, 0.7400816838737172, 0.7377646618780658, 0.8655925882569805, 0.5336051259766532, 0.56927581599836, 0.7911158030206095, 0.9912339059387534, 0.8022791030593989, 0.6211134027667142, 0.7717519996423194, 0.5300115753641752, 0.779632612167825, 0.8888742421924215, 0.5699443412887741, 0.503785702990222, 0.5191233779223856, 0.6994658881659587, 0.945190129137752, 0.7682806468359054, 0.7203260379319404, 0.7042049521215059, 0.7378476931319551, 0.6128992879476205, 0.8441954678091379, 0.6284531109168126, 0.9424044584476325, 0.6482349350277793, 0.9152618267101418, 0.8563623114412995, 0.5059719937217716, 0.8655926582313207, 0.6740974226185807, 0.6376229364967978, 0.8826911217142752, 0.5423777264499061, 0.5092548848879721, 0.6873553655508358, 0.8413593711568081, 0.5160712461149936, 0.6960860986365209, 0.9080811240415311, 0.7353122765388844, 0.5037718216272287, 0.6977459701292392, 0.6458291010752011, 0.605768878460019, 0.801056823584438, 0.6602765173860039, 0.6150786723854754, 0.7285470505164326, 0.6236320317353334, 0.5038179951078893, 0.5143867255267245, 0.9120739909624298, 0.7704674292866456, 0.561109825310252, 0.7047690588873117, 0.6154653667295957, 0.5073918122161962, 0.6115824617192982, 0.8674366980456347, 0.9408546137495409, 0.9321982240340728, 0.8203028934298107, 0.5341676456114237, 0.5135739056537687, 0.5702991767551058, 0.683704823309798, 0.8645655955287876, 0.5910313862279928, 0.7877073786686619, 0.6191660343587433, 0.7829717815671775, 0.9799556571312161, 0.6669537355778862, 0.7750548799675708, 0.845985192280434, 0.8675210542542673, 0.8425389752312478, 0.5407897358070866, 0.8464502878616322, 0.6760734200565848, 0.8540805703350978, 0.9746630136757349, 0.7003470853245761, 0.8982772511260162, 0.6327407121246358, 0.8520162530433896, 0.5023400267060261, 0.6948488514185361, 0.9605133178519987, 0.6935154947341411, 0.8075575018453995, 0.5858481100033688, 0.5160135467125697, 0.5881137848332003, 0.6075278393757271, 0.8923972737644669, 0.5680926945545068, 0.7228773351963609, 0.8227704083070382, 0.8608003440474061, 0.5542175934097551, 0.7426867364440661, 0.51773277544231, 0.9120097824037312, 0.8765216502348128, 0.9522094595170274, 0.9613422330412948, 0.9181077188071041, 0.5397542685273189, 0.7821264501084136, 0.906847433727098, 0.6776056335699471, 0.84556557154876, 0.6754280556747422, 0.8199499285736269, 0.7933652079853039, 0.8315935725489223, 0.6254791887081658, 0.5720895849104424, 0.7832483487332824, 0.9705055276472447, 0.8403802497641408, 0.9941859450163093, 0.6111510254841188, 0.5223795199696954, 0.613413900013507, 0.7044580806115237, 0.7626567428613238, 0.802294112264313, 0.7530915127578439, 0.9322252511058462, 0.6537450941498497, 0.8660863882027461, 0.6629550750157506, 0.9822872271675317, 0.8866635186314539, 0.626043780982634, 0.7140722571585144, 0.9664189464158224, 0.8850435035521609, 0.9370492607064744, 0.9036959694611908, 0.9113669984364565, 0.9588678484403164, 0.6858296010667827, 0.789158918547276, 0.9882688907981019, 0.9483655352310835, 0.7393455018759894, 0.6738114363365343, 0.7767697903341031, 0.5551908859950679, 0.6972733487991571, 0.8452132485055824, 0.6106008585553313, 0.5709406620088153, 0.7252637744685182, 0.560009816083147, 0.5002988550559482, 0.8704899046125909, 0.8898463352384522, 0.7149982387206864, 0.5018858846784225, 0.6604486401092599, 0.6108890400220219, 0.8811459568869418, 0.5465964940121235, 0.6354006264473788, 0.5296076271100947, 0.7772281447947261, 0.6165074275922287, 0.784220715782949, 0.5924299051425843, 0.5943364595493955, 0.5953408307381827, 0.7242740262046539, 0.677270151159592, 0.9201002327822019, 0.8685795568996109, 0.5949329645964908, 0.890892824669268, 0.805594254625939, 0.9331776387804307, 0.6414108911345016, 0.9820204855247692, 0.5549807260497168, 0.5726572312111382, 0.977092796988629, 0.7640591292778665, 0.8369025546948299, 0.8400303098881634, 0.629085443147372, 0.9743141833184309, 0.902747818284402, 0.5864039021211811, 0.6820535543777666, 0.8655866059113106, 0.9190777208482948, 0.6032466847219249, 0.7042527559997301, 0.8152396842323918, 0.6194464735708992, 0.5690592121408298, 0.6438227706289663, 0.8664312016184608, 0.5556793209301569, 0.8309529659375076, 0.8911603507982938, 0.6144164130683977, 0.9525897959242267, 0.5364463343230963, 0.8850441505828731, 0.8765076451207181, 0.9864735882045688, 0.6901146214054632, 0.5748246745145973, 0.9710504830367865, 0.8374559001496396, 0.8934725334254625, 0.6598920710706344, 0.8732089173929947, 0.5236722490862877, 0.6297275161368325, 0.9239762993291861, 0.585985953640172, 0.6428814969910739, 0.6865338613057339, 0.6794361199121405, 0.8573393139600005, 0.5195761587577319, 0.7765765105846756, 0.9642362060998738, 0.6454259148849095, 0.7193525836980681, 0.935437795555702, 0.5643001227379798, 0.7058222217048769, 0.988381634645902, 0.9545644122129379, 0.6553072881207077, 0.5265025986159857, 0.9984263691535213, 0.7806322169742492, 0.6674569312908449, 0.76516682716171, 0.6152425166453147, 0.6614430092691165, 0.5133697097132088, 0.9690349936271655, 0.8241940819575995, 0.7756016640516044, 0.6542392888413807, 0.5191487173856915, 0.9575228446255082, 0.6399981189890893, 0.6396077077706922, 0.7839140807518601, 0.8518331971197182, 0.9047955326428121, 0.9557036888524012, 0.6095748009888551, 0.7122881718906808, 0.6284968025543377, 0.5293802512893496, 0.837687907039296, 0.8837726861409626, 0.7702730512176547, 0.7264153114996599, 0.7751311884869037, 0.6777656447418796, 0.5864615584024897, 0.597914300338257, 0.8796619210502794, 0.7163996363340591, 0.7458478741075064, 0.6707500675046836, 0.9678311057607853, 0.7051913171020374, 0.686124988542046, 0.6755085831540208, 0.8764003191960028, 0.7283610536878586, 0.8171317595020486, 0.7820399529547777, 0.8839033272537422, 0.9771774825960581, 0.8974280380975556, 0.5074115482952058, 0.5019211829945955, 0.9842334684762297, 0.9312705816486929, 0.8191294842461307, 0.5833723271043336, 0.9387150985111108, 0.8699975280400999, 0.5314302566756691, 0.5979940301055009, 0.6550873772170127, 0.6929661565898313, 0.8229216237366537, 0.8108636310481525, 0.6644635481144322, 0.8392929075307962, 0.6407149539144317, 0.5726695235795791, 0.8993258699190132, 0.617342415319417, 0.6173418072202581, 0.515949082478866, 0.7090577960441311, 0.701026910771775, 0.9758715523811565, 0.8224820272088452, 0.7847474270646511, 0.9320740132989348, 0.5809000074923425, 0.5441530564320736, 0.9301185504923262, 0.6721632073071573, 0.6239179741075087, 0.7922591070159724, 0.7713550874296944, 0.5431218096188757, 0.8351805809734039, 0.7983995997438005, 0.7360127681583297, 0.8120567256027345, 0.5383739112952283, 0.7970098446681161, 0.9461897439290297, 0.9795238814213973, 0.569971499065586, 0.7385968490165837, 0.9190632734078718, 0.983548946570505, 0.8552767320610067, 0.9245794731686738, 0.961645052803171, 0.6302086314551978, 0.9353674083904224, 0.5050418056895051, 0.6740025222187341, 0.8781654171625348, 0.8214454400196911, 0.7955020126058627, 0.75354366875817, 0.5231094927557202, 0.8730890771082633, 0.9947237691071347, 0.7117672401736501, 0.6751937074002503, 0.590072754782101, 0.6270894333735986, 0.8334622212327929, 0.6224529681483855, 0.6675881102518211, 0.6634454980528178, 0.6660975952031778, 0.7245171550859157, 0.5525347498321138, 0.9042775469658066, 0.7400723262228298, 0.9102205279139466, 0.7950624205595939, 0.9470786809151643, 0.7286657119239952, 0.6086190580030413, 0.7002662108507376, 0.6339777605525609, 0.8953563920113836, 0.9442885888498399, 0.6641528143533735, 0.610152589049225, 0.7965103163124092, 0.8328336033195444, 0.7209430526224523, 0.9313917357845523, 0.9220372534160894, 0.5766834556052081, 0.6735912987245654, 0.9933369922223746, 0.6878815387500259, 0.5148709207998274, 0.9864745145456524, 0.9439602749589174, 0.8291674315062441, 0.9209096857623886, 0.9558402123873996, 0.9965099822521073, 0.9729876217822564, 0.5269081296437694, 0.7054546084939644, 0.7216637067330869, 0.8497349739565643, 0.6361786110888317, 0.7082656420529838, 0.5958059144419983, 0.7028561297700717, 0.8529664624973132, 0.6941372799048084, 0.8191939066606607, 0.8528305733676064, 0.5950937594617739, 0.9183594060375802, 0.8652379822113463, 0.913913931181286, 0.636157657887295, 0.9236209555134879, 0.8762023901841629, 0.5607393605058226, 0.6985582732114606, 0.6698366434207803, 0.6926477932512792, 0.8164371360139109, 0.8334002156119587, 0.8509843003387069, 0.5859545781450622, 0.9989106652507256, 0.5110825773714067, 0.7459507642084229, 0.8393679543861883, 0.7325597376433444, 0.8057925236868242, 0.9092572639964291, 0.8439470914650388, 0.8619185944242742, 0.6904872520889789, 0.7624444688315648, 0.849368119652341, 0.6573260238081765, 0.5786654552225146, 0.5720621615637556, 0.6322902275756419, 0.9717601844848069, 0.8743060150894112, 0.7809695985273781, 0.6958949058552815, 0.6776371448953288, 0.7813142452256873, 0.9524531609435704, 0.725417665595256, 0.628460822283234, 0.5144794809600131, 0.5075884890107529, 0.6510661087464835, 0.7755729063041169, 0.5273866438902078, 0.813594565078769, 0.9971686487538629, 0.9919169974140273, 0.6366567171756303, 0.9734999895404741, 0.8217934376608156, 0.8204864488101721, 0.9658312850406073, 0.5704603997159555, 0.81042942636474, 0.5224254281515808, 0.6240117112241167, 0.7006885904946953, 0.8676683617140664, 0.6848349965315454, 0.7851637204493828, 0.7042832955616833, 0.8755139181804547, 0.987498591858453, 0.6241431838155631, 0.5746869942822617, 0.6379326266254486, 0.9950217442098481, 0.978940555100137, 0.6955938843987927, 0.7608925343217726, 0.6584757880537826, 0.6984056962156597, 0.8427813234571482, 0.978332559069222, 0.5188099397580892, 0.8019403052342151, 0.7147314031765011, 0.6818388379273022, 0.9607968701780054, 0.5522395240418694, 0.5477598249999037, 0.9495617607611586, 0.7934610112010256, 0.7189206030077868, 0.5424291596640514, 0.5117574031944878, 0.5483109057442089, 0.8642079447118715, 0.8254989706863742, 0.5865017075417158, 0.6467861199107032, 0.9481762300321965, 0.9522059849357611, 0.8192833491160638, 0.9175512592862551, 0.8457176738615239, 0.5587660546957667, 0.9020762634349468, 0.7498828416678095, 0.7621805288060839, 0.9132761277404602, 0.9244953025296085, 0.8297122621452933, 0.6644057761836575, 0.9426109272885232, 0.5071080250915068, 0.7988814866468554, 0.6158601244965634, 0.5483542489927651, 0.7544121622308155, 0.8011445844712405, 0.5267646209540433, 0.6325253879955648, 0.7210591876718127, 0.9447594869329872, 0.9887642369033545, 0.9823794788575655, 0.5995385470416126, 0.8614065671202878, 0.7465992266263953, 0.9285062696605683, 0.8398291690324572, 0.5127583891069295, 0.8857279825392406, 0.972977266590476, 0.7266717114448826, 0.8767740521607913, 0.9231618528525449, 0.9982364931314549, 0.9089856233942144, 0.7815615026471971, 0.7896595792768591, 0.5391321369011051, 0.774514034308962, 0.6325178325004266, 0.7522075363487735, 0.8579309458387521, 0.8857402693121141, 0.7438343008072901, 0.6057096502562739, 0.6289181629501175, 0.7777223010762986, 0.8505654243704575, 0.5066284441455398, 0.5170448203623286, 0.9726851743795579, 0.9508244925939939, 0.8768249297968697, 0.9380115875876727, 0.5178704441656308, 0.5253882470272493, 0.6260100775140045, 0.7513045042621982, 0.7819720352829094, 0.598005679575413, 0.7475276675699433, 0.9187521787335293, 0.8526545283136443, 0.5754048983393352, 0.9939015951990439, 0.6580168902129289, 0.7819336616794952, 0.8717613294358033, 0.5094145413117634, 0.6803193578525999, 0.7272879559184384, 0.8295110416347915, 0.6028452394562757, 0.8526848855133113, 0.8970867463590959, 0.7810826014552731, 0.6455383764447334, 0.9076203420532147, 0.873517386738601, 0.942079572614273, 0.7585286167762516, 0.9697952540988184, 0.8641540950522425, 0.6960649484547496, 0.7918112686464687, 0.9135843072274434, 0.6151975497109392, 0.8672640158391486, 0.5779901645429907, 0.7430642965134457, 0.9787511164562028, 0.603988895858584, 0.9996881710381167, 0.6126243149276871, 0.6867229576233547, 0.5421378473808007, 0.8776635510397859, 0.9838753452584613, 0.8764657926280812, 0.6632160332271819, 0.7055271061037074, 0.8190420634383195, 0.5557937241838191, 0.7165827121388781, 0.9059393145899601, 0.9766084330570284, 0.6452216215444734, 0.8294425645989654, 0.7132486485935875, 0.785231434853445, 0.964904440713888, 0.9689890911472253, 0.5013222698968688, 0.7175793094677214, 0.6597600832940733, 0.7354100571551818, 0.5550730013142477, 0.9414425807175185, 0.8627324868945545, 0.9165654106369125, 0.84031318001093, 0.8217818774169887, 0.8755846396954176, 0.5888311036316136, 0.7985703522467688, 0.6891447764212351, 0.6297657746645835, 0.9211548374840011, 0.6389686451879681, 0.5791937171109812, 0.9488101840968483, 0.7997395996519749, 0.9991111010784487, 0.7107879138209948, 0.8697761967016673, 0.5011700697347992, 0.9955348788150797, 0.5847984930300185, 0.7925806891538721, 0.7134757448165714, 0.7945181829403748, 0.8070744961430065, 0.5048264965547702, 0.6362012337605427, 0.7830004913926295, 0.5625232436163183, 0.6891708636124461, 0.7772893107236363, 0.6690322186514596, 0.8129134091270663, 0.8556188927023859, 0.8345115022560367, 0.5635901936121988, 0.8765063913469471, 0.9573527788964689, 0.512015793413092, 0.7270065136298236, 0.9562019301136602, 0.9022310281813988, 0.7766298297758149, 0.6753310528758958, 0.8231955094454356, 0.7131497317713209, 0.8530205818344243, 0.9105442426646809, 0.9810123127597673, 0.6206949447198191, 0.6498225747118067, 0.6243024818246932, 0.5959791564283246, 0.970794798905716, 0.5557725560764883, 0.7828660975643242, 0.5308393632283452, 0.615965235725796, 0.8315628272547874, 0.7172700668971504, 0.6943090736330719, 0.8572292013240121, 0.5398695992646744, 0.683698380224502, 0.6843211697742744, 0.6549681400806954, 0.628959113523625, 0.5720256771060206, 0.6067826214578179, 0.8081856241736809, 0.8910831009583976, 0.7660776481250977, 0.7287020717162749, 0.7711156898060247, 0.6022037361476555, 0.9307140831446984, 0.6190715579739503, 0.7175999816137207, 0.8165411348861529, 0.9516666883137002, 0.6873772473734228, 0.553395449238179, 0.9456043543919821, 0.596320053315911, 0.649141586676399, 0.737769495335018, 0.8065596397699167, 0.536277716217153, 0.6287917393162998, 0.8779492652763594, 0.7700242908383386, 0.5875793209466698, 0.804662329407458, 0.551394852755247, 0.9815328291373504, 0.6741533939125959, 0.6283364515358165, 0.6618574641268199, 0.8057777543526028, 0.695318476997167, 0.5759573625279029, 0.9389103614055501, 0.867996275475041, 0.9797727320329215, 0.9090202830774534, 0.8393463128283016, 0.9734424191343594, 0.9319395494299196, 0.9227105658025434, 0.5411865075337616, 0.8752472312450763, 0.8368475954234591, 0.6142431328981649, 0.7971589861145814, 0.5041630392769703, 0.6431124676838564, 0.7792898543280424, 0.6786126090024368, 0.614739835330925, 0.9370247100400129, 0.6802331787977383, 0.7745289907689384, 0.6247579304762141, 0.5094015538876908, 0.8623151996181369, 0.8323931854737929, 0.9621605588140931, 0.5564304662868038, 0.517123420959425, 0.8009113838638573, 0.8493109052295116, 0.650586746604862, 0.5156514704911584, 0.8973549287724435, 0.5897540776044471, 0.6229225841157188, 0.9722430247720271, 0.9556913379722397, 0.9125755108106639, 0.5926783795052869, 0.9297358603277658, 0.9569949221832248, 0.5689227581936549, 0.8177184321698518, 0.7916235764552148, 0.8554449927726228, 0.6314937227754823, 0.854302391442378, 0.7960716326156374, 0.5166873082209469, 0.9232709930148624, 0.5971578462531368, 0.8611598892532397, 0.7864746138581098, 0.8178758824777492, 0.7696070423864945, 0.6706313734474336, 0.9588578775827514, 0.7978048174126441, 0.7108897796200999, 0.7705481285163588, 0.7674964698320862, 0.5059864840817575, 0.9694255383429801, 0.5480076147430462, 0.7387150789009342, 0.6754220177232635, 0.6201968245276501, 0.7490439716546873, 0.6457737033289088, 0.6203282877581834, 0.9936189944312053, 0.7821989393966555, 0.9709351945000992, 0.7036268345499361, 0.5279925112196816, 0.9167151986771864, 0.7278843752926718, 0.9766425692581138, 0.5110534439031481, 0.6015653759001813, 0.5756695460363366, 0.7625784847161581, 0.9524619279684949, 0.7540713711708649, 0.8914779334304, 0.9393171059802479, 0.5792805865730131, 0.9390064788124938, 0.8301885691408923, 0.5748836655329059, 0.9030576860960116, 0.7511096433705231, 0.806090732131312, 0.5855172680882345, 0.8736304190322837, 0.6559456738789438, 0.7103627810253832, 0.6661765199972453, 0.6461561618432691, 0.6652850840594213, 0.6419334317713793, 0.6893909569689854, 0.6162560524579399, 0.6598083180062573, 0.8876264248059333, 0.9079713937963764, 0.8646676610956108, 0.8510730702083682, 0.6897260110123409, 0.7385801075239578, 0.7731964611567061, 0.5301442181259393, 0.8134738047599213, 0.6504274304296309, 0.9466062323944449, 0.8485444189873848, 0.9761468231771553, 0.9074086788317322, 0.9466220732786312, 0.5236905341401845, 0.5730450871861954, 0.9243966308777234, 0.7990645269658774, 0.9830398812201169, 0.9660017121312767, 0.7276251315329868, 0.5349878716784594, 0.9925050334173526, 0.7348009694959676, 0.9410244956154374, 0.6412388494405387, 0.5666298679799027, 0.9375519054649977, 0.7505890207428767, 0.9776968545732554, 0.5955197301577464, 0.6259900795235716, 0.6782165694194369, 0.8470207147815978, 0.8493861522788533, 0.6522001035693636, 0.9511299787024978, 0.9699948639189766, 0.8244032155080091, 0.6520158436537509, 0.5168609015974183, 0.6502240411868032, 0.9525060325800916, 0.8036783643586138, 0.7212276225966066, 0.9382508619700638, 0.5240020272306504, 0.7324350332006805, 0.9813140554204951, 0.9003543747141063, 0.5217231825657669, 0.5629508561265046, 0.6730368925810974, 0.9002282447908012, 0.6932536320819342, 0.8214080989264888, 0.8182238268303644, 0.8806726425549761, 0.8079679178094026, 0.5905927211495435, 0.8384596031831448, 0.9753014930977266, 0.5734144978222617, 0.8642382904074424, 0.7612210024476658, 0.6514620693497826, 0.597691820159839, 0.5499229756927886, 0.7986686146804258, 0.6954871859840197, 0.9193411917471384, 0.8899874867888005, 0.9852701658726961, 0.8555979497429976, 0.7605502312951475, 0.6774715887529084, 0.6439955870703195, 0.9449611701243414, 0.5264656886055306, 0.8193429705924711, 0.9329900565747629, 0.7636474379334581, 0.8370690074495729, 0.9394292757380118, 0.5340531533166586, 0.7345235265602639, 0.8548486586099333, 0.80961144180312, 0.8107153386098287, 0.889160550882558, 0.964209679556586, 0.6300519824221895, 0.5548242564204072, 0.9884296221493493, 0.8254433210606698, 0.8676024953147272, 0.8715123645074461, 0.7719301059950726, 0.9712579991953556, 0.6804708085362001, 0.9754761968101602, 0.820885725481511, 0.6127843337725647, 0.8233539299450792, 0.5108409679087875, 0.7290496460752197, 0.7590101109629589, 0.5583043750615384, 0.6028686080010386, 0.7494023425044154, 0.6210001804477117, 0.7089274927525921, 0.691574805276506, 0.5077979992317956, 0.9666443962623694, 0.897180517207296, 0.7935904212500038, 0.6542605624882754, 0.5205301090821796, 0.8653271283882731, 0.5286915069287332, 0.6464970177205023, 0.5857184849849802, 0.8992145470545718, 0.8503595759645951, 0.7177798968865436, 0.5873346299301578, 0.6379206920889113, 0.845154068650513, 0.7373645877118691, 0.8911432025473851, 0.8675431698808873, 0.5001460936005271, 0.7969907406791372, 0.8918128481734946, 0.6282226380154267, 0.9173477049008709, 0.526730497432457, 0.5398924336730382, 0.6681141364980683, 0.7078701740033191, 0.8448694884597503, 0.6049012198247122, 0.9861776912214084, 0.6541116777027736, 0.6304212261825586, 0.5515770182208763, 0.5321556075229037, 0.5401952214463209, 0.6043018574871679, 0.9003968059601892, 0.8462492965050116, 0.6013268652880979, 0.8184270139839711, 0.9676929960982437, 0.5055220982615558, 0.549215174698464, 0.8964524117738853, 0.9022826547112742, 0.6177586796415726, 0.975283305502959, 0.6310891891759545, 0.5355093049720674, 0.7077897191524722, 0.5385846240236489, 0.8996162091190021, 0.7172590050269201, 0.8609997805411294, 0.8336103080494718, 0.8979316856457391, 0.9532308948104422, 0.5616974372500776, 0.8324772197672143, 0.7875870933842117, 0.7988504664119159, 0.5910589867028837, 0.9754230401914654, 0.760315185194161, 0.9998350745664946, 0.6697806358997829, 0.9691397125418715, 0.9919844842681314, 0.6701608368853383, 0.7380251914738399, 0.9079351151018011, 0.5537526839849789, 0.6271792119989523, 0.9549194003295025, 0.9029372823164187, 0.9727378928511281, 0.9272216345158699, 0.5655870439087404, 0.7872086516402128, 0.864484262442613, 0.8333880920137295, 0.8543236755807355, 0.5592885631133291, 0.9020790514668173, 0.7134953484316429, 0.6896455435998105, 0.8751536022682502, 0.9222078911294758, 0.971213149433584, 0.9269503949863162, 0.7659379675100805, 0.8493327714021233, 0.5112741987556582, 0.7636835836760265, 0.7881221938356416, 0.8398116712053876, 0.8684673801356255, 0.7858545698984838, 0.9011352922594992, 0.798426160164165, 0.9449812523444132, 0.5226148151791528, 0.5051944783238653, 0.5305452162111997, 0.9262830852737979, 0.5654969901238958, 0.5568639250753671, 0.9922587017380136, 0.5354733943901862, 0.8784484276949802, 0.9577825407232277, 0.9919969364586445, 0.848183347286922, 0.6765902214746805, 0.6790046526927083, 0.9169393935042762, 0.7907574458281208, 0.5419709928394273, 0.9791057236285325, 0.7453025771893238, 0.8352097485268257, 0.9261690665750111, 0.5092293215918513, 0.9859252016385147, 0.8300976682071375, 0.6858329332326445, 0.744759185816721, 0.6880187997140393, 0.8537153572942211, 0.6917884399777294, 0.602654515897437, 0.9334368417429058, 0.5064514350767819, 0.6045753471771238, 0.8349455720424548, 0.745337788246808, 0.6723708291687415, 0.5431533077235751, 0.5931939596681384, 0.7463086357333053, 0.6331859262901023, 0.8798093653768164, 0.5684342489849379, 0.9513321405001768, 0.749753290215939, 0.9613572968577907, 0.8834910930957067, 0.5072662501595313, 0.8117600969048402, 0.6109590284654163, 0.6912363269371777, 0.6375477096101214, 0.7079754167450323, 0.9369777259729277, 0.5622086821138723, 0.5159428517894962, 0.5817858955683479, 0.936821564564889, 0.8829846313974147, 0.9214030480840772, 0.6519987782510848, 0.9422236931386736, 0.8382241303464693, 0.5638546710392349, 0.8157152982477439, 0.7438548338290241, 0.5822825479551803, 0.9757027886690821, 0.5696758352387656, 0.7684211788796, 0.8083443525595682, 0.5074533633153278, 0.5470684592493198, 0.5326389437989647, 0.892955331289295, 0.6120859811054101, 0.9042218778905419, 0.9373439648174409, 0.581716312210542, 0.8586402521255003, 0.5700851550004192, 0.9483850031572421, 0.5937785748654218, 0.8040849218487118, 0.5043514760228551, 0.8400254972757859, 0.596545508057013, 0.77776253385561, 0.878176387766555, 0.5385257351899336, 0.6788175738773825, 0.5276128201114227, 0.5253679530381776, 0.6566769924140404, 0.60635185367678, 0.6851159674653068, 0.9164401024509308, 0.5223530098235355, 0.5549770879917308, 0.8465542161985331, 0.5136230239759456, 0.5849255106275656, 0.8155412395475465, 0.8345176581193059, 0.6911763898352143, 0.9617341598838409, 0.7819001845261471, 0.9006620026789871, 0.7651235690695919, 0.5197982344315135, 0.8483610312411956, 0.807341918064338, 0.6446718621650069, 0.9464246217379262, 0.8731851649856529, 0.5852841633062593, 0.6226395757915923, 0.6062611664330723, 0.5031855118402778, 0.9852360873086231, 0.8453183874407348, 0.8036361070735273, 0.5969251423324935, 0.6256004196124241, 0.6694649593146178, 0.7250137671141796, 0.7182892670234547, 0.9344837861594111, 0.6501380230719386, 0.702983345149558, 0.611693325921556, 0.5004674472877298, 0.6428753497662416, 0.9300330698037356, 0.5152879237617565, 0.8357881891489785, 0.6990191184047834, 0.7175368851399005, 0.5677628691790322, 0.9505914833420015, 0.7340871366795099, 0.921838285321497, 0.9101118714923464, 0.5426171251359844, 0.7945743831814782, 0.7345204606001551, 0.5071783760921966, 0.6902858242374234, 0.7865495502233271, 0.5018919913363901, 0.8147897177342058, 0.7527122531501751, 0.9694578647865159, 0.5747134548490442, 0.7930951095268917, 0.9026205186647425, 0.5526137353203876, 0.5735210420053765, 0.5353705851772004, 0.8770517873714587, 0.7751899348822371, 0.6948003781755565, 0.9504451652818802, 0.8349810962504094, 0.91039571855509, 0.7504648437312662, 0.9490445181594824, 0.5213242046408415, 0.6628765755399345, 0.7267215279617082, 0.819826838585604, 0.6237456045060839, 0.9982581947215254, 0.8627516951983798, 0.6712402967615231, 0.5583893386979701, 0.9010668397425341, 0.7890484991306601, 0.994161026456837, 0.5370054249172651, 0.6518544954010561, 0.7720461438514131, 0.8309698510130772, 0.9579206856274818, 0.9080350973365012, 0.6024040410460441, 0.7074435010253978, 0.6730413259251485, 0.7682746128786826, 0.8743352550771215, 0.9891378387210339, 0.8050775975206753, 0.724481307896681, 0.8139969069144883, 0.5299467393077928, 0.5479655829594857, 0.6600763615667558, 0.9254610651507811, 0.728017584384295, 0.6284811774969581, 0.8498206767329421, 0.5474916459016066, 0.8174573288137029, 0.7107010835633948, 0.9697952465084088, 0.922272293993702, 0.6676841393165858, 0.864122012364168, 0.6488831136614128, 0.6770731124732514, 0.6368362435078179, 0.6120298189900517, 0.9053210552119809, 0.7550587431291624, 0.9876562522155796, 0.53366620710658, 0.9319716203995057, 0.9961956515176476, 0.6826118234711609, 0.8815711933206754, 0.7065924539184366, 0.6577548062719158, 0.9033539014938669, 0.7319630943293384, 0.8359529477823275, 0.7175084118480706, 0.6386183640286505, 0.63831412383118, 0.6995685153064359, 0.8840350283723093, 0.5603488719253833, 0.9662939307733498, 0.9597815937975918, 0.7731595044662712, 0.9413418178479915, 0.6349527709438507, 0.8035567623334465, 0.7222331559388301, 0.8750488509204001, 0.9278630800051124, 0.9902034789016922, 0.5062585147608087, 0.7748662689237409, 0.6052763157793787, 0.6538953820050042, 0.7474914117567013, 0.9288701985378154, 0.6655717314052103, 0.9363363540852343, 0.6875242922533249, 0.7587020748920936, 0.9123563513142707, 0.8491267499434676, 0.712876617533011, 0.7552804229323695, 0.9932914995348009, 0.7421041573286726, 0.9123711359391061, 0.5055172216981609, 0.8452159347097528, 0.8955154300013867, 0.7257877361176658, 0.8134652117310373, 0.5312284185390443, 0.8196363717813846, 0.6983387962929233, 0.9621077992699463, 0.6150562813191556, 0.8542436674379374, 0.5460203821315996, 0.8102493166632381, 0.8740240398346449, 0.7712112996206919, 0.5645932313245864, 0.6834288065448748, 0.8960764199136921, 0.7117873453955389, 0.8309099228675942, 0.5108280503025081, 0.8778934828712273, 0.5982680992611251, 0.7107553531598518, 0.933982467840536, 0.9891740652551382, 0.7061081914676502, 0.7346114197251592, 0.6623374121833983, 0.7093952005012708, 0.6363937521544679, 0.7255601875001841, 0.5971621085952324, 0.8823553783988984, 0.5107777083803589, 0.5644322639479459, 0.6431445673003727, 0.961134555411868, 0.5141350620067456, 0.748898990232646, 0.7514112969358766, 0.7684984417768546, 0.6007186459033209, 0.5167826513940413, 0.9542280545932242, 0.8923863866288283, 0.5759362141981992, 0.5643550498985692, 0.5684523643097347, 0.907999598825479, 0.7193195133566304, 0.5062823709453728, 0.6941903780676397, 0.6237885723758938, 0.6357684610949771, 0.8819052454295125, 0.6940710536634377, 0.8890733301457245, 0.8003635422396245, 0.8165966732084837, 0.7884534392389726, 0.5182212517702763, 0.9820730688728021, 0.7568702307856986, 0.8224781418807964, 0.766453665281009, 0.5383596113370459, 0.8271811186820868, 0.867430069306931, 0.5967930978867135, 0.9995263971455526, 0.5422220399123409, 0.7611478434371299, 0.9133945505787316, 0.9823582880555863, 0.9681897128841668, 0.7076195590291467, 0.5594058209470308, 0.6000249937725215, 0.505695420344726, 0.7255622355185358, 0.9359124342277532, 0.9773544962834675, 0.8986352982766777, 0.5120598865689605, 0.6024991466518446, 0.5496814444534126, 0.7447043393857902, 0.991023251891007, 0.8319489514817973, 0.7528421371694575, 0.7864669161589726, 0.9553362215399512, 0.7460551928186692, 0.8960072978768919, 0.7541377501035389, 0.5198628947827011, 0.8817208452476633, 0.736490461373305, 0.7360243339197754, 0.7180342194694209, 0.578490114815507, 0.9945819909563369, 0.7168995437959638, 0.8769727935417968, 0.730781828089259, 0.5373412309987464, 0.9745426889567794, 0.5198991036642928, 0.9415485810264792, 0.5740101721441443, 0.7146122215552424, 0.614207438434992, 0.5545838396416696, 0.6958861485979186, 0.78285509590588, 0.5016887850060157, 0.7228575781606108, 0.5265533298215199, 0.5215703156497278, 0.6999544603352175, 0.7956116535355177, 0.546287925290027, 0.597678599232042, 0.8846788502847893, 0.6322299515066403, 0.5323812172610142, 0.9806411915666119, 0.9425477995280649, 0.7511579592477102, 0.7781673186791609, 0.8126995102356214, 0.6944184534384473, 0.8600113238895368, 0.9829246573262533, 0.9181058211408535, 0.806179863141874, 0.8521185347856611, 0.523732867222278, 0.6831174223016991, 0.6349145709840605, 0.8070347136377105, 0.5933690773079802, 0.798303543259895, 0.8035861007387308, 0.5194767279182548, 0.5876714377575138, 0.5998816338312308, 0.6607617634873977, 0.9010023928602761, 0.7092970857426185, 0.967909613871015, 0.8957784546479876, 0.7001376385947347, 0.716419033363163, 0.7488562930625045, 0.8343616454534679, 0.6859846694646831, 0.8873132739402707, 0.5629998000864833, 0.9767309246001922, 0.6945127764667778, 0.9925008762378507, 0.5958221235704579, 0.5979791579688558, 0.638662599256742, 0.8580347798733042, 0.6739333211726846, 0.9609229949859771, 0.5846027171920167, 0.6579486632547594, 0.7619292962544139, 0.5763202385243843, 0.9844722007725412, 0.9980995167290445, 0.7494356215684506, 0.8497670584288747, 0.8829867575131294, 0.7096548562696231, 0.8440917303029388, 0.6155092530836765, 0.6554747623138648, 0.855157623416734, 0.6308051439829223, 0.7108404623499075, 0.7042803938687132, 0.939333446650733, 0.5439410543876008, 0.8080773954833966, 0.6127560106255089, 0.8169942695195915, 0.9048621075386143, 0.7660767695840212, 0.6989847461780637, 0.6025744931595831, 0.6070196809224504, 0.6365165758438276, 0.5356671205719408, 0.8492860465196099, 0.5635409563238629, 0.6544523324639846, 0.911319509034725, 0.7497824805263384, 0.7314086643263354, 0.7410596356701805, 0.8378051393799768, 0.6502220860661816, 0.9353879076364846, 0.9068042843150717, 0.5010678825227859, 0.6407876866015325, 0.9022135591051752, 0.7570097648658431, 0.963123057420161, 0.500911358234569, 0.856717645915491, 0.8133002577933919, 0.5797631878844306, 0.7682112187829461, 0.702388248713368, 0.9246435341463157, 0.7532147260775108, 0.8002778865652918, 0.9402261193912891, 0.8517303753255494, 0.988266708068056, 0.7372367051948614, 0.9127933513983603, 0.6332244638614524, 0.6499099577522616, 0.582220778065937, 0.7727680039948476, 0.5689207376572375, 0.5545257401910513, 0.9801626229279778, 0.8036267026287641, 0.8668959686743523, 0.5677030284947591, 0.6190271603416966, 0.7704101919086577, 0.6567339109712098, 0.6352322629774196, 0.530518344075291, 0.5946522340769984, 0.8501115377025843, 0.7522424972918569, 0.699565236545256, 0.9883312934057551, 0.6632918016375641, 0.5184331740426261, 0.9888908825070708, 0.9533793862313886, 0.8439796611103045, 0.9344601006241408, 0.9649121102405273, 0.7722240294318, 0.8314039121240637, 0.6403125375925239, 0.7399986366328792, 0.551223153058713, 0.7413107483297836, 0.6451059709809166, 0.7665045938715036, 0.8276853325459788, 0.6476705592754148, 0.7474529773643124, 0.7378589746483949, 0.657425389525355, 0.8228052537427442, 0.9876207344526475, 0.6041881131395088, 0.5105916905329314, 0.7452243249644972, 0.8362996446518783, 0.7107187713025589, 0.5474450500274677, 0.6138268532346454, 0.8759143184797624, 0.6423634493168872, 0.9071394730996003, 0.9227820919681688, 0.6709462199064875, 0.6406632650558759, 0.8670267191527515, 0.5577388873257569, 0.6564524287498101, 0.8862075694741285, 0.5485911122421028, 0.5160424904314438, 0.5490746451951429, 0.7757978511804728, 0.7964567403440965, 0.6360291421378614, 0.552100819623339, 0.5102404529290275, 0.61903233721742, 0.5602929743958001, 0.8104205539486736, 0.7516434093902152, 0.9491892020570627, 0.809582716661314, 0.5350526724479039, 0.9376522829181213, 0.7980187292314701, 0.6711732289362182, 0.9638331767733775, 0.5192768598633815, 0.7054062145770722, 0.5707046159290405, 0.6793888214923139, 0.6705722637229512, 0.5178543902410787, 0.56613792191652, 0.9852291172354863, 0.8826279790482111, 0.6266176898966145, 0.9699913177143805, 0.9421534678837393, 0.7498272714650827, 0.7637785220835558, 0.6820202947098637, 0.5957392743204204, 0.9897783270959086, 0.6653188588285394, 0.9882916608638423, 0.54617069145447, 0.7171587665549799, 0.6386993847923874, 0.8542401233367106, 0.5667989338000348, 0.8387148798685321, 0.5190498645865427, 0.6773491237092091, 0.8613346702426312, 0.8310089593902775, 0.5657292643837617, 0.9162098982032155, 0.7380182327870012, 0.5839135312326849, 0.8553419592650713, 0.7708508010959578, 0.981282445063419, 0.6762198071084993, 0.8871989180992139, 0.9733942659197891, 0.9622744285013725, 0.6498734399943721, 0.8945204184723956, 0.5968594948879855, 0.559559520463623, 0.588627819049609, 0.90221550495004, 0.6318270062160596, 0.8993914744784094, 0.8264071437776128, 0.8980538183040774, 0.8872333656035583, 0.6103706292332663, 0.7410504302653795, 0.8690798678157714, 0.6564132971086496, 0.8537545102618045, 0.549186428548408, 0.666222277451994, 0.5315921388355423, 0.5762409462423292, 0.5414719734542626, 0.9725343985296608, 0.8997993607089361, 0.552074088246099, 0.8444614536413744, 0.7547795083375085, 0.703146368904158, 0.5181690081524495, 0.5350265807506751, 0.6199824085477443, 0.9413794924122612, 0.7431466911973204, 0.7701478568876412, 0.5842518035020553, 0.8745823613522268, 0.6412291621273786, 0.5241891163897179, 0.5931074724394041, 0.5227881398468499, 0.581582110028511, 0.635505485684076, 0.5340600957566659, 0.8220731076026621, 0.5634944471936612, 0.7665433961514938, 0.568525961832018, 0.9104398166114798, 0.7726578057811215, 0.5900618745276619, 0.6965216439126583, 0.7534633765507077, 0.7486021324677488, 0.5657636926813837, 0.8230262442621338, 0.9650388658693315, 0.624822434328154, 0.5371458150275245, 0.545937459362923, 0.5020652961546233, 0.6813568014482896, 0.5733347662639132, 0.5288078758681949, 0.7279501780702446, 0.5005853349224327, 0.9039350718461755, 0.8284952117161213, 0.883604718388063, 0.7538039699723771, 0.6970753991439227, 0.6688356120228607, 0.950784238217671, 0.6109070492996412, 0.8387026554989451, 0.7159971230127539, 0.5684811993246566, 0.6848801781882324, 0.7573659895938796, 0.9080609841323737, 0.8087733262552482, 0.526167251740054, 0.5843020390057706, 0.7950546564769834, 0.9564387020285701, 0.7399191818310287, 0.5167695911834485, 0.7043441134480821, 0.8647735606518205, 0.805543831401889, 0.6788229826846823, 0.936484900849745, 0.8499027583145838, 0.9925723502611259, 0.7801493515709397, 0.6223313055850015, 0.6891695802604186, 0.6419504869469161, 0.9067593963031155, 0.8133798020027384, 0.699861969856153, 0.9132783178443987, 0.9635786810044835, 0.9837630737232537, 0.7190984113686374, 0.5068832790063472, 0.8025485790756941, 0.7234934676121572, 0.7204767250495104, 0.7596203032249237, 0.8909057350688042, 0.8217747340729487, 0.5379858197498273, 0.6082232041739084, 0.6180165193871625, 0.5176958720187006, 0.7678780001622663, 0.9833318632481636, 0.8698167608877534, 0.7787377179460828, 0.790181659533197, 0.9862973988944725, 0.8573298224771931, 0.7312516370354838, 0.6778916203227394, 0.9652980266235556, 0.762724957136334, 0.534287650974718, 0.6483225024922047, 0.8031871909309451, 0.8365184246187242, 0.6660836055303921, 0.5422858087785087, 0.950918191841827, 0.5904758923102564, 0.6787030210507159, 0.6451030277442892, 0.6897884497630882, 0.6072669093896331, 0.8558937489505363, 0.9552923531987132, 0.9400529273118444, 0.729605780438537, 0.5572855538926249, 0.7166380035904023, 0.9505559785056462, 0.9974022073397573, 0.8139105910043838, 0.8182211528343335, 0.7922163673787705, 0.9884145045906232, 0.8768501518539151, 0.8909740209209359, 0.6196389999392471, 0.8562501669922118, 0.773438183655137, 0.8467135012302487, 0.7477396471776081, 0.8431902867028358, 0.6476058808518185, 0.7336659290734897, 0.875540950447818, 0.9316655004981974, 0.56353786855283, 0.7017737382792312, 0.5941221417244399, 0.7728823259477106, 0.6560157186267701, 0.7943106750320005, 0.6245381550656842, 0.5094326349098621, 0.87475632334551, 0.6332620227875889, 0.7292341742165769, 0.6305193401301257, 0.6434851662741208, 0.9219806964628621, 0.7819335306525359, 0.6509965526844219, 0.6621523112656733, 0.9614283703121764, 0.7459636281902615, 0.9533599481344296, 0.5743080481506482, 0.6609326613643105, 0.7708502837938821, 0.5355146526250714, 0.6758259519719573, 0.8061492971616336, 0.5524281563913104, 0.6485666194138588, 0.8652251095915611, 0.6799424058595207, 0.7208081144824017, 0.5955226110500577, 0.6899025855908881, 0.7354379136526205, 0.5523911306789493, 0.5157209813365808, 0.8702283032900038, 0.8440715372634212, 0.7296896589408493, 0.5987234600198093, 0.5505750377742955, 0.5965676211135829, 0.7478898715775867, 0.9592439801162957, 0.6119651303447076, 0.9209972899993574, 0.7560772334305053, 0.8694916687224481, 0.98274767814108, 0.9711723511918628, 0.705035703162124, 0.8213980480412714, 0.8396502166576072, 0.8623825060476598, 0.613176758033498, 0.8383250336550057, 0.6089469304264609, 0.6926288615863125, 0.9231773944455417, 0.9510483637419882, 0.8422814786729875, 0.7336203899188185, 0.9268287525498597, 0.5722259043115312, 0.6122160143250082, 0.7115881668181033, 0.5258207459273239, 0.6897173642674074, 0.7545696555290331, 0.7028748142956739, 0.5920560915181436, 0.6305721983191825, 0.5599615295183049, 0.8094993557939454, 0.5006631246916906, 0.5676861935248256, 0.7817928392270017, 0.9858979070736338, 0.8597833198922227, 0.7359937308509391, 0.8771961512048072, 0.873736687278975, 0.7997728069750321, 0.954480632516743, 0.9809589473391367, 0.5871128382755648, 0.5090838873099327, 0.8544182629313449, 0.8431824269388658, 0.7859613795955045, 0.584125176959931, 0.6748094706785174, 0.7505898152815256, 0.9246019331824926, 0.9797787529411071, 0.620770710647897, 0.5034011056942463, 0.6191125811707758, 0.6177235736290275, 0.9018743359703183, 0.7638766460273363, 0.726816842025744, 0.6841568769404631, 0.9251892906049757, 0.7720537341909475, 0.7223988885963942, 0.523424011972349, 0.756104892050007, 0.9897174740508972, 0.5462348603286602, 0.9858704147792273, 0.9665027470532734, 0.5406307552997291, 0.9334026941396187, 0.6688979649485198, 0.6622858263985272, 0.7559290886007554, 0.8075955364936863, 0.683099701275909, 0.7683694217964472, 0.7943301999742723, 0.8626176470172711, 0.5321148288549546, 0.8621353411013707, 0.9769561732767487, 0.9832958338368774, 0.5067766003846665, 0.7162648220834398, 0.5690371111557195, 0.8071957948162862, 0.9148968808393183, 0.6632628030419498, 0.7288010747913035, 0.6368589185134463, 0.984915954570455, 0.8610395997768026, 0.6442803894691813, 0.5983510255292106, 0.6187861950259554, 0.7464328475873279, 0.9676985570860626, 0.5601891056914603, 0.5676105091840788, 0.7161435430830188, 0.8488994504446536, 0.8802194042081778, 0.8815484860546328, 0.7396252960298021, 0.8900264184413991, 0.8196929159159341, 0.6338341021796803, 0.9491573997291023, 0.7250017618605061, 0.7393374878977477, 0.6634888068239003, 0.5002133324294751, 0.9482445657140255, 0.7321922235191611, 0.6449813985713104, 0.5611403124824423, 0.5139518021534695, 0.8364938648387541, 0.5949106838029408, 0.7642236290606463, 0.9867852199180102, 0.9001953236666607, 0.8338358292387249, 0.7326727835129385, 0.9289604184127332, 0.5818820924292092, 0.9196106633049921, 0.8247986169638404, 0.6428879717983158, 0.847921797555904, 0.8301431409551835, 0.8420152654644588, 0.6395464897143612, 0.6943363643691344, 0.800283702657308, 0.5699748525361599, 0.646212421871912, 0.7378543424870895, 0.9945879346759688, 0.73641866094961, 0.899063263656485, 0.9403173995126992, 0.6023969867161139, 0.8174013990877507, 0.687010325544798, 0.5996434278840846, 0.9568585624536974, 0.858380921698598, 0.5288403909812671, 0.8230958365898113, 0.8317200906444993, 0.6380012965323294, 0.5883999449684438, 0.6118507162998352, 0.9194031714440449, 0.6985664617199094, 0.9802188297451748, 0.9576292987914593, 0.8828809873970835, 0.6454046684597188, 0.9002598940951037, 0.549332828232475, 0.5313302308039773, 0.9328232913375565, 0.6171745165465009, 0.5567268356853643, 0.6913179438221718, 0.6622905942884645, 0.5783261660919164, 0.8367354070861979, 0.793393253453877, 0.9483358748606541, 0.8140213438190338, 0.9741312875029186, 0.974764012449906, 0.6311466110791517, 0.6070168020222835, 0.6569021372702208, 0.9853512292734277, 0.9912670149687963, 0.7082063712508785, 0.7504484053579168, 0.5493057237810139, 0.6443324643983244, 0.5247777864590919, 0.806932107477116, 0.6575990804008911, 0.5399787223761692, 0.5194173091983443, 0.9421427804611395, 0.8251284613489177, 0.671838299780877, 0.8383318199807208, 0.5181156918541963, 0.5688398331629057, 0.5106950037717746, 0.9831253641825289, 0.5753753129138872, 0.9174733590618147, 0.5006334003339404, 0.6169283116271267, 0.9548910664256063, 0.9686920569579851, 0.7313303493272103, 0.9064580188421869, 0.8750948612466151, 0.8795672043547182, 0.9585077995155626, 0.5430588317245382, 0.843282799883136, 0.7367198714104896, 0.7407154324533831, 0.5788869485115818, 0.9271611410340839, 0.5033095368122066, 0.839645517900971, 0.6989412337447727, 0.5512482404264752, 0.7462909391963041, 0.6747481899021995, 0.6382162386636141, 0.5016095749233314, 0.9244356380050298, 0.525517429730898, 0.7892848065650835, 0.6744725835103031, 0.5877816201989499, 0.5351293655452608, 0.9428421579954995, 0.8588284456543451, 0.8205350277851527, 0.6579487357952919, 0.9683844525229272, 0.8294277571062018, 0.6261574420335612, 0.8085714240376758, 0.5413004877655526, 0.5248652583610904, 0.6651887201006249, 0.8569633275243704, 0.8132016019378598, 0.6670023477555598, 0.7416578177232844, 0.8479421430479994, 0.8135966441335819, 0.7944675102941006, 0.7954125901854512, 0.7478495619026826, 0.9206067487081031, 0.7791802899051199, 0.6046166525180403, 0.5442214097773505, 0.8339544320105079, 0.7638572678266764, 0.5604810675737664, 0.651780729823253, 0.7774095921213326, 0.8038750252945328, 0.8847082113799034, 0.9839361799717974, 0.806928653787206, 0.5568865556666117, 0.9333882111333471, 0.5689921886241716, 0.9179758682763774, 0.7036322751740154, 0.9517882613754789, 0.9184539956148371, 0.6156499710830381, 0.9002948435359714, 0.5437491215398236, 0.5087118492436296, 0.7581124451989631, 0.8046216917643716, 0.5888551001785721, 0.8428864139630373, 0.9819691231229266, 0.8874029530438055, 0.9481160404919116, 0.7301778269451996, 0.8007210774211346, 0.9875857287079328, 0.6477440832709934, 0.8451952040792712, 0.713045043334034, 0.5575542775096058, 0.6062838180531767, 0.8177267328320132, 0.809469284447822, 0.8950358838556532, 0.5297537635508214, 0.9376804481630359, 0.8538648007422315, 0.7065942505020391, 0.5665228449355488, 0.5228223772071805, 0.5423031980166386, 0.607217849048903, 0.6533138747894829, 0.8231053059145119, 0.6781137108080908, 0.8199231897345745, 0.6232125817139806, 0.8870960992574561, 0.8865291445470856, 0.6708518075661645, 0.8772691317811334, 0.5554318222652628, 0.757163529639906, 0.603094294880153, 0.8270636252902517, 0.5756403152426864, 0.5591352214168444, 0.5347459090121057, 0.5594076819820335, 0.9583150281758726, 0.7366972374247882, 0.9031893024012025, 0.5040667828246848, 0.9963425744905081, 0.5961566310561349, 0.9325727773120126, 0.7055607659966745, 0.5465757470595469, 0.834254897407683, 0.8426215002199884, 0.6902307723602101, 0.5385999706277449, 0.5891312612050861, 0.7314259389858543, 0.7750079583840197, 0.6922920637022764, 0.6042398649550216, 0.8311768527492058, 0.7255041097152661, 0.7091958217048322, 0.8965424355676423, 0.8874828429178564, 0.6346367688262227, 0.6771334767672541, 0.5030978709083329, 0.7932085535832518, 0.8090073536554385, 0.9276470697917962, 0.863492440403969, 0.9540487636052711, 0.9979044116261868, 0.874426262087324, 0.6209845122023903, 0.5198117267825215, 0.7596010991114961, 0.6068816050869283, 0.8574044181754588, 0.9260263969922107, 0.794729410308298, 0.9535337464467168, 0.6720529206895146, 0.8428869858405797, 0.6451747583832961, 0.8200086578777286, 0.8860360661879199, 0.8196967914636301, 0.578151495039846, 0.8935200466826546, 0.6446373538422607, 0.5052022938424945, 0.9956835877991598, 0.826139232594911, 0.7380467608985413, 0.9855521218632064, 0.7474812575129022, 0.7466608262822564, 0.9195414640626944, 0.752632406412755, 0.5621881940322164, 0.5704602961659846, 0.9514318356153415, 0.9772552616344494, 0.5302083863710287, 0.9407377601962742, 0.5888039834375854, 0.8594303058828564, 0.7064955173583567, 0.7082545186828353, 0.7312958081479153, 0.6489791590127216, 0.8579933269475633, 0.7272272113144742, 0.9330648484649944, 0.9952361179392788, 0.7378593913006237, 0.8912814795088153, 0.9682964060587385, 0.729313041964013, 0.8009444550565927, 0.7264340552044897, 0.8331305369425474, 0.8664551450346678, 0.7657398067593949, 0.5371753005003992, 0.7299137430168663, 0.9694946736075795, 0.55331776265035, 0.6445032801597834, 0.7340397196310149, 0.9489200607523061, 0.8050946527795853, 0.8590708055062088, 0.9079232102868682, 0.5648513297739572, 0.774600402861403, 0.532759765345661, 0.7385416640467504, 0.9244484483442308, 0.977721626522797, 0.8429151846753038, 0.7684286975489876, 0.5103172615544174, 0.7371665578204386, 0.8274208183909645, 0.892980306351551, 0.6230659759562733, 0.5042029029205659, 0.6268966537828067, 0.6838890701015994, 0.6175414431778754, 0.8790002098902838, 0.5323216596531026, 0.9575957369202353, 0.9740918443914875, 0.8257288272254149, 0.8863112910667941, 0.9497354724880929, 0.9480834226983833, 0.6551481238154622, 0.7207744037538291, 0.690800023806602, 0.9779940687140459, 0.6559961849988476, 0.8041515215359362, 0.6953377892793442, 0.5628741689431128, 0.8543536026494406, 0.8670254924639698, 0.8851426942397442, 0.7980586814110158, 0.7162483417183623, 0.7954094731104109, 0.575692941040346, 0.8879975034786929, 0.8031850733093131, 0.8814121587619183, 0.7766552066972554, 0.6418224446295058, 0.6780076155815886, 0.594016394795644, 0.8382025447968245, 0.9240069640185535, 0.7725024393805898, 0.7071985091391314, 0.6175965120889639, 0.7564592116702091, 0.5915212984007185, 0.9901054185845639, 0.9298939723531907, 0.8040734157222291, 0.6400329474296991, 0.6766570142406114, 0.6990236075272397, 0.9573790116003464, 0.8443822793201834, 0.6347101122929197, 0.6634743358176058, 0.9653887406313275, 0.6649421803486476, 0.9040612313704776, 0.7642695199737348, 0.6113464446186639, 0.56513964405314, 0.7825180171618015, 0.6215556025334783, 0.906198726943308, 0.9062670303755653, 0.7058078621837556, 0.7563876244904701, 0.7173995522357026, 0.8992003148344568, 0.9891158109416034, 0.7081827651516076, 0.8552852114925678, 0.5010960531584929, 0.5966687071289287, 0.9383065848803498, 0.8699244646449283, 0.8151704944137104, 0.8757804350274532, 0.8191342283799983, 0.9628724632221091, 0.8604748821000365, 0.7711735179263042, 0.6172476794434352, 0.7325772066916841, 0.5651464746469759, 0.797623009846303, 0.9082364121582304, 0.501988037716937, 0.7688904962756095, 0.6781373818568983, 0.8811996846393295, 0.9107032373253574, 0.6499384426659945, 0.9358528937119429, 0.5884720029639867, 0.6742114807619507, 0.6808918155961305, 0.9626827814426796, 0.7947058720637844, 0.7207659778363282, 0.709694547476996, 0.8101367507347847, 0.5187460462465433, 0.5457232908806744, 0.7080378543458337, 0.9876310882410557, 0.9530365341392568, 0.53275739336059, 0.9893943544032042, 0.8657433510066055, 0.5134422190027759, 0.5476263797984994, 0.9097252245205186, 0.7818469079161963, 0.7905011853070476, 0.7810908569595391, 0.6611346697691549, 0.8270056053170407, 0.8356362594460801, 0.8051210204282475, 0.6619503863611347, 0.7932138517161758, 0.7377249981298051, 0.9240722087257494, 0.5705018424916228, 0.5529281207608724, 0.6551230125875507, 0.9116132412079875, 0.9973071087977619, 0.7794050585425514, 0.8870287301650124, 0.6240574331703597, 0.5342837662229488, 0.6037246590045198, 0.9721117990855233, 0.5198391670771836, 0.5421981918441617, 0.7624535652001616, 0.7207617779447755, 0.9136276233777787, 0.8307801444177153, 0.7991840759456349, 0.8822559958758872, 0.9460652434687777, 0.6364615402034793, 0.7027421205989768, 0.9572896406868436, 0.6176575787253783, 0.9900934191831146, 0.7951286453303787, 0.5609102478491942, 0.7518646080227525, 0.7622018487015555, 0.8759834603195136, 0.6207099771356002, 0.6924116404991272, 0.5623463539096542, 0.5841855360191542, 0.6198045896934621, 0.6939445783893425, 0.9861374881171533, 0.9994766180546557, 0.7339357728132625, 0.7574721583446796, 0.8749928256510482, 0.6725428933796713, 0.7206490286090763, 0.9040920787925146, 0.946221925256375, 0.901153608934848, 0.8862529054719965, 0.5044261431851884, 0.9584112416018697, 0.9116168938533191, 0.9632948049413328, 0.5174303963787432, 0.539675030293061, 0.9501058661192042, 0.6007326223193812, 0.8773217696568403, 0.976876064623956, 0.7885629368148013, 0.5529885019688923, 0.6255036662331821, 0.8086128938298847, 0.9094270473462549, 0.5690175025703094, 0.6977901523185961, 0.8002117603529333, 0.9705866465972526, 0.7810357970139113, 0.5860453273490902, 0.9007923600230889, 0.8381993241797996, 0.5168071169330894, 0.8336859640407193, 0.7201311645088593, 0.7648320114831477, 0.9281847529575721, 0.8307109345418286, 0.909105000364339, 0.859290784325224, 0.6574331473100168, 0.5143455092953935, 0.7109392020262444, 0.7822099380146375, 0.8018694786308795, 0.6504231088397695, 0.5503077085057297, 0.6146658710313486, 0.7248217726390247, 0.7113754212583717, 0.7333285168470127, 0.9296420533798888, 0.7092036146875796, 0.8548623427110593, 0.9884957594757189, 0.66017040265349, 0.5538939617599619, 0.9078240110213871, 0.6332554843292403, 0.8373139522846597, 0.6913389415298188, 0.739076883819859, 0.5161520756452378, 0.5529293688853709, 0.5813048141728102, 0.7316773906633746, 0.8546816183933157, 0.9886688027162323, 0.8548141278880632, 0.7025340312492581, 0.7501370402401862, 0.824399994525453, 0.7110161686653536, 0.9173218457983414, 0.8940861386002706, 0.8947941581836978, 0.5008144257324039, 0.9139545002036649, 0.5755914971902065, 0.661454930951926, 0.5566891685675657, 0.92128613578492, 0.6402927574690132, 0.8393019720417653, 0.6909735101584056, 0.6445263680457776, 0.5898725619266544, 0.8020347549907376, 0.8497660681478771, 0.764601338894326, 0.6691609459951591, 0.6758844350960023, 0.7106424646488643, 0.5643459291407574, 0.592490580950918, 0.6279683368207046, 0.5993305373596496, 0.6446225691773166, 0.6633497983417052, 0.7894722966329022, 0.9917286018920588, 0.8165535148386154, 0.573811617506149, 0.524920614538786, 0.8740742105589654, 0.8413864545903355, 0.5144495512189307, 0.5601261042958363, 0.5319972051780772, 0.9053001686091575, 0.6596962583257554, 0.8702729754905397, 0.6259449827351603, 0.8223985195014694, 0.7675126109373529, 0.6820555596419806, 0.6531211648038937, 0.8726318041872132, 0.6952617147546319, 0.5823533386183327, 0.9411899101825318, 0.7936127848794488, 0.8221282782835981, 0.8364577181950286, 0.8937235169435944, 0.9665555333129181, 0.532427032331159, 0.6674442082773014, 0.5519704340346911, 0.7856921675927673, 0.7731863246622179, 0.5840456712095214, 0.8977931554101175, 0.5612619915722874, 0.7576019679903843, 0.5395595889894131, 0.8309058176314418, 0.871095201603334, 0.9204840177064066, 0.5138189828708883, 0.9551121459311971, 0.8049195685235655, 0.959559003870059, 0.7178824932463901, 0.8654772794462269, 0.942203469502217, 0.6098932444736604, 0.6170410027969834, 0.9633509913952504, 0.9036000390585597, 0.842458129659925, 0.5053479228430853, 0.8924557396970325, 0.9568472491787171, 0.7047834641132019, 0.80039234773462, 0.6352214683892842, 0.9775300619308847, 0.8689286541698134, 0.5438233636327985, 0.8586079486729855, 0.7646132844102016, 0.6780864140497006, 0.9935077189591468, 0.7573439535135984, 0.7621635170723553, 0.9481130350194634, 0.8605045873653961, 0.596093083348427, 0.6081310242217013, 0.9054355958059104, 0.588475654245137, 0.9976674881827121, 0.7638845326230261, 0.612958358409772, 0.6984926298118808, 0.8073261917557664, 0.8014430836897595, 0.996654862374653, 0.9853637908699728, 0.8075019367660864, 0.739513641372262, 0.593340669525357, 0.5252851815274876, 0.8538878982000262, 0.6460051451946003, 0.8045268225349425, 0.7487099455936291, 0.8950382234044014, 0.6253568524548552, 0.9649213361980873, 0.7586779906896218, 0.8819149442644025, 0.9267701531277033, 0.5683557920909426, 0.5486194826600297, 0.7680245771044192, 0.5710978791049075, 0.8351226037004109, 0.6176041466561417, 0.7401150499754716, 0.8035680759272954, 0.9333327619522963, 0.529268901912302, 0.8564920798805422, 0.6744871101832443, 0.7154243865864798, 0.5025404265868024, 0.8658558459816417, 0.5402953125914545, 0.9904045796427889, 0.5698405369397582, 0.8423790849781099, 0.6686561898255599, 0.9729889920954644, 0.6510311301284666, 0.5357195866370923, 0.8879950121102802, 0.963604757056892, 0.8029629046757016, 0.9526649145918784, 0.9635183539878931, 0.5825389269103081, 0.7791498741173992, 0.5860768976907487, 0.6480645597837306, 0.5489627842686593, 0.847667571579112, 0.6303597949098513, 0.8454704076903414, 0.5192265297702889, 0.6790175711190853, 0.5002859364845635, 0.9010596430396682, 0.9629351892642266, 0.6012565167803172, 0.7094113014524047, 0.8095900063873727, 0.5225638360939665, 0.5864812378529753, 0.7019414687987109, 0.900919974469822, 0.9528487081433082, 0.7421431967466539, 0.7952198262255108, 0.5200798136364118, 0.7327141529814345, 0.6578720476213162, 0.5654435593303385, 0.8207314516422188, 0.9512945665180402, 0.5554862832381562, 0.5424151503579611, 0.6411303855857391, 0.8708378962615992, 0.9170765640502352, 0.7592023340145106, 0.7255877006682352, 0.7103543066917855, 0.9671032409531548, 0.9489780505687015, 0.6680889075308347, 0.7911414431315255, 0.5735849626853744, 0.9301721021906866, 0.6703121698058587, 0.87213000974803, 0.7884531767048069, 0.9373454480232152, 0.6803417672436087, 0.5178927622622829, 0.7327124321806318, 0.734825981250777, 0.6552248337317764, 0.945667156698831, 0.8946624680176238, 0.9132626624031277, 0.5300554957250388, 0.9342996495585694, 0.9354313940490537, 0.6073816071379791, 0.7392373120031241, 0.9014442968453957, 0.8542413928149912, 0.5446946791795899, 0.5480014101053915, 0.6784767342108114, 0.6307110646618381, 0.5525438444088975, 0.8040743438735165, 0.6655107175361498, 0.6518946650403372, 0.9226639783217867, 0.9375516007752335, 0.5256075892426182, 0.5758158728895216, 0.5215882190246933, 0.9937380978632739, 0.7953061406913093, 0.9735685121243791, 0.9344831035560708, 0.5913456235056349, 0.6532297091634316, 0.8540641844979103, 0.7536029903966908, 0.9733436247284957, 0.7208370017625696, 0.8283998504588451, 0.5847006855802246, 0.879234136133531, 0.7322187668157178, 0.8457006555501608, 0.7179056270390978, 0.8488457644007341, 0.8854590198028798, 0.7545269027744843, 0.6477787204116452, 0.673780881464239, 0.5642603183693908, 0.8674624356669987, 0.6699202753697991, 0.5026167475466667, 0.8793850480471641, 0.6930136858935904, 0.9788298555177557, 0.8415310331291482, 0.6859326103944168, 0.9943334645780448, 0.5639575461063693, 0.6098772150852901, 0.512072714826915, 0.872188756029891, 0.7108120585350551, 0.6820685850221546, 0.931862498212394, 0.8001149017706455, 0.5325040102975783, 0.7289460903890271, 0.992106810501705, 0.7855460287987073, 0.7954882074027406, 0.9724207550553996, 0.7543280371717662, 0.6094438674649733, 0.9916057288569733, 0.5044935829714525, 0.7378288049225992, 0.5715680343551812, 0.5956324399435835, 0.8110289634967489, 0.7890034698706654, 0.8830610983301034, 0.5383439184925249, 0.6149245541806085, 0.6570312093547024, 0.8324364382889153, 0.560270807339283, 0.7009567666184946, 0.7685298597703141, 0.6380494856021371, 0.9757000817414397, 0.8742027063095026, 0.8581115858347776, 0.7327524467847046, 0.9682627723710371, 0.9421171787284253, 0.9498812545997373, 0.8121433205756099, 0.7402943891750922, 0.6518519250732016, 0.7414994134640374, 0.5860220948699515, 0.6042256620089361, 0.9980803011851321, 0.5376666584265652, 0.8774200146155485, 0.5572822722365318, 0.9659129798942958, 0.8530742354777403, 0.9440257005103037, 0.8146702119070843, 0.5425807563712093, 0.7525964829217188, 0.5067266496823191, 0.8982738548729531, 0.8640593623038115, 0.6024755839420011, 0.5472539562413374, 0.7310357613127889, 0.7345061141530311, 0.5514432738839602, 0.8222571543952855, 0.5985267806223528, 0.6732918763930583, 0.5126243328989664, 0.602796282627811, 0.9038545848411776, 0.5562992497109132, 0.7283594768493142, 0.9670759303445331, 0.5216873204510885, 0.6241677935340629, 0.7871071485298972, 0.841720312119644, 0.9058795299614906, 0.810518375040237, 0.6297791485875859, 0.9013005167493369, 0.723277442337037, 0.6425209146068098, 0.7604141137233287, 0.5304294712548273, 0.75856439304786, 0.6524766883604416, 0.9639691500910417, 0.8618385452938757, 0.5567898430475899, 0.8381824102683619, 0.7792243049176761, 0.8408723605500423, 0.9424684770208771, 0.807956516881617, 0.5740012003734094, 0.8074108870587978, 0.7889621965497904, 0.7301711435985905, 0.6271688760019598, 0.7378048545617066, 0.7088228581417728, 0.742920402008657, 0.7036521840415506, 0.8274060410637236, 0.8086861857039643, 0.5332580927481602, 0.74115365870658, 0.8559384111588787, 0.899952057459892, 0.7140958216052637, 0.833086475541988, 0.9516759901812106, 0.9911054820308663, 0.5291949776766393, 0.960874555869114, 0.6935351299678651, 0.7328514708920983, 0.8860194775565537, 0.8895181134238533, 0.7413263223134978, 0.6499565797012212, 0.6731080883878361, 0.5815320299416548, 0.571400315997216, 0.867849094038181, 0.9669198492749282, 0.5325756053658685, 0.9168638011193779, 0.7401761492162675, 0.7471763649367035, 0.7091181549585397, 0.8471896309903535, 0.9560409000387446, 0.8833557822366912, 0.8828767520365803, 0.6314127309325901, 0.7911161202614427, 0.9730997540607103, 0.8889054760589943, 0.5369510319562425, 0.8317484651900302, 0.6007777792604667, 0.5039725482444919, 0.8991921461070849, 0.6793840184287465, 0.7752310242605054, 0.6145210124418097, 0.8168572468844643, 0.9119425150454119, 0.6879030535411663, 0.9310346081871232, 0.595779677193236, 0.5192613422104018, 0.7110934661573849, 0.5709080375000151, 0.9364641540684039, 0.6718330224892378, 0.8303723184388033, 0.7975777299941886, 0.7982935607573688, 0.8743955628722373, 0.8732118688931778, 0.8150494660786098, 0.9624585917440953, 0.7577797651035538, 0.6882277779996213, 0.699923189667233, 0.5190616044159753, 0.9652602928562288, 0.8900749256426256, 0.9494196024779727, 0.6627513407472663, 0.7876589221303525, 0.8484076340520995, 0.9174413327401914, 0.9485058556233936, 0.7814688772509077, 0.663044185019569, 0.824597324495491, 0.5269046526923491, 0.5568788682220607, 0.9641361239791033, 0.641622181307105, 0.6829348630338041, 0.5444225408310093, 0.6033644874796169, 0.8007338825810714, 0.5014431396129797, 0.701479849527594, 0.8476462596511152, 0.7810154834519312, 0.7897013803579702, 0.7998700275421238, 0.8682668149727999, 0.8355594308095591, 0.6262694162723943, 0.8421018490111611, 0.5176977486784391, 0.7106495203472245, 0.7171781612217167, 0.6043502417887314, 0.7256638988405997, 0.7405708612090589, 0.7898958374508326, 0.9834379865857401, 0.8241143937749215, 0.994286703234658, 0.6464920801319386, 0.7090127885331459, 0.8275568888542515, 0.9934175355776296, 0.5853115211382919, 0.5079279897718372, 0.6144374343827109, 0.6923463424204155, 0.5839217276823967, 0.6901761505227715, 0.8016219066543114, 0.799418360025058, 0.7226090406901493, 0.7585231414246715, 0.9196491312109709, 0.9934552489724622, 0.6597077112160836, 0.964582477964262, 0.7394979322535291, 0.9020581194005117, 0.6650645644693916, 0.6198910086549245, 0.82693982034643, 0.7583803147756455, 0.8763641663105386, 0.7963197256219849, 0.5302979939165374, 0.8504726857907318, 0.9102717707533177, 0.5409788550064714, 0.8296611349255703, 0.9729423159372723, 0.9580459476622217, 0.8026624512823642, 0.9700083453669249, 0.558263776602713, 0.7835469912854751, 0.704240761274481, 0.5855543032210517, 0.6267552285098794, 0.8630917050223377, 0.5132867092349506, 0.9405822827724857, 0.5876536361925431, 0.6785655031621978, 0.7227235303076278, 0.8145186024755074, 0.8937518042037041, 0.8614231843870808, 0.5774819718136622, 0.5254661531451869, 0.6990178624384116, 0.7362328496487699, 0.8570423329145194, 0.6227801206455277, 0.5255950338377362, 0.5129393346497606, 0.5388260432232255, 0.8062493209840043, 0.7538526491672672, 0.9707362910007535, 0.6866577551241418, 0.9314098030297059, 0.5758154344981989, 0.7438560549154873, 0.8934312573080347, 0.9178478088008833, 0.6548190683302479, 0.9974534093096936, 0.7433525208825447, 0.816165042268614, 0.9242934494018713, 0.5146979468157928, 0.5181073064698134, 0.8652069764597382, 0.8062466737145421, 0.8495706986520164, 0.7641522134703127, 0.9792274857782173, 0.8597714432378919, 0.8943467163999336, 0.6130565074562011, 0.9003697851507304, 0.8191735864928249, 0.9168564641409518, 0.9656020557939216, 0.7106585865255974, 0.6169676716588577, 0.6643804287966677, 0.9690239069420783, 0.8487796705520214, 0.7303914647686736, 0.9770095750155487, 0.9184769086434745, 0.5599792975718205, 0.9703232676458453, 0.7824688847837878, 0.6067095276962122, 0.6329120386895555, 0.5251385283920103, 0.7920832308802662, 0.8586985439510537, 0.7073158822122031, 0.5999828918311103, 0.7103185722663545, 0.5624902184071257, 0.6030405694404699, 0.9878505145947867, 0.7567111183987024, 0.7080670858605848, 0.8787488560983544, 0.7730528953338005, 0.5126478023475725, 0.913669870344701, 0.999918708188424, 0.6439796753824638, 0.9629493997388605, 0.8530886846037006, 0.9765071949527935, 0.7438723268594354, 0.5535275894329275, 0.744998710162694, 0.7554275980743763, 0.5974271074174917, 0.7623752602963731, 0.7356251412423941, 0.9254977635687465, 0.5994495913987649, 0.794882397012932, 0.5400976597785363, 0.7197586142413799, 0.7029026605779529, 0.6997736864662375, 0.6540198322863952, 0.9207143663382353, 0.7126159795133034, 0.8072817314429824, 0.5789717832710446, 0.7468691489238302, 0.9597486578385206, 0.8309388441119931, 0.9943375615290231, 0.7400398995543178, 0.8739745050970784, 0.5018169505980701, 0.8499321425213002, 0.5381469573712689, 0.6119259760101172, 0.9394372818513517, 0.9664829940288728, 0.756428816248109, 0.62229996172531, 0.6467299627265314, 0.6184692529255922, 0.5724447248958674, 0.9504113319493129, 0.6560636707934493, 0.9798337408257287, 0.5925485343149435, 0.9210214847352864, 0.9290637083112595, 0.5185794209023, 0.6595389889503143, 0.7839143397548436, 0.9344650931573959, 0.8107941537447956, 0.8977047906415296, 0.8061546159615478, 0.6422334729338721, 0.8000986368846914, 0.837570266653199, 0.9479697466126247, 0.9769436369121411, 0.7591514659860515, 0.5217794934901705, 0.6631037906089404, 0.7014453278559694, 0.7056375901029361, 0.7038344343752501, 0.7229378357223195, 0.8985101740364086, 0.6093419157157274, 0.7599888985998874, 0.8139368753100757, 0.8843076396783626, 0.9711546153449142, 0.5131260354819585, 0.6553766156309444, 0.7333930133407842, 0.9719615967033552, 0.698593789946456, 0.5234581055395304, 0.619225168708301, 0.6445926153622051, 0.6470665078131208, 0.5579855498360009, 0.9190774067944413, 0.7277594278451477, 0.8544711446206477, 0.9712278434262664, 0.545939071755383, 0.9333308592218919, 0.7867782716922255, 0.7301251346348249, 0.952514657228656, 0.9523613611783056, 0.6452672002485345, 0.8044399353937917, 0.6138550762125795, 0.5665285575287085, 0.8469881596276206, 0.6954085305129096, 0.5222198081759082, 0.7408868776476987, 0.9576262365238187, 0.8063626506541061, 0.9253587463590214, 0.9543898361869803, 0.8267814969360034, 0.7466903787156184, 0.6337929180048658, 0.8688805024935999, 0.8394161251036802, 0.5282105074497467, 0.9324429290650151, 0.6141618400789814, 0.974476253327296, 0.5906030290210037, 0.644461727507643, 0.884180528648969, 0.9210048887406175, 0.5195156907723093, 0.6996159790368073, 0.5062985126463901, 0.565542536970985, 0.7525659120727259, 0.8870428752357264, 0.7274871212181603, 0.7150558729994017, 0.7115355524511637, 0.8425320417187991, 0.8707635400745819, 0.8421704677831912, 0.7450197373758904, 0.5475605571254497, 0.9772406408403227, 0.6792523721778114, 0.5149403218385082, 0.5095467469544753, 0.65398220543415, 0.5514566508631242, 0.9697800682253603, 0.6708792526830798, 0.5189015299298219, 0.7246172123907746, 0.8273118586226627, 0.8397315710387439, 0.8581979726569777, 0.5642585957281734, 0.8930215548344725, 0.806961656340923, 0.7646812317260139, 0.6988759290851154, 0.6492313752057677, 0.5686666386545727, 0.7717383051237483, 0.8815174827061891, 0.6385456097120372, 0.9933262472779425, 0.9190152714489946, 0.5459438717254639, 0.6557845379795828, 0.5457781804710642, 0.8835665071244001, 0.8201825646156689, 0.5165740572898071, 0.7526957870601729, 0.7921442376453898, 0.9553669155828027, 0.7679951135530914, 0.5207401602988775, 0.8719544026746546, 0.9405527028112612, 0.6405986472272339, 0.8808614231280492, 0.9697745002832792, 0.5686493881866914, 0.9858924954763013, 0.7858382148589275, 0.750461473755045, 0.8192813683801623, 0.5238130356469359, 0.548474461158597, 0.8446346533047451, 0.7856658179813648, 0.9785386461022298, 0.6358673587576635, 0.6483837289148124, 0.5685160830054268, 0.7567524739180483, 0.8462027346105097, 0.6525058748540521, 0.6032819198111774, 0.8578762917208491, 0.8561329419377326, 0.5375403953962847, 0.8490108251080078, 0.5291565593754215, 0.584176862653752, 0.6915874037293241, 0.9801527323460046, 0.7227781774341981, 0.8068401329768774, 0.6705277351440266, 0.7266884861567869, 0.6416769271588623, 0.6742031294754965, 0.5359402445715485, 0.5050377736672225, 0.9813349413268445, 0.5868868391463163, 0.6033336940883192, 0.8662263817808378, 0.54496082752472, 0.7027708564229318, 0.730546256161734, 0.7502766849234022, 0.9699206394269683, 0.9456798110696089, 0.9980807935744795, 0.9015077791088165, 0.7854239811815253, 0.9977055997713169, 0.5275427567627384, 0.7872186478894312, 0.594035843970514, 0.9136259285599106, 0.8480678598518452, 0.6980773241928999, 0.7531282996536157, 0.8431805824709304, 0.8795131348649945, 0.5111480769628107, 0.5862298930620226, 0.7024153513731018, 0.9931076827101044, 0.5749410092915832, 0.7520412423449729, 0.6327851774984369, 0.6173204609709366, 0.5642466971865178, 0.8826667585530081, 0.5016799794916267, 0.8288560095894906, 0.7291524893613474, 0.9467608512608573, 0.9013399573314715, 0.9100336611907787, 0.8525980534566415, 0.5162044291014531, 0.7739587829533754, 0.5256048106616635, 0.9287200417214254, 0.7873635630755309, 0.7496198946936583, 0.6999027824410239, 0.7514597799148746, 0.9938029958394521, 0.6470667100656019, 0.5742818942872387, 0.9522484916180212, 0.9277595228354928, 0.5718186338384794, 0.9015461275137444, 0.5545338135268492, 0.9735821263214064, 0.8540634871454303, 0.6018289422923582, 0.5694399684473765, 0.92775795376036, 0.989784237393845, 0.756177743223176, 0.9619846840514625, 0.7950335875164432, 0.5330454345564335, 0.8297251483458634, 0.5955972671049539, 0.530669659722911, 0.8020476129309966, 0.7346568838483789, 0.7311313118468726, 0.9186772851202187, 0.5643540708016965, 0.5231991993684042, 0.5976666010080475, 0.7376144986282676, 0.9588868176879566, 0.6348555199309298, 0.7072103733941362, 0.5679864906033341, 0.5810957413246497, 0.8685830219198101, 0.7333995531808541, 0.6730107650444994, 0.6370864243325524, 0.5149755411418743, 0.7865191768810916, 0.5565926324861752, 0.9675705288778039, 0.6531013790726727, 0.9021643409721294, 0.5886573398868753, 0.872254839227059, 0.7765472519313995, 0.6189227217589996, 0.6515166675126096, 0.8023179559385887, 0.7713287876049438, 0.9346392307857893, 0.9148259647917611, 0.925684485038944, 0.6073439871916215, 0.6042807642994896, 0.5087079954356757, 0.5763032402179011, 0.740440850517291, 0.6463142449062094, 0.9674180823922037, 0.7477562803607278, 0.829830385851762, 0.6495336532713432, 0.9906676368887066, 0.6892883720666934, 0.8060183682298594, 0.9809144203707811, 0.8949563458064839, 0.615416589219284, 0.9741570677861509, 0.9414763236222583, 0.6208922531639671, 0.7861770142501945, 0.5464447616059116, 0.8961359522161199, 0.7580976420038936, 0.6663955323223925, 0.9777511091469184, 0.519838818355672, 0.8873207826885912, 0.8593502222317541, 0.8056522280709026, 0.7626029777193443, 0.9847503577622619, 0.6568733520137819, 0.6762807058369137, 0.776802742806078, 0.8405290029909902, 0.636024734820595, 0.8676205621366939, 0.6395419769905688, 0.5831339968468363, 0.8617030625943154, 0.7491400731488127, 0.7894596433812662, 0.6449057394342517, 0.7340708898177761, 0.8301306680685023, 0.699571299257205, 0.8433843841134683, 0.8585711222866501, 0.836909622960043, 0.6563471542857119, 0.6825844790134186, 0.5322180234359055, 0.7174614075307884, 0.6280622689946939, 0.7687271936412682, 0.6228701067166494, 0.711502168700284, 0.7654664880746473, 0.9230703164240847, 0.8799599320028477, 0.9845245618708536, 0.6765474183979379, 0.7423062471732199, 0.5327672155543122, 0.5502633838541371, 0.873628189614569, 0.6707539466892094, 0.8624279960373109, 0.7305898875841086, 0.6269046293225824, 0.7131627891837911, 0.7031686542506326, 0.5538022124742048, 0.5273086261528364, 0.8002385766659954, 0.9678263545537451, 0.9601132550728375, 0.7378865982042333, 0.6753627413897032, 0.6115348897402084, 0.99353711118999, 0.8903567706080096, 0.5660648694192894, 0.6392911820679104, 0.5519514865028583, 0.5032544209417813, 0.5058080767985631, 0.7492712811112723, 0.6204104527384638, 0.5772441852196428, 0.9168657740507214, 0.6711472817538271, 0.5569968034672016, 0.9914832604507591, 0.7664916980893017, 0.6983003846134761, 0.6220336529938084, 0.7163522370467093, 0.9086071288872859, 0.9206007621124379, 0.9815561696567198, 0.649987701735794, 0.8486644504371357, 0.5091022712514166, 0.5170113866586117, 0.8125974002529215, 0.9103372995765238, 0.5840513529453529, 0.974663130034603, 0.5447148528664671, 0.7850234799470661, 0.8000090974103926, 0.6186770676963365, 0.9685388826645669, 0.5427125961573154, 0.8108143305295429, 0.6868065820670468, 0.9201464367126359, 0.7805295790088425, 0.7981295656252361, 0.7825198489864529, 0.77136167823369, 0.5411351185982114, 0.7490848069026367, 0.9109960710191649, 0.6954221061482209, 0.876132313964646, 0.7839188535479105, 0.5959065916127222, 0.5899831055866356, 0.6216391538050359, 0.8164872475100551, 0.5276525080441619, 0.6039237663923909, 0.862575503358074, 0.6559233160488537, 0.8958656123095456, 0.8182065388499276, 0.7357542640222553, 0.8269994064343824, 0.5962880899775413, 0.9457610659174791, 0.6935933741215725, 0.5055466827680736, 0.6586174511031547, 0.8425559153201212, 0.8546397564450383, 0.9406903556570017, 0.7455708325410179, 0.5903310171084577, 0.7192930814583582, 0.8240498696426326, 0.7111323193013765, 0.8405592345761767, 0.7933556706308176, 0.9641798353772905, 0.5651267541300569, 0.573692854175055, 0.6180623280652198, 0.7935515255468684, 0.8176465163657425, 0.6275453984807688, 0.80490807851976, 0.8578686849524206, 0.5986439572121993, 0.55931038684341, 0.6075778825653244, 0.8457178426778365, 0.7234331634575818, 0.9508054903140561, 0.9498119019715562, 0.8425917777828119, 0.7742461050967493, 0.8365273689748934, 0.7463650485870141, 0.9834727292610299, 0.6106010796959975, 0.9678572380873057, 0.6058223853710492, 0.8705510293687646, 0.6714905710294655, 0.7953661400520085, 0.8089756433564692, 0.8943386722407773, 0.6424397651169985, 0.8520094605458445, 0.6334585115398452, 0.6343299703752228, 0.7396958429020348, 0.7668931344019784, 0.7632806778951264, 0.7501548878383469, 0.6337698827979461, 0.5252165285680201, 0.7058042357843524, 0.6810396789742044, 0.9721825906151087, 0.9626393867262901, 0.889827485610013, 0.7072260034489506, 0.9780115765918089, 0.587366632876523, 0.6825849011112375, 0.6433272096689883, 0.9113476885846181, 0.7056442033130179, 0.9630690619813721, 0.756938784827289, 0.9874013432499165, 0.7454150849254997, 0.974164390821676, 0.7263047774834615, 0.7877879239879233, 0.6805929555515541, 0.947796401346259, 0.5726513858244506, 0.6363891927087915, 0.9404317843981893, 0.715321152804641, 0.7650750391907679, 0.9559957688000648, 0.8167712126317009, 0.5874156961558132, 0.9085578930414407, 0.584923259899486, 0.6235291252501154, 0.9133829149687095, 0.7349480498783013, 0.7971030211269032, 0.6702063918959227, 0.9265274896363288, 0.9717341245036597, 0.6129753829400857, 0.6071019604306955, 0.8357802588453267, 0.9986910845148199, 0.5630112332614445, 0.6274575925889969, 0.6906982006150431, 0.808235042870699, 0.8261967873814995, 0.5036665713233555, 0.6536172150670374, 0.55115671064338, 0.8990436548854673, 0.5256096317163559, 0.832644728188767, 0.903187648561397, 0.6993563282549742, 0.7157118350063527, 0.925752246243872, 0.5069545573839858, 0.5353574902846634, 0.9206161558639334, 0.9755952005110591, 0.5678410624660037, 0.7415779544399272, 0.5947700049894925, 0.9665958552000725, 0.54366100801582, 0.8579532738516268, 0.58367786997006, 0.8337782685753433, 0.7009971554626981, 0.6792505371014149, 0.6177472542333593, 0.8911329366141083, 0.5850689718843878, 0.6085805938835258, 0.7071765634168191, 0.6473786028793748, 0.6314677345996935, 0.6408777537568077, 0.7244459489363309, 0.5911796738744892, 0.7985259579515935, 0.8718930640089426, 0.8201030951177775, 0.6902256912509769, 0.5282534318121187, 0.9225735495617371, 0.833829548484009, 0.9455471753495501, 0.9252898750103586, 0.5984139856460557, 0.6155605137966652, 0.7052337003470504, 0.8098645151335981, 0.94320301620167, 0.6355610223931125, 0.6404968336345396, 0.5045659630434205, 0.5420438670118931, 0.9959378006175407, 0.6411545782540233, 0.8408145431170332, 0.7691090547086514, 0.855767000234607, 0.9248903507333931, 0.8925761018610521, 0.5940014271569838, 0.8462231891302229, 0.5883799583157838, 0.7325672483503616, 0.5777006130191813, 0.9487874731857067, 0.5416918080479723, 0.9803377317391954, 0.5932186772864233, 0.9911022899910384, 0.5225095642917901, 0.8748355786306568, 0.9347664112468328, 0.7671170768480544, 0.7511230630359951, 0.7926115328668601, 0.8011434755831824, 0.7467222965439742, 0.6426497493832706, 0.6129566829781676, 0.8787372384880384, 0.5768515028922611, 0.9667655444049905, 0.9123178521240429, 0.5680971847574203, 0.7475333115241547, 0.6414305613973571, 0.8948625503204752, 0.9556017568043409, 0.9301942242041991, 0.5475397491397354, 0.6304767514373419, 0.988121133768731, 0.5665144162486427, 0.8833129806580517, 0.594975288247557, 0.6333362194315971, 0.6140528177155711, 0.8875606535144782, 0.5517504461306182, 0.8545663093950058, 0.695873110489079, 0.9516770547087208, 0.9222799392232177, 0.6602375459435779, 0.8690730779586152, 0.7117527856601786, 0.7851518045040519, 0.9130485543631865, 0.6905647902572492, 0.7965035515360728, 0.5141507983349579, 0.5590169103592779, 0.5049195777044297, 0.8630657490034381, 0.7387210079032935, 0.6715606781435074, 0.858423738855004, 0.6384357014068033, 0.5907157761998877, 0.7445476104946116, 0.8532177664939484, 0.7184615722019274, 0.6853257044334231, 0.7345146737422521, 0.5172140360683788, 0.7033465125950249, 0.5007533818957697, 0.6130814298362306, 0.5387190009105154, 0.5421705292439344, 0.6317835652559851, 0.9877640471838478, 0.5870847910475099, 0.6516696734399904, 0.8297474617092623, 0.8882217848519547, 0.7883723213597544, 0.9021543930452809, 0.7877273139220583, 0.673863101581449, 0.9555531556416397, 0.6065748000706792, 0.6755250214595314, 0.5918257499357787, 0.8115770535397091, 0.9007669235883902, 0.6903202485308408, 0.9506956866027716, 0.918307234163708, 0.8036804452827575, 0.7400459582910954, 0.5588660057289621, 0.7604899449093107, 0.8062702729823931, 0.6830616815283569, 0.6286794899869983, 0.6933959832869562, 0.8330725694837178, 0.5693679021573969, 0.9502972631054072, 0.89914944049091, 0.8434275498115091, 0.6121178735892528, 0.7255298827801085, 0.8343301955644362, 0.9721438709705632, 0.8706972822431123, 0.6163216988608904, 0.655399875644882, 0.7982833080564797, 0.7937541281420173, 0.749688167362051, 0.8821104700944893, 0.9564456980717951, 0.8585891585166625, 0.5573059636407333, 0.519034503858953, 0.7729522759547981, 0.8560497054910678, 0.8673816914892809, 0.8725992071746302, 0.7670855905010547, 0.8893910129784266, 0.9777219155279919, 0.6964699498248106, 0.7043095417782133, 0.5727946803172594, 0.9459731046295441, 0.7440043325494348, 0.9345777057148157, 0.8534085723845773, 0.7015423460088273, 0.7983207949432753, 0.6640041993223922, 0.9764924802923738, 0.8818664702177808, 0.8730142113937751, 0.9861947514151108, 0.5725962158723181, 0.7796298220005412, 0.6639611901761944, 0.8855697096499042, 0.6704378660455509, 0.9878727329331081, 0.977527002293068, 0.9714034830362712, 0.8991829189966053, 0.9736859724417996, 0.688642527335108, 0.9979614364915447, 0.5706440319954017, 0.8614862123550622, 0.8446957295876409, 0.9471830934482874, 0.6483903000061115, 0.7106941512375664, 0.8836144114597386, 0.7880472109107439, 0.9531622041226668, 0.6764464531052417, 0.5213211535905957, 0.9621362947442855, 0.732048831438278, 0.884660047907015, 0.5202631187718056, 0.6824875339779084, 0.8484004531828524, 0.8361601205007829, 0.7282176865170877, 0.7424773913834777, 0.938213245883879, 0.9869894750065208, 0.5347434378431777, 0.6591200173878077, 0.5109397572974913, 0.6106578115914241, 0.5027925312909345, 0.777064505761124, 0.6327574956355382, 0.941746806747922, 0.8656732050945184, 0.8141801367698507, 0.8118076520924107, 0.6187681906507045, 0.8425873979275722, 0.9513368012081322, 0.5242310569379307, 0.9898108642689323, 0.909979319227292, 0.8508640245062338, 0.5133700311369802, 0.719332821717892, 0.6550689103409484, 0.8396774597806604, 0.7510881479373304, 0.8753172900414826, 0.9437719347063167, 0.7278744706003106, 0.6213384169307443, 0.6844171199082414, 0.5743665208829508, 0.6383078943236511, 0.8478110306071993, 0.826162396745944, 0.5101984768386849, 0.6126308070339861, 0.9261591917767587, 0.7283855217463024, 0.8323663837688424, 0.8463730221656547, 0.8175356547290589, 0.9230610460209312, 0.5046086434482555, 0.7186177376590411, 0.5890844163136248, 0.8439280523933059, 0.6921449251158873, 0.6400413411383321, 0.6972802741636357, 0.7611148220709487, 0.8228524555649714, 0.9544484688742333, 0.6309709475332279, 0.8839302388871653, 0.7902581189190847, 0.9583248207119079, 0.6397738619566, 0.7824907837686412, 0.8873083795368126, 0.6461890165433894, 0.8221563477767504, 0.7408830610085715, 0.5530215243206251, 0.9376623748174655, 0.9657797287740992, 0.8088349219191759, 0.7314938308252014, 0.6211945331216485, 0.7437624188995072, 0.6714226259076879, 0.7253198534831313, 0.5877802014054729, 0.757750060782773, 0.6502912169377244, 0.6750404250503503, 0.6322983518465561, 0.6674427647608943, 0.7346185122743434, 0.6513341048292389, 0.7412201151715954, 0.9594290766679617, 0.6676546556562747, 0.8045082652492705, 0.9922498975070639, 0.7807297813926581, 0.7424227134732531, 0.934667164873048, 0.8443354408428665, 0.8396120342141151, 0.5404113473170972, 0.5105534628133782, 0.7515251180779455, 0.7382693045243429, 0.9344050225909937, 0.7392266624997746, 0.9449629279567924, 0.519879736465028, 0.515809303814043, 0.6694155816177072, 0.7067335194593036, 0.5424437996805058, 0.6952919653225159, 0.7151547246339938, 0.8041545797485579, 0.7567808535150029, 0.8296603804725975, 0.5685382123526341, 0.7658350815573991, 0.8992268355126907, 0.6022349266955578, 0.9546857611367081, 0.9880862634889409, 0.6734058523870653, 0.9696333243761182, 0.7713909983050764, 0.5661739335449703, 0.6295565921494818, 0.6771722793723769, 0.825533874799297, 0.7833985134069192, 0.587957099405501, 0.791968326202732, 0.945893284508747, 0.7880829147377657, 0.9255891479090314, 0.861502205970888, 0.9618754848457224, 0.5122788590783685, 0.5272463850474715, 0.7635671928783161, 0.7911052790554383, 0.9095953153517233, 0.9932590100192707, 0.6417056571022876, 0.933609685603735, 0.669330907762275, 0.9299488789131969, 0.8276591531394367, 0.7674309095811542, 0.9149630752460179, 0.9675729330022993, 0.6893783470378614, 0.5272059937202579, 0.7978420633898413, 0.7165306632553137, 0.5968797987021945, 0.5553764561490638, 0.5549516357146942, 0.8394045763511495, 0.7946291863129015, 0.5468355256947136, 0.9372172228655438, 0.7951317274198018, 0.8357140291290894, 0.7676027820701444, 0.7679126143898248, 0.7740061425992015, 0.5967962825800966, 0.9949641988170355, 0.9906662085135637, 0.7377911358064834, 0.6271134827750835, 0.6515599709519271, 0.9344978654000964, 0.9784029535883694, 0.7867583196632044, 0.9549592340892026, 0.7621428691571058, 0.849051833119072, 0.7354306659477683, 0.5263750900132045, 0.654879674725963, 0.665367877913772, 0.8149724137056145, 0.9497042691702391, 0.6737130488682326, 0.9844954381954247, 0.5948859703312691, 0.959602703503232, 0.7076818677776335, 0.7496246590412597, 0.9890241921714451, 0.9236520988240682, 0.6052923634511564, 0.5660739228853422, 0.5956032072035256, 0.7649903834483839, 0.9833139766861053, 0.9402580861134466, 0.7997461103260314, 0.7874360725818331, 0.5976630238869478, 0.5816224999546779, 0.5323414072417287, 0.9942026079552231, 0.8869467553748134, 0.6956286633464144, 0.9571858650701957, 0.7729678002037065, 0.6860701852983284, 0.620842413209661, 0.8493481981436828, 0.6623161658478056, 0.549378824890326, 0.9983781900991624, 0.6548200907178018, 0.8713222649024717, 0.7252468931022844, 0.6601145935643616, 0.689361729020435, 0.9651338512235167, 0.7416007466501786, 0.5125232397258115, 0.97396494697536, 0.6689601692909601, 0.8416732299601938, 0.8787259628063264, 0.7662842215643924, 0.5279813205771746, 0.8588195130146132, 0.9313300499070123, 0.9853355008959279, 0.6822596728874775, 0.8531924301928814, 0.6547752596245604, 0.8305920670927194, 0.5966074925559552, 0.6140109846716453, 0.9492112062964276, 0.6531848822349442, 0.8473438125454132, 0.9082972014599877, 0.8712970128702077, 0.9896324667140708, 0.9685873039978985, 0.5116755846412043, 0.6410447949789866, 0.8833401550793627, 0.9287563684739071, 0.9805092681153245, 0.5389071349940572, 0.7118957418964539, 0.7180361814322411, 0.9493926981513543, 0.6499707979713432, 0.6775976654799096, 0.5173281609636753, 0.7359929573990659, 0.6652353172726306, 0.8777113113291319, 0.5409182374802826, 0.8549808176641516, 0.9447268735723326, 0.5133931635804281, 0.9958655033538839, 0.8244566710763147, 0.6543750967073062, 0.5718112141275475, 0.7452079326973338, 0.5317386013589689, 0.7903162820039236, 0.5710392972756247, 0.7631125903477863, 0.6072812488054216, 0.7848818911421602, 0.59505707959764, 0.5159547213665323, 0.820240443378272, 0.9175641583701688, 0.8173616413271905, 0.8255880046964772, 0.8653740892847746, 0.6531100623950427, 0.9751227222463836, 0.9913003903202153, 0.9256064871563654, 0.6509488932636903, 0.8554145467994025, 0.9804641683897758, 0.748250910594633, 0.6582623468530945, 0.8189544042310368, 0.568160673226402, 0.7920505348753366, 0.9148073663373868, 0.9716243878883549, 0.6145598110821899, 0.8541957877269507, 0.6209906334889206, 0.5947268092386847, 0.5694237828855945, 0.8175712006272982, 0.6044164008318396, 0.5569086873810285, 0.9967777130570172, 0.5360505231014863, 0.62128780258382, 0.827435774557901, 0.7383696755786276, 0.8826855912267114, 0.5884562264985531, 0.9462698959045904, 0.5091499977759634, 0.7179351713544457, 0.97116029838411, 0.8677287022722455, 0.7451957583325988, 0.7262711898239054, 0.70244243356845, 0.8509472462440444, 0.7551387606282918, 0.5878745906995309, 0.8628163622323318, 0.7572732324965594, 0.9382515045039481, 0.5955325447448585, 0.7358004569910537, 0.5020647938897007, 0.7471379268961114, 0.7361210931264659, 0.6902582215022155, 0.7252929647017177, 0.5170629597634017, 0.6176413546407968, 0.9248001044184418, 0.9133398616745327, 0.8453781317900185, 0.5130317693134496, 0.918991901634383, 0.7219709374245697, 0.5405014957349894, 0.7918010123019348, 0.9268402641940823, 0.5998488710512144, 0.5334922078372708, 0.7735504446863721, 0.551138719200235, 0.750310209398845, 0.5835908300973157, 0.9012222203049042, 0.8283390305348182, 0.5015791882039342, 0.8927408129802443, 0.9602211577082627, 0.8524012806776068, 0.7317319097049662, 0.6889779344147263, 0.8118467207352766, 0.9550263458909671, 0.5791904134853487, 0.5288897123145302, 0.9714958322590528, 0.6989029722953104, 0.7648692537111037, 0.9763697200630952, 0.7423837349901403, 0.9832807524885242, 0.9844248077284387, 0.7852738390530735, 0.6707821544011834, 0.5066512142812076, 0.5757673632207636, 0.5698578893014934, 0.7062010907758454, 0.9577541588394863, 0.9050390561508159, 0.5689261491906998, 0.5674755072792318, 0.6407001216547903, 0.6163518364114516, 0.9388744494914778, 0.77899262419674, 0.6052485930601657, 0.856569917286215, 0.5202592526992593, 0.5247302440013556, 0.8809486896490608, 0.5108436959399503, 0.777821057875828, 0.5615231844339984, 0.917479011090887, 0.7122310416154309, 0.7056416668389531, 0.543767119421809, 0.8189983377976505, 0.886994469755521, 0.860377112589684, 0.707350517105379, 0.6871723268042726, 0.6149660689145493, 0.5946349330067251, 0.967554276633743, 0.9480643129855213, 0.694388467466232, 0.8284521679003217, 0.6957864703316914, 0.7620883810911093, 0.7468694837304405, 0.7074403968097327, 0.7723816867849707, 0.9936890237595074, 0.9483269081541742, 0.5553606656578731, 0.9799152230548833, 0.6421944683520071, 0.7962004662133597, 0.7444748545925658, 0.5984196283005223, 0.9576949034916397, 0.6606861684106478, 0.8281609121158005, 0.7639135368370864, 0.7292639297938417, 0.9361434482485281, 0.9230142144446354, 0.7110174200860992, 0.5187287338128823, 0.88376103704641, 0.6845284824622359, 0.585838199372753, 0.5569849561487312, 0.6660857359767516, 0.9224489222107146, 0.5315897448505826, 0.82949068046183, 0.6517346426280031, 0.887224018334924, 0.5184960987723074, 0.5349395060807042, 0.8070930466460358, 0.7423716753725516, 0.7664352166336168, 0.7060901681494651, 0.5494041113697843, 0.9104274954920842, 0.9363391217110448, 0.9938692397128828, 0.5873776376696898, 0.7343904397778451, 0.6783512258596318, 0.7401055103691121, 0.8432102542895126, 0.835216636761799, 0.5488272183337337, 0.5689042066207364, 0.9302467102245138, 0.8406238271973785, 0.8141122974328535, 0.5047185811609332, 0.6667191293044692, 0.7676961537597873, 0.5814293779285339, 0.5211762367026731, 0.8978739921060555, 0.7637964716462842, 0.9996701327656257, 0.7482864646481605, 0.5814683505953564, 0.8944524740163835, 0.9300122581340386, 0.951320892850327, 0.8184795651222116, 0.9331320066280017, 0.9159136969254686, 0.8550669044299256, 0.6431590597532315, 0.8624897733404665, 0.9970078150638182, 0.8196346249540585, 0.5824997362045099, 0.9335978848099897, 0.6699327909661604, 0.7078601115363486, 0.811331032587868, 0.5561992451066126, 0.5542368954468722, 0.852006915542344, 0.9211534381580031, 0.8730347848429313, 0.6029130518711818, 0.7406826056483083, 0.6956951898299826, 0.6760711803829247, 0.7827058833455223, 0.9728093957086771, 0.7258719033003886, 0.6810507960245664, 0.5106475308992868, 0.9749915053383338, 0.6562716411969776, 0.7837939011895791, 0.8616524841227671, 0.6736024891172883, 0.7394923361147343, 0.6328523688925389, 0.5056689748943327, 0.8515571055308842, 0.983697365074383, 0.8922111335267743, 0.7354543157717671, 0.6192889468381435, 0.5234239890139356, 0.6937459604898086, 0.5041453420520396, 0.6245753004713895, 0.6850735630074253, 0.7924072657326906, 0.5266638591727487, 0.9582449239143274, 0.8433764457247972, 0.7190457560839258, 0.6088239266810928, 0.6387592442630036, 0.5403362275292576, 0.7172531232122431, 0.6806253020763153, 0.9883402493551129, 0.6537004234947659, 0.8487236502987262, 0.6927825731795145, 0.7302991282612378, 0.9955947551998894, 0.9703661242712394, 0.813988612653624, 0.8380023735867745, 0.678369692000466, 0.8250674043878861, 0.8784189575139485, 0.8652757291026196, 0.7022565026183005, 0.8957314911383156, 0.9048599581500196, 0.5060270849526625, 0.9785893242472247, 0.976335601183387, 0.7846698195025158, 0.8599352980203638, 0.9232583554273079, 0.5270932983289931, 0.7997702234326293, 0.6614308200662173, 0.9080244380539391, 0.9429442284971115, 0.978725186098746, 0.6247490867804166, 0.8473492472266633, 0.7000774499806137, 0.8796686904041409, 0.965403850878565, 0.5879353962341326, 0.7439381156529752, 0.86339305460274, 0.7076339561073769, 0.9089309734426156, 0.8241164734866248, 0.6381555070268727, 0.7267710368079152, 0.7283253526658762, 0.699124418181267, 0.5405862022686985, 0.942962568737819, 0.9051677164180509, 0.6283708128907433, 0.8556253866897645, 0.7891931890745979, 0.7206734403506707, 0.9979761466037349, 0.6094900270989043, 0.8923271217322917, 0.5327539399855896, 0.7213453068832871, 0.9664109926907024, 0.8907413278731279, 0.7550223511231609, 0.6334851671198913, 0.8460593670628929, 0.9374805673458662, 0.9252330892332654, 0.8542382786278837, 0.8951489507313468, 0.6557178137639451, 0.5987742542731741, 0.599428268289369, 0.753951712884225, 0.7004396668058719, 0.8041160273862841, 0.5444133211405577, 0.5324703687203403, 0.7625054979440904, 0.5498988057858383, 0.9217830144425803, 0.5726731398538574, 0.8606196426693193, 0.6386925983858358, 0.881840092227669, 0.5031699611289897, 0.7569810995365548, 0.6700155229245, 0.5983524504231575, 0.7170145888328379, 0.8634520995764516, 0.8069277854417909, 0.7910626996432562, 0.6955286866866383, 0.7774285729510972, 0.6700820998002754, 0.7502142679179526, 0.9406159663404732, 0.9674200647567774, 0.7456629314064694, 0.5352200062993078, 0.5439495030685587, 0.9892348644916379, 0.5323619923165288, 0.6231826391690451, 0.9671374204424792, 0.8969476845558555, 0.9034267823457647, 0.817016738160679, 0.9759719232206601, 0.5504127148328135, 0.8824012296220627, 0.902491073820954, 0.7029902054300493, 0.9032935171795118, 0.9882215014531425, 0.52505065364266, 0.6150368919154874, 0.8136200531261819, 0.5182639444388634, 0.6927569962336538, 0.9379596251775844, 0.6109069605085446, 0.9345341337549098, 0.6841260724987529, 0.5450160078126683, 0.6964851488517937, 0.9383401258411519, 0.6566097758529471, 0.572267988122926, 0.5708111676277625, 0.8550278609554709, 0.7305087302429221, 0.650333866592846, 0.5221629666947061, 0.8564558228790162, 0.901624689082497, 0.6310768748570621, 0.7920871350479193, 0.5728474414413178, 0.5687979538322416, 0.5496253710654464, 0.7425658509129134, 0.8409147347385486, 0.7227516995004013, 0.5303900417986132, 0.747416254452677, 0.503250322109901, 0.9680119960286357, 0.7354168339608061, 0.7029176967599489, 0.9039132008783681, 0.6432669843691858, 0.9494016972238097, 0.7382668629337623, 0.939885464640732, 0.647300630511157, 0.5538788228194519, 0.9479736160685451, 0.5115954458105253, 0.7394559517975281, 0.8079275477970141, 0.5861958307002769, 0.7429002891680235, 0.5902555070661993, 0.795438675026202, 0.5500592522732718, 0.5726678441277792, 0.5421212615187225, 0.8007524270250612, 0.9633149630900928, 0.9575486439424676, 0.5858831922858345, 0.6727110873400768, 0.6663892501046171, 0.7458316222870764, 0.567256312527662, 0.9276514691912878, 0.8954786222112894, 0.8591746730318648, 0.989740256564736, 0.8716817742298673, 0.5632235933210413, 0.9394291131531951, 0.6300297232751093, 0.9633238872715182, 0.9516675355344046, 0.8948913731033377, 0.5359843809649707, 0.5823324501542979, 0.5519874272131329, 0.754278059040619, 0.5106009510928, 0.5054828285626866, 0.7908634189821557, 0.9470354365503235, 0.9870013718243247, 0.6752089233562393, 0.5494596156166001, 0.524945925873846, 0.6875043974736387, 0.8397533483546602, 0.9318323260046483, 0.5829502064205077, 0.5249156587171724, 0.5472976817106434, 0.9051928973902466, 0.5200880265660839, 0.7674714890415983, 0.7674533816582105, 0.8147759309261648, 0.8592429575607672, 0.7720702704386437, 0.7109997525162242, 0.8453784282610025, 0.5354320705628635, 0.8771477455831613, 0.6503582926368646, 0.7904556484418994, 0.8360845231083132, 0.9936911614929915, 0.9191394782053255, 0.9997108060138429, 0.756868604891024, 0.9458715190721072, 0.881895001951479, 0.629697880472107, 0.8085785729433281, 0.7463239615469885, 0.5449903530654256, 0.9202825068508769, 0.6581250701263357, 0.7277985798751843, 0.7430779720184768, 0.7721715307006058, 0.9041148867585407, 0.5694660309091276, 0.6599195525744342, 0.5230125987399639, 0.9935798184822237, 0.7845426736140133, 0.7238660181306922, 0.5162510882435711, 0.9414620273772247, 0.9493361031013223, 0.9250219575715827, 0.9436543623329385, 0.5195229337348333, 0.8120281821951856, 0.7504694145595917, 0.6989362404057503, 0.9505113958626975, 0.5546269805628171, 0.8072608878750538, 0.6703806542094584, 0.7835672254497643, 0.7774682633671551, 0.8856391589461003, 0.7640826398544762, 0.8612832564369503, 0.8042416783038377, 0.8095924950373594, 0.7225648176160842, 0.7538459521539426, 0.5022517753226947, 0.959549341236969, 0.8128477425968362, 0.979697206169701, 0.6536401333742646, 0.9903391741142655, 0.8944390257065558, 0.6837219377366518, 0.8843673934207564, 0.6422303997899761, 0.8334514158591377, 0.6819220639256356, 0.8324659880525933, 0.8042237396684129, 0.876749618524544, 0.9003409455761971, 0.7679053722821358, 0.8457169230648305, 0.6771304825563456, 0.7243923354735384, 0.6753598355193806, 0.7962208022316213, 0.96021749277897, 0.6009109057109878, 0.5089732398588172, 0.6437203193458254, 0.8156927456480185, 0.6222738173349209, 0.8001474435140241, 0.9103284013609165, 0.7132094048320691, 0.9740468819783081, 0.5975715574662117, 0.5951739785476684, 0.6900207050133464, 0.5194415234826876, 0.8745277651003185, 0.8697264207665802, 0.9616591568278985, 0.5913561757247847, 0.6662644256744552, 0.8871277200081713, 0.8430380145715685, 0.5614994789801107, 0.6438031299862801, 0.932614364318129, 0.9103378844632585, 0.7586637165700286, 0.9496535709850195, 0.7077590230197268, 0.9327096622197601, 0.5090205479547105, 0.6875248727770196, 0.9629801886648701, 0.5291042111011951, 0.7318625456163282, 0.8922485579726929, 0.5328924305393162, 0.6360806076585432, 0.9507053115483265, 0.6723443401453597, 0.8748931232675252, 0.5110484330999676, 0.891259033183217, 0.8251635759600849, 0.9356969944058524, 0.683647573122731, 0.710401323380361, 0.8031918462587397, 0.8036664288959889, 0.6791361826848318, 0.6443588166544139, 0.6042577822251713, 0.7001688612213066, 0.846107530337044, 0.8839699105813503, 0.9314405412877006, 0.6027760362771782, 0.8693561131843666, 0.6221043116730598, 0.8607020773643287, 0.8770269726932126, 0.5065318074232659, 0.8135415353568549, 0.8581408861655095, 0.7173179215706429, 0.8872756965301565, 0.7405505953434839, 0.917903331949478, 0.5360663558644425, 0.6330553983367058, 0.8076804910392079, 0.9282424228768693, 0.5066105825253024, 0.8865432689071195, 0.7553905884064944, 0.5334416000264531, 0.7348566051935188, 0.5693801077733233, 0.8686582452547993, 0.926458006729881, 0.6463567151329663, 0.8235507171975225, 0.5081398137517226, 0.9490627675166219, 0.8149748314073859, 0.9308525570697097, 0.5343970082038276, 0.8432990032030294, 0.6388188751930824, 0.819568593366333, 0.9180346520257816, 0.6535278526059163, 0.744274859612418, 0.5792907610153912, 0.9003594191924336, 0.8498826340995325, 0.54260202680788, 0.7239058165455026, 0.56280072630326, 0.8733953835547152, 0.6009920513378229, 0.838210158945921, 0.7732883309903154, 0.534338558764933, 0.9239397985663762, 0.6274148288885035, 0.7436578637732165, 0.6120979928978032, 0.8395997621065037, 0.5727888812840303, 0.8981148825027938, 0.7216909233994175, 0.5440526272691553, 0.6490480383199015, 0.7371733839110077, 0.5309837336193385, 0.911205805941631, 0.6135455319716627, 0.9455103112462493, 0.5455529895568935, 0.8292019939086602, 0.9542844725654218, 0.823137308187675, 0.917119006373954, 0.554583233979806, 0.7496857961111092, 0.9047608097044433, 0.7707432867722575, 0.8430211752031473, 0.871510791352035, 0.8715546235803515, 0.5878581638227152, 0.5279208791132977, 0.5495564040103198, 0.8633508137324646, 0.7311212644035529, 0.6731746379546557, 0.8303585425564641, 0.7091247009659816, 0.8534716132039443, 0.6952213265734877, 0.9233900162244564, 0.8852047641656889, 0.8995041579582304, 0.5535313943560587, 0.7125398716606071, 0.9519781224001678, 0.6886712490948241, 0.6307673358398552, 0.9676660569353659, 0.9286773414775522, 0.5771912584870778, 0.6303764592870227, 0.6224732135628139, 0.587874989493494, 0.7981572702780241, 0.9856312963958009, 0.8568638245955562, 0.8148285026255537, 0.5972151953725953, 0.6313441506208026, 0.5916099494055579, 0.8987532751178772, 0.8485269265717199, 0.8385657015689096, 0.8238179638702743, 0.5252586534743733, 0.5595108609223918, 0.9533761863424886, 0.6173674713425912, 0.9254518730360977, 0.6002916609180879, 0.9479327355364423, 0.8070911519280003, 0.6573725109932331, 0.5427684368608436, 0.7356729619856339, 0.7925365666487685, 0.9541802262571117, 0.745197422818148, 0.8489192737261049, 0.9822240528906638, 0.5551818165676696, 0.548713142629028, 0.9223964439377499, 0.9947184892930079, 0.8482257974351359, 0.5919697830621584, 0.8414521461193776, 0.7570321568000173, 0.7401319624261391, 0.7317191568406143, 0.5010765895301859, 0.9661902290693248, 0.9090594649974777, 0.7320991234985228, 0.8880670759368288, 0.9854461823680787, 0.6492683545303297, 0.9680572996125916, 0.7088908867021231, 0.7292015374421914, 0.8195211263519313, 0.9819059762380196, 0.8547140668291493, 0.5001919910460377, 0.8452299703514194, 0.5237187655124695, 0.7332753907260237, 0.5002675843333758, 0.5570987886269929, 0.7116748266910675, 0.7119993382038367, 0.7078866364881864, 0.9784338485490467, 0.8890589829520655, 0.9596437182918276, 0.8099399797199245, 0.7659058440541853, 0.6038234808070502, 0.9633838373117354, 0.6584121825425486, 0.6461498003148242, 0.6318085250864389, 0.523564213400133, 0.8185310890056452, 0.8518225077140272, 0.5858543096502866, 0.6981161915838443, 0.5554301663179835, 0.5596312336792709, 0.936146533399872, 0.9010945669419965, 0.8184317983242588, 0.8417269427129945, 0.8572793073141916, 0.6609640497807869, 0.9019040158131688, 0.6170872003883656, 0.7663917477815194, 0.6083766605082885, 0.9197142761487394, 0.5758475701272423, 0.8359036441751355, 0.8395703136023571, 0.8703754303392663, 0.5715560054464053, 0.9776592118169302, 0.8653008439765006, 0.6074887686014806, 0.9071417673686484, 0.9099396008409713, 0.7435389527548693, 0.8388390287467222, 0.990031987607209, 0.5743361482836808, 0.7483465665941489, 0.8797660631187392, 0.9600809242212449, 0.6736343672556557, 0.6179150874457315, 0.6666461804372749, 0.9596041822273376, 0.726544596023039, 0.7250255641737688, 0.6529358159241512, 0.5778601008987923, 0.677667276408966, 0.967142877290657, 0.8709451119749212, 0.8617239131194382, 0.7076750477644922, 0.8437228728990607, 0.8622839921386125, 0.7899890494940633, 0.9658545976778634, 0.5907622624307489, 0.9379239398926424, 0.9134402701275139, 0.5457025662390356, 0.6930627058900751, 0.5963501773117819, 0.8967859509761852, 0.8915027881314701, 0.7130093115915805, 0.9417614486989895, 0.6705809728758956, 0.9865368667569798, 0.9041934505558886, 0.7570340736124279, 0.9016433049380661, 0.6305229999760478, 0.7646393191990671, 0.7690685766984223, 0.5897278789155327, 0.8189621864513721, 0.8947039595985113, 0.8208988626539566, 0.7659181934539182, 0.8343983186708968, 0.7139124505609165, 0.6294388883843887, 0.6371334959416384, 0.7402056227004399, 0.57811373917259, 0.8716726111366291, 0.6707811007468705, 0.8420063172405982, 0.9379240350197012, 0.6124072295331157, 0.9918505211743132, 0.6013956125116559, 0.5695019869935365, 0.9397094811157003, 0.6923189606202846, 0.5965823734197184, 0.603864922542172, 0.8317230758377578, 0.6079119544053078, 0.8098986352058803, 0.7728726903226556, 0.856609885576662, 0.534934908524715, 0.7182362017475978, 0.6884504985863846, 0.9770317587493619, 0.9238796965355298, 0.6257545554645731, 0.8595133625692812, 0.8515141736343026, 0.8530681671089508, 0.9883471930158194, 0.8232878990181589, 0.6843627676706499, 0.8256571254008812, 0.9212904528151343, 0.5598344596013302, 0.8271230098228421, 0.697449298733564, 0.7198789199531745, 0.6999913193035551, 0.6950389834955947, 0.7210971679056206, 0.8725332782875135, 0.9906341306886367, 0.8911899609459182, 0.7238772810196441, 0.6713927472585235, 0.7880650925650681, 0.8149563001484751, 0.6367379029286075, 0.536320457790124, 0.5188652559279731, 0.75717380220809, 0.6694084884949354, 0.8500650807365921, 0.7599519735631817, 0.7914465392624968, 0.8339160402305674, 0.8149237764786896, 0.7246270642416208, 0.6557225812971468, 0.6498280658711797, 0.6905656542527914, 0.7274145501910039, 0.9204391112667425, 0.8797999579459693, 0.9809436082942427, 0.739623327697916, 0.9868881761673968, 0.7035869774997601, 0.5994445047263126, 0.9842868391167456, 0.6507109472715845, 0.5233470121967255, 0.9632312610166427, 0.5773762529922528, 0.9685819032554913, 0.9305151070337487, 0.5377372979106767, 0.7523785326563084, 0.7593582163525627, 0.6644237572778873, 0.8105593841521526, 0.5354647455750365, 0.5248822368860631, 0.6344227402918634, 0.9550789140894109, 0.5759588599809551, 0.695955734138771, 0.7013591622277824, 0.9193770380112353, 0.7019502365594561, 0.5800719083178398, 0.8774296876449357, 0.8228356776594488, 0.7963922149784137, 0.7997571250016482, 0.6975569061300322, 0.8631800430230436, 0.7600392104729214, 0.5961224335402444, 0.599174308996147, 0.8201217143510203, 0.6098363947154055, 0.8856755480312839, 0.6257494406434567, 0.6793374340968334, 0.734527271826901, 0.7010263415792775, 0.5417472915192215, 0.722559815817592, 0.6192879843862154, 0.8167111170543475, 0.6178319613801877, 0.9975258513307164, 0.6150837341629469, 0.712294454576075, 0.6265602956096077, 0.763583415501647, 0.8834434923479741, 0.5481883871129645, 0.566130583405451, 0.9668398207078497, 0.845443014650263, 0.8226685685594096, 0.8297573626045827, 0.8975194412552796, 0.7980579343939219, 0.6724568246734264, 0.5509015035729953, 0.9259492683981364, 0.5038483354414469, 0.689455952880528, 0.9838310738362588, 0.8625707852837226, 0.5972986930690213, 0.8133681491545282, 0.5850368975726663, 0.9874051593473326, 0.7611168199051446, 0.9612134227354303, 0.5457735955221172, 0.6529018990330377, 0.793810485200132, 0.7846638461803441, 0.9753187806079724, 0.8813954915791266, 0.6258602198875896, 0.5957488706179525, 0.5596895336321174, 0.560334031272805, 0.6524024492449543, 0.571469723163627, 0.7362202763680905, 0.6314938641448926, 0.8762976384233954, 0.6801937129218505, 0.572290291748957, 0.9821016854385454, 0.6870307064800496, 0.6040373860657068, 0.9938160490954875, 0.9228351587732821, 0.8777693483837028, 0.5801430368357818, 0.5670360968736152, 0.5018190289595287, 0.9176946286727767, 0.6294299026189129, 0.7877868368703617, 0.5686510507364229, 0.7100717138858371, 0.8284136826976698, 0.7760015046622153, 0.5284264672953611, 0.7094346465899929, 0.851179056088719, 0.6090869900326652, 0.8255506210271901, 0.6254086370234067, 0.9225118343753056, 0.6542977626068898, 0.8621253451299491, 0.7322243386536684, 0.89314459655352, 0.99186685540296, 0.8885671634262813, 0.882636922191278, 0.9211751623317582, 0.7703784786369213, 0.9213989548950225, 0.7506007355406015, 0.5652979165865422, 0.9054538254334973, 0.8280509130053384, 0.6534997565822991, 0.7035844351074101, 0.8786213223919563, 0.5002622680298601, 0.9196491062734166, 0.621969475111839, 0.5900073223367661, 0.9257157991526146, 0.8466652695511823, 0.8057217296607049, 0.9700339202189824, 0.9755305502547167, 0.8907764072380082, 0.829322081019049, 0.9243767822042652, 0.6386501678250907, 0.8083394739827247, 0.8292693507447831, 0.6152639715332868, 0.7834564797830301, 0.6351548398556361, 0.7709067246991628, 0.5063990288418727, 0.7195109599250681, 0.8692945607383379, 0.8026113136122033, 0.8525507636313043, 0.7191981863880199, 0.698954370781712, 0.7778545903882806, 0.9050291324002481, 0.8252380726004185, 0.6697069078069675, 0.510732185698751, 0.7285912960341605, 0.6686070239780529, 0.9477352596626458, 0.5270396596225668, 0.5523404466387184, 0.9213768701055932, 0.6463435528342942, 0.9896291980595978, 0.754611423847984, 0.784692589326482, 0.676917978305079, 0.9395716412706485, 0.5697769986643388, 0.7358062810072697, 0.84407465721453, 0.5733472974333713, 0.6882053138366622, 0.8092771455275645, 0.5054626226372774, 0.7555974596403034, 0.7871300872319758, 0.922095354717079, 0.7200022579477379, 0.8108543889973014, 0.6208019879947237, 0.8128531951442257, 0.8864422962749832, 0.9949727379650514, 0.8140669641469285, 0.6040181583813033, 0.7654129400932397, 0.9469859647325307, 0.7283047325598728, 0.8663279538201877, 0.914251981126085, 0.7415014292210734, 0.5551218306420135, 0.8499395990316685, 0.8199980574328417, 0.8604539980793853, 0.697632410788736, 0.5717748744174089, 0.7351373766310042, 0.9063292743138935, 0.7906732863299288, 0.8591303276611542, 0.8720484978052556, 0.878330219499994, 0.8444238198743392, 0.9688140891171336, 0.6464594649056853, 0.6353426915757203, 0.6596794785450979, 0.7486480762810894, 0.7278415550134789, 0.7830463210065692, 0.8869415123279181, 0.8989054491843984, 0.9464270367445593, 0.624551981132227, 0.5092169110246041, 0.6147418084226464, 0.548995468313392, 0.8817579427073924, 0.9553657655093459, 0.6209159926895798, 0.8728398620000358, 0.8057402786730579, 0.8707806891925278, 0.6693458319922526, 0.5290444428569943, 0.6404376986117952, 0.5063439385573243, 0.5880530699113931, 0.7854470339620684, 0.6938528624160932, 0.5884418261451894, 0.879470381476402, 0.8380641863691269, 0.8601040444363444, 0.5406621431704167, 0.8235231522669397, 0.6584342598149804, 0.9053349932336556, 0.9010295856056476, 0.9328266491017219, 0.8699294307672004, 0.6654825124083519, 0.8929244040118703, 0.8987472243757435, 0.7652559388894355, 0.6082240249568855, 0.8679090358491541, 0.8918391365756224, 0.925650798877639, 0.9394088519679353, 0.7980263417392327, 0.9020988617944756, 0.814312922745758, 0.5496699253328989, 0.579034471196649, 0.7066901691702194, 0.5032697112759446, 0.7463659026335441, 0.7366594835952226, 0.7386064455549383, 0.8754143516412421, 0.9300365724980599, 0.9487341281269664, 0.5720187313182905, 0.6898738043153317, 0.8108884538033547, 0.532553766021189, 0.9546739445772359, 0.737660989879929, 0.941231646347479, 0.7838573942002796, 0.6416676294725769, 0.6266124277749099, 0.891447556939266, 0.9318735158964565, 0.8487166086098987, 0.6332776217174128, 0.5619968553228676, 0.6788368342524369, 0.913557050304491, 0.5663368698107717, 0.5684391091538139, 0.7484133393441603, 0.9181868781072946, 0.9963104165935384, 0.5790323949318685, 0.8630475268475988, 0.7101504506514861, 0.5557608305832116, 0.9344406320497918, 0.6994968428747592, 0.7691000741103, 0.932170295897798, 0.6469728258645577, 0.5358167768865234, 0.6774118547408581, 0.8658416341015767, 0.9931766534503468, 0.6342085207833337, 0.9294110152463781, 0.6812170070929462, 0.9445358478877848, 0.77980492911586, 0.6936986391566373, 0.5231642704972512, 0.5242102342332418, 0.6825645426605559, 0.6380560169610061, 0.5203077124396525, 0.7335474693976765, 0.7042575337662922, 0.939075706872667, 0.6532267705779444, 0.5145936861549685, 0.6544793252692663, 0.9848015525744973, 0.9383662967327824, 0.6225797149672913, 0.8314485790966457, 0.6037461637489013, 0.9810135248279988, 0.5883961699458089, 0.85984655069748, 0.967923293847639, 0.7138506274367453, 0.935592575389645, 0.95859102496783, 0.6004434369117359, 0.6414630967858177, 0.8731364850057799, 0.8059444079495045, 0.5071774609788526, 0.8412650657844363, 0.8273731857784714, 0.9179942174203419, 0.5313591028084552, 0.5366937837452341, 0.9759065397930753, 0.8184876097630014, 0.7483668765325533, 0.8129246134924277, 0.8454562265116412, 0.8571586228631605, 0.5934572513346849, 0.5958944369277269, 0.7902251805166294, 0.946322511730954, 0.9060146358055648, 0.6974267279089563, 0.9373356627270346, 0.6554319032664524, 0.9290127518918248, 0.6720098615894343, 0.5773513255023872, 0.7294858716268897, 0.5699362756507433, 0.6982829460262744, 0.7741045639055804, 0.9686850052346936, 0.8377695022292413, 0.5085833705276828, 0.8718130320811324, 0.8146788281692112, 0.9089570503586755, 0.6933395555435059, 0.7214909942478068, 0.8712887962743158, 0.9328608606660285, 0.7454036404942891, 0.819088161523567, 0.6936381501507735, 0.5755181526256923, 0.5762463298886263, 0.9322943028213184, 0.8921372498613127, 0.9151074050481909, 0.9132004408894685, 0.8829948290583975, 0.6881621065137493, 0.5602301988050824, 0.8098164994251436, 0.5801714810205715, 0.9919085820753104, 0.7304073952146082, 0.8879258507196011, 0.9517345204383761, 0.8712577545588546, 0.7942291626964373, 0.6936814186816014, 0.6252377411277974, 0.8804326579098559, 0.6857656474374987, 0.5773709121372876, 0.9700685409749432, 0.7905126122984611, 0.934451617240993, 0.7299079251844932, 0.9813702470437848, 0.755754068561012, 0.5363775224287579, 0.8899482434738628, 0.8514660030958761, 0.8937644692165438, 0.8332932153552433, 0.936481144309697, 0.907365140412276, 0.6944667069507959, 0.8470491509946576, 0.5196965102959252, 0.6232798602626008, 0.9079543414539137, 0.9792285682626908, 0.8899424940030569, 0.6062049219393977, 0.5116308883022502, 0.6358133334873683, 0.7462572040780625, 0.5779117785976842, 0.6961681482879001, 0.6679779105667063, 0.7459418411874663, 0.8500858059109204, 0.7836777656961976, 0.6412676257181826, 0.7002818854515703, 0.8875136432569686, 0.870743045055081, 0.8862172562087383, 0.7544110182110199, 0.7769079537914624, 0.8744392940331157, 0.6995542575493949, 0.96756872887658, 0.8659980179743741, 0.6813504946259897, 0.945984983409238, 0.884123471742815, 0.8892331933418398, 0.6154926707064852, 0.5314440321261372, 0.7908072917231381, 0.6787708598476818, 0.6479974659125484, 0.8282887038264117, 0.9385841840092874, 0.9890142643710187, 0.569926860366947, 0.9822368160120056, 0.7532410907150756, 0.7751516957872714, 0.7165484831966095, 0.644602823735265, 0.9965141262161649, 0.5536435091704918, 0.9758477336536578, 0.8748721792816425, 0.6546945913187501, 0.8394746391876223, 0.6406052986749842, 0.8689401936390726, 0.86702731020933, 0.8680804686357925, 0.5936932933808515, 0.6525446993877912, 0.5525202801240094, 0.5665065206217348, 0.9235933385078359, 0.6353642341492539, 0.7722257994536738, 0.8425837195718471, 0.9294472116262056, 0.6240230364478249, 0.7998762873872352, 0.802023000656183, 0.9503572511888485, 0.6377817952503599, 0.5755981242898598, 0.7654824444524457, 0.8458176880843108, 0.7247248836289101, 0.5480671466666179, 0.6765792771850871, 0.6276647332968965, 0.9054208746796488, 0.899505801829026, 0.631874122055393, 0.7137305293474405, 0.9652677798132492, 0.9422810075666137, 0.7196776283796811, 0.7276060045309921, 0.7761078956020362, 0.846040290949595, 0.8180133389317968, 0.6372344973175124, 0.7208088063389941, 0.6620583378619909, 0.8314206206671365, 0.7206236302251262, 0.9808155300743373, 0.6423145425655168, 0.6917933146829285, 0.6088829428473109, 0.628495869596509, 0.9177827352178074, 0.5448016418787337, 0.8789005779416436, 0.5895210020082906, 0.7527498029289019, 0.6502138323528538, 0.5755506030119268, 0.657845784970061, 0.8014372311309008, 0.8605404752192274, 0.8885206651939175, 0.504697436862318, 0.6985036604563393, 0.5741777087842082, 0.9693259697537402, 0.8579287034168623, 0.7230017728679223, 0.7160935055268138, 0.9016806570796924, 0.9301590751985429, 0.986694454978214, 0.6994623798084253, 0.963089373539157, 0.8264828968068575, 0.8159564237150068, 0.8441843742303872, 0.7651718254053662, 0.5316950240730565, 0.8911100979876814, 0.5502682715007543, 0.6221600069223253, 0.6194959919179438, 0.8874207950364286, 0.642469130784818, 0.9529627393760566, 0.6601893549886589, 0.7667154200234481, 0.7528848890321487, 0.9518619371724941, 0.5053605280044817, 0.8402712045476693, 0.913646153991235, 0.5366872275872266, 0.7838638680690067, 0.5268660350552707, 0.7485119378438744, 0.7046631466380575, 0.6060979829801671, 0.9337944246214678, 0.71972630057489, 0.6225778805609192, 0.6382212725799861, 0.5782568077688623, 0.8945069924760021, 0.8154081587055575, 0.7097990349860837, 0.5173025302805005, 0.8483005918918864, 0.5009217295488378, 0.5523240059916473, 0.6119293628645588, 0.623241408035738, 0.9871597820431952, 0.921854814192075, 0.742867953050623, 0.522306125408798, 0.9754751622018948, 0.5693391689548466, 0.7237274769889732, 0.8071007634484024, 0.7927208174527345, 0.9461433005537063, 0.5829505704623301, 0.7120305454924842, 0.8434731962086107, 0.8805935910995606, 0.9816074894135689, 0.6867184357513225, 0.8772947335997604, 0.6045964678026401, 0.9772138180979945, 0.5316850962814412, 0.8561055872279952, 0.8606083537024988, 0.8981934707172539, 0.73226797816047, 0.8296181697188578, 0.7192468935417173, 0.9303660344985301, 0.7788780115192184, 0.5145129763134144, 0.8372166506589418, 0.8332496199582448, 0.9113834905454494, 0.663849958706458, 0.598461054574396, 0.7144852298345242, 0.5655853882349011, 0.9585249396304119, 0.6702358440534328, 0.8825355389073131, 0.6064727546507727, 0.5090048127601292, 0.9101697489598053, 0.5237493053855733, 0.8913547224897025, 0.9077501651760949, 0.6607417240867757, 0.6525855701321379, 0.6036174959688273, 0.5156862770511335, 0.5670935724315327, 0.9123829051465445, 0.9391932815009898, 0.5600845066419246, 0.6262427230195646, 0.8049067463943487, 0.6310683391134254, 0.5500576025120758, 0.7987117572532403, 0.8777358203958683, 0.9799708356965583, 0.8945173424852917, 0.971553034417135, 0.5448171076528603, 0.7162747898900904, 0.7381164289078417, 0.8182175972028088, 0.8086698850672467, 0.8446317273980307, 0.8954885251363656, 0.5043713661090332, 0.7320774540817886, 0.6934448556664115, 0.6364279029565802, 0.6261720855253188, 0.7313784804398242, 0.6886900815263787, 0.9428258107700518, 0.6650569184409043, 0.5008197649449992, 0.5285263038841622, 0.713203885084617, 0.924072371213154, 0.5921661459018377, 0.6963285680564366, 0.8163754688343632, 0.8507722533165127, 0.6026483515167427, 0.7270539451007435, 0.924401060030186, 0.6777622573793854, 0.7685052924336775, 0.8823807727853561, 0.5710420829061305, 0.6907618072608626, 0.5367303276060931, 0.8115298892531734, 0.6673427750675556, 0.6039079731669543, 0.9811396285897305, 0.8960056655884889, 0.5419095461419736, 0.986992781154586, 0.5956884472882465, 0.8307485281914686, 0.9573097754580868, 0.8881319261712346, 0.6367708671526264, 0.6426245553546756, 0.6745105360554788, 0.9227928866145358, 0.7392969037252086, 0.934810440852729, 0.5232600928271176, 0.9837136144115068, 0.7104567306829652, 0.827287005890277, 0.8679184720742357, 0.6823820536546974, 0.5514283526922968, 0.8484382157382288, 0.6995511535246917, 0.6046461926121218, 0.898684405568067, 0.8006954809658918, 0.9645056906718927, 0.7061137506150457, 0.7854159045338056, 0.5199596079698164, 0.510783239891246, 0.820738433408156, 0.8046840269813915, 0.6955492316255776, 0.8006886956106973, 0.8226686588252492, 0.5066287535076102, 0.6720479155096375, 0.7687810480328479, 0.758756419383932, 0.5962616296102722, 0.7215994169805456, 0.8299313045843191, 0.697828582659427, 0.747690712707612, 0.803434129796558, 0.9392789809800473, 0.8903635103591012, 0.5688283378366967, 0.6430600706528449, 0.5992143512166934, 0.8173821079978172, 0.8171177692944582, 0.9281412377720009, 0.9453048895672894, 0.5985809964443454, 0.5904804599578916, 0.8070482727496618, 0.9547868535799804, 0.9601070149313242, 0.8073472975369453, 0.8811182115760151, 0.7557946975078904, 0.8932696429963148, 0.5979002771980465, 0.6143306838016174, 0.7003491543831273, 0.9706558677545312, 0.8345052827152688, 0.6043590663509073, 0.7534357295169616, 0.825693150701581, 0.9368469569740768, 0.6765046561859618, 0.8275597962982628, 0.7052759828617714, 0.5113812452602686, 0.859846943001501, 0.7248171550945293, 0.577943080990916, 0.6944239854758485, 0.803694566802411, 0.8927325479013575, 0.7269005811171001, 0.6372315616993336, 0.7919942279331202, 0.5374270793208369, 0.9336385087021728, 0.8258264662033288, 0.9089269234481692, 0.8444052506270987, 0.5095547344422691, 0.9138323035981581, 0.781552825068054, 0.739357717869308, 0.95617864326298, 0.6661452546442292, 0.8624675424929229, 0.9198550507911829, 0.5060347596378691, 0.9351827253675272, 0.7817429918939869, 0.6669631270383325, 0.5654394251714668, 0.7201706438713744, 0.7342471135866857, 0.8777257827267055, 0.5218291958506822, 0.6370804743572227, 0.7349898108582831, 0.9837625596754564, 0.8087118287710444, 0.729797390589807, 0.7470146648309424, 0.5977955030359361, 0.9282967248061811, 0.5668448210208202, 0.5857799722161341, 0.6446618105702766, 0.8497010908938512, 0.8236345294929699, 0.7393477624538376, 0.6007446657759112, 0.7850265486750115, 0.544238587761736, 0.5538856160067437, 0.8111966912298059, 0.6806086653214072, 0.7226450580728447, 0.6565964028089295, 0.8657338377767876, 0.7957583217992465, 0.6584339148871559, 0.8706894013964432, 0.5110608469390159, 0.7889622931684609, 0.7002080432580475, 0.8828410868409673, 0.9443012350042692, 0.6826085506813105, 0.8116906307663019, 0.7068564468887504, 0.6929013493324293, 0.9215950366493268, 0.9831463025133085, 0.578244612076138, 0.929945016238592, 0.9287771125414088, 0.6104813722053676, 0.7149426366911438, 0.9500669020348942, 0.5225889473156284, 0.7361802623858815, 0.9182563325242139, 0.8012710050239102, 0.9666120786648764, 0.6843271753346931, 0.6997884024488636, 0.7246412705368658, 0.8918468203389381, 0.8378188362632849, 0.829358043551502, 0.9575368660181574, 0.7652349358427792, 0.589630299838575, 0.6497442204229729, 0.6332364052280572, 0.9808047168181049, 0.6181527180242847, 0.6937639596474915, 0.5398481678352309, 0.5408391321937773, 0.707552119444441, 0.5186960243475969, 0.7633731316739147, 0.585850117877341, 0.9291123194591197, 0.5543474421366852, 0.5010102939000097, 0.7291358740131062, 0.6111487804316194, 0.6350312470594712, 0.7094644103530963, 0.5824088123695201, 0.5058475341383775, 0.954371859054518, 0.8387003981409975, 0.679834269101285, 0.904760706527558, 0.65912678876593, 0.5737417536138796, 0.5494570433747031, 0.7252616281414295, 0.601071313123927, 0.9853960503910091, 0.9159316699022979, 0.5586162143132106, 0.906515682810115, 0.9629405140391729, 0.5671523302725427, 0.5373915646382306, 0.9925161740677075, 0.6136157512625664, 0.6755398340203194, 0.9382692915998012, 0.6459507370951898, 0.9212619631693029, 0.9214167711821708, 0.6625079296306995, 0.8216577849492726, 0.6400175030835098, 0.7255841950639178, 0.8457287431648677, 0.9212334363004152, 0.8619838352955513, 0.9965670496981753, 0.937074305364326, 0.9792565059150025, 0.9296871314849058, 0.7815232490586057, 0.8270093147375583, 0.680413613653737, 0.8541457990119107, 0.6019844643046897, 0.8129059907032314, 0.6766881558627598, 0.6453907131265963, 0.9036867389568202, 0.8220751157803439, 0.8588217446284641, 0.800687727689506, 0.8667867306114545, 0.7992686645536142, 0.7119206037546809, 0.928184834933865, 0.7017662761754369, 0.5855293400783592, 0.5103348385656353, 0.8104774258914332, 0.8097155441155661, 0.960534579534743, 0.5745136849480499, 0.9872047518490732, 0.8526273116330593, 0.9872155789852602, 0.704377764957298, 0.700261001933158, 0.8149935489753289, 0.9223244800802561, 0.9199153517889798, 0.6536025134809145, 0.9607487595775027, 0.9180800343692364, 0.5500480234477544, 0.5690598531524578, 0.5235329768143471, 0.8847420902859047, 0.8459851813539597, 0.5554714738227784, 0.7345974771877974, 0.9470722722451532, 0.8454641187746459, 0.5861008508456326, 0.9165776476617067, 0.9707510112573625, 0.6818010541418731, 0.732862613296853, 0.9352892717007105, 0.7586378464292858, 0.9328317031753112, 0.6494307251278756, 0.8850860627482279, 0.5291896498891226, 0.7980067161813527, 0.5674013723620981, 0.501804614541191, 0.8039564808990349, 0.8396240421298976, 0.9340453060005437, 0.9613499929453269, 0.887873112924209, 0.580286958536088, 0.8924571136863934, 0.5525422189320257, 0.5817844757018871, 0.6433498755343848, 0.6384144907437633, 0.9788282708483573, 0.5888932108410001, 0.5063216243243223, 0.691714999710124, 0.6203549592478863, 0.5443010214947974, 0.8168494740453971, 0.8173398258167437, 0.5648446658618881, 0.6501550431692101, 0.5598246901600388, 0.8117952376991919, 0.8710087813366333, 0.7744836749878097, 0.9339784955659863, 0.9322028787590706, 0.7602687389282472, 0.5221340374501722, 0.5849142333824029, 0.6018237954597535, 0.6044301351142533, 0.7100916037231741, 0.7278721173575009, 0.5616504723384774, 0.9194888137065569, 0.9980917760244059, 0.5918343242821408, 0.518055477413587, 0.8610131875593439, 0.9391613177219105, 0.7551303376901097, 0.6394010537241595, 0.841719102840123, 0.8815818829703372, 0.8454979778684353, 0.7652497352627352, 0.5432407907533959, 0.9413402260485355, 0.626018320496792, 0.514296291409247, 0.9824020169689043, 0.5826054759919537, 0.5821591959357435, 0.5384641534199771, 0.7150673338962302, 0.7227180658808705, 0.7423114229861261, 0.5079485733651261, 0.7200793361176809, 0.5240874924517691, 0.8827730584609181, 0.7611517670005998, 0.7261639658494563, 0.8244267329582455, 0.7699812734424862, 0.7672603916387513, 0.7502390778799506, 0.5565813434634632, 0.9174250163811881, 0.9361934932764155, 0.8993255505064174, 0.5748655265450534, 0.7850924266128367, 0.8208493139787343, 0.5653320956170416, 0.7778864268189238, 0.5522409994980007, 0.8479900454517142, 0.7869067614543279, 0.9069007903502726, 0.5609002059278448, 0.9372075885568765, 0.9654380121949285, 0.7620013008717111, 0.5344566057633835, 0.6694980491632667, 0.6537052045077048, 0.8648769521135958, 0.818724691125107, 0.7054145079056782, 0.5187392273003997, 0.7171697897883642, 0.7675361719746989, 0.7807295216698074, 0.6416790042496008, 0.5814354436379123, 0.9770344446389401, 0.6910725901174375, 0.7279765206476698, 0.9552200558068759, 0.5448138165474341, 0.5354216316851583, 0.7433782309602714, 0.8953129151683852, 0.805726686411463, 0.8662358479689999, 0.8264434169329369, 0.5672128268602199, 0.6703545155849401, 0.8492678827042711, 0.6200230947551246, 0.9501488143093397, 0.8049162932356457, 0.5956506436643882, 0.9187919722019803, 0.9490427016389626, 0.5094101145317544, 0.9433309394955347, 0.71901091520548, 0.8670586130389584, 0.5307944818017456, 0.9266713906366744, 0.8172707634060714, 0.6912013252853286, 0.8296209782455062, 0.7851428778045789, 0.6726275490977538, 0.6131090906183514, 0.549138349627065, 0.9298524401993814, 0.9086542909860191, 0.516076700729678, 0.7626594182816999, 0.7682710345909604, 0.5892828817905763, 0.7562344897044883, 0.6705042001383732, 0.8195554939204085, 0.5785726167620542, 0.6587536995889962, 0.824068641568459, 0.6106726739205528, 0.9385686090808221, 0.5381230612053538, 0.7728556332135625, 0.5293773714263048, 0.5473732048007478, 0.7572327410791762, 0.6233348472678646, 0.9544789368187834, 0.7797762208440862, 0.5691995916647099, 0.5560260925886724, 0.8181710165097844, 0.5866468754971805, 0.6231898109570044, 0.8195607193087007, 0.8994095957061641, 0.6138943091178989, 0.5387187209812139, 0.8947273968148315, 0.6779697154105471, 0.5495519725054899, 0.623217732924805, 0.8860108608549999, 0.7336650834838911, 0.7336045806686481, 0.7069769316619007, 0.9031293717768276, 0.7488142795354035, 0.9171159460288453, 0.5719574832635872, 0.6121947492641258, 0.7165662971740419, 0.7540067848815454, 0.9925388388229216, 0.5058925009309874, 0.6744290621363491, 0.8993875655725099, 0.6577543893048403, 0.6994512912452544, 0.8135240546092297, 0.9749484501465884, 0.7125987780451927, 0.5744350566868435, 0.5797320929000025, 0.8054643287562131, 0.6284259267383185, 0.7660724135978039, 0.9702103920515848, 0.6927669858744101, 0.8516292246109416, 0.591387785010644, 0.8169041585967426, 0.6392328229888922, 0.8361528164095303, 0.8924693740316749, 0.7307491960782482, 0.9236214122628688, 0.8303091463382949, 0.7535288381237668, 0.9189938400428435, 0.7476437508149341, 0.7939391841438821, 0.7080080382380015, 0.6696750432252534, 0.5531033379396197, 0.7922135375873472, 0.8063902601089512, 0.9511481389816467, 0.9603077475090473, 0.8156494660183116, 0.6762565949755422, 0.5635744445363029, 0.6859553166812113, 0.5986727167701833, 0.8622839333166493, 0.6540539557722994, 0.5931668376571586, 0.6614597019296573, 0.5204832650572606, 0.9708100950596977, 0.8934987364141608, 0.9262666211305699, 0.8733315216809402, 0.6263421449552287, 0.547815591473194, 0.7836166452266253, 0.9778183266632787, 0.5015223255795536, 0.8158401343390014, 0.9573364308546022, 0.9944381720328365, 0.9333030115301115, 0.8673581015538037, 0.7399447933339833, 0.5380895222676113, 0.9165368112798763, 0.5238585917867291, 0.6792657837727658, 0.8900301648727886, 0.6807635976443359, 0.9807423966194158, 0.9916443249659614, 0.8171149566717312, 0.9819294189248793, 0.5536478141060576, 0.8784249407223808, 0.7298227927643457, 0.6917012063996453, 0.5194448387194714, 0.9879785222614108, 0.7818221564247472, 0.5215628058996142, 0.7851164614972422, 0.7353513977669848, 0.6725453040247473, 0.9056912418357372, 0.6431844281928047, 0.6094376135232016, 0.5552696913870567, 0.7933005987960309, 0.7809579361551753, 0.6068959551739526, 0.7057622985968406, 0.7909927734528304, 0.8981386018170148, 0.9039473477154409, 0.9080808903322466, 0.5589510948028189, 0.9442747743723179, 0.6317949107355636, 0.7501694509382109, 0.905769893359669, 0.5596322057954084, 0.5997193838304051, 0.9918021220034815, 0.8702740116841037, 0.5982550356754845, 0.8257397982589274, 0.9832839376604717, 0.6553288006595597, 0.7968838633288003, 0.6806822604466776, 0.6180568270504645, 0.6006866713046872, 0.8373563029406477, 0.8723082403793632, 0.8992433624745335, 0.8506934561302821, 0.6346706769547703, 0.5381690588306538, 0.8019060810215302, 0.70756561476592, 0.7406549217162186, 0.9106456021134568, 0.9315555213042366, 0.5838573044509356, 0.5921076772935208, 0.9830229441049899, 0.6091802687981522, 0.8808193115544318, 0.5121738865240186, 0.7382054941197458, 0.8225807999454189, 0.9662821345788324, 0.783328742184834, 0.6100425583040143, 0.8083319474499165, 0.9478149377467171, 0.5887962422495199, 0.662776658129458, 0.9119949498325581, 0.5646511921686301, 0.6105476026868892, 0.9137849226290309, 0.6676848691492069, 0.9916546904669725, 0.7071153331775715, 0.880356472360275, 0.670473895195578, 0.5874350023164503, 0.6691283857569259, 0.8676692429157332, 0.8355742790634211, 0.790429290674606, 0.5179519296419617, 0.706143486022673, 0.6613124243594701, 0.8513807888709114, 0.9642632994452718, 0.672812093727439, 0.6003552493416822, 0.5193018025130905, 0.9391806250604582, 0.9128595068893728, 0.5635989822132025, 0.601900685011939, 0.642937980191853, 0.9464067424484208, 0.6357986212359624, 0.9605281203088403, 0.6265624236762644, 0.6841186373615582, 0.5706833200137444, 0.5117014391946626, 0.9807465238329962, 0.6925888782807452, 0.8948987793034926, 0.7760488365945022, 0.8006271821533772, 0.7777935685879073, 0.5834647912596274, 0.6390857320007233, 0.6059441691717313, 0.5254953474980226, 0.8695298453180756, 0.9445792774767746, 0.8495389133296328, 0.8430772508504265, 0.8024386019104166, 0.8859539089682905, 0.8617765059585353, 0.9556095227662997, 0.8386130936909042, 0.992854096089629, 0.9781431382421417, 0.9740621158192262, 0.9795839424576225, 0.7316123943316077, 0.637500956534508, 0.5744873077544087, 0.8900003115075965, 0.7588619773158123, 0.7971313305850782, 0.7655277419619404, 0.8080211815930484, 0.879829058569263, 0.7678219209760455, 0.8007457513515248, 0.8440218053999587, 0.9358740157233272, 0.8954157149651811, 0.8898239228759193, 0.62084171348521, 0.8058211257288226, 0.5986780264665617, 0.5316494294950777, 0.6143144776559449, 0.8982369786109139, 0.6412085901179733, 0.9557438387907651, 0.6288504538194435, 0.5238027621059875, 0.9828250774301893, 0.5930770661323328, 0.7869844993027729, 0.9028095140015056, 0.9322755068299916, 0.5645258596307741, 0.671451861400594, 0.6179949192762324, 0.9378382935172491, 0.5170692087009898, 0.9272370458809852, 0.8532495243050184, 0.7404546548595912, 0.8648470202018697, 0.741351991600657, 0.7099183435720557, 0.8214263220945159, 0.6640872514628752, 0.9295238794285094, 0.8447926357584428, 0.8868957721121788, 0.5581567659353635, 0.6030084887203805, 0.73816140428827, 0.9738786609641676, 0.63667766310822, 0.7633682910604083, 0.6230613315997334, 0.6725817681949531, 0.5365515195277178, 0.6729438769902798, 0.7770421530280182, 0.5692562242277834, 0.7362597722488464, 0.9967598105974301, 0.5867994391298361, 0.7546119077158426, 0.9079048335745924, 0.716839554982337, 0.6932236269609025, 0.6533014925315919, 0.8205611415485256, 0.8270058316272428, 0.873385801767504, 0.9531440376231459, 0.5106003626690268, 0.5756267517745504, 0.9863427966037137, 0.5506769356085484, 0.7254828442509949, 0.5116212727258098, 0.6186230391817611, 0.8586570534517697, 0.9532610130363958, 0.9180573295406202, 0.9946379045695151, 0.6392890641730955, 0.5844113979621328, 0.7566016077021036, 0.9562296618599735, 0.5440891072962203, 0.5892149746313978, 0.8810600488448812, 0.7602006291655237, 0.9373782649150524, 0.7114035591603116, 0.9753169116365457, 0.7280777360098114, 0.6821595569586887, 0.7051750063771887, 0.8853001434547524, 0.7979726242602916, 0.8261955215855878, 0.5330031946551241, 0.7389997000484372, 0.7350256101282162, 0.5098200865449231, 0.7493262376281986, 0.6996428674210832, 0.9749463634553068, 0.6071408647178612, 0.6040630766354176, 0.8691430784397196, 0.954261239809522, 0.9087684233431155, 0.7968022013253276, 0.8863948513046718, 0.5952017814082411, 0.5891800982791624, 0.8279367299686065, 0.649047221115377, 0.8330134767147563, 0.7483006446927826, 0.6622247039878666, 0.5104615312118881, 0.6961062797354989, 0.6044977490851379, 0.5200566651402432, 0.5336001171363012, 0.6481540373072839, 0.7824494454613985, 0.890568614945058, 0.887483563559037, 0.6284119500276946, 0.8264907597872868, 0.6280917370870286, 0.8798332971796903, 0.5531821013795257, 0.667983380472517, 0.8884915839201823, 0.7686466788408275, 0.5108638052232215, 0.6111279498327642, 0.7558804615949186, 0.6001590743321701, 0.9649507385850997, 0.6998249737671545, 0.9660039881618034, 0.7972753125126381, 0.8822113493199573, 0.7224076703047217, 0.8063506486031418, 0.8295362880836274, 0.8766525688304758, 0.6104269873048054, 0.8780467320312644, 0.8836927263075003, 0.8433099878185824, 0.5859276428038894, 0.898090845145721, 0.9514058657028159, 0.5790627442895877, 0.6988804585023938, 0.5963067805874516, 0.7944495642003595, 0.9765649915349922, 0.7357959887057839, 0.9195920725798357, 0.7951247927944047, 0.8277599535340755, 0.9499986384953341, 0.9007908851012366, 0.9617700965817559, 0.7342141439569473, 0.9032385335052477, 0.9298877256105538, 0.5263179008338112, 0.8206208178736214, 0.6984123326374202, 0.7373826526646536, 0.8253583819214596, 0.7145041105298012, 0.8314144274808882, 0.7641099637675486, 0.555850294728095, 0.5987921045080615, 0.5877123834599385, 0.6102891502306875, 0.6262030053748175, 0.9190591603543772, 0.6753552388618456, 0.6941668201180827, 0.8424638534984955, 0.5087523831702235, 0.9554338504895508, 0.8493980657650739, 0.6878567846076095, 0.6673045300696474, 0.5657361875847682, 0.8798863022320624, 0.9929071065839855, 0.7828741905838716, 0.5314326531118494, 0.9808085897404591, 0.8169039932855212, 0.7931398260097962, 0.513967869949689, 0.9006486510302826, 0.5368508478593494, 0.8671488524528435, 0.7409100122368453, 0.7090615477367499, 0.7423345045994588, 0.6132251820892181, 0.6280419804159559, 0.5842311790950155, 0.7766670776772406, 0.9497395396032123, 0.609587451543607, 0.7854174674020289, 0.9162471203411267, 0.970203869079852, 0.5001077913234084, 0.7471308725317996, 0.5355209510624983, 0.7705109864096444, 0.5689056823756423, 0.9988781564675742, 0.5670518536920683, 0.6741522033778033, 0.7945686264065945, 0.7672732170001308, 0.7257390072134765, 0.92608195988571, 0.5027112617648226, 0.6018089581581929, 0.8219393504341785, 0.6208764652367729, 0.9612497586270571, 0.9883161088566469, 0.5232401721275393, 0.5104225499023938, 0.5077575588931924, 0.8335767840380756, 0.7959511503091753, 0.5352222836242408, 0.6511147333649907, 0.9567803674046308, 0.9866157959660984, 0.6100309816983829, 0.6421645973394098, 0.5806784490837507, 0.9636815728916079, 0.8480289763547508, 0.7460411464819453, 0.656942029016816, 0.9795514871391415, 0.6080935950465294, 0.6971731105769332, 0.609014603973723, 0.9896267864314611, 0.5667960195388214, 0.6100503426980004, 0.6033996274766369, 0.7884592023409424, 0.9926674677955019, 0.530820588347308, 0.5149821070000781, 0.7461596422300791, 0.8398438100243715, 0.615181371677159, 0.6070164864344438, 0.8876056890276789, 0.6485849621866591, 0.5829167661639467, 0.7017394128365209, 0.7638445597933172, 0.6476979742022393, 0.6484755000637785, 0.8553840599280442, 0.6370734356072674, 0.8953573884461131, 0.5739232892197037, 0.9757461416855726, 0.9893236256035285, 0.7950013080046234, 0.816888932609179, 0.912134601491003, 0.5864819849851688, 0.9220468892364075, 0.8891371909682344, 0.5549494298831095, 0.922101690130134, 0.6774924866530108, 0.6578190083806826, 0.8114093397387124, 0.8632462207017746, 0.5196112444627099, 0.8139056378110145, 0.932823465145063, 0.9506175294112928, 0.7729018655284537, 0.7663535411865742, 0.6013872580811671, 0.6407279222680933, 0.9075664258356486, 0.9936085968064035, 0.9058038459554446, 0.6105426331756547, 0.5852071045610002, 0.6147934566583941, 0.90040133913682, 0.9350872785860924, 0.67397024907991, 0.6261533628476448, 0.9086845641383878, 0.8297941668665715, 0.8338863368028818, 0.9789117758368311, 0.5067277273110224, 0.7025364732797106, 0.5799260062883949, 0.526676196187154, 0.5336837565134911, 0.9400003946973048, 0.7584676783138947, 0.8585385525255365, 0.5809574595682784, 0.573418269998746, 0.603520602801052, 0.8754864643072038, 0.9025398432611202, 0.7034825596645171, 0.7880578166151615, 0.549926115135047, 0.9775060349157777, 0.9926911838378207, 0.8512498196772168, 0.949504186615385, 0.6220922650463347, 0.671450867340028, 0.904569232306978, 0.5056095909222976, 0.7671893065942637, 0.5894131599014745, 0.952292674336187, 0.9634137038434161, 0.7388973376391076, 0.8770693932369702, 0.6024595147502296, 0.9583324581372875, 0.5862566939963755, 0.9904601759109344, 0.8138276493846204, 0.754440443410336, 0.7264152528503482, 0.6698674721080063, 0.8884350006207464, 0.7777244289964946, 0.5843924819383784, 0.7087505081398215, 0.5116956063344552, 0.8308285303262801, 0.647771147639093, 0.970177336793457, 0.6800351322249889, 0.9391932068703879, 0.5150880777823545, 0.909086875804257, 0.5282664500058354, 0.8652836257436988, 0.7549252173857661, 0.8586864542140499, 0.7532460063496969, 0.7363669565359923, 0.6732446722288692, 0.6033693904537701, 0.935920939748808, 0.5443711548646133, 0.508167632015564, 0.9895305515994335, 0.5367064059373066, 0.8893495992005211, 0.8636900988532821, 0.9366349319339944, 0.8409800324377763, 0.5982949955976071, 0.9260852817503642, 0.7637855207715174, 0.5525908627461599, 0.5747974067981491, 0.7457594718137939, 0.768964114326153, 0.7274350393428974, 0.6533388379848588, 0.9031107399173428, 0.5842183690293414, 0.8065167303017156, 0.5275518394247172, 0.8809785951517772, 0.9265902428374679, 0.8553838026551426, 0.8505861083483168, 0.9322917869847243, 0.8019246652654337, 0.8199613036198672, 0.9157168053995585, 0.8496867045987623, 0.7870528970165103, 0.5649310020383024, 0.7375437008278796, 0.984777520983459, 0.6440158066098562, 0.6333047874595591, 0.9525244084209348, 0.9614248019318086, 0.6859398152404146, 0.862200669060869, 0.9645026869825488, 0.6849663999160116, 0.5591452271800644, 0.7159192248862378, 0.6617733790802944, 0.62650921232497, 0.5522922665821568, 0.8689029307521416, 0.5569186993858304, 0.5005153163285683, 0.7930489776167897, 0.6555441799787574, 0.6881506969232936, 0.6999104660651791, 0.6520685478277732, 0.8846334854754535, 0.8331891851920623, 0.826085227806438, 0.6594839405039785, 0.6457383420610981, 0.7802823728728494, 0.6429101352403399, 0.9937098138673386, 0.7496120789724958, 0.5027326542547299, 0.7397422628674488, 0.9569007304711585, 0.6442846343125499, 0.6561915422481485, 0.9203871241487018, 0.9732227969571839, 0.9766960716612716, 0.7497333684130689, 0.9188664050539851, 0.9812466588605451, 0.9116764847538489, 0.7198374065496713, 0.5986146934981549, 0.6638660580483118, 0.6656548392213165, 0.7000846088543592, 0.6790366220319749, 0.887441087183533, 0.8428774807347288, 0.9924065066244201, 0.9964653283593661, 0.9431135093499841, 0.938375267740317, 0.9610242740305264, 0.9622654101060852, 0.766800011018764, 0.6651595015140515, 0.5248700255702683, 0.8932086365563614, 0.7418141415529029, 0.5723537930876963, 0.5916266174070797, 0.511249812484809, 0.8183847097213011, 0.9847909364091945, 0.5072155697041554, 0.9312710444664858, 0.5630675614149057, 0.9923650730826388, 0.6187504651290718, 0.7230566802275064, 0.6539261624655841, 0.5768514287571231, 0.5084405243391019, 0.6908400040824, 0.5126799776188905, 0.5254756999848434, 0.9323363457472048, 0.8695482010937409, 0.8530410775881112, 0.8282913844381703, 0.9266284721903882, 0.5756639230307617, 0.9849052855470803, 0.8757777580149644, 0.8058671101565849, 0.646489174518726, 0.8603010664942259, 0.5650932317353499, 0.6063807062972439, 0.7010414217151526, 0.7419738942147474, 0.509973692901851, 0.5909383679432383, 0.5207576531842327, 0.7384856283789779, 0.8232025903388719, 0.7740666236765349, 0.9849587442556228, 0.8154500850054734, 0.6860720564432513, 0.7313584105348412, 0.985356169950417, 0.5014922106636961, 0.5751347595966856, 0.8714996229912217, 0.7231544528416598, 0.7784171920787816, 0.5147248086199119, 0.5237420037568199, 0.7744833206631607, 0.5407634280638618, 0.8805983230739471, 0.779184789534105, 0.9976023267817777, 0.6010858426128272, 0.8766635459822778, 0.6912205329917829, 0.9625043142824259, 0.9325279418902659, 0.8036640610433555, 0.9266994939974034, 0.6986485187560085, 0.5870602119740085, 0.729016920965841, 0.5681766518140237, 0.6895428983187974, 0.6953653821301782, 0.5800925105987058, 0.7090850594925923, 0.5965642657996979, 0.6866002043306021, 0.9879227377471946, 0.8829015593403515, 0.8897886051328652, 0.988940967703487, 0.682227259118608, 0.6439080578262575, 0.7258804400829034, 0.9483540713972664, 0.9013657005108984, 0.5538720057050319, 0.7240750722452398, 0.5947016621300524, 0.5859054896422802, 0.5028927366474962, 0.8624378725612251, 0.862109904675565, 0.8408581889356344, 0.542347264471424, 0.6384974906243308, 0.9109356791867668, 0.9721697431727869, 0.6946997762198643, 0.6771874706321697, 0.8629534706077234, 0.587157770836181, 0.6278416483055564, 0.755252479607486, 0.8001940216490813, 0.6897838837204391, 0.7682367901360949, 0.7374969714897545, 0.6692761161734813, 0.8307896128994712, 0.5987759068152645, 0.5267400948648391, 0.8209915729944635, 0.9354457588993365, 0.7346336391154463, 0.5208950879936357, 0.5152458580787012, 0.5162693958365707, 0.6007480855526313, 0.7567277918588743, 0.7286123020428749, 0.9722739314877813, 0.9910387210797802, 0.6373178536198866, 0.8795360558094407, 0.5073743383155735, 0.7762255855657092, 0.9200367130626725, 0.5414025048605998, 0.6910788069317417, 0.9764425689056941, 0.9290925303767923, 0.8665151933665369, 0.8713800884969309, 0.5228937941741174, 0.8374958541493844, 0.5209200719489682, 0.8031673156887926, 0.5699565669446751, 0.8173689270038537, 0.9628011941070238, 0.6007234688053097, 0.8194579043497145, 0.6308715393632731, 0.9133753382608989, 0.8841176180925738, 0.7589274497138447, 0.8842235760926966, 0.9245602199466619, 0.844932764728551, 0.5918149474602655, 0.8162803504609809, 0.8502697951364524, 0.5577625103123148, 0.6338482882930698, 0.8475893775011948, 0.8294700438679016, 0.590612940440558, 0.7220519584807517, 0.720998561279346, 0.5364785869288524, 0.6324350402468186, 0.6797461328098662, 0.7150357493549208, 0.8126797759105548, 0.9615183399957616, 0.5664315932841029, 0.7386383132793328, 0.6923160555925448, 0.5812429078200578, 0.5240781620501637, 0.7158931531471068, 0.9800609309095221, 0.9381751653844411, 0.9569425440815218, 0.6989147390869284, 0.7138616730857947, 0.8609427234090132, 0.6037951748198764, 0.8617018041584844, 0.6877064071662848, 0.5194064315381073, 0.8921922457470632, 0.9251025171684291, 0.8866837775202792, 0.7391840048065557, 0.8507733292699444, 0.9576579724524135, 0.9501159491930236, 0.7006578159450552, 0.731548987778654, 0.5318013466540317, 0.6391027272936769, 0.5484466300337825, 0.8544119762441061, 0.8920871048873813, 0.8691866100528673, 0.8713115509885052, 0.9982734939667268, 0.846050307501981, 0.5900835353013174, 0.6741664986943938, 0.8316978468278676, 0.9958713919192317, 0.7773946242859386, 0.9519630475564447, 0.5743788676940488, 0.6834036029343544, 0.5575384054929318, 0.5916462696953225, 0.6608656674932359, 0.6171211459272401, 0.5633585013839235, 0.546244789847371, 0.5879765854497712, 0.7888252502327275, 0.7361865076391902, 0.6626996949708714, 0.8012277618212593, 0.813295360947399, 0.8986058834569033, 0.629694384057869, 0.533937252184638, 0.9030684133166259, 0.5580448874988113, 0.917382683038561, 0.5393857888605292, 0.6806711320823102, 0.6097303920924761, 0.87543072927103, 0.8208807980936934, 0.5627574429127506, 0.7220667168997277, 0.9788826630470595, 0.8495284977248745, 0.8870089294912713, 0.744670564972224, 0.6670068334186505, 0.9763674995123103, 0.9426513136863462, 0.7982047975703189, 0.9282844262200249, 0.8946757211242724, 0.5545024043520332, 0.854504306560756, 0.7321718083400874, 0.9504929068439297, 0.6704042203903242, 0.8781150453812689, 0.6296619507920407, 0.5963254058286369, 0.7752050906098102, 0.7579872033525521, 0.7897689779029856, 0.8735232646087325, 0.9403768014370739, 0.619792934156036, 0.9958765621100101, 0.7245435609407784, 0.8975078595785051, 0.6181007980109336, 0.7936566693294603, 0.6142982571440926, 0.986306048823961, 0.6887424946038161, 0.6014109440196064, 0.8104625498229934, 0.6332024099396935, 0.6265170945139777, 0.7804634971340563, 0.9725628249164371, 0.6432146886890275, 0.9327880969422266, 0.614716360791742, 0.6099905889416517, 0.9008243080962429, 0.812755956249082, 0.5982938326050673, 0.6008988971418594, 0.8611805764140765, 0.8958317383333332, 0.7644763648594979, 0.5596466500746113, 0.6154910223781517, 0.7509078644714049, 0.9773966140940897, 0.556922465627413, 0.7496379636539334, 0.8248489646429209, 0.6106481049018282, 0.8857120205077571, 0.5980205793783464, 0.8486535854013247, 0.5967336785361519, 0.7092803751746117, 0.8484195016629228, 0.9554495197630495, 0.7808571637871959, 0.6854793199132342, 0.5023760141103455, 0.8319359636596395, 0.5988864629055742, 0.9070641318963879, 0.760161468803567, 0.8132713645843868, 0.725732999677553, 0.6877025387734634, 0.8756165331397308, 0.7325536154166636, 0.5330136445478102, 0.5494694698726035, 0.9896389987655843, 0.9261230546730739, 0.8270971074029774, 0.9229345532867236, 0.7947193174924847, 0.6051723619499199, 0.6573239977355712, 0.918437165155914, 0.7612218976209681, 0.8769713639800476, 0.9942218936993565, 0.8599864603057725, 0.6678161051554549, 0.96566581374995, 0.797718152170851, 0.9630315615545855, 0.5220830862013065, 0.5436483739239337, 0.9238800349980816, 0.6322734364995453, 0.8422508052110824, 0.8288857980193218, 0.7378726766983306, 0.793627599753062, 0.5818603234883037, 0.5543089596469717, 0.8648697157606338, 0.7172351299406766, 0.7335318218890651, 0.84289214703081, 0.6347663243335435, 0.7354594471651297, 0.6447097530043265, 0.5083084172616474, 0.6715298370993449, 0.8829288233864638, 0.6088077196041822, 0.9407348034672658, 0.5401318951551903, 0.6719096960411514, 0.8914979974267347, 0.6379766584243111, 0.8332111546572223, 0.7086482463168271, 0.894024677054385, 0.861830705584811, 0.6080674737186377, 0.8864450656447257, 0.9926100951267988, 0.9365776074610255, 0.8658521475169587, 0.9951544080181769, 0.9111630295300589, 0.9317136542910582, 0.8629326585124106, 0.6277889543812196, 0.9536307516073739, 0.6781690109766044, 0.6528278873897136, 0.9659092436861738, 0.7450753670137623, 0.8236393384677134, 0.9766317282408963, 0.8905256976157563, 0.5771623025525665, 0.7305687237360632, 0.8271459845336397, 0.5046171107124436, 0.5084636485886664, 0.9230318624667173, 0.7314006846703984, 0.7508341822131658, 0.8635801423772156, 0.5505696670599269, 0.5198872566059642, 0.6357803225021852, 0.6655032684220468, 0.575315668856279, 0.6130667502376471, 0.6048784281293965, 0.6735812888024371, 0.5393215053363495, 0.6901395072182567, 0.8107703026304062, 0.5780360353444264, 0.8750599482359597, 0.969836968942128, 0.9949978991148865, 0.7394184538190778, 0.876923506923901, 0.8749724271209025, 0.9959833886897901, 0.5946749628935264, 0.8630066853079273, 0.6045541479242412, 0.6093056354171189, 0.5407858991381196, 0.6630883231548675, 0.9150805966745916, 0.6356531336063166, 0.7707351115882846, 0.8451548330576568, 0.5792300907579675, 0.8037397626964997, 0.8452987786236947, 0.5001049371347607, 0.9653707575202087, 0.5104426566089337, 0.9338694798354858, 0.7088954955401505, 0.7306917234830347, 0.583628795912216, 0.8703434394173266, 0.7122168826429262, 0.9351436879204507, 0.5579797421393533, 0.9264755915770924, 0.994895075775918, 0.7867571067920697, 0.5527260568482943, 0.8121109650052776, 0.6506275040672831, 0.7597614788948351, 0.5829524254778413, 0.989638150510799, 0.8446367892717019, 0.9114227781967834, 0.7783937023247071, 0.8996414643191961, 0.7534455517932817, 0.9722682787539343, 0.7898404575168421, 0.7871846652034293, 0.720258413054604, 0.9783681918037124, 0.6219661066835587, 0.7939522309419589, 0.8674315647498054, 0.6401888502423487, 0.7024200806826043, 0.6280884838502879, 0.6137891191618068, 0.9290269162550782, 0.5210218802581632, 0.7505164107381102, 0.8819968754397312, 0.5199607952551983, 0.9455055147855291, 0.5998223144237302, 0.5750758248440155, 0.5277525472348243, 0.5766644971891486, 0.9153661113072205, 0.878172199917149, 0.8680300704264579, 0.8083304085629923, 0.7036312693323872, 0.836247665348488, 0.8387633126278156, 0.9638961276598534, 0.9610135723484353, 0.6450094218223643, 0.527896925636012, 0.854628976324487, 0.9493328084349651, 0.9836230150693288, 0.7900358522985655, 0.7221307853188477, 0.8533235205128917, 0.6155346354720634, 0.6681443421464842, 0.9499841986282038, 0.9560910189100138, 0.6340850774713702, 0.6138743697618192, 0.5424015123998764, 0.9694548734220323, 0.9718870054300206, 0.923862870024328, 0.9832774911735007, 0.5286528363973282, 0.6441473646805325, 0.8869586791099198, 0.9456319666221495, 0.95748434547416, 0.8625109066402763, 0.9769327243493382, 0.7629591726896536, 0.7127677853954424, 0.6871252648473529, 0.6717682379594729, 0.6962149505060752, 0.9394914095102198, 0.5134293390075373, 0.9793994810637391, 0.6614010008876348, 0.7542254720211434, 0.774661702063538, 0.6693573744214153, 0.7665268167913708, 0.9769479763836038, 0.7313121136708929, 0.6280581244863137, 0.8902778262426377, 0.9347267343131227, 0.7262244579506973, 0.663096997615913, 0.8112436064148648, 0.7264525121580883, 0.7730931913394221, 0.9699925263949603, 0.5342511204209792, 0.7421698609873034, 0.9437572744222591, 0.529309523549663, 0.9459754810765224, 0.944272188130256, 0.8731883214923744, 0.567791578844798, 0.7361467548949141, 0.501119838901559, 0.6819846480815484, 0.8490872370389784, 0.8020605422790728, 0.5475532167819923, 0.6687061017653687, 0.6934772369658809, 0.619454566668237, 0.8231477927531579, 0.7970690204414381, 0.5403531740071976, 0.5731136306224034, 0.8992533821828224, 0.5320860576029478, 0.6092350893606375, 0.5740938046472922, 0.9337773423819651, 0.7428036575780705, 0.8012309380410311, 0.562811675067627, 0.881949390418958, 0.604865011031052, 0.7268149514700009, 0.6743165696057445, 0.5913894176663095, 0.5352581437741399, 0.6021175410394202, 0.5381205350080014, 0.9647204120640933, 0.6035908425853351, 0.6487166851709993, 0.7524578859060108, 0.9583618140653417, 0.8362828452281004, 0.564616377801481, 0.5655328441731231, 0.7547750399834279, 0.88877003072104, 0.5706587765257691, 0.8881339021443573, 0.9660456482819386, 0.7832747569150654, 0.8374040792445226, 0.9914699660271556, 0.8114178855493205, 0.5639005318460607, 0.8916940058121576, 0.6757067353162709, 0.6877664033238768, 0.5761010193240725, 0.6674993679070078, 0.9395421156103043, 0.5798457837911739, 0.9983987830051435, 0.7071884944746065, 0.8256760383666366, 0.6460438793937959, 0.6714470067679281, 0.9906639738832455, 0.8025248712180177, 0.863141745559328, 0.559325988243583, 0.5245995831358898, 0.9073766792674052, 0.7008705186644293, 0.6078353505644609, 0.5336652533698281, 0.9617224453939192, 0.5986546989815029, 0.6883011375024359, 0.9203714323448899, 0.8390065157324615, 0.6591435171678895, 0.9392665534268031, 0.782791937890579, 0.9135709855629803, 0.9908261210944602, 0.6841900912176888, 0.7762256007857546, 0.9901665332947485, 0.9794746556813303, 0.9464693086495546, 0.5588584023377541, 0.657363113366515, 0.8492017751845609, 0.997548594078151, 0.7605701687248002, 0.9636452815015037, 0.5291694367081701, 0.6628540692264258, 0.8571326467852118, 0.6077733612897798, 0.889001965714332, 0.6966414828652824, 0.8359198462528628, 0.8385069740041668, 0.5290524635290386, 0.9633891947834591, 0.9213116274325535, 0.9430503077300825, 0.6824618787744492, 0.5329359452245918, 0.6832073429066335, 0.6709374551701558, 0.830343045254301, 0.6126331472800396, 0.5158651450428671, 0.6953098232241226, 0.5837688743307526, 0.53873305721889, 0.7120053560526478, 0.5883817199951319, 0.6427011723670903, 0.8410989220683847, 0.8841825761003963, 0.7411728644047801, 0.9110514286462196, 0.574511189946177, 0.6402034734943416, 0.9328854632090081, 0.6219657213353631, 0.9248066161740306, 0.9956689442662165, 0.5031993992698266, 0.5540837064884941, 0.5713108808453706, 0.5409148072497503, 0.7581135798922998, 0.9706703124196017, 0.5946962335463815, 0.9431103666163755, 0.9382339646500049, 0.7667904006618882, 0.9472520225366493, 0.6049130081848644, 0.8643161464721132, 0.7752475076540872, 0.9235330641037861, 0.9602990648508699, 0.9391513290425617, 0.6107586474146771, 0.959102072029905, 0.8077274973626447, 0.9420182971098144, 0.5846194596402036, 0.9224922141737522, 0.6468536189141743, 0.5107251392834443, 0.9875954866390352, 0.988886464113754, 0.999128524199684, 0.7511508615546936, 0.5827027203069446, 0.7300474276953147, 0.9702254708682891, 0.9938889250266276, 0.9353939616250462, 0.6670280325662341, 0.5693441060847221, 0.6657122775260471, 0.9160901393944223, 0.5015158751130294, 0.8564273779763915, 0.6269930956038059, 0.9788042676660584, 0.8788628288536022, 0.6837860965953013, 0.7268199388075041, 0.9121921636206214, 0.5318593298785799, 0.9478997464160536, 0.8496146602594765, 0.5306632796262026, 0.6529091636619895, 0.754332048121442, 0.7540832590198174, 0.913130736456073, 0.9777680415486281, 0.7933144448035119, 0.5698877479784985, 0.7049560524985807, 0.9660310970129298, 0.8730871405647098, 0.5022359668290797, 0.8361452193742491, 0.8789347828745644, 0.8803637683880439, 0.8303946443774581, 0.9199073667156497, 0.5592118904531931, 0.7788191617926258, 0.7181952124561519, 0.5394584538946618, 0.8792124915731894, 0.5705889666759183, 0.9028454886140406, 0.5269217709162939, 0.7835534820554899, 0.7973912893059065, 0.8350301333734604, 0.6173946642243562, 0.912225947750313, 0.7613676359484377, 0.8666410708070279, 0.7433306322151733, 0.7986153816707307, 0.9418625418560324, 0.7781039617280509, 0.8988599939381468, 0.9609747217738431, 0.6109910351883645, 0.6380297784290965, 0.6055740346750813, 0.925880326310319, 0.7814199412256441, 0.6661429249660642, 0.8646617878694394, 0.7937580890021599, 0.805820435600122, 0.5548969409168725, 0.7646065979143399, 0.635066039687797, 0.5158411710457242, 0.8398644995184579, 0.9122442698412118, 0.5779472593159217, 0.5324577850535944, 0.7387657363609752, 0.9173982058528497, 0.7587292703150919, 0.9204360593112825, 0.7858183093225538, 0.8930991285567282, 0.7344354509736029, 0.8523650883294467, 0.6718650951354465, 0.7040880994441097, 0.6985732923356214, 0.7595406220891134, 0.7117473924071003, 0.5377095572519146, 0.508496937913319, 0.6088301119738412, 0.5440897735065158, 0.9925308627141868, 0.5727636927083487, 0.6568539541242226, 0.9159509206177514, 0.6709768961943048, 0.886917772275823, 0.5805273399549014, 0.9102522936513038, 0.6457088394408899, 0.7341910322727467, 0.8440365566470693, 0.6712389259342945, 0.6588564310627341, 0.6699261202530095, 0.6356987456437819, 0.5767476484141116, 0.9023056671480393, 0.908809231105673, 0.5771672065969221, 0.6904679210475118, 0.9257998075781795, 0.939693820710982, 0.6255934519740256, 0.8593446193206129, 0.9331695121308919, 0.6158449787900016, 0.7121422437811906, 0.5905986152459961, 0.9235831723050871, 0.8086297800373792, 0.7763046137517042, 0.6144977591436239, 0.9066660342169339, 0.8686774346667927, 0.8020727598929596, 0.8661968485039668, 0.5317893910382823, 0.5055058015989973, 0.8974581431385158, 0.9229003052302918, 0.545230014805189, 0.6862339349920704, 0.8087335196194918, 0.8568632308299993, 0.6446490479223607, 0.9019208330441122, 0.9977441779202174, 0.8053722584889235, 0.8157482842550148, 0.8110724125215207, 0.7775227737896604, 0.7577211504052248, 0.6267527925010816, 0.8024796643678551, 0.5069584559467297, 0.9743648577206413, 0.8235144685342601, 0.5444160634233013, 0.5331905565177204, 0.5627994194325795, 0.6876184328805781, 0.7376105192728734, 0.740598599085917, 0.8780639088287092, 0.5500131431766575, 0.6716226086555825, 0.5360703717640731, 0.657707038427384, 0.9916963783227621, 0.5737691718231144, 0.7161124713950369, 0.6389137018642257, 0.8174411238515575, 0.9127808825045742, 0.8653678758256087, 0.8954960243979542, 0.6419279792815611, 0.6584797238009628, 0.7833864847848482, 0.9760066930766084, 0.7485300347455834, 0.9884731242844362, 0.5568357118993823, 0.868969306200195, 0.7221886406505018, 0.9536402688060611, 0.6701037821191091, 0.8085265052250152, 0.9931941293304539, 0.8910317113453939, 0.7255748710244365, 0.5768869825977552, 0.7622853200999105, 0.7990271843703116, 0.6874753075877574, 0.7169638039864927, 0.7201170302090745, 0.8768716895308791, 0.7139387670575805, 0.7262156990102899, 0.5078539847443538, 0.6825698770392494, 0.6328570933720068, 0.6388821902675493, 0.6599520626821146, 0.5625116802608923, 0.7324032161449936, 0.5205007486569978, 0.7701403984515194, 0.9570710502878808, 0.8898502784233658, 0.5512356082019727, 0.5064446264287277, 0.8990731552776915, 0.8684046945320483, 0.6975130089875201, 0.7043659318813005, 0.6932812804185962, 0.7998014063846279, 0.7875362087956033, 0.872680931590418, 0.9339676524628638, 0.518986742332526, 0.9586933108860793, 0.8292282264396341, 0.9267108451075718, 0.827000882228903, 0.6829822497336355, 0.8056138192454605, 0.6847452386737695, 0.8857001358518253, 0.9836479557331272, 0.5438371485763621, 0.8041982216052506, 0.7127156741600928, 0.5690128036709612, 0.9335964037478017, 0.7478652929745271, 0.5203927672240416, 0.6190372154980266, 0.7590017578009732, 0.9224158988010482, 0.6434574186058744, 0.7653545440212672, 0.8524415986643952, 0.5176542190821016, 0.7404148670637212, 0.5842084915765599, 0.5180537157111598, 0.572017013718523, 0.5588002379999379, 0.7791839484613869, 0.8165640844276504, 0.5829001142676878, 0.8699364084940925, 0.7204962714522614, 0.6490824760133743, 0.8832645766562235, 0.5271665320660437, 0.9224603687469346, 0.9780094908866144, 0.6595515234645046, 0.9945798089582172, 0.5119227881274448, 0.5714722668282363, 0.8662525262371915, 0.8097715794551488, 0.6784397510998249, 0.9713071265382353, 0.5942636694031491, 0.9425047594712187, 0.8830466277214701, 0.6096415217753139, 0.8655782286754452, 0.8370143694675711, 0.7619027294010448, 0.8531256257351945, 0.989984182723862, 0.6977224847501801, 0.5265197574276371, 0.505271628780527, 0.9827645143159802, 0.8487614797462973, 0.5262107904373793, 0.8208110831262713, 0.6321990718210003, 0.9174513624271482, 0.5443595928032439, 0.9914127768864583, 0.6007701077868521, 0.698501911781718, 0.6055264279968526, 0.8342303285865502, 0.6260236959678184, 0.765857265694642, 0.8543767624820197, 0.5595244234877692, 0.7652825444586732, 0.5244524766072682, 0.5824947831552241, 0.7505017093481471, 0.6439090949950373, 0.6034146339062469, 0.8911796351334358, 0.5857851191242187, 0.9786854470522823, 0.6734047803245933, 0.9228186593478598, 0.9404570161335738, 0.9926509934032259, 0.5147906734751659, 0.59753913343529, 0.5431690125819801, 0.9755529615905001, 0.6368605278117863, 0.5599955002538435, 0.6071570546465792, 0.7847002472794575, 0.6019007644434755, 0.5197872399342747, 0.7206496847215327, 0.6269358055996912, 0.6628330707632195, 0.8367824452566546, 0.5163093868390709, 0.9482899366411253, 0.6317798931446121, 0.7814465637261034, 0.7739164216653627, 0.9745182140339803, 0.6479981184115878, 0.9290668947397578, 0.7635842499924838, 0.7283369045211436, 0.6563301684366629, 0.5351995038848518, 0.9418424463820771, 0.6158203245620251, 0.7664412687103574, 0.5922880098063183, 0.683827936211759, 0.6407773877496161, 0.7239313410309494, 0.5950984928603514, 0.6706669146256363, 0.7646240572491172, 0.521728944452631, 0.9274743354191357, 0.9024016856985297, 0.8652510688901921, 0.5078873880391597, 0.6706622681211505, 0.615496485853271, 0.605162721166922, 0.8705877234123856, 0.9850222573934676, 0.7259545483547576, 0.9377920066269823, 0.9104869155875125, 0.8451911743868028, 0.9304170304935692, 0.7606084947117299, 0.9994851268251944, 0.9280530436793701, 0.5756704969683611, 0.7302228679140619, 0.7577897191649279, 0.7494328641586734, 0.9058187378099181, 0.5704378096146449, 0.8680861734015508, 0.5902829276173948, 0.7215348436667526, 0.6188155574536989, 0.5093198203881067, 0.9054565941566697, 0.6798526575382959, 0.9687139023337543, 0.719902718911374, 0.5775124308951177, 0.6135097016023643, 0.752184869970272, 0.8482653892565428, 0.9682814846144081, 0.5263322003947282, 0.5513579906173498, 0.7756323731354249, 0.5604320288315894, 0.8990027678603424, 0.576241852014543, 0.8532421452891362, 0.9374811943298395, 0.8722849163158974, 0.8730305618312975, 0.5500099247444153, 0.5142533537895289, 0.987583733032257, 0.7485904231041761, 0.5304211198968328, 0.7849624838766822, 0.9323613802231593, 0.9724150826861191, 0.5797430343754679, 0.955176170850734, 0.9913995348045034, 0.8508290744198354, 0.7917833854005378, 0.9248189037294872, 0.714665056349017, 0.5487685033567022, 0.8393070770052148, 0.9199879376291988, 0.8323490438680774, 0.933238216289273, 0.9744679686738166, 0.7862234948180713, 0.9560845258293993, 0.528524416656819, 0.6948280747192335, 0.8405835547663963, 0.8048494655611536, 0.8287718261961008, 0.8375139487401841, 0.6469251488020429, 0.5926722291140718, 0.5581991650501407, 0.68225332403746, 0.9580744749468568, 0.8421622600829135, 0.5194900901821802, 0.8336270136780056, 0.9276114160803508, 0.6626584540575065, 0.5403859771942585, 0.8531329187325178, 0.5637544320904068, 0.6987768269898261, 0.9527788123798944, 0.6433991932708388, 0.6877846818183168, 0.9374437405259599, 0.6151533681615675, 0.5704518871297708, 0.8920486473918239, 0.8676105400032761, 0.7786818210984977, 0.9821569418816641, 0.6722396915756141, 0.62153080536385, 0.7445885773533947, 0.6797743754861881, 0.8807381328153923, 0.505461725081236, 0.6159742298304187, 0.8249045783455491, 0.9092092347635592, 0.7140878739212886, 0.9621689041776074, 0.922583959871037, 0.5189620041940193, 0.7183000283616761, 0.5501550381409948, 0.8270557267479248, 0.6362123414580534, 0.9724321774813887, 0.5976866929868196, 0.8317399851682332, 0.5929961986270168, 0.5833087692466963, 0.5969001978750569, 0.8020899188828122, 0.9562329822399562, 0.9083005255596819, 0.604751910509324, 0.6256851481834282, 0.656459338064894, 0.9897222634989712, 0.5019374428947282, 0.7873395634768123, 0.629859576210666, 0.9855009764501927, 0.9512019539168605, 0.8676071009663353, 0.9999774250795208, 0.760640987266507, 0.7195442797195608, 0.9960228838797002, 0.6159353726518506, 0.6553897052772657, 0.9293659934909615, 0.7876343995315683, 0.8163301539375496, 0.5451602089683572, 0.9193306140949336, 0.775662775302384, 0.6177981117456984, 0.711232714129105, 0.5406245907328964, 0.9850619886430564, 0.7688628114133249, 0.9851013729939717, 0.898890666045394, 0.6698892347587144, 0.7695315039704123, 0.8014326886619136, 0.8026705272480148, 0.9554683387248024, 0.7927906739122454, 0.9513252545616433, 0.5583748304712671, 0.7858327157191018, 0.6796911975357001, 0.6858848137132365, 0.8075843826588807, 0.6644092707123079, 0.6471210466628707, 0.9181738467248485, 0.6406944454003772, 0.9394965532151454, 0.5639762626623858, 0.5118348800817215, 0.7225877951199275, 0.947360420430391, 0.9606037751934948, 0.6444631210385297, 0.8984729784720482, 0.5109984391527393, 0.7978697012501457, 0.5444475161752657, 0.7763952918669244, 0.6345612036325404, 0.8327451238108659, 0.8924912978199484, 0.5039206000282631, 0.6870252181414209, 0.6633430962701836, 0.7217054293661156, 0.5013272760731053, 0.6635517297393985, 0.6272472784505518, 0.7823628975301169, 0.7945818640966602, 0.7433663624833196, 0.8681189185377732, 0.821032913392513, 0.7556619480437063, 0.7423392621394114, 0.7913239269134973, 0.9814804972226786, 0.9907591871892574, 0.6580371999369794, 0.6684985119931101, 0.8318633391470769, 0.9130541422678072, 0.9636641247720316, 0.5594218978815797, 0.8923725856741409, 0.7519039702088162, 0.6655319347594945, 0.6629016905515412, 0.6906329500508592, 0.9345345276957204, 0.5151871703619326, 0.8214006325628158, 0.8039247093564152, 0.9055284484703978, 0.6825634647562393, 0.8138334631722521, 0.7775115448282964, 0.5183461110739542, 0.7559249889788832, 0.8099482139623844, 0.9675187998106507, 0.7507840203114416, 0.5333897353659749, 0.9215866698676763, 0.8579760790284859, 0.5600512876497092, 0.8938952825200117, 0.7961221843515032, 0.5438277439752797, 0.5496852514495915, 0.5333075298328729, 0.6187926978479243, 0.708280464155822, 0.7225052739340487, 0.5957404883641293, 0.635706087634931, 0.5103617889722701, 0.6281723036903588, 0.5805623117354277, 0.5516131592626556, 0.7108379352935396, 0.5658843974024206, 0.8868427699677788, 0.7041305189519829, 0.5754465214278747, 0.9276959842602779, 0.510843449246049, 0.9976779553141659, 0.6491425548921557, 0.5421772926742012, 0.7537215077841725, 0.9028006410131948, 0.5869239801112562, 0.7417068119420782, 0.7610482751847713, 0.6243523927664607, 0.8811677656242266, 0.7267856265438216, 0.6824964320212823, 0.6623633071646956, 0.8546002060867981, 0.6829797492224212, 0.7330502459938577, 0.56314843869452, 0.6242536187099437, 0.7179663387202555, 0.8928784742094524, 0.5539807956725318, 0.8202636628422189, 0.5934573513472396, 0.5278616550704691, 0.8910985191693424, 0.5166626241597116, 0.7820279304314096, 0.6117405399467083, 0.655517941350608, 0.5990413232681231, 0.7277218031159405, 0.8881811832131111, 0.6253899095673823, 0.5022719689255692, 0.7565879749631125, 0.8287532017758004, 0.5113635109353638, 0.5938116997945808, 0.8074694498880592, 0.7445948788642843, 0.7518312595773975, 0.653475796946003, 0.5773481168936356, 0.7687814588786883, 0.9400237791801073, 0.7291165198628897, 0.9080613101516024, 0.5364166302728399, 0.8293215407425376, 0.5895220002171028, 0.608902662607063, 0.6564632370797786, 0.782063294272668, 0.6162326180713753, 0.8304540120314123, 0.6584459814192004, 0.7624270884362871, 0.9527791991634176, 0.718775138127759, 0.873206804833677, 0.86849384235325, 0.6310796669511953, 0.5441817907135311, 0.6440271974766812, 0.9379708604413772, 0.9270794727647482, 0.7555673692767391, 0.9942280293641161, 0.8255441774307202, 0.8114400448785974, 0.7163285025384967, 0.9307712166283605, 0.8782711070580267, 0.8968004072456401, 0.7549042036801079, 0.6514234565745659, 0.7258464065999979, 0.7882073558605801, 0.8141282259599119, 0.8486698986814731, 0.5350817344502337, 0.6135456239693732, 0.7173055874302068, 0.6688574399512488, 0.6678946748953268, 0.845921998765806, 0.6058458013290207, 0.7824779073989613, 0.6374462476682488, 0.5854171065940504, 0.8390176597016082, 0.6142741713385914, 0.5746760557963155, 0.7630400479143152, 0.6561635137188541, 0.7111629793811978, 0.8782398885108742, 0.8790379252126461, 0.9157072415689174, 0.5024634439484983, 0.8391036370371256, 0.9790076151891898, 0.9587814784216695, 0.7054486253294767, 0.6668526815472458, 0.5137353822027135, 0.6509597053834795, 0.7283636280795134, 0.5356573075256076, 0.7596088498660876, 0.7042549964579398, 0.8799527419314296, 0.5143585816603526, 50000.0};
int h_B[]= {
3, 5, 7, 9, 11, 13, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 476, 478, 480, 482, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573, 575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 642, 644, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 668, 670, 672, 674, 677, 679, 681, 683, 687, 689, 691, 693, 695, 697, 700, 702, 704, 706, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 768, 770, 773, 775, 778, 780, 782, 784, 786, 788, 790, 792, 795, 797, 800, 802, 805, 807, 809, 811, 813, 815, 817, 819, 822, 824, 827, 829, 831, 833, 836, 838, 840, 842, 846, 848, 850, 852, 854, 856, 858, 860, 862, 864, 867, 869, 872, 874, 877, 879, 881, 883, 885, 887, 890, 892, 894, 896, 898, 900, 903, 905, 908, 910, 913, 915, 918, 920, 923, 925, 928, 930, 933, 935, 938, 940, 942, 944, 946, 948, 951, 953, 955, 957, 959, 961, 964, 966, 968, 970, 972, 974, 977, 979, 982, 984, 987, 989, 992, 994, 996, 998, 1001, 1003, 1005, 1007, 1010, 1012, 1016, 1018, 1020, 1022, 1025, 1027, 1030, 1032, 1035, 1037, 1040, 1042, 1045, 1047, 1050, 1052, 1055, 1057, 1060, 1062, 1065, 1067, 1070, 1072, 1075, 1077, 1080, 1082, 1085, 1087, 1090, 1092, 1095, 1097, 1100, 1102, 1104, 1106, 1108, 1110, 1113, 1115, 1118, 1120, 1123, 1125, 1128, 1130, 1133, 1135, 1138, 1140, 1143, 1145, 1148, 1150, 1153, 1155, 1158, 1160, 1163, 1165, 1168, 1170, 1172, 1174, 1176, 1178, 1181, 1183, 1186, 1188, 1191, 1193, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216, 1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1326, 1328, 1330, 1332, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369, 1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407, 1409, 1411, 1413, 1415, 1418, 1420, 1423, 1425, 1428, 1430, 1433, 1435, 1438, 1440, 1443, 1445, 1448, 1450, 1453, 1455, 1457, 1459, 1461, 1463, 1466, 1468, 1471, 1473, 1475, 1477, 1479, 1481, 1483, 1485, 1488, 1490, 1492, 1494, 1497, 1499, 1501, 1503, 1506, 1508, 1512, 1514, 1516, 1518, 1520, 1522, 1525, 1527, 1530, 1532, 1537, 1539, 1541, 1543, 1545, 1547, 1550, 1552, 1555, 1557, 1560, 1562, 1565, 1567, 1569, 1571, 1573, 1575, 1578, 1580, 1583, 1585, 1588, 1590, 1593, 1595, 1598, 1600, 1603, 1605, 1608, 1610, 1613, 1615, 1618, 1620, 1623, 1625, 1628, 1630, 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1738, 1740, 1742, 1744, 1747, 1749, 1752, 1754, 1757, 1759, 1762, 1764, 1767, 1769, 1772, 1774, 1777, 1779, 1781, 1783, 1785, 1787, 1790, 1792, 1795, 1797, 1800, 1802, 1805, 1807, 1810, 1812, 1815, 1817, 1820, 1822, 1825, 1827, 1830, 1832, 1835, 1837, 1840, 1842, 1845, 1847, 1850, 1852, 1855, 1857, 1860, 1862, 1865, 1867, 1869, 1871, 1873, 1875, 1878, 1880, 1883, 1885, 1888, 1890, 1893, 1895, 1898, 1900, 1903, 1905, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938, 1940, 1943, 1945, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976, 1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014, 2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052, 2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090, 2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129, 2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167, 2169, 2171, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2199, 2201, 2203, 2205, 2208, 2210, 2212, 2214, 2217, 2219, 2221, 2223, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242, 2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280, 2282, 2284, 2287, 2289, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356, 2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432, 2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470, 2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509, 2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547, 2549, 2551, 2553, 2555, 2557, 2559, 2562, 2564, 2566, 2568, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585, 2587, 2589, 2592, 2594, 2596, 2598, 2601, 2603, 2605, 2607, 2610, 2612, 2615, 2617, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2658, 2660, 2662, 2664, 2667, 2669, 2672, 2674, 2677, 2679, 2682, 2684, 2687, 2689, 2691, 2693, 2695, 2697, 2700, 2702, 2705, 2707, 2710, 2712, 2715, 2717, 2720, 2722, 2725, 2727, 2729, 2731, 2733, 2735, 2738, 2740, 2743, 2745, 2748, 2750, 2753, 2755, 2758, 2760, 2763, 2765, 2768, 2770, 2773, 2775, 2778, 2780, 2783, 2785, 2788, 2790, 2793, 2795, 2798, 2800, 2803, 2805, 2808, 2810, 2813, 2815, 2817, 2819, 2821, 2823, 2826, 2828, 2831, 2833, 2836, 2838, 2841, 2843, 2846, 2848, 2851, 2853, 2856, 2858, 2861, 2863, 2865, 2867, 2869, 2871, 2874, 2876, 2879, 2881, 2884, 2886, 2889, 2891, 2894, 2896, 2899, 2901, 2904, 2906, 2909, 2911, 2914, 2916, 2919, 2921, 2924, 2926, 2929, 2931, 2934, 2936, 2939, 2941, 2944, 2946, 2949, 2951, 2954, 2956, 2959, 2961, 2964, 2966, 2969, 2971, 2974, 2976, 2979, 2981, 2984, 2986, 2989, 2991, 2994, 2996, 2999, 3001, 3004, 3006, 3009, 3011, 3013, 3015, 3017, 3019, 3022, 3024, 3027, 3029, 3032, 3034, 3037, 3039, 3041, 3043, 3046, 3048, 3051, 3053, 3059, 3061, 3063, 3065, 3067, 3069, 3072, 3074, 3077, 3079, 3082, 3084, 3087, 3089, 3092, 3094, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117, 3119, 3122, 3124, 3127, 3129, 3132, 3134, 3137, 3139, 3142, 3144, 3147, 3149, 3155, 3157, 3159, 3161, 3163, 3165, 3168, 3170, 3172, 3174, 3176, 3178, 3181, 3183, 3186, 3188, 3194, 3196, 3199, 3201, 3204, 3206, 3208, 3210, 3213, 3215, 3217, 3219, 3224, 3226, 3228, 3230, 3232, 3234, 3236, 3238, 3241, 3243, 3245, 3247, 3249, 3251, 3254, 3256, 3259, 3261, 3264, 3266, 3269, 3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3290, 3292, 3295, 3297, 3300, 3302, 3305, 3307, 3310, 3312, 3315, 3317, 3320, 3322, 3325, 3327, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344, 3346, 3348, 3351, 3353, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382, 3385, 3387, 3389, 3391, 3393, 3395, 3398, 3400, 3403, 3405, 3407, 3409, 3411, 3413, 3416, 3418, 3421, 3423, 3426, 3428, 3431, 3433, 3436, 3438, 3441, 3443, 3446, 3448, 3451, 3453, 3456, 3458, 3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3497, 3499, 3501, 3503, 3505, 3507, 3510, 3512, 3515, 3517, 3519, 3521, 3523, 3525, 3528, 3530, 3533, 3535, 3538, 3540, 3543, 3545, 3547, 3549, 3551, 3553, 3556, 3558, 3561, 3563, 3566, 3568, 3571, 3573, 3576, 3578, 3582, 3584, 3586, 3588, 3593, 3595, 3598, 3600, 3603, 3605, 3608, 3610, 3613, 3615, 3617, 3619, 3622, 3624, 3627, 3629, 3641, 3643, 3646, 3648, 3651, 3653, 3656, 3658, 3661, 3663, 3665, 3667, 3669, 3671, 3674, 3676, 3679, 3681, 3684, 3686, 3689, 3691, 3694, 3696, 3699, 3701, 3703, 3705, 3708, 3710, 3713, 3715, 3721, 3723, 3725, 3727, 3729, 3731, 3734, 3736, 3739, 3741, 3744, 3746, 3749, 3751, 3753, 3755, 3757, 3759, 3762, 3764, 3767, 3769, 3772, 3774, 3777, 3779, 3781, 3783, 3785, 3787, 3790, 3792, 3795, 3797, 3800, 3802, 3805, 3807, 3810, 3812, 3815, 3817, 3820, 3822, 3825, 3827, 3829, 3831, 3834, 3836, 3839, 3841, 3847, 3849, 3852, 3854, 3857, 3859, 3862, 3864, 3867, 3869, 3872, 3874, 3877, 3879, 3882, 3884, 3887, 3889, 3891, 3893, 3895, 3897, 3900, 3902, 3905, 3907, 3910, 3912, 3915, 3917, 3920, 3922, 3925, 3927, 3930, 3932, 3935, 3937, 3939, 3941, 3943, 3945, 3948, 3950, 3953, 3955, 3958, 3960, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3980, 3982, 3984, 3986, 3990, 3992, 3995, 3997, 4000, 4002, 4005, 4007, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028, 4030, 4032, 4034, 4036, 4039, 4041, 4044, 4046, 4049, 4051, 4054, 4056, 4059, 4061, 4064, 4066, 4069, 4071, 4074, 4076, 4079, 4081, 4084, 4086, 4089, 4091, 4093, 4095, 4097, 4099, 4102, 4104, 4107, 4109, 4112, 4114, 4117, 4119, 4122, 4124, 4127, 4129, 4132, 4134, 4137, 4139, 4142, 4144, 4147, 4149, 4152, 4154, 4157, 4159, 4161, 4163, 4165, 4167, 4170, 4172, 4175, 4177, 4180, 4182, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4202, 4204, 4207, 4209, 4215, 4217, 4219, 4221, 4223, 4225, 4228, 4230, 4233, 4235, 4238, 4240, 4243, 4245, 4248, 4250, 4253, 4255, 4258, 4260, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295, 4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333, 4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371, 4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409, 4411, 4413, 4415, 4417, 4419, 4421, 4424, 4426, 4428, 4430, 4433, 4435, 4437, 4439, 4442, 4444, 4446, 4448, 4451, 4453, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484, 4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4521, 4523, 4525, 4527, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560, 4562, 4564, 4566, 4568, 4570, 4572, 4575, 4577, 4579, 4581, 4584, 4586, 4589, 4591, 4593, 4595, 4597, 4599, 4601, 4603, 4606, 4608, 4610, 4612, 4617, 4619, 4621, 4623, 4625, 4627, 4630, 4632, 4635, 4637, 4640, 4642, 4645, 4647, 4650, 4652, 4655, 4657, 4660, 4662, 4665, 4667, 4670, 4672, 4675, 4677, 4680, 4682, 4685, 4687, 4689, 4691, 4694, 4696, 4699, 4701, 4707, 4709, 4711, 4713, 4715, 4717, 4720, 4722, 4725, 4727, 4730, 4732, 4735, 4737, 4740, 4742, 4745, 4747, 4750, 4752, 4754, 4756, 4758, 4760, 4763, 4765, 4768, 4770, 4773, 4775, 4778, 4780, 4782, 4784, 4786, 4788, 4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826, 4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4847, 4849, 4851, 4853, 4855, 4857, 4860, 4862, 4865, 4867, 4870, 4872, 4875, 4877, 4879, 4881, 4884, 4886, 4888, 4890, 4894, 4896, 4899, 4901, 4904, 4906, 4909, 4911, 4913, 4915, 4918, 4920, 4922, 4924, 4928, 4930, 4933, 4935, 4938, 4940, 4943, 4945, 4948, 4950, 4953, 4955, 4958, 4960, 4962, 4964, 4967, 4969, 4972, 4974, 4980, 4982, 4984, 4986, 4989, 4991, 4993, 4995, 4998, 5000, 5004, 5006, 5008, 5010, 5013, 5015, 5018, 5020, 5023, 5025, 5028, 5030, 5032, 5034, 5037, 5039, 5042, 5044, 5047, 5049, 5051, 5053, 5055, 5057, 5060, 5062, 5065, 5067, 5070, 5072, 5075, 5077, 5080, 5082, 5085, 5087, 5090, 5092, 5095, 5097, 5099, 5101, 5103, 5105, 5108, 5110, 5113, 5115, 5118, 5120, 5123, 5125, 5127, 5129, 5131, 5133, 5136, 5138, 5141, 5143, 5146, 5148, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169, 5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5188, 5190, 5193, 5195, 5198, 5200, 5202, 5204, 5206, 5208, 5211, 5213, 5215, 5217, 5219, 5221, 5224, 5226, 5228, 5230, 5232, 5234, 5237, 5239, 5242, 5244, 5247, 5249, 5252, 5254, 5257, 5259, 5262, 5264, 5267, 5269, 5272, 5274, 5276, 5278, 5280, 5282, 5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321, 5323, 5325, 5327, 5330, 5332, 5334, 5336, 5339, 5341, 5344, 5346, 5352, 5354, 5356, 5358, 5360, 5362, 5365, 5367, 5370, 5372, 5375, 5377, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396, 5398, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5418, 5420, 5423, 5425, 5431, 5433, 5436, 5438, 5441, 5443, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5467, 5469, 5472, 5474, 5477, 5479, 5482, 5484, 5487, 5489, 5492, 5494, 5497, 5499, 5501, 5503, 5505, 5507, 5510, 5512, 5514, 5516, 5518, 5520, 5523, 5525, 5528, 5530, 5533, 5535, 5538, 5540, 5542, 5544, 5547, 5549, 5552, 5554, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587, 5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625, 5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663, 5666, 5668, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701, 5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739, 5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5760, 5762, 5764, 5766, 5769, 5771, 5773, 5775, 5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5815, 5817, 5819, 5821, 5823, 5825, 5828, 5830, 5832, 5834, 5837, 5839, 5841, 5843, 5846, 5848, 5850, 5852, 5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890, 5892, 5895, 5897, 5899, 5901, 5904, 5906, 5908, 5910, 5912, 5914, 5917, 5919, 5921, 5923, 5925, 5927, 5929, 5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967, 5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5998, 6000, 6002, 6004, 6006, 6008, 6010, 6012, 6014, 6016, 6019, 6021, 6027, 6029, 6032, 6034, 6037, 6039, 6041, 6043, 6046, 6048, 6051, 6053, 6059, 6061, 6063, 6065, 6067, 6069, 6072, 6074, 6077, 6079, 6082, 6084, 6087, 6089, 6091, 6093, 6095, 6097, 6100, 6102, 6105, 6107, 6110, 6112, 6115, 6117, 6120, 6122, 6125, 6127, 6129, 6131, 6133, 6135, 6138, 6140, 6143, 6145, 6148, 6150, 6153, 6155, 6157, 6159, 6162, 6164, 6166, 6168, 6172, 6174, 6177, 6179, 6182, 6184, 6187, 6189, 6192, 6194, 6197, 6199, 6202, 6204, 6207, 6209, 6212, 6214, 6217, 6219, 6222, 6224, 6227, 6229, 6231, 6233, 6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6252, 6254, 6256, 6258, 6261, 6263, 6266, 6268, 6273, 6275, 6278, 6280, 6286, 6288, 6291, 6293, 6296, 6298, 6301, 6303, 6306, 6308, 6310, 6312, 6314, 6316, 6319, 6321, 6324, 6326, 6329, 6331, 6334, 6336, 6338, 6340, 6342, 6344, 6347, 6349, 6352, 6354, 6356, 6358, 6360, 6362, 6365, 6367, 6369, 6371, 6374, 6376, 6378, 6380, 6383, 6385, 6389, 6391, 6393, 6395, 6398, 6400, 6403, 6405, 6408, 6410, 6412, 6414, 6416, 6418, 6421, 6423, 6426, 6428, 6431, 6433, 6436, 6438, 6441, 6443, 6446, 6448, 6451, 6453, 6456, 6458, 6461, 6463, 6466, 6468, 6471, 6473, 6476, 6478, 6481, 6483, 6485, 6487, 6489, 6491, 6494, 6496, 6499, 6501, 6504, 6506, 6509, 6511, 6514, 6516, 6519, 6521, 6524, 6526, 6529, 6531, 6533, 6535, 6537, 6539, 6542, 6544, 6547, 6549, 6552, 6554, 6557, 6559, 6562, 6564, 6567, 6569, 6572, 6574, 6577, 6579, 6582, 6584, 6587, 6589, 6592, 6594, 6597, 6599, 6602, 6604, 6607, 6609, 6612, 6614, 6617, 6619, 6621, 6623, 6626, 6628, 6630, 6632, 6637, 6639, 6641, 6643, 6645, 6647, 6650, 6652, 6655, 6657, 6660, 6662, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689, 6691, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6719, 6721, 6724, 6726, 6729, 6731, 6734, 6736, 6739, 6741, 6744, 6746, 6749, 6751, 6754, 6756, 6759, 6761, 6766, 6768, 6771, 6773, 6776, 6778, 6781, 6783, 6785, 6787, 6789, 6791, 6794, 6796, 6799, 6801, 6804, 6806, 6809, 6811, 6814, 6816, 6818, 6820, 6823, 6825, 6828, 6830, 6836, 6838, 6841, 6843, 6846, 6848, 6851, 6853, 6856, 6858, 6860, 6862, 6865, 6867, 6869, 6871, 6875, 6877, 6880, 6882, 6885, 6887, 6890, 6892, 6894, 6896, 6899, 6901, 6904, 6906, 6912, 6914, 6916, 6918, 6921, 6923, 6926, 6928, 6934, 6936, 6938, 6940, 6942, 6944, 6947, 6949, 6952, 6954, 6957, 6959, 6962, 6964, 6966, 6968, 6970, 6972, 6975, 6977, 6979, 6981, 6983, 6985, 6988, 6990, 6992, 6994, 6996, 6998, 7001, 7003, 7006, 7008, 7011, 7013, 7016, 7018, 7020, 7022, 7024, 7026, 7029, 7031, 7034, 7036, 7039, 7041, 7044, 7046, 7048, 7050, 7053, 7055, 7058, 7060, 7066, 7068, 7070, 7072, 7074, 7076, 7079, 7081, 7084, 7086, 7089, 7091, 7094, 7096, 7098, 7100, 7102, 7104, 7107, 7109, 7112, 7114, 7117, 7119, 7122, 7124, 7126, 7128, 7130, 7132, 7135, 7137, 7140, 7142, 7145, 7147, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7179, 7181, 7183, 7185, 7187, 7189, 7192, 7194, 7197, 7199, 7202, 7204, 7207, 7209, 7212, 7214, 7217, 7219, 7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258, 7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296, 7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334, 7336, 7338, 7340, 7343, 7345, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372, 7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410, 7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448, 7450, 7452, 7454, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7474, 7476, 7478, 7480, 7483, 7485, 7487, 7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7516, 7518, 7520, 7522, 7525, 7527, 7529, 7531, 7533, 7535, 7537, 7539, 7542, 7544, 7547, 7549, 7552, 7554, 7557, 7559, 7561, 7563, 7565, 7567, 7570, 7572, 7575, 7577, 7579, 7581, 7583, 7585, 7588, 7590, 7593, 7595, 7597, 7599, 7602, 7604, 7607, 7609, 7615, 7617, 7620, 7622, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639, 7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7666, 7668, 7670, 7672, 7674, 7676, 7679, 7681, 7684, 7686, 7688, 7690, 7692, 7694, 7697, 7699, 7701, 7703, 7705, 7707, 7710, 7712, 7715, 7717, 7720, 7722, 7725, 7727, 7730, 7732, 7734, 7736, 7739, 7741, 7743, 7745, 7749, 7751, 7754, 7756, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791, 7793, 7795, 7797, 7800, 7802, 7804, 7806, 7810, 7812, 7815, 7817, 7820, 7822, 7825, 7827, 7829, 7831, 7833, 7835, 7838, 7840, 7843, 7845, 7848, 7850, 7853, 7855, 7858, 7860, 7863, 7865, 7868, 7870, 7873, 7875, 7878, 7880, 7886, 7888, 7890, 7892, 7894, 7896, 7899, 7901, 7904, 7906, 7909, 7911, 7914, 7916, 7919, 7921, 7924, 7926, 7929, 7931, 7933, 7935, 7937, 7939, 7942, 7944, 7947, 7949, 7952, 7954, 7957, 7959, 7962, 7964, 7966, 7968, 7970, 7972, 7975, 7977, 7980, 7982, 7985, 7987, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 6196, 6191, 6196, 6191, 6196, 6191, 8012, 8014, 8016, 8018, 8020, 8022, 5236, 5251, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 5236, 5251, 4979, 4977, 4979, 4977, 1907, 1737, 1632, 1627, 1632, 1627, 2988, 2983, 3008, 3003, 7065, 7063, 8169, 8171, 8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 6908, 6903, 6908, 6903, 6026, 6024, 6026, 6024, 6221, 6226, 6221, 6226, 6636, 6634, 6649, 6649, 6636, 6634, 6460, 6460, 6465, 6465, 6465, 6460, 6465, 6460, 6908, 6903, 6908, 6903, 6911, 6909, 8386, 8388, 8390, 8392, 8394, 8396, 8398, 8400, 2802, 2797, 2802, 2797, 3771, 3766, 3771, 3766, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 3621, 3633, 845, 845, 1907, 1737, 1824, 1824, 1907, 1737, 1388, 1907, 1737, 1388, 1536, 1524, 1524, 1536, 1909, 1909, 2968, 2963, 2968, 2963, 2988, 2983, 2988, 2983, 3008, 3003, 2968, 2963, 2968, 2963, 2988, 2983, 2988, 2983, 3008, 3003, 3289, 3289, 3258, 3253, 3258, 3253, 3402, 3397, 3402, 3397, 3455, 3455, 3058, 3056, 3058, 3056, 3154, 3152, 3154, 3152, 3193, 3191, 3193, 3191, 3223, 3221, 3223, 3221, 3637, 3635, 3640, 3638, 3637, 3635, 3592, 3590, 3592, 3590, 3621, 3633, 3637, 3635, 3640, 3638, 3637, 3635, 3640, 3638, 3720, 3718, 3720, 3718, 3846, 3844, 3846, 3844, 3989, 3989, 4212, 4214, 4214, 4212, 4706, 4704, 4706, 4704, 4719, 4734, 4706, 4704, 4706, 4704, 4719, 4734, 4977, 4977, 4979, 4979, 5027, 5022, 5027, 5022, 4706, 4704, 4706, 4704, 4719, 4734, 4719, 4734, 4777, 4777, 4616, 4614, 4616, 4614, 4706, 4704, 4706, 4704, 4893, 4893, 4927, 4927, 4979, 4977, 4979, 4977, 5430, 5428, 5430, 5428, 5351, 5349, 5351, 5349, 5430, 5428, 5430, 5428, 5430, 5428, 5430, 5428, 6058, 6056, 6058, 6056, 6221, 6226, 6221, 6226, 6636, 6634, 6636, 6634, 5777, 6780, 6780, 6793, 6793, 5777, 6026, 6024, 6026, 6024, 6152, 6147, 6152, 6147, 6221, 6226, 6221, 6226, 6285, 6283, 6221, 6226, 6221, 6226, 6285, 6283, 6026, 6024, 6026, 6024, 6058, 6056, 6058, 6056, 6171, 6171, 6285, 6283, 6272, 6272, 6285, 6283, 6636, 6634, 6636, 6634, 6765, 6765, 6835, 6833, 6835, 6833, 6874, 6874, 6911, 6909, 6911, 6909, 6933, 6931, 6933, 6931, 7065, 7063, 7065, 7063, 7614, 7612, 7614, 7612, 7614, 7612, 7913, 7898, 7974, 7974, 7883, 7885, 7913, 7898, 7614, 7612, 7614, 7612, 7748, 7748, 7809, 7809, 7885, 7883, 10416, 10418, 10420, 10422, 10424, 10426, 10428, 10430, 10432, 10434, 10437, 10439, 10442, 10444, 10446, 10448, 10450, 10452, 10454, 10456, 10458, 10460, 10462, 10464, 10467, 10469, 10471, 10473, 10476, 10478, 10481, 10483, 10489, 10491, 10493, 10495, 10497, 10499, 10502, 10504, 10507, 10509, 10512, 10514, 10517, 10519, 10521, 10523, 10526, 10528, 10531, 10533, 10539, 10541, 10543, 10545, 10547, 10549, 10552, 10554, 10556, 10558, 10560, 10562, 10565, 10567, 10570, 10572, 10575, 10577, 10580, 10582, 10585, 10587, 10589, 10591, 10593, 10595, 10598, 10600, 10603, 10605, 10608, 10610, 10613, 10615, 10617, 10619, 10622, 10624, 10627, 10629, 10635, 10637, 10639, 10641, 10643, 10645, 10648, 10650, 10652, 10654, 10656, 10658, 10660, 10662, 10664, 10666, 10668, 10670, 10672, 10674, 10677, 10679, 10681, 10683, 10685, 10687, 10690, 10692, 10694, 10696, 10698, 10700, 10702, 10704, 10706, 10708, 10710, 10712, 10714, 10716, 10718, 10720, 10722, 10724, 10727, 10729, 10732, 10734, 10737, 10739, 10741, 10743, 10746, 10748, 10751, 10753, 10759, 10761, 10764, 10766, 10769, 10771, 10774, 10776, 10779, 10781, 10783, 10785, 10788, 10790, 10792, 10794, 10798, 10800, 10803, 10805, 10808, 10810, 10813, 10815, 10817, 10819, 10821, 10823, 10826, 10828, 10830, 10832, 10834, 10836, 10838, 10840, 10842, 10844, 10846, 10848, 10850, 10852, 10854, 10856, 10859, 10861, 10863, 10865, 10868, 10870, 10872, 10874, 10876, 10878, 10881, 10883, 10885, 10887, 10889, 10891, 10894, 10896, 10899, 10901, 10904, 10906, 10909, 10911, 10914, 10916, 10919, 10921, 10924, 10926, 10929, 10931, 10934, 10936, 10939, 10941, 10944, 10946, 10949, 10951, 10954, 10956, 10962, 10964, 10966, 10968, 10970, 10972, 10974, 10976, 10978, 10980, 10982, 10984, 10986, 10988, 10990, 10992, 10994, 10996, 10998, 11000, 11003, 11005, 11007, 11009, 11012, 11014, 11016, 11018, 11020, 11022, 11024, 11026, 11028, 11030, 11033, 11035, 11037, 11039, 11042, 11044, 11046, 11048, 11050, 11052, 11054, 11056, 11058, 11060, 11062, 11064, 11066, 11068, 11070, 11072, 11074, 11076, 11079, 11081, 11083, 11085, 11088, 11090, 11092, 11094, 11097, 11099, 11101, 11103, 11105, 11107, 11109, 11111, 11113, 11115, 11117, 11119, 11121, 11123, 11125, 11127, 11129, 11131, 11133, 11135, 11137, 11139, 11141, 11143, 11145, 11147, 11150, 11152, 11156, 11158, 11160, 11162, 11164, 11166, 11169, 11171, 11174, 11176, 11179, 11181, 11184, 11186, 11189, 11191, 11194, 11196, 11199, 11201, 11204, 11206, 11209, 11211, 11214, 11216, 11218, 11220, 11225, 11227, 11230, 11232, 11235, 11237, 11239, 11241, 11243, 11245, 11248, 11250, 11253, 11255, 11258, 11260, 11263, 11265, 11268, 11270, 11273, 11275, 11278, 11280, 11283, 11285, 11288, 11290, 11293, 11295, 11298, 11300, 11303, 11305, 11307, 11309, 11312, 11314, 11317, 11319, 11324, 11326, 11328, 11330, 11332, 11334, 11336, 11338, 11340, 11342, 11344, 11346, 11349, 11351, 11353, 11355, 11357, 11359, 11361, 11363, 11366, 11368, 11370, 11372, 11374, 11376, 11378, 11380, 11382, 11384, 11386, 11388, 11390, 11392, 11394, 11396, 11398, 11400, 11402, 11404, 11406, 11408, 11410, 11412, 11414, 11416, 11418, 11420, 11422, 11424, 11426, 11428, 11431, 11433, 11435, 11437, 11439, 11441, 11443, 11445, 11447, 11449, 11451, 11453, 11456, 11458, 11460, 11462, 11465, 11467, 11469, 11471, 11474, 11476, 11479, 11481, 11487, 11489, 11491, 11493, 11496, 11498, 11501, 11503, 11509, 11511, 11513, 11515, 11517, 11519, 11521, 11523, 11525, 11527, 11529, 11531, 11534, 11536, 11539, 11541, 11544, 11546, 11549, 11551, 11554, 11556, 11559, 11561, 11567, 11569, 11572, 11574, 11577, 11579, 11582, 11584, 11587, 11589, 11591, 11593, 11596, 11598, 11601, 11603, 11608, 11610, 11612, 11614, 11616, 11618, 11621, 11623, 11626, 11628, 11631, 11633, 11636, 11638, 11640, 11642, 11644, 11646, 11649, 11651, 11654, 11656, 11659, 11661, 10731, 10726, 10923, 10928, 10923, 10928, 10961, 10959, 11741, 11743, 11745, 11747, 11750, 11752, 11754, 11756, 11759, 11761, 11763, 11765, 11767, 11769, 11771, 11773, 11775, 11777, 11779, 11781, 11783, 11785, 11787, 11789, 11791, 11793, 11795, 11797, 11799, 11801, 11803, 11805, 11807, 11809, 11811, 11813, 11816, 11818, 11821, 11823, 11826, 11828, 11830, 11832, 11834, 11836, 11838, 11840, 11843, 11845, 11848, 11850, 11852, 11854, 11858, 11860, 11862, 11864, 11866, 11868, 11870, 11872, 11874, 11876, 11878, 11880, 11882, 11884, 10903, 10898, 10961, 10959, 10903, 10898, 11486, 11484, 11505, 11500, 11508, 11506, 11505, 11500, 11508, 11506, 11486, 11484, 11486, 11484, 11505, 11500, 11505, 11500, 11505, 11500, 11508, 11506, 11566, 11564, 10536, 10538, 10634, 10632, 10488, 10486, 10488, 10486, 10538, 10536, 10538, 10536, 10634, 10632, 10634, 10632, 10756, 10758, 10758, 10756, 10731, 10726, 10758, 10756, 10758, 10756, 10812, 10731, 10726, 10758, 10756, 10758, 10756, 10812, 10758, 10756, 10758, 10756, 10797, 10797, 10923, 10928, 10923, 10928, 10961, 10959, 10961, 10959, 11224, 11222, 11173, 11168, 11173, 11168, 11282, 11277, 11282, 11277, 11193, 11188, 11193, 11188, 11224, 11222, 11224, 11222, 11323, 11323, 11484, 11486, 11486, 11484, 11505, 11500, 11508, 11506, 11505, 11500, 11508, 11506, 11486, 11484, 11486, 11484, 11505, 11500, 11506, 11505, 11500, 11508, 11505, 11500, 11508, 11506, 11566, 11564, 11566, 11564, 11595, 11607, 11648, 11648, 11486, 11484, 11486, 11484, 11508, 11506, 11508, 11506, 11566, 11564, 11566, 11564, 11595, 11607, 13310, 13312, 13314, 13316, 13318, 13320, 13322, 13324, 13326, 13328, 13331, 13333, 13336, 13338, 13341, 13343, 13346, 13348, 13351, 13353, 13356, 13358, 13361, 13363, 13366, 13368, 13370, 13372, 13374, 13376, 13379, 13381, 13384, 13386, 13389, 13391, 13394, 13396, 13398, 13400, 13402, 13404, 13406, 13408, 13410, 13412, 13414, 13416, 13418, 13420, 13422, 13424, 13426, 13428, 13431, 13433, 13436, 13438, 13441, 13443, 13446, 13448, 13451, 13453, 13456, 13458, 13461, 13463, 13465, 13467, 13469, 13471, 13474, 13476, 13479, 13481, 13484, 13486, 13489, 13491, 13494, 13496, 13499, 13501, 13504, 13506, 13509, 13511, 13514, 13516, 13519, 13521, 13524, 13526, 13528, 13530, 13532, 13534, 13537, 13539, 13542, 13544, 13547, 13549, 13552, 13554, 13557, 13559, 13562, 13564, 13567, 13569, 13572, 13574, 13577, 13579, 13582, 13584, 13587, 13589, 13592, 13594, 13596, 13598, 13600, 13602, 13605, 13607, 13610, 13612, 13615, 13617, 13620, 13622, 13624, 13626, 13629, 13631, 13634, 13636, 13642, 13644, 13646, 13648, 13651, 13653, 13656, 13658, 13664, 13666, 13668, 13670, 13672, 13674, 13677, 13679, 13682, 13684, 13687, 13689, 13692, 13694, 13697, 13699, 13702, 13704, 13706, 13708, 13710, 13712, 13714, 13716, 13719, 13721, 13723, 13725, 13728, 13730, 13734, 13736, 13738, 13740, 13743, 13745, 13748, 13750, 13753, 13755, 13758, 13760, 13762, 13764, 13766, 13768, 13771, 13773, 13776, 13778, 13781, 13783, 13786, 13788, 13790, 13792, 13795, 13797, 13799, 13801, 13805, 13807, 13810, 13812, 13815, 13817, 13820, 13822, 13824, 13826, 13828, 13830, 13833, 13835, 13838, 13840, 13843, 13845, 13848, 13850, 13852, 13854, 13856, 13858, 13861, 13863, 13866, 13868, 13871, 13873, 13876, 13878, 13881, 13883, 13886, 13888, 13891, 13893, 13896, 13898, 13900, 13902, 13905, 13907, 13910, 13912, 13918, 13920, 13923, 13925, 13928, 13930, 13933, 13935, 13938, 13940, 13943, 13945, 13948, 13950, 13953, 13955, 13957, 13959, 13961, 13963, 13966, 13968, 13970, 13972, 13975, 13977, 13980, 13982, 13988, 13990, 13993, 13995, 13998, 14000, 14003, 14005, 14008, 14010, 14013, 14015, 14018, 14020, 14023, 14025, 14028, 14030, 14033, 14035, 14038, 14040, 14043, 14045, 14048, 14050, 14052, 14054, 14057, 14059, 14062, 14064, 14125, 14127, 14129, 14131, 14133, 14135, 14137, 14139, 14141, 14143, 14145, 14147, 14149, 14151, 14153, 14155, 13478, 13473, 13483, 13488, 13478, 13473, 13483, 13488, 13483, 13488, 13860, 13875, 13860, 13875, 14218, 14220, 14222, 14224, 14226, 14228, 14230, 14232, 14234, 14236, 14238, 14240, 14242, 14244, 14246, 14248, 14250, 14252, 14254, 14256, 14258, 14260, 14262, 14264, 14267, 14269, 14271, 14273, 14275, 14277, 14279, 14281, 14283, 14285, 14287, 14289, 14291, 14293, 14295, 14297, 14299, 14301, 14303, 14305, 14307, 14309, 14311, 14313, 14315, 14317, 14319, 14321, 14323, 14325, 14327, 14329, 11857, 11856, 14368, 14370, 14372, 14374, 14376, 14378, 14380, 14382, 14384, 14386, 14388, 14390, 14393, 14395, 14397, 14399, 14401, 14403, 14405, 14407, 14409, 14411, 14413, 14415, 14418, 14420, 14422, 14424, 14426, 14428, 14430, 14432, 14434, 14436, 14438, 14440, 14443, 14445, 14447, 14449, 14452, 14454, 14456, 14458, 14461, 14463, 14465, 14467, 14469, 14471, 14473, 14475, 14477, 14479, 14481, 14483, 14485, 14487, 13641, 13639, 13641, 13639, 13663, 13661, 13663, 13661, 13804, 13804, 13917, 13915, 13917, 13915, 13987, 13985, 13987, 13985, 14069, 14067, 14069, 14067, 15219, 15221, 15223, 15225, 15227, 15229, 15232, 15234, 15236, 15238, 15240, 15242, 15244, 15246, 15248, 15250, 15252, 15254, 15256, 15258, 15260, 15262, 15264, 15266, 15268, 15270, 15273, 15275, 15278, 15280, 15283, 15285, 15288, 15290, 15293, 15295, 15298, 15300, 15303, 15305, 15308, 15310, 15313, 15315, 15321, 15323, 15326, 15328, 15331, 15333, 15336, 15338, 15341, 15343, 15346, 15348, 15351, 15353, 15356, 15358, 15361, 15363, 15365, 15367, 15369, 15371, 15374, 15376, 15379, 15381, 15384, 15386, 15389, 15391, 15393, 15395, 15398, 15400, 15402, 15404, 15408, 15410, 15412, 15414, 15416, 15418, 15320, 15318, 15360, 15355, 15360, 15355, 15388, 15373, 15272, 15272, 15360, 15355, 15360, 15355, 15388, 15373, 15307, 15307, 15360, 15355, 15360, 15355, 15388, 15373, 15320, 15318, 15320, 15318, 15407, 15407, 15974, 15972, 15974, 15972, 16411, 16413, 16416, 16418, 16420, 16422, 16431, 16433, 16444, 16446, 16448, 16450, 16452, 16454, 16456, 16458, 16659, 16661, 16663, 16665, 16667, 16669, 16672, 16674, 16677, 16679, 16682, 16684, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17024, 17026, 17028, 17030, 17032, 17034, 17036, 17038, 17040, 17042, 17044, 17046, 17048, 17050, 17052, 17054, 17056, 17058, 17060, 17062, 17064, 17066, 17068, 17070, 17072, 17074, 17076, 17078, 17080, 17082, 17084, 17086, 17088, 17090, 17092, 17094, 17096, 17098, 17100, 17102, 17104, 17106, 17108, 17110, 17112, 17114, 17116, 17118, 17120, 17122, 17124, 17126, 17128, 17130, 17132, 17134, 17136, 17138, 17140, 17142, 17144, 17146, 17148, 17150, 17152, 17154, 17156, 17158, 17160, 17162, 17164, 17166, 17168, 17170, 17172, 17174, 17176, 17178, 17180, 17182, 17184, 17186, 17188, 17190, 17192, 17194, 17196, 17198, 17200, 17202, 17204, 17206, 17208, 17210, 17212, 17214, 17216, 17218, 17220, 17222, 17224, 17226, 17228, 17230, 17232, 17234, 17236, 17238, 17240, 17242, 17244, 17246, 17248, 17250, 17252, 17254, 17256, 17258, 17260, 17262, 17264, 17266, 17268, 17270, 17272, 17274, 17276, 17278, 17280, 17282, 17284, 17286, 17288, 17290, 17292, 17294, 17296, 17298, 17300, 17302, 17304, 17306, 17308, 17310, 17312, 17314, 17316, 17318, 17320, 17322, 17324, 17326, 17328, 17330, 17332, 17334, 17336, 17338, 17340, 17342, 17344, 17346, 17348, 17350, 17352, 17354, 17356, 17358, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17376, 17378, 17380, 17382, 17384, 17386, 17388, 17390, 17392, 17394, 17396, 17398, 17400, 17402, 17404, 17406, 17408, 17410, 17412, 17414, 17416, 17418, 17420, 17422, 17424, 17426, 17428, 17430, 17432, 17434, 17436, 17438, 17440, 17442, 17444, 17446, 17448, 17450, 17452, 17454, 17456, 17458, 17460, 17462, 17464, 17466, 17468, 17470, 17472, 17474, 17476, 17478, 17480, 17482, 17484, 17486, 17488, 17490, 17492, 17494, 17496, 17498, 17500, 17502, 17504, 17506, 17508, 17510, 17512, 17514, 17516, 17518, 17520, 17522, 17524, 17526, 17528, 17530, 17532, 17534, 17536, 17538, 17540, 17542, 17544, 17546, 17548, 17550, 17552, 17554, 17556, 17558, 17560, 17562, 17564, 17566, 17568, 17570, 17572, 17574, 17576, 17578, 17580, 17582, 17584, 17586, 17588, 17590, 17592, 17594, 17596, 17598, 17600, 17602, 17604, 17606, 17608, 17610, 17612, 17614, 17616, 17618, 17620, 17622, 17624, 17626, 17628, 17630, 17632, 17634, 17636, 17638, 17640, 17642, 17644, 17646, 17648, 17650, 17652, 17654, 17656, 17658, 17660, 17662, 17664, 17666, 17668, 17670, 17672, 17674, 17676, 17678, 17680, 17682, 17684, 17686, 17688, 17690, 17692, 17694, 17696, 17698, 17700, 17702, 17704, 17706, 17708, 17710, 17712, 17714, 17716, 17718, 17720, 17722, 17724, 17726, 17728, 17730, 17732, 17734, 17736, 17738, 17740, 17742, 17744, 17746, 17748, 17750, 17752, 17754, 17756, 17758, 17760, 17762, 17764, 17766, 17768, 17770, 17772, 17774, 17776, 17778, 17780, 17782, 17784, 17786, 17788, 17790, 17792, 17794, 17796, 17798, 17800, 17802, 17804, 17806, 17808, 17810, 17812, 17814, 17816, 17818, 17820, 17822, 17824, 17826, 17828, 17830, 17832, 17834, 17836, 17838, 17840, 17842, 17844, 17846, 17848, 17850, 17852, 17854, 17856, 17858, 17860, 17862, 17864, 17866, 17868, 17870, 17872, 17874, 17876, 17878, 17880, 17882, 17884, 17886, 17888, 17890, 17892, 17894, 17896, 17898, 17900, 17902, 17904, 17906, 17908, 17910, 17912, 17914, 17916, 17918, 17920, 17922, 17924, 17926, 17928, 17930, 17932, 17934, 17936, 17938, 17940, 17942, 17944, 17946, 17948, 17950, 17952, 17954, 17956, 17958, 17960, 17962, 17964, 17966, 17968, 17970, 17972, 17974, 17976, 17978, 17980, 17982, 17984, 17986, 17988, 17990, 17992, 17994, 17996, 17998, 18000, 18002, 18004, 18006, 18008, 18010, 18012, 18014, 18016, 18018, 18020, 18022, 18024, 18026, 18028, 18030, 18032, 18034, 18036, 18038, 18040, 18042, 18044, 18046, 18048, 18050, 18052, 18054, 18056, 18058, 18060, 18062, 18064, 18066, 18068, 18070, 18072, 18074, 18076, 18078, 18080, 18082, 18084, 18086, 18088, 18090, 18092, 18094, 18096, 18098, 18100, 18102, 18104, 18106, 18108, 18110, 18112, 18114, 18116, 18118, 18120, 18122, 18124, 18126, 18128, 18130, 18132, 18134, 18136, 18138, 18140, 18142, 18144, 18146, 18148, 18150, 18152, 18154, 18156, 18158, 18160, 18162, 18164, 18166, 18168, 18170, 18172, 18174, 18176, 18178, 18180, 18182, 18184, 18186, 18188, 18190, 18192, 18194, 18196, 18198, 18200, 18202, 18204, 18206, 18208, 18210, 18212, 18214, 18216, 18218, 18220, 18222, 18224, 18226, 18228, 18230, 18232, 18234, 18236, 18238, 18240, 18242, 18244, 18246, 18248, 18250, 18252, 18254, 18256, 18258, 18260, 18262, 18264, 18266, 18268, 18270, 18272, 18274, 18276, 18278, 18280, 18282, 18284, 18286, 18288, 18290, 18292, 18294, 18296, 18298, 18300, 18302, 18304, 18306, 18308, 18310, 18312, 18314, 18316, 18318, 18320, 18322, 18324, 18326, 18328, 18330, 18332, 18334, 18336, 18338, 18340, 18342, 18344, 18346, 18348, 18350, 18352, 18354, 18356, 18358, 18360, 18362, 18364, 18366, 18368, 18370, 18372, 18374, 18376, 18378, 18380, 18382, 18384, 18386, 18388, 18390, 18392, 18394, 18396, 18398, 18400, 18402, 18404, 18406, 18408, 18410, 18412, 18414, 18416, 18418, 18420, 18422, 18424, 18426, 18428, 18430, 18432, 18434, 18436, 18438, 18440, 18442, 18444, 18446, 18448, 18450, 18452, 18454, 18456, 18458, 18460, 18462, 18464, 18466, 18468, 18470, 18472, 18474, 18476, 18478, 18480, 18482, 18484, 18486, 18488, 18490, 18492, 18494, 18496, 18498, 18500, 18502, 18504, 18506, 18508, 18510, 18512, 18514, 18516, 18518, 18520, 18522, 18524, 18526, 18528, 18530, 18532, 18534, 18536, 18538, 18540, 18542, 18544, 18546, 18548, 18550, 18552, 18554, 18556, 18558, 18560, 18562, 18564, 18566, 18568, 18570, 18572, 18574, 18576, 18578, 18580, 18582, 18584, 18586, 18588, 18590, 18592, 18594, 18596, 18598, 18600, 18602, 18604, 18606, 18608, 18610, 18612, 18614, 18616, 18618, 18620, 18622, 18624, 18626, 18628, 18630, 18632, 18634, 18636, 18638, 18640, 18642, 18644, 18646, 18648, 18650, 18652, 18654, 18656, 18658, 18660, 18662, 18664, 18666, 18668, 18670, 18672, 18674, 18676, 18678, 18680, 18682, 18684, 18686, 18688, 18690, 18692, 18694, 18696, 18698, 18700, 18702, 18704, 18706, 18708, 18710, 18712, 18714, 18716, 18718, 18720, 18722, 18724, 18726, 18728, 18730, 18732, 18734, 18736, 18738, 18740, 18742, 18744, 18746, 18748, 18750, 18752, 18754, 18756, 18758, 18760, 18762, 18764, 18766, 18768, 18770, 18772, 18774, 18776, 18778, 18780, 18782, 18784, 18786, 18788, 18790, 18792, 18794, 18796, 18798, 18800, 18802, 18804, 18806, 18808, 18810, 18812, 18814, 18816, 18818, 18820, 18822, 18824, 18826, 18828, 18830, 18832, 18834, 18836, 18838, 18840, 18842, 18844, 18846, 18848, 18850, 18852, 18854, 18856, 18858, 18860, 18862, 18864, 18866, 18868, 18870, 18872, 18874, 18876, 18878, 18880, 18882, 18884, 18886, 18888, 18890, 18892, 18894, 18896, 18898, 18900, 18902, 18904, 18906, 18908, 18910, 18912, 18914, 18916, 18918, 18920, 18922, 18924, 18926, 18928, 18930, 18932, 18934, 18936, 18938, 18940, 18942, 18944, 18946, 18948, 18950, 18952, 18954, 18956, 18958, 18960, 18962, 18964, 18966, 18968, 18970, 18972, 18974, 18976, 18978, 18980, 18982, 18984, 18986, 18988, 18990, 18992, 18994, 18996, 18998, 19000, 19002, 19004, 19006, 19008, 19010, 19012, 19014, 19016, 19018, 19020, 19022, 19024, 19026, 19028, 19030, 19032, 19034, 19036, 19038, 19040, 19042, 19044, 19046, 19048, 19050, 19052, 19054, 19056, 19058, 19060, 19062, 19064, 19066, 19068, 19070, 19072, 19074, 19076, 19078, 19080, 19082, 19084, 19086, 19088, 19090, 19092, 19094, 19096, 19098, 19100, 19102, 19104, 19106, 19108, 19110, 19112, 19114, 19116, 19118, 19120, 19122, 19124, 19126, 19128, 19130, 19132, 19134, 19136, 19138, 19140, 19142, 19144, 19146, 19148, 19150, 19152, 19154, 19156, 19158, 19160, 19162, 19164, 19166, 19168, 19170, 19172, 19174, 19176, 19178, 19180, 19182, 19184, 19186, 19188, 19190, 19192, 19194, 19196, 19198, 19200, 19202, 19204, 19206, 19208, 19210, 19212, 19214, 19216, 19218, 19220, 19222, 19224, 19226, 19228, 19230, 19232, 19234, 19236, 19238, 19240, 19242, 19244, 19246, 19248, 19250, 19252, 19254, 19256, 19258, 19260, 19262, 19264, 19266, 19268, 19270, 19272, 19274, 19276, 19278, 19280, 19282, 19284, 19286, 19288, 19290, 19292, 19294, 19296, 19298, 19300, 19302, 19304, 19306, 19308, 19310, 19312, 19314, 19316, 19318, 19320, 19322, 19324, 19326, 19328, 19330, 19332, 19334, 19336, 19338, 19340, 19342, 19344, 19346, 19348, 19350, 19352, 19354, 19356, 19358, 19360, 19362, 19364, 19366, 19368, 19370, 19372, 19374, 19376, 19378, 19380, 19382, 19384, 19386, 19388, 19390, 19392, 19394, 19396, 19398, 19400, 19402, 19404, 19406, 19408, 19410, 19412, 19414, 19416, 19418, 19420, 19422, 19424, 19426, 19428, 19430, 19432, 19434, 19436, 19438, 19440, 19442, 19444, 19446, 19448, 19450, 19452, 19454, 19456, 19458, 19460, 19462, 19464, 19466, 19468, 19470, 19472, 19474, 19476, 19478, 19480, 19482, 19484, 19486, 19488, 19490, 19492, 19494, 19496, 19498, 19500, 19502, 19504, 19506, 19508, 19510, 19512, 19514, 19516, 19518, 19520, 19522, 19524, 19526, 19528, 19530, 19532, 19534, 19536, 19538, 19540, 19542, 19544, 19546, 19548, 19550, 19552, 19554, 19556, 19558, 19560, 19562, 19564, 19566, 19568, 19570, 19572, 19574, 19576, 19578, 19580, 19582, 19584, 19586, 19588, 19590, 19592, 19594, 19596, 19598, 19600, 19602, 19604, 19606, 19608, 19610, 19612, 19614, 19616, 19618, 19620, 19622, 19624, 19626, 19628, 19630, 19632, 19634, 19636, 19638, 19640, 19642, 19644, 19646, 19648, 19650, 19652, 19654, 19656, 19658, 19660, 19662, 19664, 19666, 19668, 19670, 19672, 19674, 19676, 19678, 19680, 19682, 19684, 19686, 19688, 19690, 19692, 19694, 19696, 19698, 19700, 19702, 19704, 19706, 19708, 19710, 19712, 19714, 19716, 19718, 19720, 19722, 19724, 19726, 19728, 19730, 19732, 19734, 19736, 19738, 19740, 19742, 19744, 19746, 19748, 19750, 19752, 19754, 19756, 19758, 19760, 19762, 19764, 19766, 19768, 19770, 19772, 19774, 19776, 19778, 19780, 19782, 19784, 19786, 19788, 19790, 19792, 19794, 19796, 19798, 19800, 19802, 19804, 19806, 19808, 19810, 19812, 19814, 19816, 19818, 19820, 19822, 19824, 19826, 19828, 19830, 19832, 19834, 19836, 19838, 19840, 19842, 19844, 19846, 19848, 19850, 19852, 19854, 19856, 19858, 19860, 19862, 19864, 19866, 19868, 19870, 19872, 19874, 19876, 19878, 19880, 19882, 19884, 19886, 19888, 19890, 19892, 19894, 19896, 19898, 19900, 19902, 19904, 19906, 19908, 19910, 19912, 19914, 19916, 19918, 19920, 19922, 19924, 19926, 19928, 19930, 19932, 19934, 19936, 19938, 19940, 19942, 19944, 19946, 19948, 19950, 19952, 19954, 19956, 19958, 19960, 19962, 19964, 19966, 19968, 19970, 19972, 19974, 19976, 19978, 19980, 19982, 19984, 19986, 19988, 19990, 19992, 19994, 19996, 19998, 20000, 20002, 20004, 20006, 20008, 20010, 20012, 20014, 20016, 20018, 20020, 20022, 20024, 20026, 20028, 20030, 20032, 20034, 20036, 20038, 20040, 20042, 20044, 20046, 20048, 20050, 20052, 20054, 20056, 20058, 20060, 20062, 20064, 20066, 20068, 20070, 20072, 20074, 20076, 20078, 20080, 20082, 20084, 20086, 20088, 20090, 20092, 20094, 20096, 20098, 20100, 20102, 20104, 20106, 20108, 20110, 20112, 20114, 20116, 20118, 20120, 20122, 20124, 20126, 20128, 20130, 20132, 20134, 20136, 20138, 20140, 20142, 20144, 20146, 20148, 20150, 20152, 20154, 20156, 20158, 20160, 20162, 20164, 20166, 20168, 20170, 20172, 20174, 20176, 20178, 20180, 20182, 20184, 20186, 20188, 20190, 20192, 20194, 20196, 20198, 20200, 20202, 20204, 20206, 20208, 20210, 20212, 20214, 20216, 20218, 20220, 20222, 20224, 20226, 20228, 20230, 20232, 20234, 20236, 20238, 20240, 20242, 20244, 20246, 20248, 20250, 20252, 20254, 20256, 20258, 20260, 20262, 20264, 20266, 20268, 20270, 20272, 20274, 20276, 20278, 20280, 20282, 20284, 20286, 20288, 20290, 20292, 20294, 20296, 20298, 20300, 20302, 20304, 20306, 20308, 20310, 20312, 20314, 20316, 20318, 20320, 20322, 20324, 20326, 20328, 20330, 20332, 20334, 20336, 20338, 20340, 20342, 20344, 20346, 20348, 20350, 20352, 20354, 20356, 20358, 20360, 20362, 20364, 20366, 20368, 20370, 20372, 20374, 20376, 20378, 20380, 20382, 20384, 20386, 20388, 20390, 20392, 20394, 20396, 20398, 20400, 20402, 20404, 20406, 20408, 20410, 20412, 20414, 20416, 20418, 20420, 20422, 20424, 20426, 20428, 20430, 20432, 20434, 20436, 20438, 20440, 20442, 20444, 20446, 20448, 20450, 20452, 20454, 20456, 20458, 20460, 20462, 20464, 20466, 20468, 20470, 20472, 20474, 20476, 20478, 20480, 20482, 20484, 20486, 20488, 20490, 20492, 20494, 20496, 20498, 20500, 20502, 20504, 20506, 20508, 20510, 20512, 20514, 20516, 20518, 20520, 20522, 20524, 20526, 20528, 20530, 20532, 20534, 20536, 20538, 20540, 20542, 20544, 20546, 20548, 20550, 20552, 20554, 20556, 20558, 20560, 20562, 20564, 20566, 20568, 20570, 20572, 20574, 20576, 20578, 20580, 20582, 20584, 20586, 20587, 20588, 20589, 20590, 20591, 20592, 20594, 20596, 20598, 20599, 20600, 20601, 20602, 20603, 20604, 20605, 20606, 20607, 20608, 20609, 20610, 20611, 20612, 20613, 20614, 20615, 20616, 20617, 20618, 20619, 20620, 20621, 20622, 20623, 20624, 20625, 20626, 20628, 20630, 20632, 20634, 20636, 20638, 20639, 20640, 20641, 20642, 20643, 20644, 20645, 20646, 20647, 20648, 20649, 20650, 20651, 20652, 20653, 20654, 20655, 20656, 20657, 20658, 20659, 20660, 20661, 20662, 20663, 20664, 20665, 20666, 20667, 20668, 20669, 20670, 20672, 20674, 20676, 20678, 20679, 20680, 20681, 20682, 20683, 20684, 20685, 20686, 20687, 20688, 20689, 20690, 20691, 20692, 20693, 20694, 20695, 20696, 20697, 20698, 20699, 20700, 20701, 20702, 20703, 20704, 20705, 20706, 20707, 20708, 20709, 20710, 20711, 20712, 20713, 20714, 20715, 20716, 20717, 20718, 20719, 20720, 20721, 20722, 20723, 20724, 20725, 20726, 20727, 20728, 20729, 20730, 20731, 20732, 20733, 20734, 20735, 20736, 20737, 20738, 20739, 20740, 20741, 20742, 20743, 20744, 20745, 20746, 20747, 20748, 20749, 20750, 20751, 20752, 20753, 20754, 20755, 20756, 20757, 20758, 20759, 20760, 20761, 20762, 20763, 20764, 20765, 20766, 20767, 20768, 20769, 20770, 20771, 20772, 20773, 20774, 20775, 20776, 20777, 20778, 20779, 20780, 20781, 20782, 20783, 20784, 20785, 20786, 20787, 20788, 20789, 20790, 20791, 20792, 20793, 20794, 20795, 20796, 20797, 20798, 20799, 20800, 20801, 20802, 20803, 20804, 20805, 20806, 20807, 20808, 20809, 20810, 20811, 20812, 20813, 20814, 20815, 20816, 20817, 20818, 20819, 20820, 20821, 20822, 20823, 20824, 20825, 20826, 20827, 20828, 20829, 20830, 20831, 20832, 20833, 20834, 20835, 20836, 20837, 20838, 20839, 20840, 20841, 20842, 20843, 20844, 20845, 20846, 20847, 20848, 20849, 20850, 20851, 20852, 20853, 20854, 20855, 20856, 20857, 20858, 20859, 20860, 20861, 20862, 20863, 20864, 20865, 20866, 20867, 20868, 20869, 20870, 20871, 20872, 20873, 20874, 20875, 20876, 20877, 20878, 20879, 20880, 20881, 20882, 20883, 20884, 20885, 20886, 20887, 20888, 20889, 20890, 20891, 20892, 20893, 20894, 20895, 20896, 20897, 20898, 20899, 20900, 20901, 20902, 20903, 20904, 20905, 20906, 20907, 20908, 20909, 20910, 20911, 20912, 20913, 20914, 20915, 20916, 20917, 20918, 20919, 20920, 20921, 20922, 20923, 20924, 20925, 20926, 20927, 20928, 20929, 20930, 20931, 20932, 20933, 20934, 20935, 20936, 20937, 20938, 20939, 20940, 20941, 20942, 20943, 20944, 20945, 20946, 20947, 20948, 20949, 20950, 20951, 20952, 20953, 20954, 20955, 20956, 20957, 20958, 20959, 20960, 20962, 20964, 20966, 20968, 20970, 20972, 20974, 20976, 20978, 20980, 20982, 20984, 20986, 20988, 20990, 20992, 20994, 20996, 20998, 21000, 21002, 21004, 21006, 21008, 21010, 21012, 21014, 21016, 21018, 21020, 21022, 21024, 21026, 21028, 21030, 21032, 21034, 21036, 21038, 21040, 21042, 21044, 21046, 21048, 21050, 21052, 21054, 21056, 21058, 21060, 21062, 21064, 21066, 21068, 21070, 21072, 21074, 21076, 21078, 21080, 21082, 21084, 21086, 21088, 21090, 21092, 21094, 21096, 21098, 21100, 21102, 21104, 21106, 21108, 21110, 21112, 21114, 21116, 21118, 21120, 21122, 21124, 21126, 21128, 21130, 21132, 21134, 21136, 21138, 21140, 21142, 21144, 21146, 21148, 21150, 21152, 21154, 21156, 21158, 21160, 21162, 21164, 21166, 21168, 21170, 21172, 21174, 21176, 21178, 21180, 21182, 21184, 21186, 21188, 21190, 21192, 21194, 21196, 21198, 21200, 21202, 21204, 21206, 21208, 21210, 21212, 21214, 21216, 21218, 21220, 21222, 21224, 21226, 21228, 21230, 21232, 21234, 21236, 21238, 21240, 21242, 21244, 21246, 21248, 21250, 21252, 21254, 21256, 21258, 21260, 21262, 21264, 21266, 21268, 21270, 21272, 21274, 21276, 21278, 21280, 21282, 21284, 21286, 21288, 21290, 21292, 21294, 21296, 21298, 21300, 21302, 21304, 21306, 21308, 21310, 21312, 21314, 21316, 21318, 21320, 21322, 21324, 21326, 21328, 21330, 21332, 21334, 21336, 21338, 21340, 21342, 21344, 21346, 21348, 21350, 21352, 21354, 21356, 21358, 21360, 21362, 21364, 21366, 21368, 21370, 21372, 21374, 21376, 21378, 21380, 21382, 21384, 21386, 21388, 21390, 21392, 21394, 21396, 21398, 21400, 21402, 21404, 21406, 21408, 21410, 21412, 21414, 21416, 21418, 21420, 21422, 21424, 21426, 21428, 21430, 21432, 21434, 21436, 21438, 21440, 21442, 21444, 21446, 21448, 21450, 21452, 21454, 21456, 21458, 21460, 21462, 21464, 21466, 21468, 21470, 21472, 21474, 21476, 21478, 21480, 21482, 21484, 21486, 21488, 21490, 21492, 21494, 21496, 21498, 21500, 21502, 21503, 21504, 21505, 21506, 21507, 21508, 21509, 21510, 21512, 21514, 21516, 21518, 21520, 21522, 21524, 21526, 21528, 21530, 21532, 21534, 21536, 21538, 21540, 21542, 21544, 21546, 21548, 21550, 21552, 21554, 21556, 21558, 21560, 21562, 21564, 21566, 21568, 21570, 21572, 21574, 21576, 21578, 21579, 21580, 21581, 21582, 21583, 21584, 21585, 21586, 21587, 21588, 21589, 21590, 21591, 21592, 21593, 21594, 21595, 21596, 21597, 21598, 21599, 21600, 21601, 21602, 21603, 21604, 21605, 21606, 21607, 21608, 21609, 21610, 21611, 21612, 21613, 21614, 21615, 21616, 21617, 21618, 21619, 21620, 21621, 21622, 21623, 21624, 21625, 21626, 21627, 21628, 21629, 21630, 21631, 21632, 21633, 21634, 21635, 21636, 21637, 21638, 21639, 21640, 21641, 21642, 21643, 21644, 21645, 21646, 21647, 21648, 21649, 21650, 21651, 21652, 21653, 21654, 21655, 21656, 21657, 21658, 21659, 21660, 21661, 21662, 21663, 21664, 21665, 21666, 21667, 21668, 21669, 21670, 21671, 21672, 21673, 21674, 21675, 21676, 21677, 21678, 21679, 21680, 21681, 21682, 21683, 21684, 21685, 21686, 21687, 21688, 21689, 21690, 21691, 21692, 21693, 21694, 21695, 21696, 21697, 21698, 21699, 21700, 21701, 21702, 21703, 21704, 21705, 21706, 21707, 21708, 21709, 21710, 21711, 21712, 21713, 21714, 21715, 21716, 21717, 21718, 21719, 21720, 21721, 21722, 21723, 21724, 21726, 21728, 21730, 21732, 21734, 21736, 21738, 21740, 21742, 21744, 21746, 21748, 21750, 21752, 21754, 21756, 21758, 21760, 21762, 21764, 21766, 21768, 21770, 21772, 21774, 21776, 21778, 21780, 21782, 21784, 21786, 21788, 21790, 21792, 21794, 21796, 21798, 21800, 21802, 21804, 21806, 21808, 21810, 21812, 21814, 21816, 21818, 21820, 21822, 21824, 21826, 21828, 21830, 21832, 21834, 21836, 21838, 21840, 21842, 21844, 21846, 21848, 21850, 21852, 21854, 21856, 21858, 21860, 21862, 21864, 21866, 21868, 21870, 21872, 21874, 21876, 21878, 21880, 21882, 21884, 21886, 21888, 21890, 21892, 21894, 21896, 21898, 21900, 21902, 21904, 21906, 21908, 21910, 21912, 21914, 21916, 21918, 21920, 21922, 21924, 21926, 21928, 21930, 21932, 21934, 21936, 21938, 21940, 21942, 21944, 21946, 21948, 21950, 21952, 21954, 21956, 21958, 21960, 21962, 21964, 21966, 21968, 21970, 21972, 21974, 21976, 21978, 21980, 21982, 21984, 21986, 21988, 21990, 21992, 21994, 21996, 21998, 22000, 22002, 22004, 22006, 22008, 22010, 22012, 22014, 22016, 22018, 22020, 22022, 22024, 22026, 22028, 22030, 22032, 22034, 22036, 22038, 22040, 22042, 22044, 22046, 22048, 22050, 22052, 22054, 22055, 22056, 22057, 22058, 22059, 22060, 22061, 22062, 22063, 22064, 22065, 22066, 22067, 22068, 22070, 22072, 22074, 22076, 22078, 22080, 22082, 22084, 22086, 22088, 22090, 22092, 22094, 22096, 22098, 22100, 22102, 22104, 22106, 22108, 22110, 22112, 22114, 22116, 22118, 22120, 22122, 22124, 22125, 22126, 22128, 22130, 22132, 22134, 22136, 22138, 22140, 22142, 22144, 22146, 22148, 22150, 22152, 22154, 22156, 22158, 22160, 22162, 22164, 22166, 22168, 22170, 22172, 22174, 22176, 22178, 22180, 22182, 22184, 22185, 22186, 22187, 22188, 22189, 22190, 22191, 22192, 22193, 22194, 22195, 22196, 22197, 22198, 22199, 22200, 22201, 22202, 22203, 22204, 22205, 22206, 22208, 22210, 22212, 22214, 22216, 22218, 22220, 22222, 22224, 22226, 22228, 22230, 22232, 22234, 22236, 22238, 22240, 22242, 22244, 22246, 22248, 22250, 22252, 22254, 22256, 22258, 22260, 22262, 22264, 22266, 22268, 22270, 22272, 22274, 22276, 22278, 22280, 22282, 22284, 22286, 22288, 22290, 22292, 22293, 22294, 22295, 22296, 22297, 22298, 22299, 22300, 22301, 22302, 22303, 22304, 22305, 22306, 22307, 22308, 22309, 22310, 22311, 22312, 22313, 22314, 22315, 22316, 22317, 22318, 22319, 22320, 22321, 22322, 22323, 22324, 22325, 22326, 22328, 22330, 22332, 22334, 22336, 22338, 22340, 22342, 22344, 22346, 22348, 22350, 22352, 8, 9, 10, 11, 12, 13, 14, 15, 24149, 24151, 24153, 5246, 5241, 3631, 3626, 3631, 3626, 24160, 24162, 24164, 24166, 5246, 5241, 4976, 4971, 24170, 4976, 4971, 24172, 5002, 4997, 23486, 5002, 4997, 23489, 5017, 5012, 5027, 5022, 5041, 5036, 5041, 5046, 777, 772, 826, 821, 22380, 950, 22383, 963, 1069, 1064, 1079, 1074, 1089, 1084, 1099, 1094, 1122, 1117, 1112, 1122, 1117, 1127, 1137, 1132, 1147, 1142, 1157, 1152, 1162, 1167, 1190, 1185, 1180, 1190, 1185, 1195, 1834, 1839, 22396, 22798, 1877, 22397, 1864, 1882, 1887, 1892, 1897, 24174, 1388, 1452, 1470, 1465, 1534, 1529, 24176, 1447, 1442, 1534, 1529, 1549, 1577, 1612, 1607, 24178, 1839, 1834, 1849, 1844, 22763, 1877, 1854, 1859, 1864, 1887, 1882, 1897, 1892, 1737, 1909, 2860, 2855, 2873, 2898, 2893, 2908, 2903, 24180, 2993, 2998, 24182, 7038, 7033, 7028, 7062, 7057, 24184, 24192, 24194, 22430, 22432, 24196, 6031, 24198, 6036, 6081, 6076, 6071, 6081, 6076, 6086, 6109, 6104, 6099, 6109, 6104, 6114, 6124, 6119, 5827, 5814, 6196, 6191, 6201, 6211, 6216, 24200, 6211, 6216, 24202, 6270, 6265, 5916, 6277, 6282, 6285, 6283, 6295, 6290, 6305, 6300, 22461, 6351, 6346, 6402, 6397, 6364, 6328, 6323, 6333, 6318, 6455, 6450, 6455, 6450, 6470, 6475, 685, 6503, 6498, 6508, 6493, 6518, 6513, 6523, 6528, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 5670, 5665, 24204, 6654, 6659, 6654, 6659, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 5670, 5665, 24208, 6659, 6654, 6664, 22489, 6718, 6733, 6743, 6738, 6748, 6753, 22497, 6775, 6770, 646, 641, 6425, 6420, 22508, 6445, 6440, 6455, 6450, 6455, 6450, 6470, 6475, 685, 646, 641, 6425, 6420, 22508, 6445, 6440, 6455, 6450, 6455, 6450, 6470, 6475, 685, 6503, 6498, 22515, 6518, 6513, 6523, 6528, 6551, 6546, 6556, 6541, 6566, 6561, 6576, 6571, 646, 641, 6425, 6420, 22529, 6445, 6440, 6455, 6450, 24214, 6455, 6450, 24216, 6475, 6470, 685, 24218, 24220, 24222, 7753, 7665, 7758, 2747, 2742, 24228, 2812, 2807, 24230, 24232, 24234, 3776, 3761, 2850, 2845, 2860, 2855, 2883, 2878, 2888, 3631, 3626, 3631, 3626, 24236, 24238, 24240, 24242, 3650, 3645, 3660, 3655, 23198, 3678, 3673, 3542, 3537, 3565, 3560, 3570, 3555, 23162, 3592, 3590, 3631, 3626, 777, 772, 826, 821, 871, 866, 22571, 22557, 22575, 871, 866, 876, 799, 794, 804, 912, 907, 902, 912, 907, 917, 22565, 826, 821, 871, 866, 22571, 22573, 22575, 871, 866, 876, 22580, 889, 912, 907, 902, 912, 907, 917, 927, 922, 937, 932, 22593, 950, 22596, 963, 986, 981, 976, 986, 981, 991, 1014, 1009, 22606, 1014, 1009, 22609, 1029, 1024, 1039, 1034, 1049, 1044, 1059, 1054, 1069, 1064, 1079, 1074, 1089, 1084, 1099, 1094, 1122, 1117, 1112, 1122, 1117, 1127, 1137, 1132, 1147, 1142, 1157, 1152, 1167, 1162, 1190, 1185, 1180, 1190, 1185, 1195, 22647, 1437, 1432, 1761, 1756, 1776, 22654, 1799, 1794, 1809, 1804, 1819, 1814, 1824, 1834, 1839, 22663, 1854, 1859, 1877, 22666, 1864, 1882, 1887, 1897, 1892, 24248, 1388, 1761, 1756, 1799, 1794, 1809, 1804, 1819, 1814, 1819, 1814, 1819, 1814, 1829, 1834, 1839, 1849, 1844, 22798, 1877, 22688, 1864, 1882, 1887, 1892, 1897, 24252, 24255, 22694, 1437, 1432, 1447, 1442, 1417, 1427, 1422, 1437, 1432, 1447, 1442, 1452, 1510, 1505, 1470, 1465, 1534, 1529, 1534, 1529, 1510, 1505, 22717, 1510, 1505, 22720, 1534, 1529, 1534, 1529, 1559, 1554, 1549, 1559, 1554, 1564, 1587, 1582, 1577, 1587, 1582, 1592, 1602, 1597, 1612, 1607, 1622, 1617, 1632, 1627, 1751, 1746, 1761, 1756, 1789, 1799, 1794, 1809, 1804, 1819, 1814, 1829, 1824, 1839, 1834, 1849, 1844, 22763, 1887, 1882, 1892, 1887, 1882, 1897, 1737, 1909, 1751, 1746, 1761, 1756, 1771, 1766, 1776, 22780, 1789, 1799, 1794, 1809, 1804, 1819, 1814, 1829, 1824, 1839, 1834, 1849, 1844, 1859, 1854, 1864, 22798, 1877, 1887, 1882, 1897, 1892, 1907, 1907, 22807, 2686, 2681, 1947, 1942, 2699, 1947, 1942, 2704, 2714, 2709, 2719, 2714, 2709, 2724, 2747, 2742, 2752, 2747, 2742, 2737, 2762, 2757, 2772, 2767, 2782, 2777, 2787, 2782, 2777, 2792, 2850, 2845, 2860, 2855, 2883, 2878, 2888, 2883, 2878, 2873, 2898, 2893, 2908, 2903, 2918, 2913, 2923, 2928, 2938, 2933, 2943, 2938, 2933, 2948, 2953, 2958, 24264, 2938, 2933, 2948, 2943, 2958, 2953, 24266, 2978, 2973, 24268, 2978, 2973, 24270, 2998, 2993, 24272, 2898, 2893, 2908, 2903, 2918, 2913, 2928, 2923, 2938, 2933, 2943, 2938, 2933, 2948, 2958, 2953, 24274, 2938, 2933, 2948, 2943, 2958, 2953, 24276, 2978, 2973, 24278, 2978, 2973, 24280, 2998, 2993, 24282, 3031, 3026, 3021, 3055, 3050, 22889, 2291, 2286, 3076, 3071, 3086, 3081, 3091, 2291, 2286, 22900, 3086, 3081, 3096, 3131, 3126, 3141, 3136, 3151, 3146, 3152, 22907, 2472, 3191, 3203, 3198, 3131, 3126, 3141, 3136, 3151, 3146, 3152, 22920, 2472, 3193, 3203, 3198, 3021, 3055, 3050, 22929, 22931, 22933, 3131, 3126, 3141, 3136, 3151, 3146, 3154, 3185, 3180, 2472, 3193, 3191, 3203, 3198, 3268, 3263, 22951, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3319, 3329, 3324, 22959, 3268, 3263, 3223, 3221, 3299, 3294, 3299, 3294, 3299, 3294, 3304, 3314, 3309, 3240, 3324, 3329, 24286, 3324, 3329, 24288, 2619, 2614, 24290, 2619, 2614, 24292, 3355, 3350, 3420, 3415, 23141, 3397, 3402, 22984, 3420, 3415, 3425, 3430, 3435, 3440, 3450, 3445, 23141, 3402, 3397, 23145, 3420, 3415, 3430, 3425, 3440, 3435, 3450, 3445, 2676, 2671, 2686, 2681, 22995, 2704, 2699, 2714, 2709, 2724, 2719, 2747, 2742, 2737, 2747, 2742, 2752, 2762, 2757, 2772, 2767, 2782, 2777, 2792, 2787, 2802, 2797, 2812, 2807, 2835, 2830, 2825, 2835, 2830, 2840, 2850, 2845, 2860, 2855, 2878, 2883, 2873, 2883, 2878, 2888, 2898, 2893, 2908, 2903, 2918, 2913, 2928, 2923, 2938, 2933, 2948, 2943, 2958, 2953, 2968, 2963, 2978, 2973, 2988, 2983, 2998, 2993, 3008, 3003, 3031, 3026, 3021, 3031, 3026, 3036, 3055, 3050, 24296, 3055, 3050, 24298, 23071, 3076, 3071, 3086, 3081, 3096, 3091, 3131, 3126, 3141, 3136, 3151, 3146, 24300, 3131, 3126, 3141, 3136, 3151, 3146, 24302, 3185, 3180, 3190, 24304, 3203, 3198, 3185, 3180, 3190, 24306, 3203, 3198, 3268, 3263, 24308, 3268, 3263, 24310, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3240, 3324, 3329, 3258, 3253, 3268, 3263, 23115, 3299, 3294, 3289, 3299, 3294, 3304, 3314, 3309, 3319, 3329, 3324, 23128, 23130, 3402, 3397, 3355, 3350, 3420, 3415, 3425, 3430, 3440, 3435, 3445, 3450, 3384, 23141, 3402, 3397, 23145, 3420, 3415, 3430, 3425, 3440, 3435, 3450, 3445, 3455, 3532, 3527, 3542, 3537, 3565, 3560, 3570, 3555, 23162, 3592, 3590, 3602, 3597, 3612, 3607, 24312, 24314, 24316, 3650, 3645, 3514, 3509, 23170, 3532, 3527, 3542, 3537, 3565, 3560, 3555, 3565, 3560, 3570, 3580, 3575, 24318, 23184, 24320, 3602, 3597, 3612, 3607, 3631, 3626, 3631, 3626, 24324, 24326, 24328, 24330, 3650, 3645, 3660, 3655, 23198, 3678, 3673, 3688, 3683, 3698, 3693, 3717, 3712, 24332, 3717, 3712, 24334, 3743, 3738, 3733, 3743, 3738, 3748, 3771, 3766, 3761, 3771, 3766, 3776, 3799, 3794, 3789, 3799, 3794, 3804, 3814, 3809, 3824, 3819, 3843, 3838, 24336, 3843, 3838, 24338, 3856, 3851, 3866, 3861, 3876, 3871, 3886, 3881, 23244, 3899, 3909, 3904, 3914, 3924, 3919, 3934, 3929, 3957, 3952, 3947, 3957, 3952, 3962, 3999, 3994, 4009, 4004, 23260, 23272, 4048, 4043, 4038, 4048, 4043, 4053, 4063, 4058, 4063, 4058, 3999, 3994, 4009, 4004, 23270, 23272, 4048, 4043, 4038, 4048, 4043, 4053, 4063, 4058, 4068, 4078, 4073, 4088, 4083, 4111, 4106, 4101, 4111, 4106, 4116, 4126, 4121, 4136, 4131, 4146, 4141, 4156, 4151, 4179, 4174, 4169, 4179, 4174, 4184, 4211, 4206, 4211, 4206, 4211, 4206, 24344, 23313, 4227, 4237, 4232, 4242, 4252, 4247, 4262, 4257, 4455, 4450, 4616, 4614, 4639, 4634, 4629, 4639, 4634, 4644, 4654, 4649, 4664, 4659, 4674, 4669, 4679, 4674, 4669, 4684, 4698, 4703, 24346, 4698, 4703, 24348, 4729, 4724, 4729, 4724, 4729, 4724, 24350, 4574, 4744, 4574, 4749, 4767, 4762, 4767, 4777, 4698, 4703, 24352, 4698, 4703, 24354, 4729, 4724, 4729, 4724, 4729, 4724, 24356, 4574, 4744, 4574, 4749, 4772, 4762, 4772, 4777, 4869, 4864, 4874, 4859, 5002, 4997, 23343, 5002, 4997, 23346, 4574, 4744, 4574, 4749, 4903, 4898, 23354, 4952, 4947, 4846, 4976, 4971, 5017, 5012, 24362, 5017, 5012, 24364, 4441, 4441, 5036, 4455, 4450, 4616, 4614, 4654, 4649, 4664, 4659, 4684, 4679, 4698, 4703, 24366, 4698, 4703, 24368, 4729, 4724, 4729, 4724, 4729, 4724, 24372, 4574, 4744, 4574, 4749, 4767, 4762, 4767, 4777, 4574, 4744, 4574, 4749, 4772, 4772, 4772, 4762, 23404, 24376, 23406, 24378, 4639, 4634, 4629, 4639, 4634, 4644, 4654, 4649, 4664, 4659, 4674, 4669, 4684, 4679, 4703, 4698, 24380, 4703, 4698, 24382, 4729, 4724, 4719, 4729, 4724, 4734, 4739, 4749, 4744, 4772, 4767, 4762, 4772, 4767, 4777, 4869, 4864, 4874, 4859, 4903, 4898, 23447, 4932, 4937, 4927, 4932, 4937, 4942, 4952, 4947, 4846, 4869, 4864, 4859, 4869, 4864, 4874, 4903, 4898, 4903, 4898, 4903, 4898, 4908, 4937, 4932, 4937, 4932, 4937, 4932, 4942, 4952, 4947, 4957, 4976, 4971, 24388, 4976, 4971, 24390, 5002, 4997, 23486, 5002, 4997, 23489, 5017, 5012, 5027, 5022, 5041, 5036, 5041, 5046, 5069, 5064, 5059, 5069, 5064, 5074, 5084, 5079, 5094, 5089, 5117, 5112, 5107, 5117, 5112, 5122, 5145, 5140, 5135, 5145, 5140, 5150, 23520, 5210, 23535, 5223, 5246, 5241, 5236, 5246, 5241, 5251, 5261, 5256, 5271, 5266, 5192, 5187, 5197, 23532, 5210, 23535, 5223, 5246, 5241, 5236, 5246, 5241, 5251, 5261, 5256, 5271, 5266, 23548, 5284, 5348, 5343, 24392, 24394, 5435, 5445, 5440, 5486, 5481, 5496, 5491, 5329, 5329, 5348, 5343, 24396, 5348, 5343, 24398, 5374, 5369, 5364, 5374, 5369, 5379, 5427, 5422, 24400, 5427, 5422, 24402, 5400, 23577, 5427, 5422, 24404, 5427, 5422, 24406, 5435, 5445, 5440, 5486, 5481, 5496, 5491, 5471, 5466, 5476, 5486, 5481, 5496, 5491, 23597, 5509, 23600, 5522, 5532, 5527, 5537, 5551, 5546, 5551, 5556, 6055, 6050, 24408, 6055, 6050, 24410, 6081, 6076, 6071, 6081, 6076, 6086, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24412, 6211, 6216, 24414, 6270, 6265, 5916, 6277, 6282, 6285, 6283, 6503, 6498, 23628, 6518, 6513, 6528, 6523, 6586, 6581, 6591, 6596, 6606, 6601, 6616, 6611, 5670, 5665, 24416, 6659, 6654, 6649, 6659, 6654, 6664, 6581, 6586, 6596, 6591, 6606, 6601, 6616, 6611, 23847, 24418, 6659, 6654, 6649, 6659, 6654, 6664, 23646, 6718, 6733, 6743, 6738, 6748, 6753, 6763, 6758, 23654, 23656, 23658, 23660, 6763, 6758, 23664, 23666, 24426, 6031, 24428, 6036, 6124, 6119, 5814, 6124, 6119, 5827, 6142, 6137, 24430, 6142, 6137, 24432, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24434, 6211, 6216, 24436, 6270, 6265, 5916, 6282, 6277, 24438, 6196, 6191, 6201, 6196, 6191, 6206, 6211, 6216, 24440, 6211, 6216, 24442, 6265, 6270, 5916, 6282, 6277, 24444, 6295, 6290, 6305, 6300, 6328, 6323, 6333, 6318, 6402, 6397, 6364, 6402, 6397, 6407, 23713, 23715, 24446, 6036, 6031, 23719, 6023, 6018, 24448, 6036, 6031, 6055, 6050, 24450, 6055, 6050, 24452, 6081, 6076, 6071, 6081, 6076, 6086, 6109, 6104, 6099, 6109, 6104, 6114, 6124, 6119, 23743, 6142, 6137, 6152, 6147, 6181, 6176, 6181, 6176, 6181, 6176, 6186, 6196, 6191, 6206, 6201, 6216, 6211, 6226, 6221, 6270, 6265, 6270, 6265, 6282, 6277, 24456, 6270, 6265, 6270, 6265, 6282, 6277, 24460, 6295, 6290, 6305, 6300, 6328, 6323, 6318, 6328, 6323, 6333, 23786, 6351, 6346, 6402, 6397, 6364, 6387, 6382, 23794, 6387, 6382, 23797, 6402, 6397, 6407, 23802, 6425, 6420, 6435, 6430, 6445, 6440, 6455, 6450, 6465, 6460, 6475, 6470, 6480, 6503, 6498, 6493, 6503, 6498, 6508, 6518, 6513, 6528, 6523, 6551, 6546, 6541, 6551, 6546, 6556, 6566, 6561, 6576, 6571, 6586, 6581, 6596, 6591, 6606, 6601, 6616, 6611, 23845, 24462, 23847, 24464, 6659, 6654, 6649, 6659, 6654, 6664, 23855, 6718, 6733, 6743, 6738, 6748, 6753, 6763, 6758, 23862, 6780, 23880, 6793, 6728, 6723, 6718, 6728, 6723, 6733, 6743, 6738, 6753, 6748, 6763, 6758, 6775, 6770, 6780, 23880, 6793, 6803, 6798, 6813, 6808, 6832, 6827, 24468, 6832, 6827, 24470, 6845, 6840, 6855, 6850, 6884, 6879, 6884, 6879, 6884, 6879, 6889, 6908, 6903, 24474, 6908, 6903, 24476, 6930, 6925, 24478, 6930, 6925, 24480, 6956, 6951, 6946, 6956, 6951, 6961, 23916, 6974, 23919, 6987, 7010, 7005, 7000, 7010, 7005, 7015, 7038, 7033, 7028, 7038, 7033, 7043, 7062, 7057, 24482, 7062, 7057, 24484, 7088, 7083, 7078, 7088, 7083, 7093, 7116, 7111, 7106, 7116, 7111, 7121, 7144, 7139, 7134, 7144, 7139, 7149, 7201, 7196, 7206, 7191, 7216, 7211, 7178, 7201, 7196, 7191, 7201, 7196, 7206, 7216, 7211, 7221, 7556, 7551, 7574, 7569, 24486, 7624, 7619, 7719, 7714, 7665, 7546, 7541, 7592, 7587, 24488, 7611, 7606, 24490, 7696, 7719, 7714, 7729, 7724, 23991, 7546, 7541, 7556, 7551, 23997, 7574, 7569, 7347, 7342, 7592, 7587, 7611, 7606, 24007, 7729, 7719, 7714, 24012, 7882, 7877, 7908, 7903, 7961, 7956, 7984, 7979, 7824, 24023, 7847, 7842, 7852, 7872, 7867, 7951, 7946, 7984, 7979, 7984, 7979, 7819, 7814, 24038, 7882, 7877, 7882, 7877, 7908, 7903, 7923, 7918, 7928, 7546, 7541, 7556, 7551, 24053, 7574, 7569, 24057, 7592, 7587, 7611, 7606, 24500, 7611, 7606, 24502, 24084, 7709, 7624, 7619, 7696, 7719, 7714, 7724, 7719, 7714, 7729, 24074, 7665, 7753, 7758, 7683, 7678, 24081, 7696, 24084, 7709, 7719, 7714, 7729, 7724, 24091, 24093, 7758, 7753, 24097, 7819, 7814, 7824, 7819, 7814, 7837, 7847, 7842, 24107, 7819, 7814, 7824, 24112, 7837, 7847, 7842, 7852, 7862, 7857, 7872, 7867, 7882, 7877, 24508, 7908, 7903, 7898, 7908, 7903, 7913, 7923, 7918, 7928, 24133, 7941, 7951, 7946, 7961, 7956, 7984, 7979, 7974, 7984, 7979, 7989, 10802, 10802, 24781, 10736, 10802, 10802, 10802, 10802, 10802, 10802, 10802, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 24783, 10918, 10913, 24785, 10938, 10933, 10948, 10943, 10958, 10953, 24787, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 10807, 11576, 11571, 11581, 11576, 11571, 11586, 11605, 11600, 11576, 11571, 11586, 11581, 11605, 11600, 11630, 11625, 11620, 11630, 11625, 11635, 11658, 11653, 11663, 11658, 11653, 11658, 11653, 24823, 10938, 10933, 10948, 10943, 10958, 10953, 24825, 24827, 10908, 10893, 11483, 11478, 11483, 11478, 11483, 11478, 24829, 24831, 24833, 24835, 24837, 11483, 11478, 24839, 11483, 11478, 24841, 24843, 24845, 24847, 24849, 11543, 11538, 24851, 10535, 10530, 10535, 10530, 10441, 10436, 10551, 10574, 10569, 24855, 24520, 10466, 10485, 10480, 24857, 10485, 10480, 24859, 10511, 10506, 10501, 10511, 10506, 10516, 10535, 10530, 24861, 10535, 10530, 24863, 24537, 10551, 24540, 10564, 10574, 10569, 10584, 10579, 10607, 10602, 10597, 10607, 10602, 10612, 10631, 10626, 24865, 10631, 10626, 24867, 24557, 10647, 10750, 10755, 10750, 10755, 10755, 10750, 24871, 10768, 10763, 10778, 10773, 10807, 10802, 10802, 10807, 24873, 10736, 10750, 10755, 24875, 10755, 10750, 24877, 10768, 10763, 10778, 10773, 10807, 10802, 10807, 10802, 24880, 10736, 10750, 10755, 24882, 10755, 10750, 24884, 10768, 10763, 10778, 10773, 10802, 10807, 10802, 10807, 10731, 10726, 10736, 10755, 10750, 24887, 10755, 10750, 24889, 10768, 10763, 10778, 10773, 10807, 10802, 10807, 10802, 10807, 10802, 10812, 24596, 10825, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 24893, 10918, 10913, 24895, 10938, 10933, 10948, 10943, 10953, 10958, 24897, 10903, 10898, 10893, 10903, 10898, 10908, 10918, 10913, 10928, 10923, 10938, 10933, 10948, 10943, 10958, 10953, 24899, 11183, 11178, 11193, 11188, 11183, 11178, 11154, 11149, 24901, 11234, 11229, 24903, 11234, 11229, 24905, 11154, 11149, 11252, 11247, 11262, 11257, 11272, 11267, 24907, 11272, 11267, 24909, 11292, 11287, 11297, 11292, 11287, 11302, 11321, 11316, 11078, 11183, 11178, 24911, 11183, 11178, 24913, 11198, 24661, 11183, 11178, 11188, 11183, 11178, 11193, 11203, 11213, 11208, 11154, 11149, 24915, 11234, 11229, 11173, 11168, 11183, 11178, 11193, 11188, 11203, 11198, 11213, 11208, 24686, 24917, 11234, 11229, 24690, 11252, 11247, 11262, 11257, 11272, 11267, 11282, 11277, 11292, 11287, 11302, 11297, 11321, 11316, 11321, 11316, 11483, 11478, 11483, 11478, 11483, 11478, 24923, 24925, 24927, 24929, 24931, 11483, 11478, 24933, 11483, 11478, 24935, 24937, 24940, 24943, 24945, 11543, 11538, 11553, 11548, 11563, 11558, 24947, 11543, 11538, 11553, 11548, 11563, 11558, 24949, 11576, 11571, 11581, 11576, 11571, 11586, 11605, 11600, 11576, 11571, 11586, 11581, 11605, 11600, 11630, 11625, 11658, 11653, 11658, 11653, 11658, 11653, 11483, 11478, 24955, 11483, 11478, 24957, 11505, 11500, 24959, 11505, 11500, 24961, 11543, 11538, 11553, 11548, 11563, 11558, 24963, 11543, 11538, 11553, 11548, 11563, 11558, 24965, 11576, 11571, 11586, 11581, 11605, 11600, 11605, 11600, 11630, 11625, 11620, 11630, 11625, 11635, 11658, 11653, 11648, 11658, 11653, 11663, 13435, 13440, 13450, 13445, 13460, 13455, 24990, 25134, 25136, 13498, 13493, 13430, 13440, 13435, 13450, 13445, 13460, 13455, 24990, 25138, 25140, 13498, 13493, 13430, 13435, 13440, 13450, 13445, 13460, 13455, 24790, 13478, 13473, 25142, 13498, 13493, 13430, 13885, 13880, 13895, 13890, 13561, 13556, 13571, 13566, 13581, 13576, 13586, 13581, 13576, 13591, 11825, 11820, 24809, 13335, 13330, 13345, 13340, 13355, 13350, 13365, 13360, 13388, 13383, 13378, 13388, 13383, 13393, 13701, 13696, 11847, 11842, 13727, 13732, 25055, 13732, 13727, 25176, 13742, 13747, 13757, 13752, 13780, 13775, 13785, 13770, 24970, 24972, 13335, 13330, 13345, 13340, 13355, 13350, 13365, 13360, 13388, 13383, 13378, 13388, 13383, 13393, 13435, 13440, 13450, 13445, 13460, 13455, 24990, 13478, 13473, 13483, 13488, 13498, 13493, 13430, 13440, 13435, 13450, 13445, 13460, 13455, 25003, 13478, 13473, 13488, 13483, 13498, 13493, 13503, 13513, 13508, 13523, 13518, 13546, 13541, 13536, 13546, 13541, 13551, 13561, 13556, 13571, 13566, 13581, 13576, 13591, 13586, 13614, 13609, 13604, 13614, 13609, 13619, 13638, 13633, 25207, 13638, 13633, 25209, 13660, 13655, 25211, 13660, 13655, 25213, 13686, 13681, 13676, 13686, 13681, 13691, 13701, 13696, 25052, 13732, 13727, 25055, 13732, 13727, 25058, 13747, 13742, 13757, 13752, 13780, 13775, 13770, 13780, 13775, 13785, 13814, 13809, 13814, 13809, 13814, 13809, 13819, 13842, 13837, 13832, 13842, 13837, 13847, 13870, 13865, 13860, 13870, 13865, 13875, 13885, 13880, 13895, 13890, 13914, 13909, 25217, 13914, 13909, 25219, 13927, 13922, 13937, 13932, 13947, 13942, 13952, 25104, 13965, 13984, 13979, 25221, 13984, 13979, 25223, 13997, 13992, 14007, 14002, 14017, 14012, 14027, 14022, 14037, 14032, 14047, 14042, 14066, 14061, 25225, 14066, 14061, 25227, 15282, 15277, 15292, 15287, 15302, 15297, 15302, 15297, 14157, 14157, 14157, 15317, 15312, 25272, 15325, 15335, 15340, 15350, 15345, 25274, 15350, 15345, 25276, 15383, 15378, 25278, 14266, 14266, 14266, 15302, 15297, 15282, 15277, 15292, 15287, 15302, 15297, 15297, 15302, 15330, 15330, 15350, 15345, 25282, 15350, 15345, 25284, 15383, 15378, 25286, 15282, 15277, 15292, 15287, 15302, 15297, 15282, 15277, 15292, 15287, 15302, 15297, 15325, 15335, 15340, 15325, 15350, 15345, 25290, 15350, 15345, 25292, 15383, 15378, 25294, 25201, 25203, 25205, 15420, 15282, 15277, 15292, 15287, 15302, 15297, 15272, 15282, 15277, 15292, 15287, 15302, 15297, 15307, 15317, 15312, 25296, 15330, 15325, 15335, 15340, 15282, 15277, 15292, 15287, 15302, 15297, 15272, 15282, 15277, 15292, 15287, 15302, 15297, 15307, 15317, 15312, 25298, 15330, 15325, 15340, 15335, 15350, 15345, 15360, 15355, 15383, 15378, 15373, 15383, 15378, 15388, 25266, 25268, 25270, 15420, 25302, 25304, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16676, 16681, 16671, 16676, 16681, 16686, 16681, 16676, 16671, 16681, 16676, 16686, 14, 15, 25331, 25332, 25333, 25334, 25335, 25336, 25341, 25342, 25343, 25344, 25346, 25347, 25349, 25350, 25351, 25352, 25353, 25354, 25355, 25356, 25357, 25358, 25359, 25360, 25361, 25362, 25363, 25364, 25365, 25366, 25367, 25368, 25369, 25370, 25371, 25372, 25373, 25374, 25375, 25376, 25377, 25378, 25379, 25380, 25381, 25382, 25383, 25384, 25385, 25386, 25387, 25388, 25389, 25390, 25391, 25392, 25393, 25394, 25395, 25396, 25397, 25398, 25399, 25400, 25401, 25402, 25403, 25404, 25405, 25406, 25407, 25408, 25409, 25411, 25412, 25413, 25414, 25415, 25416, 25418, 25419, 25420, 25421, 25422, 25423, 25424, 25425, 25427, 25428, 25429, 25430, 25431, 25432, 25433, 25434, 25435, 25436, 25437, 25438, 25439, 25440, 25441, 25442, 25443, 25444, 25445, 25446, 25447, 25448, 25450, 25451, 25453, 25454, 25455, 25456, 25457, 25461, 25462, 25464, 25466, 25467, 25468, 25469, 25470, 25471, 25472, 25473, 25474, 25475, 25476, 25477, 25478, 25479, 25480, 25481, 25482, 25483, 25484, 25485, 25486, 25487, 25489, 25490, 25492, 25493, 25494, 25495, 25496, 25497, 25498, 25499, 25500, 25501, 25502, 25503, 25504, 25505, 25506, 25507, 25508, 25509, 25510, 25511, 25512, 25513, 25514, 25515, 25516, 25517, 25518, 25519, 25520, 25521, 25522, 25523, 25524, 25525, 25526, 25527, 25528, 25529, 25530, 25531, 25532, 25533, 25534, 25535, 25536, 25537, 25539, 25540, 25541, 25542, 25543, 25544, 25545, 25546, 25547, 25548, 25549, 25550, 25551, 25552, 25554, 25555, 25556, 25557, 25558, 25559, 25560, 25561, 25562, 25563, 25564, 25565, 25566, 25567, 25568, 25569, 25570, 25571, 25572, 25573, 25574, 25575, 25576, 25577, 25578, 25579, 25580, 25581, 25582, 25583, 25584, 25585, 25586, 25587, 25588, 25589, 25590, 25591, 25592, 25593, 25594, 25595, 25596, 25597, 25598, 25599, 25600, 25601, 25602, 25603, 25604, 25605, 25606, 25607, 25608, 25609, 25610, 25611, 25612, 25613, 25614, 25615, 25616, 25617, 25618, 25620, 25621, 25623, 25624, 25625, 25629, 25630, 25631, 25632, 25633, 25635, 25636, 25640, 25641, 25642, 25643, 25644, 25645, 25646, 25647, 25648, 25649, 25650, 25651, 25652, 25657, 25658, 25659, 25660, 25661, 25662, 25663, 25664, 25665, 25666, 25667, 25668, 25669, 25670, 25671, 25672, 25673, 25674, 25675, 25676, 25677, 25678, 25679, 25680, 25681, 25682, 25683, 25684, 25685, 25686, 25687, 25688, 25689, 25690, 25691, 25692, 25693, 25694, 25695, 25696, 25697, 25698, 25699, 25700, 25701, 25702, 25703, 25704, 25705, 25706, 25707, 25708, 25709, 25710, 25711, 25712, 25713, 25714, 25715, 25716, 25717, 25718, 25719, 25720, 25721, 25722, 25723, 25724, 25725, 25726, 25727, 25728, 25729, 25730, 25731, 25732, 25733, 25734, 25735, 25736, 25737, 25738, 25739, 25740, 25741, 25742, 25743, 25744, 25745, 25746, 25747, 25748, 25749, 25750, 25751, 25752, 25753, 25754, 25755, 25756, 25757, 25758, 25759, 25760, 25761, 25762, 25763, 25764, 25765, 25766, 25767, 25768, 25769, 25770, 25771, 25772, 25773, 25774, 25775, 25776, 25777, 25778, 25779, 25780, 25781, 25782, 25783, 25784, 25785, 25786, 25787, 25788, 25789, 25790, 25791, 25792, 25793, 25794, 25795, 25796, 25798, 25799, 25800, 25801, 25802, 25803, 25804, 25805, 25806, 25807, 25808, 25809, 25810, 25811, 25812, 25813, 25814, 25815, 25816, 25817, 25818, 25819, 25820, 25821, 25822, 25823, 25826, 25827, 25828, 25829, 25830, 25831, 25832, 25833, 25834, 25835, 25836, 25837, 25838, 25839, 25840, 25841, 25842, 25843, 25844, 25845, 25846, 25847, 25848, 25849, 25850, 25851, 25852, 25853, 25854, 25855, 25856, 25857, 25858, 25859, 25860, 25861, 25862, 25863, 25864, 25865, 25866, 25867, 25868, 25869, 25870, 25871, 25872, 25873, 25874, 25875, 25876, 25877, 25878, 25879, 25880, 25881, 25882, 25883, 25884, 25885, 25886, 25887, 25888, 25889, 25890, 25891, 25892, 25893, 25894, 25895, 25896, 25897, 25898, 25899, 25900, 25901, 25902, 25903, 25904, 25905, 25906, 25907, 25908, 25909, 25910, 25911, 25912, 25913, 25914, 25915, 25916, 25917, 25918, 25919, 25920, 25921, 25922, 25923, 25924, 25925, 25926, 25927, 25928, 25929, 25930, 25931, 25932, 25933, 25934, 25935, 25936, 25937, 25938, 25939, 25940, 25941, 25942, 25943, 25944, 25945, 25946, 25947, 25948, 25949, 25950, 25951, 25952, 25953, 25954, 25955, 25956, 25957, 25958, 25959, 25960, 25961, 25962, 25963, 25964, 25965, 25966, 25967, 25968, 25969, 25970, 25971, 25972, 25973, 25974, 25975, 25976, 25977, 25978, 25979, 25980, 25981, 25982, 25983, 25984, 25985, 25986, 25987, 25988, 25989, 25990, 25991, 25993, 25994, 25995, 25996, 25997, 25998, 26000, 26001, 26003, 26004, 26006, 26007, 26009, 26010, 26011, 26012, 26013, 26014, 26015, 26016, 26017, 26018, 26019, 26020, 26021, 26022, 26023, 26024, 26026, 26027, 26028, 26029, 26030, 26031, 26033, 26034, 26036, 26037, 26039, 26040, 26042, 26043, 26044, 26045, 26046, 26047, 26048, 26049, 26050, 26051, 26052, 26053, 26054, 26055, 26056, 26057, 26058, 26059, 26060, 26061, 26062, 26063, 26064, 26065, 26066, 26067, 26068, 26069, 26070, 26071, 26072, 26073, 26074, 26075, 26076, 26077, 26078, 26079, 26080, 26081, 26082, 26083, 26084, 26085, 26086, 26087, 26088, 26089, 26090, 26091, 26092, 26093, 26094, 26095, 26096, 26097, 26098, 26099, 26100, 26101, 26102, 26103, 26104, 26105, 26106, 26107, 26108, 26109, 26110, 26111, 26112, 26113, 26114, 26115, 26116, 26117, 26118, 26119, 26120, 26121, 26122, 26123, 26124, 26125, 26126, 26127, 26128, 26129, 26130, 26131, 26132, 26133, 26134, 26135, 26137, 26138, 26140, 26141, 26143, 26144, 26146, 26147, 26148, 26149, 26150, 26151, 26152, 26153, 26154, 26155, 26156, 26157, 26158, 26159, 26160, 26161, 26162, 26163, 26164, 26165, 26166, 26167, 26168, 26169, 26170, 26171, 26172, 26173, 26174, 26175, 26176, 26177, 26178, 26179, 26180, 26181, 26182, 26183, 26184, 26185, 26186, 26187, 26188, 26189, 26190, 26191, 26192, 26193, 26194, 26195, 26196, 26197, 26198, 26199, 26200, 26201, 26202, 26203, 26204, 26205, 26206, 26207, 26208, 26209, 26210, 26211, 26212, 26213, 26214, 26215, 26216, 26217, 26218, 26219, 26220, 26221, 26222, 26223, 26224, 26225, 26226, 26227, 26228, 26229, 26230, 26231, 26232, 26233, 26234, 26235, 26236, 26237, 26238, 26239, 26240, 26241, 26242, 26243, 26244, 26245, 26246, 26247, 26248, 26249, 26250, 26252, 26253, 26255, 26256, 26257, 26258, 26259, 26260, 26261, 26262, 26263, 26264, 26265, 26266, 26267, 26269, 26270, 26271, 26272, 26273, 26274, 26276, 26277, 26278, 26280, 26281, 26282, 26283, 26284, 26286, 26287, 26288, 26289, 26291, 26292, 26294, 26295, 26296, 26297, 26298, 26299, 26300, 26301, 26302, 26303, 26304, 26305, 26306, 26307, 26308, 26309, 26310, 26311, 26312, 26313, 26314, 26315, 26316, 26317, 26318, 26319, 26320, 26321, 26322, 26323, 26324, 26325, 26326, 26327, 26328, 26329, 26330, 26331, 26332, 26333, 26334, 26335, 26336, 26337, 26338, 26339, 26340, 26341, 26342, 26343, 26344, 26345, 26346, 26347, 26348, 26349, 26350, 26351, 26352, 26353, 26354, 26355, 26356, 26357, 26358, 26359, 26360, 26361, 26362, 26363, 26367, 26368, 26369, 26370, 26371, 26372, 26373, 26374, 26375, 26376, 26377, 26378, 26379, 26380, 26381, 26382, 26383, 26385, 26387, 26388, 26389, 26390, 26391, 26392, 26393, 26394, 26399, 26400, 26401, 26402, 26403, 26404, 26405, 26406, 26407, 26408, 26409, 26410, 26411, 26413, 26414, 26416, 26417, 26418, 26419, 26420, 26421, 26422, 26423, 26424, 26425, 26426, 26427, 26428, 26429, 26430, 26431, 26432, 26433, 26434, 26435, 26436, 26437, 26438, 26439, 26441, 26442, 26444, 26445, 26446, 26447, 26448, 26449, 26450, 26451, 26452, 26453, 26454, 26455, 26456, 26457, 26458, 26459, 26460, 26461, 26462, 26463, 26464, 26465, 26466, 26467, 26468, 26469, 26470, 26471, 26472, 26473, 26474, 26475, 26476, 26477, 26478, 26479, 26480, 26481, 26482, 26483, 26484, 26485, 26486, 26487, 26488, 26489, 26490, 26491, 26492, 26493, 26494, 26495, 26496, 26497, 26498, 26499, 26500, 26501, 26502, 26503, 26504, 26505, 26506, 26507, 26508, 26509, 26510, 26511, 26512, 26513, 26514, 26515, 26516, 26517, 26518, 26519, 26520, 26521, 26522, 26523, 26524, 26525, 26526, 26527, 26529, 26530, 26531, 26532, 26533, 26534, 26535, 26536, 26537, 26538, 26539, 26540, 26541, 26542, 26543, 26544, 26545, 26546, 26547, 26548, 26549, 26550, 26551, 26552, 26553, 26554, 26555, 26556, 26557, 26558, 26559, 26561, 26562, 26564, 26565, 26566, 26567, 26568, 26569, 26571, 26572, 26573, 26574, 26575, 26576, 26577, 26578, 26579, 26580, 26582, 26583, 26585, 26586, 26587, 26588, 26589, 26590, 26592, 26593, 26594, 26595, 26596, 26597, 26598, 26599, 26600, 26601, 26602, 26603, 26604, 26605, 26606, 26607, 26608, 26609, 26610, 26611, 26612, 26613, 26614, 26615, 26616, 26617, 26618, 26619, 26620, 26621, 26622, 26623, 26625, 26626, 26628, 26629, 26630, 26631, 26632, 26633, 26634, 26635, 26636, 26637, 26638, 26639, 26640, 26641, 26642, 26644, 26645, 26647, 26648, 26649, 26650, 26651, 26652, 26654, 26655, 26656, 26657, 26658, 26659, 26660, 26661, 26662, 26663, 26664, 26665, 26666, 26667, 26668, 26669, 26670, 26672, 26674, 26675, 26676, 26677, 26678, 26679, 26680, 26681, 26682, 26683, 26684, 26685, 26686, 26687, 26688, 26689, 26691, 26692, 26694, 26695, 26696, 26697, 26698, 26699, 26700, 26701, 26702, 26703, 26704, 26705, 26706, 26707, 26708, 26709, 26710, 26711, 26712, 26713, 26714, 26715, 26716, 26717, 26718, 26719, 26720, 26721, 26722, 26723, 26724, 26725, 26726, 26727, 26728, 26729, 26730, 26731, 26732, 26733, 26734, 26735, 26736, 26737, 26738, 26739, 26740, 26741, 26742, 26743, 26744, 26745, 26746, 26747, 26748, 26749, 26751, 26752, 26754, 26755, 26756, 26757, 26758, 26759, 26760, 26761, 26762, 26763, 26764, 26765, 26766, 26767, 26768, 26769, 26770, 26771, 26772, 26773, 26774, 26775, 26776, 26777, 26778, 26779, 26780, 26781, 26782, 26783, 26784, 26785, 26786, 26787, 26788, 26789, 26790, 26791, 26792, 26793, 26794, 26795, 26796, 26797, 26798, 26799, 26800, 26801, 26802, 26803, 26804, 26805, 26806, 26807, 26808, 26809, 26810, 26811, 26812, 26813, 26814, 26815, 26816, 26817, 26818, 26819, 26820, 26821, 26822, 26823, 26824, 26827, 26828, 26829, 26830, 26831, 26832, 26833, 26834, 26835, 26836, 26837, 26839, 26840, 26842, 26843, 26844, 26845, 26846, 26847, 26848, 26849, 26851, 26852, 26854, 26855, 26856, 26857, 26859, 26860, 26862, 26863, 26864, 26865, 26866, 26867, 26868, 26869, 26870, 26871, 26872, 26873, 26874, 26875, 26876, 26877, 26878, 26879, 26880, 26881, 26882, 26883, 26884, 26885, 26886, 26887, 26888, 26890, 26891, 26893, 26894, 26895, 26896, 26897, 26898, 26899, 26900, 26901, 26902, 26903, 26904, 26905, 26906, 26908, 26909, 26911, 26912, 26913, 26914, 26915, 26916, 26917, 26918, 26919, 26920, 26921, 26922, 26923, 26924, 26925, 26926, 26927, 26928, 26929, 26930, 26931, 26932, 26933, 26934, 26936, 26937, 26938, 26939, 26940, 26941, 26942, 26943, 26944, 26945, 26946, 26947, 26948, 26949, 26950, 26952, 26953, 26954, 26955, 26956, 26957, 26958, 26959, 26960, 26961, 26962, 26963, 26964, 26965, 26966, 26967, 26968, 26969, 26970, 26971, 26972, 26973, 26974, 26976, 26978, 26979, 26980, 26981, 26982, 26983, 26984, 26985, 26986, 26988, 26989, 26991, 26992, 26993, 26994, 26995, 26996, 26997, 26998, 27000, 27001, 27003, 27004, 27005, 27006, 27007, 27009, 27010, 27011, 27012, 27013, 27014, 27015, 27016, 27018, 27019, 27021, 27022, 27023, 27024, 27025, 27027, 27028, 27029, 27030, 27031, 27032, 27033, 27034, 27035, 27036, 27037, 27038, 27039, 27040, 27041, 27042, 27044, 27045, 27046, 27047, 27048, 27050, 27051, 27052, 27053, 27055, 27056, 27058, 27059, 27060, 27061, 27062, 27063, 27064, 27065, 27066, 27067, 27068, 27069, 27070, 27071, 27072, 27073, 27074, 27075, 27076, 27077, 27078, 27079, 27080, 27081, 27082, 27083, 27084, 27085, 27086, 27087, 27088, 27089, 27090, 27091, 27092, 27093, 27094, 27095, 27096, 27097, 27099, 27100, 27101, 27102, 27103, 27104, 27106, 27107, 27108, 27109, 27110, 27111, 27112, 27113, 27114, 27115, 27116, 27117, 27118, 27119, 27120, 27121, 27122, 27123, 27124, 27125, 27126, 27127, 27128, 27129, 27130, 27131, 27132, 27133, 27134, 27135, 27136, 27137, 27138, 27139, 27140, 27141, 27142, 27143, 27144, 27145, 27146, 27147, 27148, 27149, 27150, 27151, 27152, 27153, 27154, 27155, 27156, 27157, 27158, 27159, 27160, 27161, 27162, 27163, 27164, 27165, 27166, 27167, 27168, 27169, 27170, 27171, 27172, 27173, 27175, 27177, 27178, 27179, 27180, 27181, 27182, 27183, 27184, 27185, 27186, 27187, 27188, 27189, 27190, 27191, 27192, 27193, 27194, 27195, 27196, 27197, 27198, 27199, 27200, 27201, 27202, 27203, 27204, 27205, 27206, 27207, 27208, 27209, 27210, 27211, 27212, 27213, 27214, 27215, 27216, 27217, 27218, 27220, 27221, 27223, 27224, 27225, 27226, 27227, 27228, 27229, 27230, 27231, 27232, 27233, 27234, 27235, 27237, 27238, 27240, 27241, 27243, 27244, 27246, 27247, 27248, 27249, 27250, 27251, 27252, 27253, 27254, 27255, 27256, 27257, 27258, 27259, 27260, 27261, 27262, 27263, 27264, 27265, 27266, 27267, 27268, 27269, 27271, 27272, 27274, 27275, 27276, 27277, 27278, 27279, 27280, 27281, 27282, 27283, 27284, 27285, 27286, 27287, 27288, 27289, 27290, 27291, 27292, 27293, 27294, 27295, 27296, 27297, 27298, 27299, 27300, 27301, 27302, 27303, 27304, 27305, 27306, 27307, 27308, 27309, 27310, 27311, 27313, 27314, 27315, 27316, 27317, 27318, 27319, 27320, 27321, 27323, 27324, 27326, 27327, 27328, 27329, 27330, 27331, 27332, 27333, 27334, 27335, 27336, 27337, 27338, 27339, 27340, 27341, 27342, 27343, 27344, 27345, 27346, 27347, 27348, 27349, 27350, 27351, 27352, 27353, 27354, 27355, 27356, 27357, 27358, 27359, 27360, 27361, 27362, 27363, 27364, 27365, 27366, 27367, 27368, 27369, 27370, 27371, 27372, 27373, 27374, 27375, 27376, 27377, 27378, 27379, 27380, 27381, 27382, 27383, 27384, 27385, 27386, 27387, 27388, 27389, 27390, 27391, 27392, 27393, 27394, 27396, 27397, 27399, 27400, 27401, 27402, 27403, 27404, 27405, 27406, 27407, 27408, 27409, 27410, 27411, 27412, 27413, 27414, 27415, 27416, 27417, 27418, 27419, 27420, 27421, 27422, 27423, 27424, 27425, 27426, 27427, 27428, 27429, 27430, 27431, 27432, 27433, 27434, 27435, 27436, 27437, 27438, 27439, 27440, 27441, 27442, 27443, 27444, 27445, 27446, 27447, 27448, 27449, 27450, 27451, 27453, 27454, 27455, 27456, 27457, 27458, 27459, 27460, 27461, 27462, 27463, 27464, 27465, 27466, 27467, 27468, 27469, 27470, 27471, 27472, 27473, 27474, 27475, 27477, 27478, 27479, 27480, 27481, 27482, 27483, 27484, 27485, 27486, 27487, 27488, 27489, 27490, 27491, 27492, 27494, 27495, 27497, 27498, 27499, 27500, 27501, 27502, 27504, 27505, 27506, 27507, 27508, 27509, 27510, 27511, 27512, 27513, 27514, 27515, 27516, 27517, 27518, 27519, 27520, 27521, 27522, 27523, 27524, 27525, 27526, 27527, 27528, 27529, 27530, 27531, 27532, 27533, 27534, 27535, 27536, 27537, 27538, 27539, 25340, 25338, 27541, 27542, 27543, 27544, 27545, 27546, 27239, 27236, 25628, 27239, 27549, 27550, 27551, 27552, 27553, 27554, 27555, 27556, 27562, 27563, 27565, 27566, 27572, 27573, 25656, 25654, 24257, 24254, 26398, 26365, 26398, 26396, 27575, 27576, 27577, 27578, 27579, 27580, 27581, 27582, 27583, 27585, 27586, 27587, 27588, 27590, 27591, 27593, 27594, 27595, 27596, 27597, 27598, 27599, 27600, 27602, 27603, 27605, 27606, 27607, 27608, 27609, 27610, 27611, 27612, 27613, 27614, 27615, 27616, 27617, 27618, 27619, 27620, 27622, 27623, 27625, 27626, 27627, 27628, 27629, 27630, 27631, 27632, 27634, 27635, 27636, 27637, 27638, 27639, 27640, 27641, 27643, 27644, 27645, 27647, 27648, 27650, 27651, 27652, 27653, 27654, 27655, 27656, 27657, 27659, 27660, 27661, 27663, 27664, 27666, 27667, 27668, 27669, 27670, 27671, 27672, 27673, 27674, 27675, 27676, 27677, 27678, 27680, 27681, 27683, 27684, 27685, 27686, 27687, 27688, 27689, 27690, 27691, 27692, 27693, 27694, 27695, 27696, 27697, 27698, 27699, 27700, 27701, 27702, 27703, 27705, 27706, 27708, 27709, 27710, 27711, 27712, 27713, 27715, 27716, 27717, 27718, 27719, 27720, 27721, 27722, 27723, 27724, 27725, 27726, 27727, 27728, 27729, 27730, 27732, 27733, 27734, 27735, 27736, 27737, 27738, 27739, 27741, 27742, 27744, 27745, 27747, 27748, 27749, 27750, 27751, 27752, 27753, 27754, 27756, 27757, 27759, 27760, 27761, 27762, 27763, 27764, 27765, 27766, 27767, 27768, 27769, 27771, 27772, 27774, 27775, 27776, 27777, 27778, 27779, 27780, 27781, 27782, 27783, 27784, 27785, 27786, 27788, 27789, 27790, 27791, 27792, 27793, 27794, 27795, 27796, 27797, 27798, 27799, 27800, 27802, 27803, 27804, 27805, 27806, 27807, 27808, 27809, 27810, 27811, 27812, 27813, 27814, 27815, 27816, 27817, 27818, 27819, 27820, 27821, 27822, 27823, 27824, 27825, 27826, 27832, 27833, 27835, 27836, 27842, 27843, 27844, 27845, 27846, 27847, 27849, 27850, 27851, 27852, 27853, 27854, 27856, 27857, 27858, 27859, 27860, 27861, 27862, 27863, 27864, 27865, 27866, 27867, 27868, 27869, 27870, 27871, 27872, 27873, 27874, 27875, 27876, 27877, 27878, 27879, 27881, 27882, 27884, 27885, 27887, 27888, 27890, 27891, 27892, 27893, 27894, 27895, 27897, 27898, 27899, 27900, 27901, 27902, 27904, 27905, 27906, 27907, 27908, 27909, 27910, 27911, 27912, 27913, 27914, 27915, 27916, 27917, 27918, 27919, 27920, 27921, 27922, 27923, 27924, 27925, 27926, 27927, 27928, 27929, 27930, 27933, 27934, 27935, 27936, 27937, 27938, 27939, 27940, 27941, 27942, 27945, 27946, 27947, 27948, 27949, 27950, 27951, 27952, 27953, 27954, 27955, 27956, 27958, 27959, 27960, 27961, 27962, 27963, 27964, 27965, 27966, 27967, 27968, 27969, 27970, 27971, 27972, 27973, 27974, 27975, 27976, 27977, 27978, 27979, 27980, 27981, 27982, 27983, 27984, 27985, 27986, 27987, 27988, 27989, 27990, 27991, 27992, 27993, 27994, 27995, 27996, 27997, 27998, 27999, 28000, 28002, 28003, 28004, 28005, 28006, 28007, 28008, 28009, 27561, 27559, 27571, 24942, 24939, 27831, 27829, 27841, 24942, 24939, 28010, 28011, 28012, 28013, 28014, 28015, 28016, 28017, 28018, 28019, 28020, 28021, 28022, 28023, 28024, 28025, 28026, 28027, 28028, 28029, 28030, 28031, 28032, 28033, 28034, 28035, 28036, 28037, 28038, 28039, 28040, 28041, 28042, 28043, 28044, 28045, 28046, 28047, 28048, 28049, 28050, 28051, 28052, 28053, 28054, 28055, 28056, 28057, 28058, 28059, 28060, 28061, 28062, 28063, 28064, 28065, 28066, 28067, 28068, 28069, 28070, 28071, 28072, 28073, 28074, 28075, 28076, 28077, 28078, 28079, 28081, 28082, 28084, 28085, 28087, 28088, 28090, 28091, 28092, 28093, 28094, 28095, 28096, 28097, 28098, 28099, 28100, 28101, 28102, 28103, 28104, 28105, 28106, 28107, 28108, 28109, 28110, 28111, 28112, 28113, 28114, 28115, 28116, 28117, 28118, 28119, 28120, 28121, 28122, 28123, 28124, 28125, 28126, 28127, 28128, 28129, 28130, 28131, 28132, 28133, 28134, 28135, 28136, 28137, 28138, 28139, 28141, 28142, 28144, 28145, 28146, 28147, 28148, 28149, 28150, 28151, 28152, 28153, 28154, 28156, 28157, 28159, 28160, 28161, 28162, 28163, 28164, 28165, 28166, 28167, 28168, 28169, 28170, 28171, 28172, 28174, 28175, 28177, 28178, 28179, 28180, 28181, 28182, 28183, 28184, 28185, 28186, 28187, 27932, 27944, 28188, 28189, 28191, 28192, 28193, 28194, 28195, 28197, 28198, 28200, 28201, 28203, 28204, 28205, 28206, 28207, 28208, 28209, 28210, 28211, 28212, 28213, 28214, 28215, 28216, 28217, 28218, 28219, 28221, 28222, 28224, 28225, 28227, 28228, 28229, 28230, 28231, 28232, 28233, 28234, 28235, 28236, 28237, 28238, 28239, 28240, 28241, 28242, 28243, 28244, 28246, 28247, 28249, 28250, 28252, 28253, 28254, 28255, 28256, 28257, 28258, 28259, 28260, 28261, 28262, 28263, 28264, 28265, 28266, 28267, 28268, 28269, 28270, 28271, 28273, 28274, 28275, 28276, 28277, 28278, 28279, 28280, 28281, 28282, 28283, 28284, 28285, 28286, 28287, 28288, 28289, 28290, 28291, 28292, 28294, 28295, 28296, 28297, 28298, 28299, 28300, 28301, 28302, 28303, 28304, 28305, 28306, 28307, 28308, 28309, 28310, 28311, 28314, 28315, 28316, 28317, 28318, 28319, 28320, 28321, 28322, 28323, 28324, 28325, 28326, 28327, 28328, 28329, 28330, 28331, 28332, 28333, 28334, 28335, 28336, 28337, 28338, 28339, 28340, 28341, 28342, 28343, 28344, 28345, 28346, 28347, 28348, 28349, 13, 14, 15, 28352, 28354, 28356, 28358, 28360, 28362, 28364, 28367, 28370, 28372, 28378, 28380, 28386, 28388, 28390, 28392, 28394, 28397, 28400, 28402, 28404, 28406, 28408, 28411, 28414, 28421, 28423, 28427, 28429, 28431, 28433, 28437, 28439, 28441, 28445, 28448, 28450, 28454, 28457, 28459, 28461, 28463, 28466, 28472, 28475, 28478, 28481, 28484, 28486, 28488, 28491, 28493, 28495, 28498, 28500, 28502, 28504, 28507, 28509, 28512, 28514, 28516, 28518, 28520, 28523, 28525, 28527, 28529, 28531, 28533, 28535, 28537, 28539, 28541, 28543, 28545, 28547, 28549, 28551, 28553, 28555, 28559, 28561, 28563, 28566, 28568, 28570, 28573, 28575, 28577, 28579, 28582, 28584, 28587, 28589, 28591, 28593, 28596, 28599, 28601, 28603, 28605, 28607, 28609, 28611, 28613, 28616, 28618, 28620, 28622, 28628, 28630, 28632, 28634, 28636, 28638, 28641, 28643, 28645, 28647, 28650, 28652, 28654, 28656, 28659, 28661, 28663, 28665, 28667, 28672, 28675, 28678, 28681, 28685, 28687, 28692, 28697, 28700, 28703, 28705, 28711, 28714, 28717, 28720, 28723, 28725, 28727, 28729, 28731, 28733, 28735, 28737, 28739, 28742, 28745, 28747, 28749, 28751, 28753, 28756, 28760, 28762, 28766, 28768, 28770, 28773, 28776, 28781, 28783, 28786, 28788, 28790, 28792, 28794, 28796, 28799, 28801, 28807, 28809, 28812, 28814, 28817, 28819, 28821, 28824, 28826, 28828, 28830, 28832, 28835, 28838, 28840, 28842, 28845, 28848, 28851, 28854, 28856, 28858, 28860, 28862, 28864, 28867, 28869, 28871, 28873, 28875, 28877, 28880, 28883, 28888, 28890, 28892, 28897, 28899, 28901, 28903, 28905, 28907, 28909, 28914, 28916, 28921, 28923, 28926, 28929, 28932, 28935, 28938, 28941, 28943, 28945, 28948, 28951, 28953, 28955, 28958, 28961, 28963, 28965, 28967, 28969, 28972, 28975, 28977, 28979, 28981, 28983, 28985, 28987, 28989, 28991, 28993, 28995, 28997, 29000, 29003, 29005, 29007, 29009, 29011, 29013, 29015, 29017, 29020, 29023, 29025, 29027, 29030, 29033, 29036, 29038, 29040, 29046, 29048, 29050, 29052, 29058, 29061, 29066, 29068, 29070, 29073, 29076, 29078, 29080, 29083, 29086, 29089, 29092, 29095, 29097, 29099, 29101, 29103, 29106, 29109, 29111, 29113, 29115, 29117, 29119, 29122, 29125, 29127, 29129, 29131, 29134, 29137, 29139, 29141, 29143, 29145, 29147, 29150, 29152, 29154, 29156, 29159, 29162, 29164, 29166, 29168, 29170, 29172, 29174, 29177, 29180, 29182, 29184, 29187, 29190, 29192, 29194, 29196, 29198, 29200, 29202, 29204, 29206, 29208, 29210, 29212, 29214, 29217, 29220, 29222, 29225, 29227, 29229, 29231, 29233, 29235, 29237, 29239, 29241, 29243, 29246, 29248, 29251, 29253, 29255, 29257, 29260, 29263, 29266, 29268, 29270, 29273, 29276, 29279, 29282, 29286, 29288, 29290, 29292, 29294, 29296, 29300, 29303, 29305, 29307, 29309, 29312, 29314, 29316, 29318, 29321, 29323, 29325, 29327, 29329, 29332, 29334, 29336, 29339, 29342, 29345, 29347, 29349, 29351, 29353, 29355, 29358, 29360, 29362, 29364, 29366, 29368, 29371, 29374, 29377, 29380, 29383, 29386, 29388, 29390, 29392, 29394, 29396, 29398, 29400, 29404, 29407, 29409, 29411, 29414, 29417, 29419, 29423, 29426, 29429, 29431, 29433, 29435, 29439, 29442, 29445, 29448, 29450, 29452, 29455, 29458, 29460, 29462, 29464, 29466, 29469, 29472, 29474, 29476, 29480, 29483, 29485, 29487, 29489, 29491, 29494, 29497, 29499, 29501, 29504, 29507, 29509, 29511, 29513, 29515, 29525, 29527, 29529, 29531, 29533, 29543, 29545, 29547, 29550, 29557, 29560, 29563, 29565, 29567, 29572, 29574, 29576, 29578, 29580, 29582, 29584, 29586, 29588, 29590, 29610, 29613, 29616, 29618, 29620, 29622, 29624, 29626, 29628, 29631, 29635, 29637, 29640, 29643, 29645, 29647, 29650, 29653, 29656, 29659, 29662, 29665, 29667, 29669, 29672, 29674, 29676, 29679, 29682, 29684, 29686, 29689, 29692, 29694, 29700, 29703, 29706, 29708, 29710, 29713, 29716, 29719, 29726, 29729, 29732, 29734, 29736, 29743, 29746, 29749, 29751, 29755, 29758, 29760, 29762, 29766, 29768, 29770, 29773, 29776, 29778, 29782, 29784, 29787, 29789, 29791, 29793, 29796, 29798, 29804, 29811, 29813, 29815, 29818, 29821, 29824, 29827, 29829, 29831, 29834, 29836, 29838, 29841, 29843, 29845, 29847, 29849, 29851, 29853, 29855, 29858, 29861, 29863, 29865, 29867, 29870, 29873, 29877, 29879, 29881, 29883, 29889, 29895, 29898, 29901, 29903, 29905, 29908, 29911, 29913, 29915, 29918, 29920, 29923, 29926, 29928, 29930, 29933, 29935, 29937, 29939, 29941, 29943, 29946, 29951, 29954, 29956, 29958, 29960, 29962, 29965, 29968, 29971, 29974, 29977, 29979, 29981, 29983, 29985, 29988, 29990, 29992, 29994, 29996, 29998, 30000, 30002, 30004, 30006, 30008, 30010, 30012, 30015, 30019, 30021, 30024, 30027, 30030, 30034, 30036, 30038, 30040, 30042, 30044, 30047, 30050, 30053, 30055, 30057, 30060, 30063, 30065, 30067, 30069, 30071, 30073, 30077, 30080, 30084, 30086, 30088, 30090, 30096, 30099, 30102, 30104, 30106, 30108, 30113, 30115, 30117, 30119, 30121, 30123, 30125, 30127, 30129, 30132, 30134, 30136, 30138, 30140, 30143, 30150, 30153, 30156, 30159, 30162, 30164, 30166, 30169, 30172, 30175, 30178, 30181, 30184, 30186, 30188, 30191, 30194, 30197, 30200, 30202, 30204, 30206, 30209, 30211, 30213, 30216, 30218, 30221, 30223, 30226, 30228, 30230, 30232, 30236, 30239, 30241, 30243, 30245, 30249, 30252, 30254, 30256, 30258, 30260, 30263, 30265, 30267, 30269, 30272, 30274, 30277, 30280, 30282, 30284, 30288, 30291, 30294, 30299, 30301, 30307, 30309, 30313, 30316, 30319, 30322, 30325, 30330, 30333, 30335, 30337, 30339, 30342, 30345, 30350, 30352, 30354, 30357, 30370, 30373, 30376, 30378, 30380, 30382, 30384, 29826, 29823, 29826, 29823, 30395, 30398, 30401, 30403, 30405, 30407, 30409, 30412, 30415, 30418, 30420, 30422, 30423, 29725, 29742, 28377, 28375, 29723, 29740, 28385, 28383, 28420, 28418, 28425, 28444, 28453, 30424, 30426, 30428, 30430, 30431, 28469, 28471, 28470, 24423, 24421, 24423, 24424, 24422, 24424, 30432, 30433, 28626, 28626, 30434, 30436, 30438, 30440, 30442, 30444, 30446, 30448, 30449, 24247, 28671, 24247, 28691, 28696, 28710, 28708, 28896, 28780, 28785, 28866, 28806, 28804, 30450, 30451, 28866, 28913, 28887, 28896, 28913, 24263, 24262, 29044, 29056, 29065, 30452, 30453, 26386, 30454, 30455, 29403, 29422, 29438, 29479, 29520, 29518, 29524, 29522, 29538, 29536, 29542, 29540, 29556, 29554, 29571, 29699, 29595, 29593, 29599, 29597, 29603, 29601, 29607, 24375, 24374, 26673, 26671, 29699, 29697, 29725, 29723, 29742, 29740, 29754, 29781, 29801, 29803, 29810, 29808, 29781, 29801, 29803, 29810, 29808, 26951, 24424, 24423, 24422, 24421, 29892, 29894, 29893, 29950, 27176, 27174, 30095, 30093, 30112, 30149, 30147, 30208, 30215, 24505, 24504, 30287, 30298, 30306, 24505, 24504, 24506, 24507, 30329, 30349, 24506, 24507, 30329, 24507, 30329, 30349, 30287, 30298, 30306, 30304, 24505, 24504, 24507, 30329, 30349, 30456, 30458, 30460, 30463, 30467, 30469, 30471, 30474, 30477, 30479, 30485, 30487, 30489, 30492, 30495, 30497, 30501, 30503, 30505, 30507, 30509, 30511, 30513, 30516, 30518, 30520, 30522, 30524, 30526, 30529, 30531, 30533, 30535, 30537, 30539, 30541, 30544, 30546, 30548, 30550, 30552, 30554, 30556, 30561, 30564, 30567, 30569, 30571, 30573, 30575, 30577, 30580, 30583, 30585, 30587, 30589, 30591, 30593, 30595, 30597, 30599, 30601, 30603, 30605, 30607, 30609, 30611, 30613, 30615, 30618, 30621, 30624, 30626, 30630, 30633, 30637, 30639, 30641, 30643, 30645, 30647, 30649, 30651, 30654, 30657, 30659, 30661, 30663, 30665, 30667, 30669, 30671, 30673, 30675, 30677, 30679, 30681, 30683, 30685, 30687, 30689, 30691, 30693, 30695, 30698, 30701, 30703, 30705, 30707, 30709, 30711, 30713, 30715, 30717, 30719, 30721, 30723, 30725, 30727, 30729, 30731, 30733, 30735, 30737, 30739, 30741, 30743, 30745, 30748, 30751, 30754, 24891, 24879, 30362, 24891, 24879, 24892, 24886, 30558, 24892, 24891, 24891, 24879, 24891, 24879, 24892, 24886, 30558, 24892, 24891, 30757, 30759, 30761, 30764, 30767, 30769, 30771, 30774, 30777, 30779, 30781, 30784, 30786, 30789, 30791, 30793, 30795, 30797, 30800, 30803, 30806, 30808, 30810, 30812, 30814, 30817, 30820, 30822, 30824, 30827, 30829, 30831, 30833, 30835, 30837, 30838, 30839, 30840, 30841, 30484, 30466, 30484, 30482, 30500, 30515, 30528, 30560, 30629, 30629, 27801, 30842, 30843, 30844, 30845, 30846, 30849, 30851, 30853, 30855, 30857, 30860, 30863, 30865, 30867, 30870, 30872, 30874, 30877, 30879, 30881, 30884, 30886, 30888, 30891, 30893, 30895, 30898, 30901, 30903, 30905, 30907, 30909, 30912, 30915, 30917, 30919, 30921, 30923, 30926, 30929, 30932, 30935, 30938, 30940, 30942, 30945, 30948, 30950, 30952, 30955, 30958, 30961, 30964, 30967, 30969, 30971, 30973, 30975, 30977, 30979, 30984, 30986, 30988, 30990, 30992, 30994, 30996, 30998, 31000, 31002, 31004, 31006, 31008, 31010, 31015, 31016, 31017, 31020, 31022, 31024, 31026, 31031, 31033, 31035, 31037, 31039, 31043, 31045, 31047, 31049, 31051, 31053, 31055, 31057, 31059, 31062, 31065, 31067, 31069, 30848, 30983, 31075, 31077, 31079, 31082, 31084, 31086, 31089, 31091, 31093, 31095, 31097, 31099, 31102, 31104, 31106, 31109, 31111, 31113, 31115, 31117, 31119, 31122, 31074, 25301, 25300, 31074, 25301, 25300, 31074, 25301, 25300, 31128, 25301, 25300, 31129, 31132, 31135, 31138, 31141, 31144, 31147, 31150, 31153, 31156, 31159, 31162, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 31946, 31947, 31948, 31949, 24159, 24158, 24159, 24158, 24244, 24245, 31961, 31963, 24169, 24168, 31964, 24169, 24168, 25348, 25345, 28369, 28366, 31177, 31965, 31966, 31967, 31968, 31179, 31969, 31970, 31181, 31183, 28399, 28396, 31187, 31189, 28413, 28410, 28416, 31971, 31972, 31194, 31973, 31347, 28816, 31350, 28426, 31195, 24259, 24258, 28837, 28834, 24260, 24261, 28847, 28435, 28436, 28853, 31199, 25417, 31347, 28816, 31350, 28823, 31353, 24259, 24258, 28837, 28834, 24260, 24261, 28847, 28435, 28436, 28853, 31199, 25426, 31201, 28447, 31974, 31204, 31975, 31205, 28456, 29189, 31207, 31421, 29002, 28999, 26025, 31426, 26032, 26038, 25449, 25452, 30155, 30152, 30161, 28465, 25458, 27270, 30171, 30168, 30177, 30174, 30183, 30180, 31875, 30190, 30196, 30193, 30199, 31979, 31981, 31982, 31983, 28477, 28474, 28483, 28480, 31216, 28490, 25491, 25488, 28497, 31222, 31224, 31225, 28511, 29826, 31228, 24211, 24210, 28522, 31233, 31235, 31237, 31239, 25538, 24207, 24206, 31244, 31246, 25553, 28557, 31249, 31251, 31984, 31985, 31986, 24421, 31254, 31255, 24211, 24210, 28581, 31260, 31261, 24213, 24212, 28595, 28598, 31267, 31269, 31271, 31987, 31988, 31989, 24422, 31273, 31274, 25622, 25619, 28624, 31990, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 31992, 31920, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 31993, 31920, 31390, 28928, 28925, 28934, 28931, 28937, 28940, 31398, 28950, 28947, 31279, 29179, 29176, 31390, 28928, 28925, 28934, 28931, 28940, 28937, 31398, 28950, 28947, 31492, 29179, 29176, 31280, 31280, 31481, 31482, 31484, 29161, 29158, 31488, 31490, 31492, 29179, 29176, 31282, 28640, 29186, 31500, 31502, 31504, 31506, 31508, 31510, 24244, 24245, 32001, 31287, 31288, 31289, 31291, 31292, 31566, 24245, 24244, 31295, 32003, 24246, 32004, 28674, 28677, 28683, 28680, 31301, 32005, 24246, 32006, 28694, 32007, 28702, 28699, 31307, 32008, 32009, 28716, 28713, 28722, 28719, 31313, 31315, 31317, 31319, 28744, 28741, 31323, 31325, 28758, 28755, 31328, 31329, 32010, 28764, 31331, 28798, 28772, 28775, 32011, 28778, 31336, 32012, 31337, 28894, 32013, 31339, 28798, 24251, 24250, 31344, 32014, 32015, 31346, 32016, 31347, 28816, 31350, 28823, 31353, 24259, 24258, 28837, 28834, 24261, 24260, 28847, 28844, 28853, 28850, 31365, 31367, 31369, 28894, 32018, 31371, 31373, 31375, 32019, 28911, 28885, 28882, 32020, 31379, 32021, 28894, 31382, 31384, 31386, 32022, 28911, 31389, 32023, 32024, 31390, 28928, 28925, 28934, 28931, 28940, 28937, 31398, 28950, 28947, 31402, 28960, 28957, 31406, 31408, 28974, 28971, 25992, 31413, 25999, 26005, 26002, 26008, 31496, 29189, 29186, 31419, 31421, 29002, 28999, 26025, 31426, 26032, 26038, 26035, 26041, 29219, 29019, 29022, 31434, 29029, 29032, 29035, 31439, 29042, 32025, 31441, 31443, 29054, 32026, 31445, 29219, 29060, 29063, 32027, 31517, 31448, 29072, 29075, 31452, 29082, 29088, 29085, 29091, 29094, 31459, 29105, 24285, 24284, 29108, 26139, 26136, 26145, 26142, 31469, 31470, 31471, 31473, 24294, 31475, 31476, 31478, 24295, 31481, 31482, 31484, 29161, 29158, 31488, 31490, 31492, 29179, 29176, 31496, 29189, 29186, 31500, 31502, 31504, 31506, 31508, 31510, 29219, 29216, 26254, 26251, 31515, 31517, 31519, 26268, 31522, 26275, 29245, 31525, 29250, 31527, 26293, 26290, 29262, 29259, 29265, 31534, 29272, 29278, 29275, 29281, 29284, 31540, 31542, 31544, 29298, 31546, 31547, 31549, 29311, 31552, 31554, 31555, 31557, 24323, 24322, 32028, 31570, 29331, 31561, 29341, 29338, 32030, 26384, 31566, 24323, 24322, 32031, 31570, 31571, 31573, 26415, 26412, 29373, 29370, 29379, 29376, 29385, 29382, 31583, 26443, 26440, 31587, 31589, 29406, 32033, 31592, 29416, 29413, 31596, 32034, 29428, 29425, 24341, 24340, 31602, 32035, 29444, 29441, 29447, 31607, 29457, 29454, 31611, 31613, 29471, 29468, 26528, 24343, 24342, 29482, 32036, 31621, 31623, 29496, 29493, 31627, 29506, 29503, 26563, 26560, 26570, 24371, 24370, 32037, 32038, 32039, 32040, 26584, 26581, 26591, 24371, 24370, 32041, 32042, 32043, 32044, 31641, 24359, 24358, 29552, 29549, 32045, 32046, 29559, 29562, 24361, 24360, 26627, 26624, 32047, 32048, 31650, 29615, 29612, 31652, 31653, 26646, 26643, 26653, 24371, 24370, 32049, 32050, 32051, 32052, 32053, 32054, 32055, 32056, 32057, 32058, 32059, 29615, 29612, 31662, 31664, 26693, 26690, 29633, 29630, 31669, 29642, 29639, 31673, 29649, 29655, 29652, 29658, 29664, 29661, 29671, 24385, 24384, 29678, 24387, 24386, 29681, 26753, 26750, 29691, 29688, 31692, 32060, 32061, 29705, 29702, 31696, 29715, 29712, 29721, 29718, 32062, 32063, 29731, 29728, 31704, 29738, 32064, 32065, 29748, 29745, 31709, 32066, 26841, 26838, 29775, 29772, 26853, 26850, 32067, 26826, 26825, 31711, 31724, 29795, 31713, 32068, 29806, 32069, 32070, 32071, 26841, 26838, 29775, 29772, 26853, 26850, 32072, 26861, 26858, 31722, 31724, 29795, 31727, 32073, 29806, 32074, 32075, 32076, 26892, 26889, 29820, 29817, 29826, 29823, 26910, 26907, 29833, 31739, 29840, 31742, 31744, 31746, 26935, 29860, 29857, 31751, 31753, 32077, 29875, 29872, 31756, 31758, 24420, 32078, 32079, 32080, 32081, 24425, 32082, 32083, 32084, 29900, 29897, 26990, 26987, 29910, 29907, 27002, 26999, 29917, 27008, 29925, 29922, 27020, 27017, 29932, 27026, 31778, 31780, 29945, 29948, 32085, 31783, 31784, 31785, 27057, 27054, 29967, 29964, 29973, 29970, 29976, 31794, 29987, 24455, 24454, 31799, 31801, 24459, 24458, 27098, 24459, 24458, 27105, 31809, 30017, 30014, 31812, 30023, 30029, 30026, 30032, 31817, 31819, 31821, 30046, 30052, 30049, 31826, 30062, 30059, 31830, 31832, 31834, 32086, 32087, 30082, 30079, 31837, 31839, 24466, 32088, 32089, 30101, 30098, 31844, 24467, 32090, 30110, 31848, 27222, 27219, 31852, 30131, 24473, 24472, 27239, 27236, 27245, 27242, 30145, 30142, 32091, 32092, 30155, 30152, 30161, 30158, 27273, 27270, 30171, 30168, 30177, 30174, 30183, 30180, 31875, 30190, 30196, 30193, 30199, 31880, 31881, 31913, 27322, 27312, 30290, 30293, 30296, 32093, 31920, 31911, 31912, 31885, 27325, 27322, 32094, 31888, 32095, 32096, 30315, 31890, 31891, 31893, 27398, 27395, 32097, 30293, 30235, 32098, 31920, 32099, 31922, 32100, 32101, 30238, 30321, 30247, 32102, 32103, 32104, 30327, 30251, 31901, 24497, 24496, 24492, 24493, 32105, 31898, 30356, 30359, 30321, 30247, 32106, 32107, 32108, 30327, 30251, 31901, 24497, 24496, 24493, 24492, 30347, 31936, 30359, 24495, 24494, 30321, 30318, 32109, 24506, 32110, 30327, 30332, 31930, 24497, 24496, 24499, 24498, 30271, 32111, 31936, 30359, 30356, 31911, 31912, 31913, 27398, 27395, 30290, 32112, 30296, 30293, 32113, 31920, 32114, 32115, 31922, 32116, 32117, 30315, 30321, 30318, 32118, 24506, 32119, 30327, 30332, 31930, 27452, 30344, 30341, 32120, 30347, 31936, 30359, 30356, 32252, 32253, 32254, 32255, 32256, 32257, 32258, 32259, 32260, 32261, 30375, 30372, 27496, 27493, 31944, 27503, 32262, 32263, 32264, 32265, 32266, 32267, 32268, 32269, 32270, 30400, 30397, 24951, 31954, 24952, 30414, 30411, 24954, 24953, 30417, 31994, 31977, 27547, 31994, 27557, 24922, 24921, 32305, 27567, 27564, 32307, 32239, 27574, 32242, 27903, 27592, 27589, 30476, 30473, 27604, 24854, 24853, 32310, 30462, 32132, 30494, 30491, 27624, 27584, 32311, 27592, 27589, 30476, 30473, 27604, 27601, 32312, 32313, 32132, 30494, 30491, 27624, 27621, 32314, 27633, 24870, 24869, 32141, 24891, 24879, 32315, 27649, 27646, 32147, 24891, 24879, 32316, 27665, 27662, 32153, 24892, 24886, 30543, 27682, 27679, 32160, 30558, 24892, 24891, 32317, 30566, 30563, 27707, 27704, 32169, 27714, 30582, 30579, 32174, 32176, 27731, 32179, 32318, 30635, 30632, 32196, 27740, 27746, 27743, 32201, 32203, 27801, 30656, 32186, 27758, 27755, 30620, 30617, 30623, 27773, 27770, 32319, 30635, 30632, 32196, 27787, 32199, 32201, 32203, 32320, 30656, 32206, 32208, 32210, 24920, 24919, 27827, 24922, 24921, 32321, 27837, 27834, 32323, 32219, 27848, 32222, 27855, 30700, 30697, 24951, 32228, 24952, 30750, 30747, 24954, 24953, 30756, 27883, 27880, 27889, 27886, 32239, 27896, 32242, 27903, 32245, 24968, 24967, 30750, 30747, 30756, 30753, 32272, 30763, 30766, 32276, 30773, 30776, 32280, 30783, 27957, 30788, 25145, 25144, 32285, 32287, 30802, 30799, 25147, 25146, 30805, 32292, 32294, 30819, 30816, 32298, 28001, 30826, 32302, 32304, 32420, 32327, 32329, 30862, 30859, 32333, 30869, 32336, 30876, 32339, 30883, 32342, 30890, 32345, 30900, 30897, 32349, 32351, 30914, 30911, 28083, 28080, 28089, 28086, 30928, 30925, 30931, 30937, 30934, 32364, 30947, 30944, 30954, 25216, 25215, 30960, 30957, 30966, 30963, 32375, 28143, 28140, 32379, 32421, 30981, 28158, 28155, 32384, 32386, 32388, 28176, 28173, 32392, 25281, 25280, 32444, 32445, 32446, 28190, 32398, 28199, 28196, 28202, 32447, 32448, 32449, 25288, 32404, 25281, 25280, 32430, 32439, 28223, 28220, 28226, 32411, 25288, 32414, 25289, 32416, 32439, 28248, 28245, 28251, 32450, 32451, 32452, 32423, 31081, 32426, 31088, 28272, 32430, 32432, 31101, 32435, 31108, 28293, 32439, 32441, 31124, 31121, 32453, 32454, 32455, 31134, 31131, 31140, 31137, 31146, 31143, 31152, 31149, 31158, 31155, 31164, 31161, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 32480, 32482, 32484, 32485, 32486, 32487, 32488, 32489, 32492, 32493, 32495, 32496, 32497, 32498, 32499, 32500, 32501, 32502, 32506, 32507, 32509, 32510, 32511, 32512, 32513, 32514, 32515, 32516, 32517, 32518, 32520, 32522, 32523, 32524, 32525, 32526, 32527, 32528, 32529, 32530, 32531, 32532, 32533, 32534, 32535, 32536, 32537, 32538, 32539, 32540, 32541, 32542, 32543, 32544, 32545, 32546, 32547, 32548, 32549, 32550, 32551, 32552, 32553, 32554, 32555, 32556, 32557, 32559, 32561, 32562, 32563, 32564, 32565, 32566, 32567, 32568, 32569, 32570, 32571, 32572, 32573, 32574, 32575, 32576, 32577, 32578, 32579, 32580, 32581, 32582, 32583, 32584, 32585, 32586, 32587, 32588, 32589, 32590, 32593, 32595, 32596, 32597, 32598, 32599, 32600, 32601, 32602, 32603, 32604, 32605, 32606, 32607, 32608, 32609, 32610, 32611, 32612, 32613, 32614, 32615, 32616, 32617, 32618, 32619, 32620, 32621, 32622, 32623, 32624, 32625, 32626, 32629, 32630, 32631, 32632, 32633, 32634, 32635, 32636, 32637, 32638, 32639, 32640, 32641, 32642, 32643, 32644, 32647, 32648, 32649, 32650, 32651, 32652, 32654, 32655, 32656, 32657, 32658, 32659, 32660, 32661, 32663, 32664, 32665, 32666, 32667, 32668, 32669, 32670, 32671, 32673, 32674, 32675, 32676, 32677, 32678, 32679, 32680, 32681, 32682, 32683, 32684, 32685, 32686, 32687, 32688, 32689, 32690, 32691, 32692, 32693, 32694, 32695, 32696, 32697, 32698, 32699, 32700, 32701, 32702, 32703, 32704, 32705, 32706, 32707, 32708, 32709, 32710, 32711, 32712, 32713, 32714, 32715, 32716, 32717, 32718, 32719, 32720, 32721, 32722, 32724, 32725, 32726, 32727, 32728, 32729, 32730, 32731, 32732, 32734, 32736, 32737, 32738, 32739, 32740, 32742, 32744, 32746, 32747, 32748, 32749, 32751, 32752, 32753, 32754, 32755, 32756, 32757, 32758, 32759, 32760, 32761, 32762, 32763, 32764, 32765, 32766, 32768, 32769, 32770, 32771, 32772, 32774, 32775, 32777, 32778, 32780, 32781, 32782, 32783, 32784, 32785, 32787, 32789, 32790, 32791, 32792, 32793, 32794, 32795, 32796, 32797, 32798, 32799, 32800, 32801, 32802, 32803, 32804, 32805, 32806, 32807, 32809, 32810, 32811, 32813, 32814, 32815, 32817, 32819, 32820, 32821, 32822, 32824, 32825, 32826, 32828, 32829, 32830, 32831, 32832, 32833, 32834, 32835, 32836, 32837, 32838, 32839, 32840, 32841, 32842, 32843, 32844, 32845, 32846, 32847, 32848, 32849, 32850, 32851, 32852, 32853, 32854, 32855, 32856, 32857, 32858, 32859, 32860, 32861, 32862, 32863, 32864, 32865, 32866, 32867, 32868, 32869, 32870, 32871, 32872, 32874, 32875, 32876, 32878, 32879, 32880, 32881, 32883, 32884, 32885, 32886, 32887, 32888, 32889, 32890, 32891, 32892, 32893, 32894, 32895, 32896, 32897, 32898, 32899, 32900, 32901, 32902, 32903, 32904, 32905, 32906, 32907, 32908, 32909, 32910, 32911, 32912, 32913, 32914, 32915, 32916, 32917, 32918, 32919, 32920, 32921, 32922, 32923, 32924, 32925, 32926, 32927, 32928, 32929, 32930, 32931, 32932, 32933, 32934, 32935, 32936, 32937, 32938, 32939, 32940, 32941, 32942, 32943, 32944, 32945, 32946, 32947, 32948, 32949, 32950, 32951, 32952, 32953, 32954, 32955, 32956, 32957, 32958, 32959, 32960, 32961, 32962, 32963, 32964, 32965, 32966, 32967, 32968, 32970, 32971, 32972, 32973, 32974, 32976, 32977, 32978, 32979, 32981, 32982, 32983, 32984, 32985, 32986, 32987, 32988, 32989, 32990, 32991, 32992, 32993, 32994, 32995, 32996, 32997, 32999, 33000, 33001, 33002, 33004, 33005, 33006, 33007, 33008, 33010, 33011, 33012, 33013, 33014, 33015, 33016, 33017, 33018, 33019, 33020, 33021, 33022, 33023, 33025, 33026, 33027, 33028, 33029, 33030, 33031, 33032, 33033, 33034, 33035, 33036, 33037, 33039, 33041, 33042, 33043, 33044, 33045, 33046, 33048, 33050, 33051, 33052, 33053, 33054, 33055, 33057, 33058, 33059, 33060, 33061, 33062, 33063, 33065, 33066, 33067, 33068, 33069, 33070, 33071, 33072, 33073, 33074, 33075, 33077, 33079, 33081, 33084, 33086, 33087, 33088, 33089, 33090, 33091, 33092, 33093, 33094, 33095, 33096, 33097, 33098, 33099, 33100, 33101, 33102, 33103, 33104, 33105, 33106, 33107, 33108, 33109, 33110, 33111, 33112, 33113, 33114, 33115, 33116, 33118, 33119, 33120, 33121, 33122, 33123, 33124, 33125, 33127, 33128, 33129, 33130, 33131, 33133, 33134, 33135, 33137, 33138, 33139, 33140, 33141, 33142, 33144, 33145, 33146, 33147, 33148, 33149, 33151, 33153, 33155, 33156, 33157, 33158, 33159, 33160, 33162, 33163, 33164, 33165, 33166, 33167, 33169, 33171, 33173, 33174, 33175, 33176, 33177, 33178, 33179, 33180, 33181, 33182, 33183, 33184, 33185, 33186, 33187, 33188, 33189, 33190, 33191, 33193, 33194, 33195, 33196, 33197, 33198, 33200, 33202, 33204, 33206, 33207, 33208, 33209, 33210, 33211, 33212, 33213, 33214, 33215, 33216, 33217, 33218, 33219, 33220, 33221, 33222, 33223, 33224, 33225, 33227, 33228, 33229, 33230, 33231, 33232, 33233, 33234, 33235, 33236, 33237, 33238, 33239, 33240, 33241, 33242, 33243, 33244, 33245, 33246, 33247, 33248, 33249, 33250, 33251, 33252, 33253, 33254, 33255, 33256, 33257, 33258, 33259, 33260, 33261, 33262, 33263, 33264, 33265, 33266, 33267, 33268, 33269, 33271, 33272, 33273, 33274, 33275, 33276, 33278, 33279, 33280, 33281, 33283, 33284, 33285, 33286, 33287, 33288, 33289, 33290, 33291, 33292, 33293, 33294, 33295, 33296, 33297, 33299, 33300, 33301, 33302, 33303, 33304, 33305, 33306, 33307, 33308, 33309, 33310, 33311, 33312, 33313, 33314, 33315, 33316, 33317, 33318, 33319, 33320, 33321, 33322, 33323, 33325, 33326, 33327, 33328, 33329, 33330, 33332, 33333, 33335, 33336, 33337, 33338, 33339, 33340, 33342, 33343, 33345, 33347, 33348, 33350, 33351, 33352, 33353, 33356, 33357, 33358, 33359, 33360, 33361, 33362, 33364, 33365, 33366, 33367, 33368, 33369, 33372, 33373, 33374, 33375, 33376, 33377, 33378, 33379, 33380, 33381, 33382, 33383, 33384, 33385, 33387, 33389, 33390, 33391, 33392, 33393, 33394, 33395, 33396, 33398, 33399, 33400, 33401, 33402, 33403, 33404, 33405, 33406, 33408, 33409, 33411, 33412, 33414, 33415, 33417, 33418, 33419, 33421, 33423, 33424, 33425, 33426, 33427, 33428, 33430, 33431, 33432, 33433, 33434, 33437, 33439, 33441, 33444, 33445, 33446, 33447, 33448, 33449, 33450, 33452, 33454, 33456, 33459, 33460, 33461, 33462, 33463, 33464, 33465, 33466, 33467, 33468, 33469, 33470, 33471, 33472, 33473, 33474, 33475, 33477, 33478, 33479, 33480, 33481, 33482, 33483, 33484, 33485, 33486, 33487, 33488, 33489, 33490, 33492, 33493, 33494, 33495, 33496, 33497, 33499, 33500, 33501, 33502, 33503, 33504, 33505, 33507, 33508, 33509, 33510, 33511, 33513, 33514, 33515, 33516, 33517, 33518, 33520, 33521, 33522, 33523, 33524, 33526, 33527, 33528, 33529, 33530, 33531, 33532, 33533, 33534, 33535, 33536, 33537, 33539, 33540, 33541, 33542, 33543, 33544, 33545, 33546, 33547, 33548, 33549, 33550, 33552, 33553, 33554, 33555, 33556, 33557, 33558, 33559, 33560, 33561, 33562, 33563, 33564, 33565, 33566, 33567, 33568, 33569, 33571, 33572, 33573, 33574, 33575, 33576, 33577, 33579, 33580, 33581, 33582, 33583, 33584, 33585, 33586, 33587, 33589, 33590, 33591, 33592, 33593, 33594, 33595, 33596, 33597, 33598, 33599, 33600, 33601, 33602, 33603, 33604, 33605, 33606, 33607, 33608, 33609, 33610, 33611, 33612, 33613, 33614, 33615, 33616, 33617, 33618, 33619, 33620, 33621, 33622, 33623, 33624, 33625, 33626, 33627, 33628, 33629, 33630, 33631, 33632, 33633, 33634, 33635, 33636, 33637, 33638, 33639, 33640, 33641, 33642, 33643, 33644, 33645, 33646, 33647, 33648, 33650, 33651, 33652, 33653, 33654, 33655, 33656, 33657, 33658, 33659, 33660, 33661, 33662, 33663, 33664, 33665, 33666, 33667, 33668, 33669, 33670, 33671, 33672, 33673, 33674, 33675, 33676, 33677, 33678, 33679, 33680, 33681, 33682, 33683, 33684, 33685, 33686, 33687, 33688, 33689, 33690, 33691, 33693, 33694, 33695, 33696, 33697, 33698, 33699, 33700, 33701, 33702, 33703, 33704, 33707, 33708, 33709, 33710, 33711, 33712, 33715, 33716, 33717, 33718, 33719, 33720, 33721, 33722, 33723, 33724, 33725, 33726, 33727, 33728, 33729, 33730, 33731, 33732, 33733, 33736, 33737, 33738, 33739, 33740, 33741, 33742, 33743, 33744, 33745, 33746, 33747, 33748, 33749, 33750, 33751, 33754, 33755, 33756, 33757, 33758, 33759, 33760, 33761, 33762, 33763, 33764, 33765, 9, 10, 11, 12, 13, 14, 15, 33778, 33780, 33782, 33784, 33786, 33788, 33790, 33798, 33802, 33812, 33814, 33816, 33818, 33820, 33829, 33831, 33833, 33835, 33837, 33842, 33845, 33849, 33854, 33857, 33859, 33861, 33863, 33865, 33867, 33871, 33875, 33877, 33881, 33890, 33898, 32628, 33910, 33915, 32646, 33926, 33932, 33935, 33941, 33944, 33948, 33950, 33952, 33955, 33958, 33961, 33963, 33965, 33968, 33971, 33978, 33983, 33986, 33994, 34002, 32733, 34008, 32741, 34013, 34017, 34019, 34025, 34029, 32767, 34035, 32773, 34041, 34043, 34054, 34056, 34058, 34060, 34062, 34067, 32812, 34072, 32818, 32823, 34083, 34085, 34087, 34090, 34093, 34097, 34102, 34106, 34110, 34115, 34118, 34131, 34140, 34145, 34149, 34151, 34165, 34170, 34173, 34181, 34183, 34195, 34197, 34202, 34218, 34223, 32975, 34227, 34232, 34234, 34236, 34238, 34241, 34245, 34247, 34250, 34252, 34255, 34259, 34263, 34265, 34268, 34271, 34274, 34276, 34278, 34283, 34285, 34291, 34293, 34298, 34300, 34304, 34308, 34310, 34316, 34318, 34322, 34324, 34327, 34331, 34334, 34336, 34339, 34343, 34345, 34349, 34352, 34354, 34357, 34362, 34365, 34367, 34369, 34371, 34377, 34379, 34381, 34383, 34385, 34391, 34393, 34395, 34397, 34399, 34408, 34412, 34417, 34421, 34423, 34425, 34427, 34431, 34433, 34444, 34446, 34448, 34452, 34457, 34460, 34464, 34468, 34475, 34478, 34484, 34490, 33282, 34496, 34499, 34502, 34504, 34506, 34509, 34511, 34513, 34515, 34517, 34519, 34523, 34529, 34532, 34538, 34546, 34548, 34554, 33355, 34560, 34562, 34565, 34567, 33371, 34573, 34575, 34579, 34582, 33386, 33388, 34588, 34590, 34592, 34594, 34599, 34601, 34602, 34609, 33420, 33422, 34616, 33429, 34620, 34625, 34626, 34628, 34443, 34420, 34402, 34438, 34439, 34443, 34420, 34402, 34438, 34439, 34635, 34636, 34641, 34643, 33793, 34006, 34012, 33795, 34022, 33797, 33805, 32521, 33810, 33808, 33823, 33827, 33825, 33840, 32560, 33848, 33853, 33870, 34443, 33874, 33884, 33889, 33887, 34443, 34420, 34402, 33889, 34439, 33894, 33896, 33901, 33903, 33905, 33906, 33909, 33914, 33919, 33921, 33922, 33925, 33930, 33937, 33939, 33946, 34650, 34653, 33976, 33981, 33989, 33991, 33993, 33997, 33999, 34001, 34006, 34012, 34016, 34022, 34024, 34052, 34050, 34065, 32776, 34047, 32788, 34052, 34050, 34065, 34069, 34077, 34081, 34096, 34101, 34109, 34114, 34124, 34122, 34126, 34127, 34129, 34130, 34134, 34136, 34138, 34143, 34155, 34157, 34159, 34161, 34163, 34168, 34176, 34178, 34180, 34186, 34190, 34188, 34194, 34192, 34200, 34205, 34207, 34209, 34211, 34213, 34215, 34217, 34221, 34230, 33003, 33009, 34262, 34282, 34289, 34321, 34296, 34307, 34314, 34321, 34296, 34307, 34314, 34321, 34330, 34348, 34360, 33136, 33150, 34375, 33168, 34389, 34402, 34404, 34406, 34411, 34415, 34443, 34420, 34430, 34436, 34438, 34439, 34443, 34441, 34456, 34467, 34472, 34474, 34482, 34487, 34489, 34522, 34527, 34534, 34536, 34540, 34542, 34544, 34550, 34551, 34553, 34597, 34604, 34606, 34608, 34615, 34660, 34662, 34664, 33491, 34669, 34671, 34673, 34675, 34677, 34681, 34683, 34685, 34689, 34691, 34694, 34696, 34699, 34702, 34705, 34708, 34710, 34714, 34720, 34724, 34731, 34733, 34736, 34738, 34749, 34751, 34754, 34761, 34766, 34768, 34771, 34773, 34780, 34782, 34784, 33436, 33525, 34701, 34631, 33519, 33525, 34701, 34640, 34796, 34800, 34802, 34807, 34810, 34716, 34648, 34716, 34659, 34657, 34713, 34718, 33551, 34727, 34729, 34742, 34744, 34745, 34747, 34760, 34758, 34765, 34778, 34776, 34816, 34827, 34831, 34833, 34835, 34837, 34840, 34843, 34845, 34848, 34850, 34853, 33692, 34857, 34862, 34865, 34867, 34787, 34788, 34790, 34791, 34793, 34795, 34870, 34873, 34876, 34880, 34805, 34813, 34889, 34892, 34814, 34819, 34821, 34823, 34825, 34830, 34860, 34906, 34908, 34869, 34900, 34886, 34874, 34878, 34879, 34886, 34884, 34887, 34902, 34888, 34896, 34894, 34898, 34902, 34900, 34904, 34909, 34911, 34913, 34915, 34917, 34919, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 34999, 35023, 35050, 35055, 35057, 35064, 35072, 35073, 35107, 35118, 35143, 35163, 35164, 35092, 35098, 35094, 35165, 35166, 34440, 35167, 35168, 35169, 35092, 35098, 35094, 35170, 35171, 34440, 35172, 35176, 34928, 34929, 32490, 34931, 34932, 34934, 35177, 35079, 35080, 35178, 34987, 34988, 35179, 34989, 34990, 35180, 34992, 35181, 35182, 33800, 34936, 35183, 35184, 35185, 35186, 34939, 34937, 34941, 35187, 35188, 35189, 34944, 34942, 34946, 35190, 34947, 35191, 34948, 35192, 35193, 33851, 33856, 34952, 34954, 34956, 33873, 35194, 35120, 35195, 35196, 34958, 33879, 34960, 35197, 35198, 35199, 34440, 35200, 35201, 35092, 35098, 35094, 35202, 35203, 34440, 35204, 33892, 35205, 35206, 34962, 35207, 35208, 35209, 35210, 34963, 35211, 33912, 35212, 33917, 35213, 35214, 35215, 34966, 35216, 33928, 35120, 35217, 34968, 34969, 35218, 35219, 34970, 34971, 35220, 35221, 34972, 34974, 34975, 34976, 34977, 34979, 34980, 34981, 33973, 33974, 35223, 34982, 35224, 34983, 34984, 35225, 35226, 35227, 32723, 35228, 35229, 35230, 32969, 35231, 34987, 34988, 35232, 34989, 34990, 35233, 34992, 35234, 35235, 34027, 34994, 35236, 35237, 35002, 35000, 35004, 35238, 34995, 34996, 34997, 35239, 34998, 35240, 35241, 35242, 35243, 35002, 35000, 35004, 35244, 35005, 35245, 35006, 32816, 35008, 35246, 35009, 35247, 35010, 35012, 35013, 35014, 35248, 35249, 34099, 34104, 35017, 35250, 35251, 34112, 34117, 34120, 35252, 35253, 35254, 35255, 35256, 35257, 34133, 35258, 35259, 35260, 35022, 35261, 35024, 34153, 35262, 35263, 35264, 35265, 35266, 35026, 35267, 35027, 35028, 35268, 35269, 35270, 35030, 35271, 35272, 35273, 35274, 35275, 35032, 35276, 35033, 35277, 35278, 35279, 35280, 35281, 35282, 35283, 32969, 35284, 35035, 34226, 32980, 35285, 35038, 35040, 34240, 34243, 35043, 35044, 35286, 35046, 35287, 34257, 35048, 35288, 34269, 35052, 35053, 35289, 35290, 35066, 35291, 35068, 35069, 35292, 34297, 35059, 34302, 35062, 35293, 35065, 35294, 35066, 35295, 35068, 35069, 35296, 34297, 35075, 34302, 35062, 35297, 35065, 35298, 35066, 35299, 35068, 35069, 35300, 34333, 35075, 35301, 34351, 35078, 35079, 35302, 35080, 35303, 35082, 34373, 33143, 35304, 35305, 34378, 35087, 34387, 33161, 35306, 35307, 34392, 35092, 35094, 35308, 35309, 35310, 35095, 35311, 35096, 35312, 35097, 35116, 35313, 35314, 35105, 35098, 35101, 35315, 35103, 35316, 35317, 34440, 35318, 35319, 35320, 35105, 34450, 35321, 34462, 34459, 35110, 34470, 35322, 35323, 35324, 34477, 34480, 35325, 35114, 35326, 35327, 34492, 35116, 35117, 35120, 34508, 35123, 35125, 35127, 34525, 35328, 35329, 35129, 35130, 35330, 35331, 35131, 35332, 35333, 35334, 35132, 35133, 35335, 35336, 35337, 34558, 34556, 35136, 33363, 35138, 34571, 34569, 35141, 34577, 34586, 35145, 35147, 35149, 35150, 35338, 35151, 35153, 35339, 35340, 35341, 34613, 35155, 35342, 35158, 35159, 35345, 35354, 35361, 35372, 35376, 35382, 35383, 35384, 33538, 35162, 35385, 35386, 35387, 35388, 33538, 35389, 34638, 35395, 35396, 35397, 34655, 35398, 35399, 35344, 35347, 33498, 35350, 34679, 35352, 33512, 33519, 34693, 33525, 34698, 34701, 34704, 35363, 35400, 34716, 35401, 34722, 35402, 35366, 35403, 35404, 35367, 34735, 34740, 33570, 35405, 35406, 35407, 35408, 35371, 34756, 35409, 35410, 35411, 34763, 35378, 35412, 35413, 35379, 35381, 35422, 35431, 35432, 35433, 35434, 35435, 35436, 34798, 35391, 34852, 35441, 35393, 35394, 35442, 35445, 35414, 35446, 35447, 35448, 35449, 35415, 35450, 35417, 35419, 35420, 35421, 34852, 34855, 35427, 35451, 35428, 35429, 28312, 35439, 35454, 35455, 34872, 28312, 34882, 35439, 35456, 35457, 35458, 35459, 34882, 34891, 35460, 35461, 35462, 35463, 35464, 34891, 28312, 35465, 35466, 35467, 35468, 35469, 35470, 35452, 28313, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 35499, 35501, 35502, 35496, 35503, 35506, 35508, 35510, 35511, 35496, 35512, 35515, 35518, 35519, 35520, 35521, 35522, 35523, 35525, 35526, 35528, 35529, 35531, 35532, 35534, 35537, 35538, 35541, 35543, 35544, 35545, 35547, 35549, 35550, 35551, 35553, 35555, 35558, 35559, 35560, 35561, 35562, 35563, 35565, 35566, 35568, 35569, 35496, 35570, 35574, 35575, 35577, 35578, 35496, 35579, 35582, 35584, 35587, 35592, 35594, 35596, 35600, 35602, 35603, 35605, 35606, 35609, 35610, 35613, 35614, 35615, 35616, 35617, 35618, 35619, 35620, 35621, 35622, 35624, 35626, 35627, 35631, 35635, 35637, 35638, 35640, 35641, 35643, 35646, 35647, 35648, 35650, 35651, 35652, 35654, 35655, 35656, 35658, 35488, 35661, 35663, 35664, 35665, 35667, 35669, 35670, 35671, 35673, 35675, 35676, 35677, 35678, 35681, 35682, 35683, 35686, 35687, 35688, 35689, 35695, 35699, 35489, 35701, 35702, 35708, 35710, 35711, 35715, 35717, 35719, 35721, 35723, 35731, 35733, 35734, 35735, 35737, 35738, 35739, 35740, 35741, 35742, 35744, 35746, 35747, 35490, 35749, 35750, 35751, 35491, 35492, 35754, 35756, 35757, 35759, 35494, 34342, 35760, 35761, 35762, 35493, 35764, 35766, 35768, 35769, 35771, 35494, 34342, 35772, 35773, 35774, 35493, 35776, 35778, 35780, 35781, 35783, 35494, 34342, 35784, 35786, 35787, 35788, 35790, 35792, 35793, 35794, 35795, 35797, 35798, 35799, 35800, 35801, 35803, 35804, 35805, 35809, 35811, 35813, 35814, 35815, 35817, 35818, 35496, 35819, 35821, 35824, 35826, 35828, 35829, 35496, 35831, 35832, 35833, 35834, 35838, 35839, 35841, 35844, 35845, 35846, 35497, 35847, 35848, 35849, 35850, 35851, 35852, 35855, 35856, 35859, 35863, 35864, 35868, 35869, 35870, 35871, 35872, 35873, 35874, 35875, 35876, 35498, 35877, 35878, 35879, 35880, 35881, 35883, 35884, 35888, 35889, 35891, 35892, 35901, 35902, 35907, 35909, 35517, 35536, 35540, 35726, 35706, 35704, 35728, 35598, 35589, 35591, 35598, 35599, 35913, 33476, 35914, 35694, 35692, 35706, 35704, 35698, 35706, 35704, 35630, 35634, 35645, 35660, 35694, 35692, 35698, 35706, 35704, 35714, 35728, 35726, 35730, 35837, 35837, 35843, 35861, 35867, 35887, 35916, 35346, 35917, 35918, 35919, 35920, 35921, 35922, 34688, 35923, 35924, 35925, 35926, 35927, 35928, 33538, 35929, 35931, 35933, 35935, 35938, 35939, 35940, 35941, 35946, 35947, 33588, 35948, 35951, 35897, 35952, 35953, 35955, 35956, 35964, 35965, 35966, 35968, 35969, 35911, 35932, 35937, 35944, 35972, 35977, 35979, 35980, 35981, 35982, 35423, 35983, 35984, 35985, 35987, 35988, 35989, 35976, 35963, 35961, 35959, 35976, 35974, 35990, 35993, 35994, 35995, 35996, 36001, 36002, 36003, 36008, 36009, 35976, 35974, 36010, 36013, 36016, 36017, 36007, 36007, 36007, 12, 13, 14, 15, 36035, 36037, 36041, 36043, 35527, 35530, 36060, 36064, 35557, 36074, 36079, 35573, 36085, 36087, 35636, 35639, 36123, 36130, 36132, 35680, 35685, 36153, 36177, 36181, 36182, 36187, 36188, 36192, 36193, 36198, 36199, 36203, 36204, 36209, 36210, 36217, 36222, 36235, 36238, 36242, 36243, 36246, 36253, 36259, 36265, 36270, 36274, 36275, 36282, 36033, 35504, 36039, 35513, 35908, 36290, 35791, 35789, 35736, 35791, 35789, 35524, 35791, 35789, 36056, 36291, 36058, 36292, 35546, 35552, 35554, 35726, 36293, 35556, 36294, 36295, 36296, 36072, 36255, 36077, 35571, 36083, 35580, 36240, 36092, 36088, 36297, 36229, 36298, 36089, 36090, 36299, 36226, 35806, 36240, 36092, 36091, 36300, 36229, 36228, 36093, 36301, 36094, 36255, 36096, 35607, 36098, 35611, 36303, 36141, 36157, 35679, 35684, 36150, 36305, 36306, 35700, 36307, 36308, 35726, 36101, 36103, 36105, 36107, 35679, 35684, 35696, 36309, 35700, 36310, 36311, 35726, 36108, 36109, 36110, 36111, 35628, 36312, 35632, 36313, 35732, 36119, 36314, 36121, 35653, 36127, 35657, 36315, 35666, 35668, 36137, 35672, 35674, 36141, 36157, 35679, 35684, 36150, 36316, 36317, 35696, 36318, 35700, 36319, 36320, 35726, 36156, 36157, 35712, 36321, 35716, 36161, 35724, 35722, 36322, 36323, 36324, 35732, 36166, 35736, 36169, 36171, 36173, 36175, 36174, 35748, 36180, 35755, 36185, 36186, 36190, 35763, 35767, 36196, 36197, 36201, 35775, 35779, 36207, 36208, 35785, 36213, 35791, 35789, 36220, 36225, 36226, 35806, 36240, 36325, 36248, 36229, 36228, 36231, 36230, 36233, 35822, 35820, 36240, 36326, 36248, 36249, 36251, 36327, 36255, 36257, 36260, 35857, 36262, 36328, 36263, 36329, 35865, 36269, 36279, 36280, 36330, 35885, 36285, 36332, 36339, 36346, 36349, 36353, 36357, 35950, 36360, 36286, 35900, 35899, 35903, 36288, 35906, 35905, 35932, 35930, 36370, 36371, 36334, 36336, 36338, 36344, 36342, 35932, 35930, 36372, 36352, 36373, 36355, 36362, 36364, 36380, 36368, 36387, 36388, 36389, 36390, 36366, 36377, 35970, 36383, 36384, 36368, 36391, 36392, 36366, 36377, 35970, 36383, 36384, 36397, 36398, 36368, 35970, 36374, 36403, 36404, 35978, 36377, 36379, 36383, 36384, 36409, 35991, 36386, 36410, 35991, 36395, 36007, 36402, 36000, 36411, 36005, 36402, 36015, 36012, 36408, 12, 13, 14, 15, 36465, 36416, 36466, 36417, 36467, 36418, 36468, 36419, 36471, 36472, 36473, 36474, 36475, 36476, 36477, 36478, 36055, 36053, 36479, 36481, 36422, 36483, 36423, 36484, 36485, 36486, 36488, 36070, 36489, 36492, 36425, 36493, 36494, 36426, 36495, 36427, 36496, 36428, 36497, 36429, 36498, 36455, 36456, 36457, 36499, 36500, 36502, 36504, 36505, 36507, 36453, 36508, 36454, 36509, 36455, 36456, 36457, 36510, 36511, 36513, 36514, 36515, 36517, 36518, 36519, 36520, 36521, 36522, 36302, 36524, 36525, 36526, 36145, 36527, 36148, 36528, 36529, 36154, 36531, 36534, 36532, 36535, 36536, 36537, 36538, 36539, 36145, 36540, 36148, 36541, 36154, 36543, 36546, 36544, 36547, 36548, 36549, 36550, 36551, 36553, 36555, 36118, 36116, 36556, 36558, 36432, 36559, 36560, 36561, 36433, 36434, 36563, 36564, 36565, 36566, 36567, 36568, 36569, 36570, 36145, 36571, 36148, 36572, 36573, 36575, 36154, 36577, 36580, 36578, 36581, 36582, 36583, 36585, 36586, 36587, 36588, 36589, 36592, 36593, 36594, 36595, 36596, 36597, 36598, 36599, 36600, 36178, 36601, 35753, 35752, 36602, 36603, 36442, 36604, 36605, 36606, 36444, 36607, 36608, 36446, 36609, 36610, 36611, 36448, 36612, 36613, 36450, 36614, 36615, 36616, 36617, 36618, 36451, 36619, 36452, 36620, 36621, 36453, 36622, 36454, 36623, 36455, 36456, 36457, 36625, 36626, 36627, 36628, 36629, 36630, 36453, 36631, 36632, 36454, 36633, 36455, 36456, 36457, 36635, 36636, 36637, 36458, 36639, 36640, 36459, 36641, 36642, 36643, 36645, 36647, 36267, 36648, 36272, 36462, 36277, 36649, 36650, 36652, 35890, 36653, 36356, 36662, 36663, 36664, 35898, 36665, 36666, 36667, 36668, 35904, 36470, 36669, 36670, 36654, 36673, 36674, 36675, 36656, 36676, 36677, 36340, 36678, 36679, 36350, 36681, 35942, 36683, 36661, 36684, 36685, 36687, 36688, 36690, 36692, 36693, 36694, 36365, 36695, 36696, 36697, 36698, 36700, 36701, 36702, 36367, 36703, 36704, 36705, 36707, 36708, 36709, 36710, 36712, 36713, 36714, 36381, 36715, 36716, 36718, 36719, 36721, 36722, 36723, 36724, 36725, 36402, 36727, 36728, 36729, 36730, 36731, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36737, 36739, 36741, 36743, 36744, 36747, 36750, 36752, 36753, 36756, 36758, 36763, 36766, 36769, 36771, 36773, 36775, 36777, 36778, 36779, 36780, 36782, 36784, 36786, 36788, 36790, 36791, 36792, 36793, 36795, 36797, 36808, 36810, 36813, 36816, 36822, 36824, 36826, 36829, 36837, 36838, 36841, 36845, 36846, 36855, 36857, 36861, 36864, 36870, 36879, 36882, 36884, 36885, 36888, 36892, 36895, 36899, 36902, 36906, 36908, 36910, 36913, 36915, 36917, 36918, 36919, 36921, 36923, 36926, 36927, 36929, 36931, 36932, 36933, 36936, 36937, 36940, 36646, 36946, 36948, 36949, 36950, 36651, 36954, 36960, 36957, 36965, 36962, 36966, 36746, 36836, 36877, 36887, 36894, 36901, 36836, 36746, 36877, 36887, 36894, 36901, 36835, 36836, 36877, 36887, 36894, 36901, 36755, 36482, 36851, 36760, 36853, 36576, 36859, 36833, 36552, 36869, 36840, 36844, 36851, 36849, 36853, 36576, 36859, 36833, 36552, 36869, 36934, 36644, 36942, 36934, 36644, 36803, 36801, 36304, 36806, 36812, 36820, 36818, 36542, 36877, 36877, 36833, 36552, 36836, 36835, 36887, 36894, 36901, 36840, 36844, 36851, 36849, 36853, 36576, 36859, 36866, 36584, 36869, 36875, 36873, 36877, 36887, 36894, 36901, 36920, 36934, 36644, 36942, 36969, 36973, 36976, 36977, 36979, 36981, 36358, 36983, 36972, 36985, 36972, 36985, 36987, 36992, 37000, 36972, 36985, 36972, 36985, 36972, 36985, 37011, 36990, 36994, 36996, 36998, 37002, 36717, 36990, 36994, 36996, 36998, 37002, 36720, 37007, 37009, 37013, 36005, 37007, 37009, 37013, 35999, 37021, 37007, 37009, 37013, 36726, 37007, 37009, 37013, 37024, 12, 13, 14, 15, 37047, 37061, 37073, 37077, 37079, 37086, 37091, 37093, 37095, 37097, 36959, 37040, 37041, 37042, 37043, 36964, 37129, 37130, 37131, 37089, 37090, 37132, 37133, 37094, 37134, 37096, 37044, 36911, 36909, 37135, 37136, 37137, 37089, 37090, 37138, 37139, 37094, 37140, 37096, 37045, 36911, 36909, 37141, 37142, 37143, 37089, 37090, 37144, 37145, 37094, 37146, 37096, 37046, 36911, 36909, 37147, 36842, 37148, 36562, 36759, 36757, 37149, 37150, 37151, 37085, 37084, 37152, 37153, 37154, 37155, 37156, 36487, 37157, 36842, 36562, 37158, 36847, 37159, 37160, 37161, 37085, 37051, 37162, 37163, 37164, 37165, 37166, 36491, 37052, 37053, 37054, 37055, 37056, 37057, 37059, 36501, 37063, 37064, 37065, 37067, 36512, 37070, 37108, 37110, 37111, 37113, 37167, 37114, 36767, 37116, 37117, 37168, 37169, 36951, 37120, 36947, 37122, 36955, 37053, 37054, 37055, 37056, 37057, 37059, 36501, 37063, 37064, 37065, 37067, 36512, 37070, 37108, 37110, 37111, 37113, 37170, 37114, 36799, 37116, 37117, 37171, 37172, 37173, 36951, 37120, 36947, 37122, 36955, 37174, 37175, 37072, 37071, 37176, 37177, 37178, 37076, 37075, 37179, 37180, 37181, 37094, 37096, 37182, 37183, 37184, 37185, 37186, 37187, 37188, 37189, 36842, 36562, 37190, 36847, 37191, 37192, 37193, 37085, 37084, 37194, 37195, 37196, 37197, 37198, 36872, 37199, 37200, 37201, 37089, 37090, 37202, 37203, 37094, 37204, 37096, 37098, 36911, 36909, 37101, 37102, 37103, 37105, 37205, 37107, 37108, 37110, 37111, 37113, 37206, 37114, 36938, 37116, 37117, 37207, 37208, 36951, 37120, 36947, 37122, 36955, 37210, 36975, 36680, 36682, 37215, 37217, 36970, 37218, 37219, 36970, 37220, 37224, 36970, 37225, 37226, 36970, 37227, 37228, 36970, 37229, 37221, 37231, 37222, 37232, 37233, 37234, 37223, 37235, 37221, 37237, 37222, 37238, 37239, 37240, 37223, 37241, 37243, 37244, 37230, 37245, 37246, 37247, 37248, 37230, 37249, 37250, 37252, 37253, 37230, 37254, 37256, 37257, 37230, 37258, 37015, 37017, 37023, 37026, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37125, 37275, 37276, 37277, 37278, 37127, 37280, 37283, 37284, 37270, 36890, 37287, 36897, 37289, 36749, 37290, 37291, 37292, 37293, 37296, 37297, 37270, 36890, 37300, 36897, 37302, 36904, 37303, 37304, 37305, 37306, 37309, 37310, 37270, 36890, 37313, 36897, 37315, 36749, 37316, 37317, 37318, 36754, 37320, 37322, 37323, 37324, 37325, 37328, 37329, 37330, 36761, 37335, 36839, 37337, 37338, 37340, 37341, 37344, 37345, 37346, 36764, 37351, 37352, 37353, 37354, 37355, 37356, 37357, 37358, 37359, 37062, 37360, 37361, 37362, 37363, 37364, 37365, 37366, 37367, 37368, 37369, 37371, 37372, 37373, 37374, 37377, 37378, 37379, 37380, 37381, 37382, 37383, 37384, 37385, 37386, 37387, 37388, 37062, 37389, 37390, 37391, 37392, 37393, 37394, 37395, 37396, 37397, 37398, 37400, 37401, 37402, 37403, 37405, 37407, 37408, 37409, 37410, 37411, 37414, 37415, 37074, 37417, 37419, 37420, 37078, 37270, 37424, 37425, 37428, 36839, 37434, 37435, 37437, 37438, 37441, 37442, 37443, 37087, 37448, 37449, 37452, 37453, 37270, 36890, 37456, 36897, 37458, 36904, 37459, 37460, 37461, 37462, 37463, 37464, 37465, 37467, 37468, 37469, 37470, 37471, 37473, 37474, 37475, 37476, 37479, 37480, 37481, 37482, 37483, 37484, 37490, 36982, 36980, 37128, 37493, 36982, 36980, 37128, 37333, 37349, 37496, 36982, 36980, 37216, 37499, 36982, 36980, 37216, 37427, 37427, 37446, 37502, 36982, 36980, 37216, 37504, 37506, 37510, 37512, 37514, 37518, 37522, 37018, 37527, 37020, 37532, 37536, 37509, 37538, 37517, 37539, 37521, 37526, 37531, 37540, 37535, 37541, 10, 11, 12, 13, 14, 15, 37561, 37562, 37286, 37564, 37288, 37566, 37568, 37573, 37574, 37299, 37576, 37301, 37578, 37580, 37585, 37586, 37312, 37588, 37314, 37590, 37592, 37594, 37321, 37597, 37600, 37603, 37605, 37607, 37610, 37613, 37623, 37637, 37638, 37650, 37664, 37666, 37671, 37673, 37675, 37677, 37678, 37682, 37684, 37687, 37690, 37695, 37696, 37455, 37698, 37457, 37700, 37702, 37716, 37717, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37660, 37658, 37661, 37663, 37670, 37489, 36961, 37724, 37725, 37726, 37621, 37619, 37617, 37627, 37556, 37554, 37629, 37633, 37631, 37634, 37615, 37642, 37492, 36968, 37728, 37729, 37730, 36967, 37282, 37560, 37295, 37572, 37308, 37584, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37660, 37658, 37661, 37663, 37670, 36968, 37731, 37604, 37609, 37732, 37614, 37451, 37694, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37615, 37642, 37648, 37646, 37644, 37663, 37670, 37495, 36671, 37734, 37735, 37736, 37621, 37619, 37617, 37627, 37625, 37629, 37633, 37631, 37634, 37636, 37642, 37648, 37646, 37644, 37654, 37652, 37656, 37660, 37658, 37661, 37663, 37670, 37498, 36672, 37738, 37739, 37740, 37686, 37741, 37691, 37422, 37694, 37423, 37686, 37742, 37451, 37694, 37686, 37743, 37691, 37451, 37694, 37707, 37705, 37708, 37712, 37710, 37713, 37715, 37721, 37501, 37212, 37745, 37746, 37747, 37505, 37507, 37760, 37511, 37513, 37515, 37762, 37519, 37764, 37523, 37019, 37765, 37528, 37251, 37766, 37533, 37768, 37537, 35471, 35472, 35475, 35476, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37285, 37298, 37311, 37807, 37808, 37810, 37811, 37454, 37828, 37829, 37830, 37831, 37832, 37806, 37833, 37834, 37835, 37836, 37837, 37838, 37839, 37840, 37841, 37842, 37843, 37809, 37844, 37845, 37846, 37847, 37848, 37850, 37851, 37491, 37854, 37855, 37856, 37806, 37857, 37858, 37859, 37860, 37861, 37862, 37863, 37864, 37865, 37867, 37868, 37494, 37871, 37872, 37873, 37781, 37779, 37782, 37874, 37875, 37788, 37786, 37789, 37876, 37877, 37795, 37793, 37796, 37878, 37879, 37880, 37806, 37881, 37882, 37883, 37884, 37885, 37886, 37887, 37888, 37889, 37890, 37891, 37809, 37892, 37893, 37894, 37895, 37896, 37897, 37319, 37599, 37798, 37800, 37801, 37899, 37336, 37900, 37803, 37804, 37805, 37902, 37903, 37904, 37826, 37824, 37827, 37905, 37906, 37907, 37806, 37908, 37909, 37910, 37911, 37912, 37913, 37914, 37915, 37916, 37917, 37918, 37809, 37919, 37920, 37922, 37923, 37497, 37926, 37927, 37928, 37806, 37929, 37930, 37931, 37932, 37933, 37934, 37935, 37936, 37937, 37938, 37939, 37809, 37940, 37941, 37942, 37943, 37944, 37945, 37946, 37947, 37949, 37950, 37500, 37433, 37953, 37818, 37812, 37813, 37814, 37815, 37955, 37956, 37957, 37826, 37824, 37822, 37827, 37958, 37826, 37824, 37822, 37433, 37959, 37818, 37819, 37820, 37961, 37962, 37826, 37824, 37822, 37827, 37433, 37963, 37818, 37819, 37820, 37965, 37966, 37967, 37826, 37824, 37827, 37968, 37969, 37970, 37971, 37972, 37973, 37974, 37975, 37977, 37978, 37503, 37981, 37982, 37984, 37985, 37986, 37988, 37990, 37991, 37993, 37994, 37996, 37998, 37999, 38000, 38001, 38002, 12, 13, 14, 15, 38026, 38029, 38030, 38033, 38020, 38038, 38041, 38042, 38022, 38050, 38053, 38054, 38058, 38020, 38069, 38070, 37777, 38071, 38074, 38075, 37784, 38076, 38079, 38080, 37791, 38081, 38082, 38085, 38086, 38089, 38020, 38094, 38097, 38098, 38022, 38104, 38105, 38106, 38107, 38108, 38110, 38112, 38113, 38114, 38118, 38119, 37822, 38120, 38121, 38124, 38125, 38128, 38020, 38133, 38136, 38022, 38142, 38145, 38146, 38149, 38020, 38154, 38157, 38158, 38161, 38022, 38169, 38171, 38172, 38173, 38174, 38175, 38179, 38180, 38181, 38182, 38184, 38185, 38186, 38187, 38189, 38190, 38191, 38194, 38195, 38196, 38197, 38198, 38200, 38201, 38202, 38206, 38207, 37822, 38208, 38209, 38212, 38025, 38047, 38049, 38063, 38065, 38066, 38219, 38109, 38115, 38068, 38073, 38078, 38203, 38205, 38103, 38219, 38109, 38115, 38117, 38203, 38205, 38139, 38141, 38166, 38168, 38176, 38178, 38193, 38203, 38193, 38203, 38205, 38217, 38219, 38222, 38221, 38225, 38224, 38226, 35473, 38228, 35474, 38230, 38231, 13, 14, 15, 38240, 38244, 38245, 38248, 38249, 38251, 38253, 38256, 38254, 38260, 38258, 38264, 38262, 38266, 38270, 38271, 38274, 38276, 38111, 38286, 38284, 38288, 38292, 38293, 38295, 38296, 38300, 38301, 38305, 38170, 38312, 38316, 38188, 38323, 38199, 38333, 38331, 38337, 38035, 38032, 38044, 38160, 38338, 38339, 38060, 38340, 38341, 38091, 38088, 38100, 38160, 38342, 38343, 38344, 38279, 38345, 38283, 38346, 38347, 38348, 38349, 38330, 38350, 38091, 38088, 38100, 38160, 38351, 38352, 38353, 38279, 38354, 38283, 38355, 38356, 38330, 38357, 38130, 38127, 38163, 38160, 38358, 38359, 38151, 38148, 38163, 38160, 38360, 38361, 38362, 38311, 38309, 38363, 38364, 38365, 38322, 38366, 38367, 38330, 38368, 38214, 38211, 38369, 38370, 38371, 38372, 38373, 38374, 38375, 38376, 38377, 38378, 38379, 38380, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38037, 38046, 38062, 38392, 38394, 38396, 38093, 38102, 38404, 38132, 38138, 38153, 38165, 38414, 38415, 38417, 38420, 38216, 38422, 38423, 38241, 38424, 38425, 38246, 38428, 38057, 38250, 38431, 38432, 38267, 38433, 38434, 38272, 38401, 38438, 38402, 38440, 38418, 38445, 38447, 38448, 38267, 38449, 38450, 38272, 38401, 38454, 38402, 38456, 38418, 38459, 38461, 38462, 38289, 38463, 38464, 38294, 38467, 38468, 38297, 38469, 38470, 38302, 38413, 38474, 38475, 38416, 38479, 38418, 38482, 38484, 38485, 38427, 38430, 38436, 38487, 38452, 38466, 38472, 38487, 38488, 38490, 38234, 38495, 38493, 38235, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38532, 38530, 38512, 38535, 38533, 38513, 38537, 38538, 38514, 38541, 38539, 38518, 38544, 38542, 38519, 38545, 38437, 38547, 38439, 38257, 38261, 38265, 38549, 38444, 38334, 38553, 38551, 38518, 38556, 38554, 38519, 38557, 38453, 38559, 38455, 38287, 38561, 38458, 38334, 38565, 38563, 38521, 38568, 38566, 38522, 38571, 38569, 38523, 38574, 38572, 38524, 38575, 38473, 38315, 38326, 38578, 38478, 38326, 38580, 38481, 38334, 38582, 38529, 38584, 38585, 38586, 38587, 38588, 38589, 38590, 38591, 38594, 38595, 38233, 38596, 38232, 38597, 13, 14, 15, 38609, 38610, 38612, 38613, 38536, 38616, 38618, 38619, 38621, 38622, 38627, 38628, 38629, 38632, 38634, 38635, 38637, 38638, 38643, 38646, 38648, 38649, 38651, 38652, 38654, 38655, 38657, 38658, 38660, 38661, 38662, 38665, 38668, 38670, 38626, 38624, 38631, 38642, 38640, 38645, 38626, 38624, 38631, 38642, 38640, 38645, 38664, 38667, 38681, 38683, 38679, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38692, 38722, 38723, 38700, 38699, 38698, 38724, 38701, 38691, 38689, 38715, 38725, 38726, 38706, 38727, 38707, 38697, 38695, 38716, 38717, 38721, 38728, 38729, 38700, 38699, 38698, 38730, 38701, 38705, 38703, 38731, 38732, 38706, 38733, 38707, 38711, 38709, 38715, 38713, 38716, 38717, 38718, 38734, 38719, 38735, 38720, 38721, 38736, 38753, 38755, 38756, 38757, 38759, 38760, 38761, 38762, 38693, 38763, 38765, 38767, 38768, 38769, 38770, 38771, 38772, 38773, 38775, 38776, 38777, 38779, 38780, 38781, 38782, 38784, 38786, 38787, 38788, 38789, 38790, 38791, 38792, 38793, 38795, 38797, 38798, 38738, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38801, 38805, 38808, 38812, 38818, 38822, 38827, 38829, 38804, 38811, 38810, 38835, 38834, 38833, 38815, 38674, 38821, 38826, 38825, 38835, 38834, 38833, 38832, 38678, 38837, 9, 10, 11, 12, 13, 14, 15, 38848, 38807, 38852, 38856, 38671, 38857, 38858, 38673, 38859, 38860, 38861, 38862, 38863, 38864, 38675, 38865, 38866, 38677, 38676, 38867, 38868, 38869, 38870, 38871, 8, 9, 10, 11, 12, 13, 14, 15, 38880, 38672, 38884, 38885, 38887, 38888, 38890, 38882, 38894, 38895, 38897, 38898, 38899, 38901, 14, 15, 38912, 38913, 38917, 38919, 38922, 38924, 38916, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38883, 38929, 38893, 38892, 38934, 38903, 38932, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38947, 38945, 38949, 38950, 38920, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38961, 38960, 38964, 38962, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38977, 38979, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38684, 38872, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39008, 39009, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39024, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
int h_C[]= {
4, 6, 8, 10, 12, 14, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 477, 479, 481, 483, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572, 574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 643, 645, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 669, 671, 673, 675, 678, 680, 682, 684, 688, 690, 692, 694, 696, 698, 701, 703, 705, 707, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 769, 771, 774, 776, 779, 781, 783, 785, 787, 789, 791, 793, 796, 798, 801, 803, 806, 808, 810, 812, 814, 816, 818, 820, 823, 825, 828, 830, 832, 834, 837, 839, 841, 843, 847, 849, 851, 853, 855, 857, 859, 861, 863, 865, 868, 870, 873, 875, 878, 880, 882, 884, 886, 888, 891, 893, 895, 897, 899, 901, 904, 906, 909, 911, 914, 916, 919, 921, 924, 926, 929, 931, 934, 936, 939, 941, 943, 945, 947, 949, 952, 954, 956, 958, 960, 962, 965, 967, 969, 971, 973, 975, 978, 980, 983, 985, 988, 990, 993, 995, 997, 999, 1002, 1004, 1006, 1008, 1011, 1013, 1017, 1019, 1021, 1023, 1026, 1028, 1031, 1033, 1036, 1038, 1041, 1043, 1046, 1048, 1051, 1053, 1056, 1058, 1061, 1063, 1066, 1068, 1071, 1073, 1076, 1078, 1081, 1083, 1086, 1088, 1091, 1093, 1096, 1098, 1101, 1103, 1105, 1107, 1109, 1111, 1114, 1116, 1119, 1121, 1124, 1126, 1129, 1131, 1134, 1136, 1139, 1141, 1144, 1146, 1149, 1151, 1154, 1156, 1159, 1161, 1164, 1166, 1169, 1171, 1173, 1175, 1177, 1179, 1182, 1184, 1187, 1189, 1192, 1194, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217, 1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1327, 1329, 1331, 1333, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368, 1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406, 1408, 1410, 1412, 1414, 1416, 1419, 1421, 1424, 1426, 1429, 1431, 1434, 1436, 1439, 1441, 1444, 1446, 1449, 1451, 1454, 1456, 1458, 1460, 1462, 1464, 1467, 1469, 1472, 1474, 1476, 1478, 1480, 1482, 1484, 1486, 1489, 1491, 1493, 1495, 1498, 1500, 1502, 1504, 1507, 1509, 1513, 1515, 1517, 1519, 1521, 1523, 1526, 1528, 1531, 1533, 1538, 1540, 1542, 1544, 1546, 1548, 1551, 1553, 1556, 1558, 1561, 1563, 1566, 1568, 1570, 1572, 1574, 1576, 1579, 1581, 1584, 1586, 1589, 1591, 1594, 1596, 1599, 1601, 1604, 1606, 1609, 1611, 1614, 1616, 1619, 1621, 1624, 1626, 1629, 1631, 1634, 1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1739, 1741, 1743, 1745, 1748, 1750, 1753, 1755, 1758, 1760, 1763, 1765, 1768, 1770, 1773, 1775, 1778, 1780, 1782, 1784, 1786, 1788, 1791, 1793, 1796, 1798, 1801, 1803, 1806, 1808, 1811, 1813, 1816, 1818, 1821, 1823, 1826, 1828, 1831, 1833, 1836, 1838, 1841, 1843, 1846, 1848, 1851, 1853, 1856, 1858, 1861, 1863, 1866, 1868, 1870, 1872, 1874, 1876, 1879, 1881, 1884, 1886, 1889, 1891, 1894, 1896, 1899, 1901, 1904, 1906, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939, 1941, 1944, 1946, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977, 1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015, 2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091, 2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128, 2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166, 2168, 2170, 2172, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2200, 2202, 2204, 2206, 2209, 2211, 2213, 2215, 2218, 2220, 2222, 2224, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243, 2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281, 2283, 2285, 2288, 2290, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357, 2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433, 2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508, 2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546, 2548, 2550, 2552, 2554, 2556, 2558, 2560, 2563, 2565, 2567, 2569, 2572, 2574, 2576, 2578, 2580, 2582, 2584, 2586, 2588, 2590, 2593, 2595, 2597, 2599, 2602, 2604, 2606, 2608, 2611, 2613, 2616, 2618, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2659, 2661, 2663, 2665, 2668, 2670, 2673, 2675, 2678, 2680, 2683, 2685, 2688, 2690, 2692, 2694, 2696, 2698, 2701, 2703, 2706, 2708, 2711, 2713, 2716, 2718, 2721, 2723, 2726, 2728, 2730, 2732, 2734, 2736, 2739, 2741, 2744, 2746, 2749, 2751, 2754, 2756, 2759, 2761, 2764, 2766, 2769, 2771, 2774, 2776, 2779, 2781, 2784, 2786, 2789, 2791, 2794, 2796, 2799, 2801, 2804, 2806, 2809, 2811, 2814, 2816, 2818, 2820, 2822, 2824, 2827, 2829, 2832, 2834, 2837, 2839, 2842, 2844, 2847, 2849, 2852, 2854, 2857, 2859, 2862, 2864, 2866, 2868, 2870, 2872, 2875, 2877, 2880, 2882, 2885, 2887, 2890, 2892, 2895, 2897, 2900, 2902, 2905, 2907, 2910, 2912, 2915, 2917, 2920, 2922, 2925, 2927, 2930, 2932, 2935, 2937, 2940, 2942, 2945, 2947, 2950, 2952, 2955, 2957, 2960, 2962, 2965, 2967, 2970, 2972, 2975, 2977, 2980, 2982, 2985, 2987, 2990, 2992, 2995, 2997, 3000, 3002, 3005, 3007, 3010, 3012, 3014, 3016, 3018, 3020, 3023, 3025, 3028, 3030, 3033, 3035, 3038, 3040, 3042, 3044, 3047, 3049, 3052, 3054, 3060, 3062, 3064, 3066, 3068, 3070, 3073, 3075, 3078, 3080, 3083, 3085, 3088, 3090, 3093, 3095, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116, 3118, 3120, 3123, 3125, 3128, 3130, 3133, 3135, 3138, 3140, 3143, 3145, 3148, 3150, 3156, 3158, 3160, 3162, 3164, 3166, 3169, 3171, 3173, 3175, 3177, 3179, 3182, 3184, 3187, 3189, 3195, 3197, 3200, 3202, 3205, 3207, 3209, 3211, 3214, 3216, 3218, 3220, 3225, 3227, 3229, 3231, 3233, 3235, 3237, 3239, 3242, 3244, 3246, 3248, 3250, 3252, 3255, 3257, 3260, 3262, 3265, 3267, 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3291, 3293, 3296, 3298, 3301, 3303, 3306, 3308, 3311, 3313, 3316, 3318, 3321, 3323, 3326, 3328, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345, 3347, 3349, 3352, 3354, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383, 3386, 3388, 3390, 3392, 3394, 3396, 3399, 3401, 3404, 3406, 3408, 3410, 3412, 3414, 3417, 3419, 3422, 3424, 3427, 3429, 3432, 3434, 3437, 3439, 3442, 3444, 3447, 3449, 3452, 3454, 3457, 3459, 3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3498, 3500, 3502, 3504, 3506, 3508, 3511, 3513, 3516, 3518, 3520, 3522, 3524, 3526, 3529, 3531, 3534, 3536, 3539, 3541, 3544, 3546, 3548, 3550, 3552, 3554, 3557, 3559, 3562, 3564, 3567, 3569, 3572, 3574, 3577, 3579, 3583, 3585, 3587, 3589, 3594, 3596, 3599, 3601, 3604, 3606, 3609, 3611, 3614, 3616, 3618, 3620, 3623, 3625, 3628, 3630, 3642, 3644, 3647, 3649, 3652, 3654, 3657, 3659, 3662, 3664, 3666, 3668, 3670, 3672, 3675, 3677, 3680, 3682, 3685, 3687, 3690, 3692, 3695, 3697, 3700, 3702, 3704, 3706, 3709, 3711, 3714, 3716, 3722, 3724, 3726, 3728, 3730, 3732, 3735, 3737, 3740, 3742, 3745, 3747, 3750, 3752, 3754, 3756, 3758, 3760, 3763, 3765, 3768, 3770, 3773, 3775, 3778, 3780, 3782, 3784, 3786, 3788, 3791, 3793, 3796, 3798, 3801, 3803, 3806, 3808, 3811, 3813, 3816, 3818, 3821, 3823, 3826, 3828, 3830, 3832, 3835, 3837, 3840, 3842, 3848, 3850, 3853, 3855, 3858, 3860, 3863, 3865, 3868, 3870, 3873, 3875, 3878, 3880, 3883, 3885, 3888, 3890, 3892, 3894, 3896, 3898, 3901, 3903, 3906, 3908, 3911, 3913, 3916, 3918, 3921, 3923, 3926, 3928, 3931, 3933, 3936, 3938, 3940, 3942, 3944, 3946, 3949, 3951, 3954, 3956, 3959, 3961, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3981, 3983, 3985, 3987, 3991, 3993, 3996, 3998, 4001, 4003, 4006, 4008, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029, 4031, 4033, 4035, 4037, 4040, 4042, 4045, 4047, 4050, 4052, 4055, 4057, 4060, 4062, 4065, 4067, 4070, 4072, 4075, 4077, 4080, 4082, 4085, 4087, 4090, 4092, 4094, 4096, 4098, 4100, 4103, 4105, 4108, 4110, 4113, 4115, 4118, 4120, 4123, 4125, 4128, 4130, 4133, 4135, 4138, 4140, 4143, 4145, 4148, 4150, 4153, 4155, 4158, 4160, 4162, 4164, 4166, 4168, 4171, 4173, 4176, 4178, 4181, 4183, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4203, 4205, 4208, 4210, 4216, 4218, 4220, 4222, 4224, 4226, 4229, 4231, 4234, 4236, 4239, 4241, 4244, 4246, 4249, 4251, 4254, 4256, 4259, 4261, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294, 4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332, 4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370, 4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408, 4410, 4412, 4414, 4416, 4418, 4420, 4422, 4425, 4427, 4429, 4431, 4434, 4436, 4438, 4440, 4443, 4445, 4447, 4449, 4452, 4454, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485, 4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4522, 4524, 4526, 4528, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561, 4563, 4565, 4567, 4569, 4571, 4573, 4576, 4578, 4580, 4582, 4585, 4587, 4590, 4592, 4594, 4596, 4598, 4600, 4602, 4604, 4607, 4609, 4611, 4613, 4618, 4620, 4622, 4624, 4626, 4628, 4631, 4633, 4636, 4638, 4641, 4643, 4646, 4648, 4651, 4653, 4656, 4658, 4661, 4663, 4666, 4668, 4671, 4673, 4676, 4678, 4681, 4683, 4686, 4688, 4690, 4692, 4695, 4697, 4700, 4702, 4708, 4710, 4712, 4714, 4716, 4718, 4721, 4723, 4726, 4728, 4731, 4733, 4736, 4738, 4741, 4743, 4746, 4748, 4751, 4753, 4755, 4757, 4759, 4761, 4764, 4766, 4769, 4771, 4774, 4776, 4779, 4781, 4783, 4785, 4787, 4789, 4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827, 4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4848, 4850, 4852, 4854, 4856, 4858, 4861, 4863, 4866, 4868, 4871, 4873, 4876, 4878, 4880, 4882, 4885, 4887, 4889, 4891, 4895, 4897, 4900, 4902, 4905, 4907, 4910, 4912, 4914, 4916, 4919, 4921, 4923, 4925, 4929, 4931, 4934, 4936, 4939, 4941, 4944, 4946, 4949, 4951, 4954, 4956, 4959, 4961, 4963, 4965, 4968, 4970, 4973, 4975, 4981, 4983, 4985, 4987, 4990, 4992, 4994, 4996, 4999, 5001, 5005, 5007, 5009, 5011, 5014, 5016, 5019, 5021, 5024, 5026, 5029, 5031, 5033, 5035, 5038, 5040, 5043, 5045, 5048, 5050, 5052, 5054, 5056, 5058, 5061, 5063, 5066, 5068, 5071, 5073, 5076, 5078, 5081, 5083, 5086, 5088, 5091, 5093, 5096, 5098, 5100, 5102, 5104, 5106, 5109, 5111, 5114, 5116, 5119, 5121, 5124, 5126, 5128, 5130, 5132, 5134, 5137, 5139, 5142, 5144, 5147, 5149, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168, 5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5189, 5191, 5194, 5196, 5199, 5201, 5203, 5205, 5207, 5209, 5212, 5214, 5216, 5218, 5220, 5222, 5225, 5227, 5229, 5231, 5233, 5235, 5238, 5240, 5243, 5245, 5248, 5250, 5253, 5255, 5258, 5260, 5263, 5265, 5268, 5270, 5273, 5275, 5277, 5279, 5281, 5283, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320, 5322, 5324, 5326, 5328, 5331, 5333, 5335, 5337, 5340, 5342, 5345, 5347, 5353, 5355, 5357, 5359, 5361, 5363, 5366, 5368, 5371, 5373, 5376, 5378, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397, 5399, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5419, 5421, 5424, 5426, 5432, 5434, 5437, 5439, 5442, 5444, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5468, 5470, 5473, 5475, 5478, 5480, 5483, 5485, 5488, 5490, 5493, 5495, 5498, 5500, 5502, 5504, 5506, 5508, 5511, 5513, 5515, 5517, 5519, 5521, 5524, 5526, 5529, 5531, 5534, 5536, 5539, 5541, 5543, 5545, 5548, 5550, 5553, 5555, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586, 5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624, 5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662, 5664, 5667, 5669, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700, 5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738, 5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5761, 5763, 5765, 5767, 5770, 5772, 5774, 5776, 5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5816, 5818, 5820, 5822, 5824, 5826, 5829, 5831, 5833, 5835, 5838, 5840, 5842, 5844, 5847, 5849, 5851, 5853, 5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891, 5893, 5896, 5898, 5900, 5902, 5905, 5907, 5909, 5911, 5913, 5915, 5918, 5920, 5922, 5924, 5926, 5928, 5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966, 5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5999, 6001, 6003, 6005, 6007, 6009, 6011, 6013, 6015, 6017, 6020, 6022, 6028, 6030, 6033, 6035, 6038, 6040, 6042, 6044, 6047, 6049, 6052, 6054, 6060, 6062, 6064, 6066, 6068, 6070, 6073, 6075, 6078, 6080, 6083, 6085, 6088, 6090, 6092, 6094, 6096, 6098, 6101, 6103, 6106, 6108, 6111, 6113, 6116, 6118, 6121, 6123, 6126, 6128, 6130, 6132, 6134, 6136, 6139, 6141, 6144, 6146, 6149, 6151, 6154, 6156, 6158, 6160, 6163, 6165, 6167, 6169, 6173, 6175, 6178, 6180, 6183, 6185, 6188, 6190, 6193, 6195, 6198, 6200, 6203, 6205, 6208, 6210, 6213, 6215, 6218, 6220, 6223, 6225, 6228, 6230, 6232, 6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6253, 6255, 6257, 6259, 6262, 6264, 6267, 6269, 6274, 6276, 6279, 6281, 6287, 6289, 6292, 6294, 6297, 6299, 6302, 6304, 6307, 6309, 6311, 6313, 6315, 6317, 6320, 6322, 6325, 6327, 6330, 6332, 6335, 6337, 6339, 6341, 6343, 6345, 6348, 6350, 6353, 6355, 6357, 6359, 6361, 6363, 6366, 6368, 6370, 6372, 6375, 6377, 6379, 6381, 6384, 6386, 6390, 6392, 6394, 6396, 6399, 6401, 6404, 6406, 6409, 6411, 6413, 6415, 6417, 6419, 6422, 6424, 6427, 6429, 6432, 6434, 6437, 6439, 6442, 6444, 6447, 6449, 6452, 6454, 6457, 6459, 6462, 6464, 6467, 6469, 6472, 6474, 6477, 6479, 6482, 6484, 6486, 6488, 6490, 6492, 6495, 6497, 6500, 6502, 6505, 6507, 6510, 6512, 6515, 6517, 6520, 6522, 6525, 6527, 6530, 6532, 6534, 6536, 6538, 6540, 6543, 6545, 6548, 6550, 6553, 6555, 6558, 6560, 6563, 6565, 6568, 6570, 6573, 6575, 6578, 6580, 6583, 6585, 6588, 6590, 6593, 6595, 6598, 6600, 6603, 6605, 6608, 6610, 6613, 6615, 6618, 6620, 6622, 6624, 6627, 6629, 6631, 6633, 6638, 6640, 6642, 6644, 6646, 6648, 6651, 6653, 6656, 6658, 6661, 6663, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688, 6690, 6692, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6720, 6722, 6725, 6727, 6730, 6732, 6735, 6737, 6740, 6742, 6745, 6747, 6750, 6752, 6755, 6757, 6760, 6762, 6767, 6769, 6772, 6774, 6777, 6779, 6782, 6784, 6786, 6788, 6790, 6792, 6795, 6797, 6800, 6802, 6805, 6807, 6810, 6812, 6815, 6817, 6819, 6821, 6824, 6826, 6829, 6831, 6837, 6839, 6842, 6844, 6847, 6849, 6852, 6854, 6857, 6859, 6861, 6863, 6866, 6868, 6870, 6872, 6876, 6878, 6881, 6883, 6886, 6888, 6891, 6893, 6895, 6897, 6900, 6902, 6905, 6907, 6913, 6915, 6917, 6919, 6922, 6924, 6927, 6929, 6935, 6937, 6939, 6941, 6943, 6945, 6948, 6950, 6953, 6955, 6958, 6960, 6963, 6965, 6967, 6969, 6971, 6973, 6976, 6978, 6980, 6982, 6984, 6986, 6989, 6991, 6993, 6995, 6997, 6999, 7002, 7004, 7007, 7009, 7012, 7014, 7017, 7019, 7021, 7023, 7025, 7027, 7030, 7032, 7035, 7037, 7040, 7042, 7045, 7047, 7049, 7051, 7054, 7056, 7059, 7061, 7067, 7069, 7071, 7073, 7075, 7077, 7080, 7082, 7085, 7087, 7090, 7092, 7095, 7097, 7099, 7101, 7103, 7105, 7108, 7110, 7113, 7115, 7118, 7120, 7123, 7125, 7127, 7129, 7131, 7133, 7136, 7138, 7141, 7143, 7146, 7148, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7180, 7182, 7184, 7186, 7188, 7190, 7193, 7195, 7198, 7200, 7203, 7205, 7208, 7210, 7213, 7215, 7218, 7220, 7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259, 7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297, 7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335, 7337, 7339, 7341, 7344, 7346, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373, 7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411, 7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449, 7451, 7453, 7455, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7475, 7477, 7479, 7481, 7484, 7486, 7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7517, 7519, 7521, 7523, 7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7543, 7545, 7548, 7550, 7553, 7555, 7558, 7560, 7562, 7564, 7566, 7568, 7571, 7573, 7576, 7578, 7580, 7582, 7584, 7586, 7589, 7591, 7594, 7596, 7598, 7600, 7603, 7605, 7608, 7610, 7616, 7618, 7621, 7623, 7626, 7628, 7630, 7632, 7634, 7636, 7638, 7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7667, 7669, 7671, 7673, 7675, 7677, 7680, 7682, 7685, 7687, 7689, 7691, 7693, 7695, 7698, 7700, 7702, 7704, 7706, 7708, 7711, 7713, 7716, 7718, 7721, 7723, 7726, 7728, 7731, 7733, 7735, 7737, 7740, 7742, 7744, 7746, 7750, 7752, 7755, 7757, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790, 7792, 7794, 7796, 7798, 7801, 7803, 7805, 7807, 7811, 7813, 7816, 7818, 7821, 7823, 7826, 7828, 7830, 7832, 7834, 7836, 7839, 7841, 7844, 7846, 7849, 7851, 7854, 7856, 7859, 7861, 7864, 7866, 7869, 7871, 7874, 7876, 7879, 7881, 7887, 7889, 7891, 7893, 7895, 7897, 7900, 7902, 7905, 7907, 7910, 7912, 7915, 7917, 7920, 7922, 7925, 7927, 7930, 7932, 7934, 7936, 7938, 7940, 7943, 7945, 7948, 7950, 7953, 7955, 7958, 7960, 7963, 7965, 7967, 7969, 7971, 7973, 7976, 7978, 7981, 7983, 7986, 7988, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 0, 0, 1, 1, 1, 1, 8013, 8015, 8017, 8019, 8021, 8023, 2, 2, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 15, 15, 4966, 4966, 4978, 4978, 1288, 1288, 144, 144, 177, 177, 2207, 2207, 2108, 2108, 7064, 7064, 8170, 8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 250, 250, 250, 250, 5997, 5997, 5997, 5997, 5894, 5894, 5903, 5903, 6625, 6625, 475, 484, 6625, 6625, 667, 676, 667, 676, 667, 667, 676, 676, 686, 686, 686, 686, 6898, 6898, 8387, 8389, 8391, 8393, 8395, 8397, 8399, 8401, 699, 699, 708, 708, 709, 709, 710, 710, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 767, 767, 835, 844, 1288, 1288, 1325, 1334, 1387, 1387, 1902, 1387, 1387, 1908, 1487, 1487, 1535, 1535, 1902, 1908, 2173, 2173, 2198, 2198, 2207, 2207, 2216, 2216, 2108, 2108, 2173, 2173, 2198, 2198, 2207, 2207, 2216, 2216, 2225, 2225, 2561, 2570, 2591, 2591, 2600, 2600, 2609, 2609, 2620, 2620, 2657, 2666, 3045, 3045, 3057, 3057, 3121, 3121, 3153, 3153, 3167, 3167, 3192, 3192, 3212, 3212, 3222, 3222, 3496, 3496, 3634, 3634, 3496, 3496, 3581, 3581, 3591, 3591, 3632, 3632, 3636, 3636, 3634, 3634, 3636, 3636, 3639, 3639, 3707, 3707, 3719, 3719, 3833, 3833, 3845, 3845, 3979, 3988, 4201, 4201, 4213, 4213, 4693, 4693, 4705, 4705, 4529, 4529, 4693, 4693, 4705, 4705, 4529, 4529, 4966, 4978, 4966, 4978, 4423, 4423, 4432, 4432, 4693, 4693, 4705, 4705, 4520, 4520, 4529, 4529, 4583, 4588, 4605, 4605, 4615, 4615, 4693, 4693, 4705, 4705, 4883, 4892, 4917, 4926, 4966, 4966, 4978, 4978, 5429, 5429, 5417, 5417, 5338, 5338, 5350, 5350, 5417, 5417, 5429, 5429, 5417, 5417, 5429, 5429, 6045, 6045, 6057, 6057, 5894, 5894, 5903, 5903, 6625, 6625, 6635, 6635, 6693, 5759, 5768, 5759, 5768, 6764, 5997, 5997, 5997, 5997, 5836, 5836, 5845, 5845, 5894, 5894, 5903, 5903, 6251, 6251, 5894, 5894, 5903, 5903, 6284, 6284, 5997, 5997, 6025, 6025, 6045, 6045, 6057, 6057, 6161, 6170, 6251, 6251, 6260, 6271, 6284, 6284, 6625, 6625, 6635, 6635, 6693, 6764, 6822, 6822, 6834, 6834, 6864, 6873, 6898, 6898, 6910, 6910, 6920, 6920, 6932, 6932, 7052, 7052, 7064, 7064, 7601, 7601, 7613, 7613, 7601, 7601, 7456, 7456, 7473, 7482, 7515, 7515, 7524, 7524, 7601, 7601, 7613, 7613, 7738, 7747, 7799, 7808, 7884, 7884, 10417, 10419, 10421, 10423, 10425, 10427, 10429, 10431, 10433, 10435, 10438, 10440, 10443, 10445, 10447, 10449, 10451, 10453, 10455, 10457, 10459, 10461, 10463, 10465, 10468, 10470, 10472, 10474, 10477, 10479, 10482, 10484, 10490, 10492, 10494, 10496, 10498, 10500, 10503, 10505, 10508, 10510, 10513, 10515, 10518, 10520, 10522, 10524, 10527, 10529, 10532, 10534, 10540, 10542, 10544, 10546, 10548, 10550, 10553, 10555, 10557, 10559, 10561, 10563, 10566, 10568, 10571, 10573, 10576, 10578, 10581, 10583, 10586, 10588, 10590, 10592, 10594, 10596, 10599, 10601, 10604, 10606, 10609, 10611, 10614, 10616, 10618, 10620, 10623, 10625, 10628, 10630, 10636, 10638, 10640, 10642, 10644, 10646, 10649, 10651, 10653, 10655, 10657, 10659, 10661, 10663, 10665, 10667, 10669, 10671, 10673, 10675, 10678, 10680, 10682, 10684, 10686, 10688, 10691, 10693, 10695, 10697, 10699, 10701, 10703, 10705, 10707, 10709, 10711, 10713, 10715, 10717, 10719, 10721, 10723, 10725, 10728, 10730, 10733, 10735, 10738, 10740, 10742, 10744, 10747, 10749, 10752, 10754, 10760, 10762, 10765, 10767, 10770, 10772, 10775, 10777, 10780, 10782, 10784, 10786, 10789, 10791, 10793, 10795, 10799, 10801, 10804, 10806, 10809, 10811, 10814, 10816, 10818, 10820, 10822, 10824, 10827, 10829, 10831, 10833, 10835, 10837, 10839, 10841, 10843, 10845, 10847, 10849, 10851, 10853, 10855, 10857, 10860, 10862, 10864, 10866, 10869, 10871, 10873, 10875, 10877, 10879, 10882, 10884, 10886, 10888, 10890, 10892, 10895, 10897, 10900, 10902, 10905, 10907, 10910, 10912, 10915, 10917, 10920, 10922, 10925, 10927, 10930, 10932, 10935, 10937, 10940, 10942, 10945, 10947, 10950, 10952, 10955, 10957, 10963, 10965, 10967, 10969, 10971, 10973, 10975, 10977, 10979, 10981, 10983, 10985, 10987, 10989, 10991, 10993, 10995, 10997, 10999, 11001, 11004, 11006, 11008, 11010, 11013, 11015, 11017, 11019, 11021, 11023, 11025, 11027, 11029, 11031, 11034, 11036, 11038, 11040, 11043, 11045, 11047, 11049, 11051, 11053, 11055, 11057, 11059, 11061, 11063, 11065, 11067, 11069, 11071, 11073, 11075, 11077, 11080, 11082, 11084, 11086, 11089, 11091, 11093, 11095, 11098, 11100, 11102, 11104, 11106, 11108, 11110, 11112, 11114, 11116, 11118, 11120, 11122, 11124, 11126, 11128, 11130, 11132, 11134, 11136, 11138, 11140, 11142, 11144, 11146, 11148, 11151, 11153, 11157, 11159, 11161, 11163, 11165, 11167, 11170, 11172, 11175, 11177, 11180, 11182, 11185, 11187, 11190, 11192, 11195, 11197, 11200, 11202, 11205, 11207, 11210, 11212, 11215, 11217, 11219, 11221, 11226, 11228, 11231, 11233, 11236, 11238, 11240, 11242, 11244, 11246, 11249, 11251, 11254, 11256, 11259, 11261, 11264, 11266, 11269, 11271, 11274, 11276, 11279, 11281, 11284, 11286, 11289, 11291, 11294, 11296, 11299, 11301, 11304, 11306, 11308, 11310, 11313, 11315, 11318, 11320, 11325, 11327, 11329, 11331, 11333, 11335, 11337, 11339, 11341, 11343, 11345, 11347, 11350, 11352, 11354, 11356, 11358, 11360, 11362, 11364, 11367, 11369, 11371, 11373, 11375, 11377, 11379, 11381, 11383, 11385, 11387, 11389, 11391, 11393, 11395, 11397, 11399, 11401, 11403, 11405, 11407, 11409, 11411, 11413, 11415, 11417, 11419, 11421, 11423, 11425, 11427, 11429, 11432, 11434, 11436, 11438, 11440, 11442, 11444, 11446, 11448, 11450, 11452, 11454, 11457, 11459, 11461, 11463, 11466, 11468, 11470, 11472, 11475, 11477, 11480, 11482, 11488, 11490, 11492, 11494, 11497, 11499, 11502, 11504, 11510, 11512, 11514, 11516, 11518, 11520, 11522, 11524, 11526, 11528, 11530, 11532, 11535, 11537, 11540, 11542, 11545, 11547, 11550, 11552, 11555, 11557, 11560, 11562, 11568, 11570, 11573, 11575, 11578, 11580, 11583, 11585, 11588, 11590, 11592, 11594, 11597, 11599, 11602, 11604, 11609, 11611, 11613, 11615, 11617, 11619, 11622, 11624, 11627, 11629, 11632, 11634, 11637, 11639, 11641, 11643, 11645, 11647, 11650, 11652, 11655, 11657, 11660, 11662, 10676, 10676, 10858, 10858, 10867, 10867, 10880, 10880, 11742, 11744, 11746, 11748, 11751, 11753, 11755, 11757, 11760, 11762, 11764, 11766, 11768, 11770, 11772, 11774, 11776, 11778, 11780, 11782, 11784, 11786, 11788, 11790, 11792, 11794, 11796, 11798, 11800, 11802, 11804, 11806, 11808, 11810, 11812, 11814, 11817, 11819, 11822, 11824, 11827, 11829, 11831, 11833, 11835, 11837, 11839, 11841, 11844, 11846, 11849, 11851, 11853, 11855, 11859, 11861, 11863, 11865, 11867, 11869, 11871, 11873, 11875, 11877, 11879, 11881, 11883, 11885, 8168, 8168, 10960, 10960, 8385, 8385, 11473, 11473, 11348, 11348, 11495, 11495, 11348, 11348, 11507, 11507, 11473, 11473, 11485, 11485, 11365, 11365, 11365, 11365, 11365, 11365, 11495, 11495, 11533, 11533, 10525, 10525, 10621, 10621, 10475, 10475, 10487, 10487, 10525, 10525, 10537, 10537, 10621, 10621, 10633, 10633, 10745, 10745, 10757, 10757, 10676, 10676, 10745, 10745, 10757, 10757, 10787, 10689, 10689, 10745, 10745, 10757, 10757, 10796, 10745, 10745, 10757, 10757, 10787, 10796, 10858, 10858, 10867, 10867, 10880, 10880, 10960, 10960, 11155, 11155, 11002, 11002, 11011, 11011, 11032, 11032, 11041, 11041, 11087, 11087, 11096, 11096, 11155, 11155, 11223, 11223, 11311, 11322, 11485, 11485, 11473, 11473, 11348, 11348, 11495, 11495, 11348, 11348, 11507, 11507, 11473, 11473, 11485, 11485, 11365, 11365, 11507, 11365, 11365, 11507, 11365, 11365, 11495, 11495, 11533, 11533, 11565, 11565, 11430, 11430, 11455, 11464, 11473, 11473, 11485, 11485, 11495, 11495, 11507, 11507, 11533, 11533, 11565, 11565, 11606, 11606, 13311, 13313, 13315, 13317, 13319, 13321, 13323, 13325, 13327, 13329, 13332, 13334, 13337, 13339, 13342, 13344, 13347, 13349, 13352, 13354, 13357, 13359, 13362, 13364, 13367, 13369, 13371, 13373, 13375, 13377, 13380, 13382, 13385, 13387, 13390, 13392, 13395, 13397, 13399, 13401, 13403, 13405, 13407, 13409, 13411, 13413, 13415, 13417, 13419, 13421, 13423, 13425, 13427, 13429, 13432, 13434, 13437, 13439, 13442, 13444, 13447, 13449, 13452, 13454, 13457, 13459, 13462, 13464, 13466, 13468, 13470, 13472, 13475, 13477, 13480, 13482, 13485, 13487, 13490, 13492, 13495, 13497, 13500, 13502, 13505, 13507, 13510, 13512, 13515, 13517, 13520, 13522, 13525, 13527, 13529, 13531, 13533, 13535, 13538, 13540, 13543, 13545, 13548, 13550, 13553, 13555, 13558, 13560, 13563, 13565, 13568, 13570, 13573, 13575, 13578, 13580, 13583, 13585, 13588, 13590, 13593, 13595, 13597, 13599, 13601, 13603, 13606, 13608, 13611, 13613, 13616, 13618, 13621, 13623, 13625, 13627, 13630, 13632, 13635, 13637, 13643, 13645, 13647, 13649, 13652, 13654, 13657, 13659, 13665, 13667, 13669, 13671, 13673, 13675, 13678, 13680, 13683, 13685, 13688, 13690, 13693, 13695, 13698, 13700, 13703, 13705, 13707, 13709, 13711, 13713, 13715, 13717, 13720, 13722, 13724, 13726, 13729, 13731, 13735, 13737, 13739, 13741, 13744, 13746, 13749, 13751, 13754, 13756, 13759, 13761, 13763, 13765, 13767, 13769, 13772, 13774, 13777, 13779, 13782, 13784, 13787, 13789, 13791, 13793, 13796, 13798, 13800, 13802, 13806, 13808, 13811, 13813, 13816, 13818, 13821, 13823, 13825, 13827, 13829, 13831, 13834, 13836, 13839, 13841, 13844, 13846, 13849, 13851, 13853, 13855, 13857, 13859, 13862, 13864, 13867, 13869, 13872, 13874, 13877, 13879, 13882, 13884, 13887, 13889, 13892, 13894, 13897, 13899, 13901, 13903, 13906, 13908, 13911, 13913, 13919, 13921, 13924, 13926, 13929, 13931, 13934, 13936, 13939, 13941, 13944, 13946, 13949, 13951, 13954, 13956, 13958, 13960, 13962, 13964, 13967, 13969, 13971, 13973, 13976, 13978, 13981, 13983, 13989, 13991, 13994, 13996, 13999, 14001, 14004, 14006, 14009, 14011, 14014, 14016, 14019, 14021, 14024, 14026, 14029, 14031, 14034, 14036, 14039, 14041, 14044, 14046, 14049, 14051, 14053, 14055, 14058, 14060, 14063, 14065, 14126, 14128, 14130, 14132, 14134, 14136, 14138, 14140, 14142, 14144, 14146, 14148, 14150, 14152, 14154, 14156, 11738, 11738, 11740, 11740, 11739, 11739, 11740, 11740, 11749, 11749, 11758, 11758, 11815, 11815, 14219, 14221, 14223, 14225, 14227, 14229, 14231, 14233, 14235, 14237, 14239, 14241, 14243, 14245, 14247, 14249, 14251, 14253, 14255, 14257, 14259, 14261, 14263, 14265, 14268, 14270, 14272, 14274, 14276, 14278, 14280, 14282, 14284, 14286, 14288, 14290, 14292, 14294, 14296, 14298, 14300, 14302, 14304, 14306, 14308, 14310, 14312, 14314, 14316, 14318, 14320, 14322, 14324, 14326, 14328, 14330, 13733, 13733, 14369, 14371, 14373, 14375, 14377, 14379, 14381, 14383, 14385, 14387, 14389, 14391, 14394, 14396, 14398, 14400, 14402, 14404, 14406, 14408, 14410, 14412, 14414, 14416, 14419, 14421, 14423, 14425, 14427, 14429, 14431, 14433, 14435, 14437, 14439, 14441, 14444, 14446, 14448, 14450, 14453, 14455, 14457, 14459, 14462, 14464, 14466, 14468, 14470, 14472, 14474, 14476, 14478, 14480, 14482, 14484, 14486, 14488, 13628, 13628, 13640, 13640, 13650, 13650, 13662, 13662, 13794, 13803, 13904, 13904, 13916, 13916, 13974, 13974, 13986, 13986, 14056, 14056, 14068, 14068, 15220, 15222, 15224, 15226, 15228, 15230, 15233, 15235, 15237, 15239, 15241, 15243, 15245, 15247, 15249, 15251, 15253, 15255, 15257, 15259, 15261, 15263, 15265, 15267, 15269, 15271, 15274, 15276, 15279, 15281, 15284, 15286, 15289, 15291, 15294, 15296, 15299, 15301, 15304, 15306, 15309, 15311, 15314, 15316, 15322, 15324, 15327, 15329, 15332, 15334, 15337, 15339, 15342, 15344, 15347, 15349, 15352, 15354, 15357, 15359, 15362, 15364, 15366, 15368, 15370, 15372, 15375, 15377, 15380, 15382, 15385, 15387, 15390, 15392, 15394, 15396, 15399, 15401, 15403, 15405, 15409, 15411, 15413, 15415, 15417, 15419, 15231, 15231, 14442, 14442, 14451, 14451, 14460, 14460, 14392, 14417, 14442, 14442, 14451, 14451, 14331, 14331, 14392, 14417, 14442, 14442, 14451, 14451, 14460, 14460, 15231, 15231, 15319, 15319, 15397, 15406, 15565, 15565, 15973, 15973, 16412, 16414, 16417, 16419, 16421, 16423, 16432, 16434, 16445, 16447, 16449, 16451, 16453, 16455, 16457, 16459, 16660, 16662, 16664, 16666, 16668, 16670, 16673, 16675, 16678, 16680, 16683, 16685, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17025, 17027, 17029, 17031, 17033, 17035, 17037, 17039, 17041, 17043, 17045, 17047, 17049, 17051, 17053, 17055, 17057, 17059, 17061, 17063, 17065, 17067, 17069, 17071, 17073, 17075, 17077, 17079, 17081, 17083, 17085, 17087, 17089, 17091, 17093, 17095, 17097, 17099, 17101, 17103, 17105, 17107, 17109, 17111, 17113, 17115, 17117, 17119, 17121, 17123, 17125, 17127, 17129, 17131, 17133, 17135, 17137, 17139, 17141, 17143, 17145, 17147, 17149, 17151, 17153, 17155, 17157, 17159, 17161, 17163, 17165, 17167, 17169, 17171, 17173, 17175, 17177, 17179, 17181, 17183, 17185, 17187, 17189, 17191, 17193, 17195, 17197, 17199, 17201, 17203, 17205, 17207, 17209, 17211, 17213, 17215, 17217, 17219, 17221, 17223, 17225, 17227, 17229, 17231, 17233, 17235, 17237, 17239, 17241, 17243, 17245, 17247, 17249, 17251, 17253, 17255, 17257, 17259, 17261, 17263, 17265, 17267, 17269, 17271, 17273, 17275, 17277, 17279, 17281, 17283, 17285, 17287, 17289, 17291, 17293, 17295, 17297, 17299, 17301, 17303, 17305, 17307, 17309, 17311, 17313, 17315, 17317, 17319, 17321, 17323, 17325, 17327, 17329, 17331, 17333, 17335, 17337, 17339, 17341, 17343, 17345, 17347, 17349, 17351, 17353, 17355, 17357, 17359, 17361, 17363, 17365, 17367, 17369, 17371, 17373, 17375, 17377, 17379, 17381, 17383, 17385, 17387, 17389, 17391, 17393, 17395, 17397, 17399, 17401, 17403, 17405, 17407, 17409, 17411, 17413, 17415, 17417, 17419, 17421, 17423, 17425, 17427, 17429, 17431, 17433, 17435, 17437, 17439, 17441, 17443, 17445, 17447, 17449, 17451, 17453, 17455, 17457, 17459, 17461, 17463, 17465, 17467, 17469, 17471, 17473, 17475, 17477, 17479, 17481, 17483, 17485, 17487, 17489, 17491, 17493, 17495, 17497, 17499, 17501, 17503, 17505, 17507, 17509, 17511, 17513, 17515, 17517, 17519, 17521, 17523, 17525, 17527, 17529, 17531, 17533, 17535, 17537, 17539, 17541, 17543, 17545, 17547, 17549, 17551, 17553, 17555, 17557, 17559, 17561, 17563, 17565, 17567, 17569, 17571, 17573, 17575, 17577, 17579, 17581, 17583, 17585, 17587, 17589, 17591, 17593, 17595, 17597, 17599, 17601, 17603, 17605, 17607, 17609, 17611, 17613, 17615, 17617, 17619, 17621, 17623, 17625, 17627, 17629, 17631, 17633, 17635, 17637, 17639, 17641, 17643, 17645, 17647, 17649, 17651, 17653, 17655, 17657, 17659, 17661, 17663, 17665, 17667, 17669, 17671, 17673, 17675, 17677, 17679, 17681, 17683, 17685, 17687, 17689, 17691, 17693, 17695, 17697, 17699, 17701, 17703, 17705, 17707, 17709, 17711, 17713, 17715, 17717, 17719, 17721, 17723, 17725, 17727, 17729, 17731, 17733, 17735, 17737, 17739, 17741, 17743, 17745, 17747, 17749, 17751, 17753, 17755, 17757, 17759, 17761, 17763, 17765, 17767, 17769, 17771, 17773, 17775, 17777, 17779, 17781, 17783, 17785, 17787, 17789, 17791, 17793, 17795, 17797, 17799, 17801, 17803, 17805, 17807, 17809, 17811, 17813, 17815, 17817, 17819, 17821, 17823, 17825, 17827, 17829, 17831, 17833, 17835, 17837, 17839, 17841, 17843, 17845, 17847, 17849, 17851, 17853, 17855, 17857, 17859, 17861, 17863, 17865, 17867, 17869, 17871, 17873, 17875, 17877, 17879, 17881, 17883, 17885, 17887, 17889, 17891, 17893, 17895, 17897, 17899, 17901, 17903, 17905, 17907, 17909, 17911, 17913, 17915, 17917, 17919, 17921, 17923, 17925, 17927, 17929, 17931, 17933, 17935, 17937, 17939, 17941, 17943, 17945, 17947, 17949, 17951, 17953, 17955, 17957, 17959, 17961, 17963, 17965, 17967, 17969, 17971, 17973, 17975, 17977, 17979, 17981, 17983, 17985, 17987, 17989, 17991, 17993, 17995, 17997, 17999, 18001, 18003, 18005, 18007, 18009, 18011, 18013, 18015, 18017, 18019, 18021, 18023, 18025, 18027, 18029, 18031, 18033, 18035, 18037, 18039, 18041, 18043, 18045, 18047, 18049, 18051, 18053, 18055, 18057, 18059, 18061, 18063, 18065, 18067, 18069, 18071, 18073, 18075, 18077, 18079, 18081, 18083, 18085, 18087, 18089, 18091, 18093, 18095, 18097, 18099, 18101, 18103, 18105, 18107, 18109, 18111, 18113, 18115, 18117, 18119, 18121, 18123, 18125, 18127, 18129, 18131, 18133, 18135, 18137, 18139, 18141, 18143, 18145, 18147, 18149, 18151, 18153, 18155, 18157, 18159, 18161, 18163, 18165, 18167, 18169, 18171, 18173, 18175, 18177, 18179, 18181, 18183, 18185, 18187, 18189, 18191, 18193, 18195, 18197, 18199, 18201, 18203, 18205, 18207, 18209, 18211, 18213, 18215, 18217, 18219, 18221, 18223, 18225, 18227, 18229, 18231, 18233, 18235, 18237, 18239, 18241, 18243, 18245, 18247, 18249, 18251, 18253, 18255, 18257, 18259, 18261, 18263, 18265, 18267, 18269, 18271, 18273, 18275, 18277, 18279, 18281, 18283, 18285, 18287, 18289, 18291, 18293, 18295, 18297, 18299, 18301, 18303, 18305, 18307, 18309, 18311, 18313, 18315, 18317, 18319, 18321, 18323, 18325, 18327, 18329, 18331, 18333, 18335, 18337, 18339, 18341, 18343, 18345, 18347, 18349, 18351, 18353, 18355, 18357, 18359, 18361, 18363, 18365, 18367, 18369, 18371, 18373, 18375, 18377, 18379, 18381, 18383, 18385, 18387, 18389, 18391, 18393, 18395, 18397, 18399, 18401, 18403, 18405, 18407, 18409, 18411, 18413, 18415, 18417, 18419, 18421, 18423, 18425, 18427, 18429, 18431, 18433, 18435, 18437, 18439, 18441, 18443, 18445, 18447, 18449, 18451, 18453, 18455, 18457, 18459, 18461, 18463, 18465, 18467, 18469, 18471, 18473, 18475, 18477, 18479, 18481, 18483, 18485, 18487, 18489, 18491, 18493, 18495, 18497, 18499, 18501, 18503, 18505, 18507, 18509, 18511, 18513, 18515, 18517, 18519, 18521, 18523, 18525, 18527, 18529, 18531, 18533, 18535, 18537, 18539, 18541, 18543, 18545, 18547, 18549, 18551, 18553, 18555, 18557, 18559, 18561, 18563, 18565, 18567, 18569, 18571, 18573, 18575, 18577, 18579, 18581, 18583, 18585, 18587, 18589, 18591, 18593, 18595, 18597, 18599, 18601, 18603, 18605, 18607, 18609, 18611, 18613, 18615, 18617, 18619, 18621, 18623, 18625, 18627, 18629, 18631, 18633, 18635, 18637, 18639, 18641, 18643, 18645, 18647, 18649, 18651, 18653, 18655, 18657, 18659, 18661, 18663, 18665, 18667, 18669, 18671, 18673, 18675, 18677, 18679, 18681, 18683, 18685, 18687, 18689, 18691, 18693, 18695, 18697, 18699, 18701, 18703, 18705, 18707, 18709, 18711, 18713, 18715, 18717, 18719, 18721, 18723, 18725, 18727, 18729, 18731, 18733, 18735, 18737, 18739, 18741, 18743, 18745, 18747, 18749, 18751, 18753, 18755, 18757, 18759, 18761, 18763, 18765, 18767, 18769, 18771, 18773, 18775, 18777, 18779, 18781, 18783, 18785, 18787, 18789, 18791, 18793, 18795, 18797, 18799, 18801, 18803, 18805, 18807, 18809, 18811, 18813, 18815, 18817, 18819, 18821, 18823, 18825, 18827, 18829, 18831, 18833, 18835, 18837, 18839, 18841, 18843, 18845, 18847, 18849, 18851, 18853, 18855, 18857, 18859, 18861, 18863, 18865, 18867, 18869, 18871, 18873, 18875, 18877, 18879, 18881, 18883, 18885, 18887, 18889, 18891, 18893, 18895, 18897, 18899, 18901, 18903, 18905, 18907, 18909, 18911, 18913, 18915, 18917, 18919, 18921, 18923, 18925, 18927, 18929, 18931, 18933, 18935, 18937, 18939, 18941, 18943, 18945, 18947, 18949, 18951, 18953, 18955, 18957, 18959, 18961, 18963, 18965, 18967, 18969, 18971, 18973, 18975, 18977, 18979, 18981, 18983, 18985, 18987, 18989, 18991, 18993, 18995, 18997, 18999, 19001, 19003, 19005, 19007, 19009, 19011, 19013, 19015, 19017, 19019, 19021, 19023, 19025, 19027, 19029, 19031, 19033, 19035, 19037, 19039, 19041, 19043, 19045, 19047, 19049, 19051, 19053, 19055, 19057, 19059, 19061, 19063, 19065, 19067, 19069, 19071, 19073, 19075, 19077, 19079, 19081, 19083, 19085, 19087, 19089, 19091, 19093, 19095, 19097, 19099, 19101, 19103, 19105, 19107, 19109, 19111, 19113, 19115, 19117, 19119, 19121, 19123, 19125, 19127, 19129, 19131, 19133, 19135, 19137, 19139, 19141, 19143, 19145, 19147, 19149, 19151, 19153, 19155, 19157, 19159, 19161, 19163, 19165, 19167, 19169, 19171, 19173, 19175, 19177, 19179, 19181, 19183, 19185, 19187, 19189, 19191, 19193, 19195, 19197, 19199, 19201, 19203, 19205, 19207, 19209, 19211, 19213, 19215, 19217, 19219, 19221, 19223, 19225, 19227, 19229, 19231, 19233, 19235, 19237, 19239, 19241, 19243, 19245, 19247, 19249, 19251, 19253, 19255, 19257, 19259, 19261, 19263, 19265, 19267, 19269, 19271, 19273, 19275, 19277, 19279, 19281, 19283, 19285, 19287, 19289, 19291, 19293, 19295, 19297, 19299, 19301, 19303, 19305, 19307, 19309, 19311, 19313, 19315, 19317, 19319, 19321, 19323, 19325, 19327, 19329, 19331, 19333, 19335, 19337, 19339, 19341, 19343, 19345, 19347, 19349, 19351, 19353, 19355, 19357, 19359, 19361, 19363, 19365, 19367, 19369, 19371, 19373, 19375, 19377, 19379, 19381, 19383, 19385, 19387, 19389, 19391, 19393, 19395, 19397, 19399, 19401, 19403, 19405, 19407, 19409, 19411, 19413, 19415, 19417, 19419, 19421, 19423, 19425, 19427, 19429, 19431, 19433, 19435, 19437, 19439, 19441, 19443, 19445, 19447, 19449, 19451, 19453, 19455, 19457, 19459, 19461, 19463, 19465, 19467, 19469, 19471, 19473, 19475, 19477, 19479, 19481, 19483, 19485, 19487, 19489, 19491, 19493, 19495, 19497, 19499, 19501, 19503, 19505, 19507, 19509, 19511, 19513, 19515, 19517, 19519, 19521, 19523, 19525, 19527, 19529, 19531, 19533, 19535, 19537, 19539, 19541, 19543, 19545, 19547, 19549, 19551, 19553, 19555, 19557, 19559, 19561, 19563, 19565, 19567, 19569, 19571, 19573, 19575, 19577, 19579, 19581, 19583, 19585, 19587, 19589, 19591, 19593, 19595, 19597, 19599, 19601, 19603, 19605, 19607, 19609, 19611, 19613, 19615, 19617, 19619, 19621, 19623, 19625, 19627, 19629, 19631, 19633, 19635, 19637, 19639, 19641, 19643, 19645, 19647, 19649, 19651, 19653, 19655, 19657, 19659, 19661, 19663, 19665, 19667, 19669, 19671, 19673, 19675, 19677, 19679, 19681, 19683, 19685, 19687, 19689, 19691, 19693, 19695, 19697, 19699, 19701, 19703, 19705, 19707, 19709, 19711, 19713, 19715, 19717, 19719, 19721, 19723, 19725, 19727, 19729, 19731, 19733, 19735, 19737, 19739, 19741, 19743, 19745, 19747, 19749, 19751, 19753, 19755, 19757, 19759, 19761, 19763, 19765, 19767, 19769, 19771, 19773, 19775, 19777, 19779, 19781, 19783, 19785, 19787, 19789, 19791, 19793, 19795, 19797, 19799, 19801, 19803, 19805, 19807, 19809, 19811, 19813, 19815, 19817, 19819, 19821, 19823, 19825, 19827, 19829, 19831, 19833, 19835, 19837, 19839, 19841, 19843, 19845, 19847, 19849, 19851, 19853, 19855, 19857, 19859, 19861, 19863, 19865, 19867, 19869, 19871, 19873, 19875, 19877, 19879, 19881, 19883, 19885, 19887, 19889, 19891, 19893, 19895, 19897, 19899, 19901, 19903, 19905, 19907, 19909, 19911, 19913, 19915, 19917, 19919, 19921, 19923, 19925, 19927, 19929, 19931, 19933, 19935, 19937, 19939, 19941, 19943, 19945, 19947, 19949, 19951, 19953, 19955, 19957, 19959, 19961, 19963, 19965, 19967, 19969, 19971, 19973, 19975, 19977, 19979, 19981, 19983, 19985, 19987, 19989, 19991, 19993, 19995, 19997, 19999, 20001, 20003, 20005, 20007, 20009, 20011, 20013, 20015, 20017, 20019, 20021, 20023, 20025, 20027, 20029, 20031, 20033, 20035, 20037, 20039, 20041, 20043, 20045, 20047, 20049, 20051, 20053, 20055, 20057, 20059, 20061, 20063, 20065, 20067, 20069, 20071, 20073, 20075, 20077, 20079, 20081, 20083, 20085, 20087, 20089, 20091, 20093, 20095, 20097, 20099, 20101, 20103, 20105, 20107, 20109, 20111, 20113, 20115, 20117, 20119, 20121, 20123, 20125, 20127, 20129, 20131, 20133, 20135, 20137, 20139, 20141, 20143, 20145, 20147, 20149, 20151, 20153, 20155, 20157, 20159, 20161, 20163, 20165, 20167, 20169, 20171, 20173, 20175, 20177, 20179, 20181, 20183, 20185, 20187, 20189, 20191, 20193, 20195, 20197, 20199, 20201, 20203, 20205, 20207, 20209, 20211, 20213, 20215, 20217, 20219, 20221, 20223, 20225, 20227, 20229, 20231, 20233, 20235, 20237, 20239, 20241, 20243, 20245, 20247, 20249, 20251, 20253, 20255, 20257, 20259, 20261, 20263, 20265, 20267, 20269, 20271, 20273, 20275, 20277, 20279, 20281, 20283, 20285, 20287, 20289, 20291, 20293, 20295, 20297, 20299, 20301, 20303, 20305, 20307, 20309, 20311, 20313, 20315, 20317, 20319, 20321, 20323, 20325, 20327, 20329, 20331, 20333, 20335, 20337, 20339, 20341, 20343, 20345, 20347, 20349, 20351, 20353, 20355, 20357, 20359, 20361, 20363, 20365, 20367, 20369, 20371, 20373, 20375, 20377, 20379, 20381, 20383, 20385, 20387, 20389, 20391, 20393, 20395, 20397, 20399, 20401, 20403, 20405, 20407, 20409, 20411, 20413, 20415, 20417, 20419, 20421, 20423, 20425, 20427, 20429, 20431, 20433, 20435, 20437, 20439, 20441, 20443, 20445, 20447, 20449, 20451, 20453, 20455, 20457, 20459, 20461, 20463, 20465, 20467, 20469, 20471, 20473, 20475, 20477, 20479, 20481, 20483, 20485, 20487, 20489, 20491, 20493, 20495, 20497, 20499, 20501, 20503, 20505, 20507, 20509, 20511, 20513, 20515, 20517, 20519, 20521, 20523, 20525, 20527, 20529, 20531, 20533, 20535, 20537, 20539, 20541, 20543, 20545, 20547, 20549, 20551, 20553, 20555, 20557, 20559, 20561, 20563, 20565, 20567, 20569, 20571, 20573, 20575, 20577, 20579, 20581, 20583, 20585, 8006, 8007, 8008, 8009, 8010, 8011, 20593, 20595, 20597, 8026, 8027, 8032, 8033, 8034, 8035, 8036, 8037, 8038, 8039, 8042, 8043, 8046, 8047, 8050, 8051, 8113, 8114, 8121, 8122, 8131, 8132, 8155, 8156, 8159, 8160, 8166, 8167, 20627, 20629, 20631, 20633, 20635, 20637, 8193, 8194, 8195, 8196, 8199, 8200, 8202, 8203, 8226, 8227, 8230, 8231, 8278, 8279, 8282, 8285, 8296, 8297, 8320, 8323, 8336, 8339, 8367, 8368, 8371, 8372, 8376, 8377, 8378, 8379, 8380, 8381, 20671, 20673, 20675, 20677, 8404, 8405, 8408, 8409, 8410, 8411, 8412, 8413, 8427, 8428, 8429, 8430, 8431, 8432, 8433, 8434, 8453, 8454, 8481, 8483, 8579, 8580, 8590, 8593, 8609, 8610, 8611, 8612, 8613, 8614, 8634, 8637, 8646, 8649, 8727, 8729, 8787, 8788, 8795, 8796, 8799, 8800, 8803, 8804, 8807, 8808, 8825, 8826, 8833, 8834, 8837, 8838, 8841, 8842, 8845, 8846, 8931, 8934, 8943, 8944, 8947, 8948, 8951, 8952, 8955, 8956, 8973, 8986, 9064, 9065, 9068, 9069, 9083, 9084, 9091, 9092, 9096, 9097, 9103, 9104, 9109, 9110, 9113, 9114, 9185, 9186, 9187, 9188, 9189, 9190, 9208, 9209, 9211, 9212, 9219, 9222, 9223, 9224, 9225, 9226, 9227, 9228, 9229, 9230, 9244, 9245, 9248, 9249, 9274, 9275, 9278, 9279, 9317, 9320, 9362, 9365, 9368, 9369, 9401, 9402, 9405, 9406, 9413, 9414, 9425, 9426, 9429, 9430, 9437, 9438, 9451, 9452, 9471, 9472, 9475, 9476, 9479, 9480, 9496, 9497, 9500, 9501, 9504, 9507, 9510, 9511, 9525, 9527, 9531, 9532, 9534, 9535, 9552, 9553, 9556, 9557, 9597, 9600, 9606, 9609, 9618, 9619, 9622, 9623, 9695, 9696, 9697, 9698, 9710, 9711, 9714, 9715, 9724, 9725, 9728, 9729, 9734, 9735, 9738, 9739, 9767, 9768, 9771, 9772, 9787, 9788, 9791, 9792, 9817, 9818, 9834, 9835, 9851, 9853, 9855, 9857, 9859, 9862, 9865, 9866, 9868, 9869, 9879, 9880, 9883, 9884, 9893, 9894, 9897, 9898, 9904, 9905, 9914, 9915, 9918, 9919, 9925, 9926, 9943, 9944, 9950, 9951, 9956, 9957, 9960, 9961, 9983, 9986, 10004, 10005, 10008, 10011, 10014, 10015, 10084, 10085, 10087, 10088, 10104, 10121, 10133, 10134, 10137, 10138, 10145, 10148, 10154, 10155, 10158, 10159, 10162, 10163, 10166, 10167, 10192, 10193, 10196, 10197, 10236, 10237, 10247, 10248, 10251, 10252, 10292, 10293, 10298, 10301, 10307, 10310, 10311, 10314, 10330, 10331, 10334, 10335, 10362, 10364, 10376, 10378, 10393, 10394, 20961, 20963, 20965, 20967, 20969, 20971, 20973, 20975, 20977, 20979, 20981, 20983, 20985, 20987, 20989, 20991, 20993, 20995, 20997, 20999, 21001, 21003, 21005, 21007, 21009, 21011, 21013, 21015, 21017, 21019, 21021, 21023, 21025, 21027, 21029, 21031, 21033, 21035, 21037, 21039, 21041, 21043, 21045, 21047, 21049, 21051, 21053, 21055, 21057, 21059, 21061, 21063, 21065, 21067, 21069, 21071, 21073, 21075, 21077, 21079, 21081, 21083, 21085, 21087, 21089, 21091, 21093, 21095, 21097, 21099, 21101, 21103, 21105, 21107, 21109, 21111, 21113, 21115, 21117, 21119, 21121, 21123, 21125, 21127, 21129, 21131, 21133, 21135, 21137, 21139, 21141, 21143, 21145, 21147, 21149, 21151, 21153, 21155, 21157, 21159, 21161, 21163, 21165, 21167, 21169, 21171, 21173, 21175, 21177, 21179, 21181, 21183, 21185, 21187, 21189, 21191, 21193, 21195, 21197, 21199, 21201, 21203, 21205, 21207, 21209, 21211, 21213, 21215, 21217, 21219, 21221, 21223, 21225, 21227, 21229, 21231, 21233, 21235, 21237, 21239, 21241, 21243, 21245, 21247, 21249, 21251, 21253, 21255, 21257, 21259, 21261, 21263, 21265, 21267, 21269, 21271, 21273, 21275, 21277, 21279, 21281, 21283, 21285, 21287, 21289, 21291, 21293, 21295, 21297, 21299, 21301, 21303, 21305, 21307, 21309, 21311, 21313, 21315, 21317, 21319, 21321, 21323, 21325, 21327, 21329, 21331, 21333, 21335, 21337, 21339, 21341, 21343, 21345, 21347, 21349, 21351, 21353, 21355, 21357, 21359, 21361, 21363, 21365, 21367, 21369, 21371, 21373, 21375, 21377, 21379, 21381, 21383, 21385, 21387, 21389, 21391, 21393, 21395, 21397, 21399, 21401, 21403, 21405, 21407, 21409, 21411, 21413, 21415, 21417, 21419, 21421, 21423, 21425, 21427, 21429, 21431, 21433, 21435, 21437, 21439, 21441, 21443, 21445, 21447, 21449, 21451, 21453, 21455, 21457, 21459, 21461, 21463, 21465, 21467, 21469, 21471, 21473, 21475, 21477, 21479, 21481, 21483, 21485, 21487, 21489, 21491, 21493, 21495, 21497, 21499, 21501, 11666, 11667, 11684, 11685, 11688, 11689, 11696, 11697, 21511, 21513, 21515, 21517, 21519, 21521, 21523, 21525, 21527, 21529, 21531, 21533, 21535, 21537, 21539, 21541, 21543, 21545, 21547, 21549, 21551, 21553, 21555, 21557, 21559, 21561, 21563, 21565, 21567, 21569, 21571, 21573, 21575, 21577, 11994, 11995, 12002, 12003, 12089, 12090, 12099, 12100, 12101, 12102, 12103, 12104, 12105, 12106, 12107, 12108, 12111, 12112, 12115, 12116, 12117, 12118, 12119, 12120, 12121, 12122, 12123, 12124, 12127, 12128, 12895, 12898, 12904, 12905, 12910, 12911, 12914, 12915, 12924, 12925, 12928, 12929, 12946, 12947, 12950, 12951, 12956, 12959, 12962, 12963, 12972, 12973, 12977, 12978, 12981, 12982, 12989, 12992, 12993, 12997, 12998, 13001, 13002, 13009, 13017, 13018, 13021, 13022, 13029, 13032, 13046, 13047, 13050, 13051, 13058, 13059, 13076, 13077, 13086, 13087, 13090, 13091, 13094, 13095, 13104, 13105, 13108, 13109, 13121, 13122, 13125, 13126, 13140, 13141, 13155, 13156, 13174, 13177, 13180, 13183, 13186, 13187, 13188, 13189, 13190, 13191, 13192, 13193, 13194, 13195, 13198, 13199, 13202, 13203, 13204, 13205, 13206, 13207, 13208, 13209, 13210, 13211, 13212, 13213, 13220, 13221, 13228, 13229, 13238, 13245, 13252, 13255, 13258, 13259, 13262, 13263, 13266, 13267, 13270, 13271, 13278, 13279, 13286, 13287, 13294, 13297, 21725, 21727, 21729, 21731, 21733, 21735, 21737, 21739, 21741, 21743, 21745, 21747, 21749, 21751, 21753, 21755, 21757, 21759, 21761, 21763, 21765, 21767, 21769, 21771, 21773, 21775, 21777, 21779, 21781, 21783, 21785, 21787, 21789, 21791, 21793, 21795, 21797, 21799, 21801, 21803, 21805, 21807, 21809, 21811, 21813, 21815, 21817, 21819, 21821, 21823, 21825, 21827, 21829, 21831, 21833, 21835, 21837, 21839, 21841, 21843, 21845, 21847, 21849, 21851, 21853, 21855, 21857, 21859, 21861, 21863, 21865, 21867, 21869, 21871, 21873, 21875, 21877, 21879, 21881, 21883, 21885, 21887, 21889, 21891, 21893, 21895, 21897, 21899, 21901, 21903, 21905, 21907, 21909, 21911, 21913, 21915, 21917, 21919, 21921, 21923, 21925, 21927, 21929, 21931, 21933, 21935, 21937, 21939, 21941, 21943, 21945, 21947, 21949, 21951, 21953, 21955, 21957, 21959, 21961, 21963, 21965, 21967, 21969, 21971, 21973, 21975, 21977, 21979, 21981, 21983, 21985, 21987, 21989, 21991, 21993, 21995, 21997, 21999, 22001, 22003, 22005, 22007, 22009, 22011, 22013, 22015, 22017, 22019, 22021, 22023, 22025, 22027, 22029, 22031, 22033, 22035, 22037, 22039, 22041, 22043, 22045, 22047, 22049, 22051, 22053, 14165, 14166, 14167, 14168, 14179, 14180, 14181, 14182, 14195, 14196, 14200, 14201, 14216, 14217, 22069, 22071, 22073, 22075, 22077, 22079, 22081, 22083, 22085, 22087, 22089, 22091, 22093, 22095, 22097, 22099, 22101, 22103, 22105, 22107, 22109, 22111, 22113, 22115, 22117, 22119, 22121, 22123, 14358, 14359, 22127, 22129, 22131, 22133, 22135, 22137, 22139, 22141, 22143, 22145, 22147, 22149, 22151, 22153, 22155, 22157, 22159, 22161, 22163, 22165, 22167, 22169, 22171, 22173, 22175, 22177, 22179, 22181, 22183, 15110, 15111, 15114, 15115, 15118, 15119, 15122, 15123, 15151, 15154, 15176, 15177, 15180, 15181, 15193, 15194, 15197, 15198, 15213, 15214, 15217, 15218, 22207, 22209, 22211, 22213, 22215, 22217, 22219, 22221, 22223, 22225, 22227, 22229, 22231, 22233, 22235, 22237, 22239, 22241, 22243, 22245, 22247, 22249, 22251, 22253, 22255, 22257, 22259, 22261, 22263, 22265, 22267, 22269, 22271, 22273, 22275, 22277, 22279, 22281, 22283, 22285, 22287, 22289, 22291, 15475, 15476, 15482, 15483, 15486, 15487, 15490, 15491, 15503, 15506, 15511, 15512, 15515, 15516, 15519, 15520, 15537, 15544, 15551, 15552, 15555, 15556, 15559, 15560, 15928, 15929, 15950, 15951, 15967, 15969, 16038, 16039, 16330, 16331, 22327, 22329, 22331, 22333, 22335, 22337, 22339, 22341, 22343, 22345, 22347, 22349, 22351, 22353, 8, 9, 10, 11, 12, 13, 14, 15, 24150, 24152, 24154, 23524, 23523, 22549, 22548, 22551, 22368, 24161, 24163, 24165, 24167, 22370, 22369, 23481, 23480, 24171, 23483, 23482, 24173, 23485, 23484, 4988, 23488, 23487, 5003, 23491, 22371, 23493, 22372, 23494, 23495, 22373, 22374, 22376, 22375, 22378, 22377, 22379, 22381, 22382, 22384, 22386, 22385, 22388, 22387, 22390, 22389, 22392, 22391, 22627, 22626, 22628, 22630, 22629, 22631, 22633, 22632, 22635, 22634, 22637, 22636, 22638, 22393, 22641, 22640, 22642, 22644, 22643, 22394, 22683, 22395, 22662, 22797, 22799, 22687, 22667, 22690, 22689, 22692, 22691, 24175, 22669, 22398, 22400, 22399, 22402, 22401, 24177, 22404, 22403, 22406, 22405, 22407, 22408, 22410, 22409, 24179, 22759, 22758, 22761, 22760, 22411, 22686, 22413, 22412, 22414, 22416, 22415, 22418, 22417, 22770, 22419, 22841, 22840, 22844, 22846, 22420, 22848, 22421, 24181, 22423, 22422, 24183, 22425, 22424, 22426, 22428, 22427, 24185, 24193, 24195, 22429, 22431, 24197, 22433, 24199, 22434, 22436, 22435, 22437, 22439, 22438, 22440, 22442, 22441, 22443, 22445, 22444, 22446, 22448, 22447, 22450, 22449, 23684, 23683, 23685, 22452, 22451, 24201, 22454, 22453, 24203, 23681, 23621, 22455, 23696, 23622, 23624, 23623, 22457, 22456, 22459, 22458, 22460, 22463, 22462, 22465, 22464, 22466, 22468, 22467, 22470, 22469, 22472, 22471, 22474, 22473, 22476, 22475, 22477, 23626, 23625, 22479, 22478, 23630, 23629, 22481, 22480, 23641, 23640, 23839, 23634, 23841, 23840, 23843, 23642, 23636, 22482, 24205, 22484, 22483, 22486, 22485, 23641, 23640, 23839, 22487, 23841, 23840, 23843, 23642, 23636, 23635, 24209, 23852, 23851, 23639, 22488, 22491, 22490, 22493, 22492, 22495, 22494, 22496, 22499, 22498, 22504, 22503, 22506, 22505, 22500, 22510, 22509, 22531, 22530, 22533, 22501, 22511, 22534, 22502, 22504, 22503, 22506, 22505, 22507, 22510, 22509, 22531, 22530, 22533, 22532, 22511, 22534, 22535, 22513, 22512, 22514, 22517, 22516, 22519, 22518, 22521, 22520, 22523, 22522, 23833, 22524, 23835, 23834, 22526, 22525, 23804, 22527, 22528, 23808, 23807, 22531, 22530, 24215, 22533, 22532, 24217, 22534, 23813, 22535, 24219, 24221, 24223, 24077, 22536, 24076, 22538, 22537, 24229, 22540, 22539, 24231, 24233, 24235, 22542, 22541, 22839, 22838, 22544, 22543, 22546, 22545, 22547, 22549, 22548, 22551, 22550, 24237, 24239, 24241, 24243, 23194, 23193, 23196, 23195, 22552, 23200, 23199, 23156, 23155, 23158, 23157, 23160, 23159, 23161, 23164, 23163, 22554, 22553, 22556, 22555, 22567, 22566, 22569, 22568, 22570, 22572, 22558, 22577, 22559, 22578, 22561, 22560, 22562, 22583, 22563, 22584, 22586, 22585, 22587, 22564, 22567, 22566, 22569, 22568, 22570, 22572, 22574, 22577, 22576, 22578, 22579, 22581, 22583, 22582, 22584, 22586, 22585, 22587, 22589, 22588, 22591, 22590, 22592, 22594, 22595, 22597, 22599, 22598, 22600, 22602, 22601, 22603, 22605, 22604, 1000, 22608, 22607, 1015, 22611, 22610, 22613, 22612, 22615, 22614, 22617, 22616, 22619, 22618, 22621, 22620, 22623, 22622, 22625, 22624, 22627, 22626, 22628, 22630, 22629, 22631, 22633, 22632, 22635, 22634, 22637, 22636, 22639, 22638, 22641, 22640, 22642, 22644, 22643, 22645, 22646, 22649, 22648, 22651, 22650, 22652, 22653, 22656, 22655, 22658, 22657, 22660, 22659, 22661, 22683, 22791, 22662, 22665, 22664, 22799, 22687, 22667, 22690, 22689, 22668, 22692, 24249, 22669, 22671, 22670, 22673, 22672, 22675, 22674, 22677, 22676, 22679, 22678, 22681, 22680, 22682, 22683, 22791, 22685, 22684, 22797, 22686, 22687, 22796, 22690, 22689, 22692, 22691, 24253, 24256, 22693, 22696, 22695, 22698, 22697, 22699, 22701, 22700, 22703, 22702, 22705, 22704, 22706, 22708, 22707, 22710, 22709, 22712, 22711, 22714, 22713, 22716, 22715, 1496, 22719, 22718, 1511, 22722, 22721, 22724, 22723, 22726, 22725, 22727, 22729, 22728, 22730, 22732, 22731, 22733, 22735, 22734, 22736, 22738, 22737, 22740, 22739, 22742, 22741, 22744, 22743, 22746, 22745, 22748, 22747, 22749, 22751, 22750, 22753, 22752, 22755, 22754, 22757, 22756, 22759, 22758, 22761, 22760, 22762, 22765, 22764, 22766, 22768, 22767, 22769, 22770, 22771, 22773, 22772, 22775, 22774, 22777, 22776, 22778, 22779, 22781, 22783, 22782, 22785, 22784, 22787, 22786, 22789, 22788, 22791, 22790, 22793, 22792, 22795, 22794, 22796, 22797, 22799, 22801, 22800, 22803, 22802, 22804, 22805, 22806, 22809, 22808, 22811, 22810, 22812, 22814, 22813, 22815, 22817, 22816, 22818, 22820, 22819, 22821, 22823, 22822, 22824, 22826, 22825, 22827, 22829, 22828, 22831, 22830, 22833, 22832, 22834, 22836, 22835, 22837, 22839, 22838, 22841, 22840, 23034, 23033, 23035, 22843, 22842, 22844, 22846, 22845, 22848, 22847, 22860, 22859, 22850, 22849, 22864, 22851, 22865, 22867, 22866, 22868, 22869, 22852, 24265, 22872, 22871, 22874, 22873, 22876, 22875, 24267, 22878, 22877, 24269, 22880, 22879, 24271, 22854, 22853, 24273, 22856, 22855, 22858, 22857, 22860, 22859, 22862, 22861, 22864, 22863, 22865, 22867, 22866, 22868, 22870, 22869, 24275, 22872, 22871, 22874, 22873, 22876, 22875, 24277, 22878, 22877, 24279, 22880, 22879, 24281, 22882, 22881, 24283, 22884, 22883, 22885, 22887, 22886, 22888, 22891, 22890, 22893, 22892, 22895, 22894, 22896, 22898, 22897, 22899, 22902, 22901, 22903, 22913, 22912, 22915, 22904, 22917, 22916, 22905, 22906, 22908, 22909, 22911, 22910, 22913, 22912, 22915, 22914, 22917, 22916, 22918, 22919, 22921, 22922, 22924, 22923, 22925, 22927, 22926, 22928, 22930, 22932, 22935, 22934, 22937, 22936, 22939, 22938, 22940, 22942, 22941, 22943, 22945, 22944, 22947, 22946, 22949, 22948, 22950, 22953, 22952, 22954, 22956, 22955, 22957, 23123, 22958, 23124, 23126, 23125, 23127, 22961, 22960, 22963, 22962, 22965, 22964, 22967, 22966, 22969, 22968, 22970, 23106, 23105, 23107, 22972, 22971, 24287, 22974, 22973, 24289, 22976, 22975, 24291, 22978, 22977, 24293, 22980, 22979, 22982, 22981, 23140, 23142, 22983, 23144, 23147, 22985, 22986, 23134, 23136, 22987, 23138, 22989, 22988, 23143, 23142, 23144, 23147, 23146, 23149, 23148, 23151, 23150, 23153, 22989, 22991, 22990, 22993, 22992, 22994, 22997, 22996, 22999, 22998, 23001, 23000, 23003, 23002, 23004, 23006, 23005, 23007, 23009, 23008, 23011, 23010, 23013, 23012, 23015, 23014, 23017, 23016, 23019, 23018, 23021, 23020, 23022, 23024, 23023, 23025, 23027, 23026, 23029, 23028, 23031, 23030, 23032, 23034, 23033, 23035, 23037, 23036, 23039, 23038, 23041, 23040, 23043, 23042, 23045, 23044, 23047, 23046, 23049, 23048, 23051, 23050, 23053, 23052, 23055, 23054, 23057, 23056, 23059, 23058, 23061, 23060, 23062, 23064, 23063, 23065, 23067, 23066, 24297, 23069, 23068, 24299, 23070, 23073, 23072, 23075, 23074, 23077, 23076, 23079, 23078, 23081, 23080, 23083, 23082, 24301, 23085, 23084, 23087, 23086, 23089, 23088, 24303, 23091, 23090, 23092, 24305, 23094, 23093, 23096, 23095, 23097, 24307, 23099, 23098, 23101, 23100, 24309, 23103, 23102, 24311, 23117, 23116, 23104, 23120, 23119, 23121, 23106, 23105, 23107, 23109, 23108, 23111, 23110, 23113, 23112, 23114, 23117, 23116, 23118, 23120, 23119, 23121, 23123, 23122, 23124, 23126, 23125, 23127, 23129, 23143, 23142, 23132, 23131, 23147, 23133, 23135, 23134, 23137, 23136, 23152, 23138, 23139, 23140, 23143, 23142, 23144, 23147, 23146, 23149, 23148, 23151, 23150, 23153, 23152, 23154, 23172, 23171, 23156, 23155, 23158, 23157, 23160, 23159, 23161, 23164, 23163, 23186, 23185, 23188, 23187, 24313, 24315, 24317, 23166, 23165, 23168, 23167, 23169, 23172, 23171, 23174, 23173, 23176, 23175, 23177, 23179, 23178, 23180, 23182, 23181, 24319, 23183, 24321, 23186, 23185, 23188, 23187, 23190, 23189, 23192, 23191, 24325, 24327, 24329, 24331, 23194, 23193, 23196, 23195, 23197, 23200, 23199, 23202, 23201, 23204, 23203, 23206, 23205, 24333, 23208, 23207, 24335, 23210, 23209, 23211, 23213, 23212, 23214, 23216, 23215, 23217, 23219, 23218, 23220, 23222, 23221, 23223, 23225, 23224, 23226, 23228, 23227, 23230, 23229, 23232, 23231, 24337, 23234, 23233, 24339, 23236, 23235, 23238, 23237, 23240, 23239, 23242, 23241, 23243, 23245, 23247, 23246, 23248, 23250, 23249, 23252, 23251, 23254, 23253, 23255, 23257, 23256, 23258, 23266, 23265, 23268, 23259, 23269, 23271, 23274, 23273, 23275, 23277, 23276, 23278, 23262, 23261, 23264, 23263, 23266, 23265, 23268, 23267, 23269, 23271, 23274, 23273, 23275, 23277, 23276, 23278, 23280, 23279, 23281, 23283, 23282, 23285, 23284, 23287, 23286, 23288, 23290, 23289, 23291, 23293, 23292, 23295, 23294, 23297, 23296, 23299, 23298, 23301, 23300, 23302, 23304, 23303, 23305, 23307, 23306, 23309, 23308, 23311, 23310, 24345, 23312, 23314, 23316, 23315, 23317, 23319, 23318, 23321, 23320, 23368, 23367, 23370, 23369, 23408, 23407, 23322, 23411, 23410, 23412, 23372, 23371, 23374, 23373, 23324, 23323, 23325, 23327, 23326, 23328, 23378, 23377, 24347, 23380, 23379, 24349, 23382, 23381, 23384, 23329, 23386, 23330, 24351, 23387, 23388, 23389, 23390, 23331, 23332, 23333, 23334, 23378, 23377, 24353, 23380, 23379, 24355, 23382, 23381, 23384, 23383, 23386, 23385, 24357, 23395, 23396, 23397, 23398, 23401, 23402, 23335, 23336, 23338, 23337, 23340, 23339, 23342, 23341, 4988, 23345, 23344, 5003, 23347, 23348, 23349, 23350, 23352, 23351, 23353, 23356, 23355, 23357, 23359, 23358, 23361, 23360, 24363, 23363, 23362, 24365, 23364, 23365, 23366, 23368, 23367, 23370, 23369, 23372, 23371, 23374, 23373, 23376, 23375, 23378, 23377, 24367, 23380, 23379, 24369, 23382, 23381, 23384, 23383, 23386, 23385, 24373, 23387, 23388, 23389, 23390, 23391, 23392, 23393, 23394, 23395, 23396, 23397, 23398, 23399, 23400, 23401, 23402, 23403, 24377, 23405, 24379, 23408, 23407, 23409, 23411, 23410, 23412, 23414, 23413, 23416, 23415, 23418, 23417, 23420, 23419, 23422, 23421, 24381, 23424, 23423, 24383, 23426, 23425, 23427, 23429, 23428, 23430, 23431, 23433, 23432, 23435, 23434, 23436, 23438, 23437, 23439, 23441, 23440, 23443, 23442, 23445, 23444, 23446, 23449, 23448, 23450, 23452, 23451, 23453, 23455, 23454, 23456, 23458, 23457, 23459, 23461, 23460, 23462, 23464, 23463, 23466, 23465, 23468, 23467, 23469, 23471, 23470, 23473, 23472, 23475, 23474, 23476, 23478, 23477, 23479, 23481, 23480, 24389, 23483, 23482, 24391, 23485, 23484, 4988, 23488, 23487, 5003, 23491, 23490, 23493, 23492, 23494, 23495, 23496, 23497, 23499, 23498, 23500, 23502, 23501, 23503, 23505, 23504, 23507, 23506, 23509, 23508, 23510, 23512, 23511, 23513, 23515, 23514, 23516, 23518, 23517, 23519, 23531, 23521, 23534, 23522, 23524, 23523, 23525, 23541, 23540, 23542, 23544, 23543, 23527, 23526, 23529, 23528, 23530, 23531, 23533, 23534, 23536, 23538, 23537, 23539, 23541, 23540, 23542, 23544, 23543, 23546, 23545, 23547, 23549, 23551, 23550, 24393, 24395, 23552, 23554, 23553, 23556, 23555, 23558, 23557, 23559, 23560, 23562, 23561, 24397, 23564, 23563, 24399, 23566, 23565, 23567, 23569, 23568, 23570, 23572, 23571, 24401, 23574, 23573, 24403, 23575, 23576, 23579, 23578, 24405, 23581, 23580, 24407, 23582, 23584, 23583, 23586, 23585, 23588, 23587, 23590, 23589, 23591, 23593, 23592, 23595, 23594, 23596, 23598, 23599, 23601, 23603, 23602, 23604, 23605, 23606, 23607, 23608, 23610, 23609, 24409, 23612, 23611, 24411, 23614, 23613, 23615, 23617, 23616, 23618, 23620, 23619, 23685, 23687, 23686, 23688, 23690, 23689, 24413, 23692, 23680, 24415, 23681, 23621, 23682, 23696, 23622, 23624, 23623, 23626, 23625, 23627, 23630, 23629, 23632, 23631, 23837, 23641, 23634, 23633, 23841, 23840, 23843, 23642, 23636, 23635, 24417, 23638, 23637, 23850, 23852, 23643, 23639, 23641, 23640, 23839, 23838, 23841, 23840, 23843, 23642, 23846, 24419, 23849, 23848, 23850, 23852, 23643, 23644, 23645, 23648, 23647, 23650, 23649, 23652, 23651, 23662, 23661, 23653, 23655, 23657, 23659, 23662, 23661, 23663, 23665, 24427, 23667, 24429, 23668, 23670, 23669, 23671, 23673, 23672, 23674, 23676, 23675, 24431, 23678, 23677, 24433, 23684, 23683, 23679, 23687, 23686, 23688, 23690, 23689, 24435, 23692, 23680, 24437, 23681, 23694, 23682, 23697, 23696, 24439, 23684, 23683, 23685, 23687, 23686, 23688, 23690, 23689, 24441, 23692, 23691, 24443, 23694, 23693, 23695, 23697, 23696, 24445, 23699, 23698, 23701, 23700, 23703, 23702, 23705, 23704, 23707, 23706, 23708, 23710, 23709, 23711, 23712, 23714, 24447, 23717, 23716, 23718, 23721, 23720, 24449, 23723, 23722, 23725, 23724, 24451, 23727, 23726, 24453, 23729, 23728, 23730, 23732, 23731, 23733, 23735, 23734, 23736, 23738, 23737, 23739, 23741, 23740, 23742, 23745, 23744, 23747, 23746, 23749, 23748, 23751, 23750, 23753, 23752, 23754, 23756, 23755, 23758, 23757, 23760, 23759, 23762, 23761, 23764, 23763, 23766, 23765, 23768, 23767, 24457, 23770, 23769, 23772, 23771, 23774, 23773, 24461, 23776, 23775, 23778, 23777, 23780, 23779, 23781, 23783, 23782, 23784, 23785, 23788, 23787, 23790, 23789, 23791, 23793, 23792, 6373, 23796, 23795, 6388, 23799, 23798, 23800, 23801, 23804, 23803, 23806, 23805, 23808, 23807, 23810, 23809, 23812, 23811, 23814, 23813, 23815, 23817, 23816, 23818, 23820, 23819, 23821, 23823, 23822, 23825, 23824, 23827, 23826, 23828, 23830, 23829, 23831, 23833, 23832, 23835, 23834, 23837, 23836, 23839, 23838, 23841, 23840, 23843, 23842, 23844, 24463, 23846, 24465, 23849, 23848, 23850, 23852, 23851, 23853, 23854, 23857, 23856, 23859, 23858, 23872, 23860, 23875, 23874, 23861, 23863, 23879, 23881, 23865, 23864, 23866, 23868, 23867, 23869, 23871, 23870, 23873, 23872, 23875, 23874, 23877, 23876, 23878, 23879, 23881, 23883, 23882, 23885, 23884, 23887, 23886, 24469, 23889, 23888, 24471, 23891, 23890, 23893, 23892, 23895, 23894, 23897, 23896, 23899, 23898, 23900, 23902, 23901, 24475, 23904, 23903, 24477, 23906, 23905, 24479, 23908, 23907, 24481, 23910, 23909, 23911, 23913, 23912, 23914, 23915, 23917, 23918, 23920, 23922, 23921, 23923, 23925, 23924, 23926, 23928, 23927, 23929, 23931, 23930, 23932, 23934, 23933, 24483, 23936, 23935, 24485, 23938, 23937, 23939, 23941, 23940, 23942, 23944, 23943, 23945, 23947, 23946, 23948, 23950, 23949, 23951, 23953, 23952, 23954, 23956, 23955, 23958, 23957, 23960, 23959, 23961, 23963, 23962, 23964, 23966, 23965, 23967, 23969, 23968, 23970, 23972, 23971, 23974, 23973, 24487, 23976, 23975, 23978, 23977, 23979, 23981, 23980, 23983, 23982, 24489, 23985, 23984, 24491, 24066, 23987, 23986, 23989, 23988, 23990, 23993, 23992, 23995, 23994, 23996, 23999, 23998, 24001, 24000, 24003, 24002, 24005, 24004, 24006, 24008, 24010, 24009, 24011, 24014, 24013, 24016, 24015, 24018, 24017, 24020, 24019, 24021, 24022, 24025, 24024, 24026, 24028, 24027, 24030, 24029, 24032, 24031, 24034, 24033, 24036, 24035, 24037, 24040, 24039, 24042, 24041, 24044, 24043, 24046, 24045, 24047, 24049, 24048, 24051, 24050, 24052, 24055, 24054, 24056, 24059, 24058, 24061, 24060, 24501, 24063, 24062, 24503, 24083, 24085, 24065, 24064, 24066, 24068, 24067, 24069, 24071, 24070, 24072, 24073, 24075, 24077, 24076, 24079, 24078, 24080, 24082, 24083, 24085, 24087, 24086, 24089, 24088, 24090, 24092, 24095, 24094, 24096, 24099, 24098, 24100, 24102, 24101, 24103, 24105, 24104, 24106, 24109, 24108, 24110, 24111, 24113, 24115, 24114, 24116, 24118, 24117, 24120, 24119, 24122, 24121, 24509, 24124, 24123, 24125, 24127, 24126, 24128, 24130, 24129, 24131, 24132, 24134, 24136, 24135, 24138, 24137, 24140, 24139, 24141, 24143, 24142, 24144, 24574, 24567, 24782, 24145, 24574, 24567, 24574, 24576, 24588, 24590, 24592, 24599, 24598, 24600, 24602, 24601, 24146, 24605, 24147, 24784, 24607, 24606, 24786, 24622, 24148, 24624, 24623, 24626, 24610, 24788, 24593, 24575, 24593, 24568, 24573, 24155, 24589, 24591, 24593, 24720, 24719, 24721, 24723, 24722, 24724, 24726, 24725, 24728, 24727, 24730, 24729, 24732, 24731, 24734, 24733, 24156, 24773, 24772, 24774, 24736, 24735, 24780, 24738, 24157, 24740, 24739, 24824, 24187, 24186, 24189, 24188, 24191, 24190, 24826, 24828, 24225, 24224, 24708, 24707, 24710, 24709, 24712, 24226, 24830, 24832, 24834, 24836, 24838, 24714, 24713, 24840, 24716, 24715, 24842, 24844, 24846, 24848, 24850, 24718, 24227, 24852, 24511, 24510, 24513, 24512, 24515, 24514, 24516, 24518, 24517, 24856, 24519, 24521, 24523, 24522, 24858, 24525, 24524, 24860, 24527, 24526, 24528, 24530, 24529, 24531, 24533, 24532, 24862, 24535, 24534, 24864, 24536, 24538, 24539, 24541, 24543, 24542, 24545, 24544, 24547, 24546, 24548, 24550, 24549, 24551, 24553, 24552, 24866, 24555, 24554, 24868, 24556, 24558, 24560, 24559, 24562, 24561, 24583, 24563, 24872, 24585, 24564, 24587, 24572, 24593, 24565, 24567, 24575, 24874, 24579, 24566, 24570, 24876, 24583, 24582, 24878, 24585, 24571, 24587, 24572, 24593, 24574, 24568, 24567, 24881, 24569, 24580, 24570, 24883, 24583, 24582, 24885, 24585, 24571, 24587, 24572, 24574, 24573, 24576, 24575, 24578, 24577, 24579, 24581, 24580, 24888, 24583, 24582, 24890, 24585, 24584, 24587, 24586, 24589, 24588, 24591, 24590, 24593, 24592, 24594, 24595, 24597, 24599, 24598, 24600, 24602, 24601, 24603, 24605, 24604, 24894, 24607, 24606, 24896, 24622, 24608, 24624, 24623, 24610, 24609, 24898, 24612, 24611, 24613, 24615, 24614, 24616, 24618, 24617, 24620, 24619, 24622, 24621, 24624, 24623, 24626, 24625, 24900, 24628, 24627, 24630, 24629, 24632, 24631, 24634, 24633, 24902, 24636, 24635, 24904, 24638, 24637, 24906, 24640, 24639, 24692, 24691, 24694, 24641, 24643, 24642, 24908, 24645, 24644, 24910, 24647, 24646, 24648, 24650, 24649, 24651, 24653, 24652, 24654, 24656, 24655, 24912, 24658, 24657, 24914, 24659, 24660, 24663, 24662, 24664, 24666, 24665, 24667, 24668, 24670, 24669, 24672, 24671, 24916, 24674, 24673, 24676, 24675, 24678, 24677, 24680, 24679, 24682, 24681, 24684, 24683, 24685, 24918, 24688, 24687, 24689, 24692, 24691, 24694, 24693, 24696, 24695, 24698, 24697, 24700, 24699, 24702, 24701, 24704, 24703, 24706, 24705, 24708, 24707, 24710, 24709, 24712, 24711, 24924, 24926, 24928, 24930, 24932, 24714, 24713, 24934, 24716, 24715, 24936, 24938, 24941, 24944, 24946, 24718, 24717, 24752, 24751, 24754, 24753, 24948, 24756, 24755, 24758, 24757, 24760, 24759, 24950, 24720, 24719, 24721, 24723, 24722, 24724, 24726, 24725, 24728, 24727, 24730, 24729, 24732, 24731, 24734, 24733, 24736, 24735, 24738, 24737, 24740, 24739, 24742, 24741, 24956, 24744, 24743, 24958, 24746, 24745, 24960, 24748, 24747, 24962, 24750, 24749, 24752, 24751, 24754, 24753, 24964, 24756, 24755, 24758, 24757, 24760, 24759, 24966, 24762, 24761, 24764, 24763, 24766, 24765, 24768, 24767, 24770, 24769, 24771, 24773, 24772, 24774, 24776, 24775, 24777, 24779, 24778, 24780, 24996, 24987, 24999, 24998, 25001, 25000, 25002, 25135, 25137, 24994, 24791, 24995, 24997, 24996, 24999, 24998, 25001, 25000, 25002, 25139, 25141, 24994, 24791, 24995, 24996, 24987, 24999, 24998, 25001, 24789, 25002, 25005, 25004, 25143, 24994, 24791, 24792, 24794, 24793, 24796, 24795, 24798, 24797, 24800, 24799, 24802, 24801, 24803, 24805, 24804, 24806, 24808, 24807, 24971, 24974, 24973, 24976, 24975, 24978, 24977, 24980, 24979, 24982, 24810, 24983, 24985, 24984, 24986, 25050, 24811, 24813, 24812, 24815, 24814, 13718, 25057, 25056, 25177, 24817, 24816, 25062, 24818, 24820, 24819, 24822, 24821, 24969, 24971, 24974, 24973, 24976, 24975, 24978, 24977, 24980, 24979, 24982, 24981, 24983, 24985, 24984, 24986, 24988, 24987, 24999, 24989, 25001, 25000, 25002, 25005, 25004, 24992, 24991, 24994, 24993, 24995, 24997, 24996, 24999, 24998, 25001, 25000, 25002, 25005, 25004, 25007, 25006, 25009, 25008, 25010, 25012, 25011, 25014, 25013, 25016, 25015, 25017, 25019, 25018, 25020, 25022, 25021, 25024, 25023, 25026, 25025, 25028, 25027, 25030, 25029, 25031, 25033, 25032, 25034, 25036, 25035, 25208, 25038, 25037, 25210, 25040, 25039, 25212, 25042, 25041, 25214, 25044, 25043, 25045, 25047, 25046, 25048, 25050, 25049, 25051, 25054, 25053, 13718, 25057, 25056, 13733, 25060, 25059, 25062, 25061, 25064, 25063, 25065, 25067, 25066, 25068, 25070, 25069, 25072, 25071, 25074, 25073, 25075, 25077, 25076, 25078, 25080, 25079, 25081, 25083, 25082, 25084, 25086, 25085, 25087, 25089, 25088, 25091, 25090, 25093, 25092, 25218, 25095, 25094, 25220, 25097, 25096, 25099, 25098, 25101, 25100, 25102, 25103, 25105, 25107, 25106, 25222, 25109, 25108, 25224, 25111, 25110, 25113, 25112, 25115, 25114, 25117, 25116, 25119, 25118, 25121, 25120, 25123, 25122, 25226, 25125, 25124, 25228, 25127, 25126, 25129, 25128, 25131, 25130, 25133, 25132, 25200, 25202, 25159, 25149, 25148, 25273, 25150, 25152, 25151, 25154, 25153, 25275, 25156, 25155, 25277, 25158, 25157, 25279, 25200, 25202, 25159, 25161, 25160, 25163, 25162, 25165, 25164, 25167, 25166, 25169, 25168, 25233, 25252, 25171, 25170, 25283, 25173, 25172, 25285, 25175, 25174, 25287, 25179, 25178, 25181, 25180, 25183, 25182, 25185, 25184, 25187, 25186, 25189, 25188, 25190, 25192, 25191, 25193, 25195, 25194, 25291, 25197, 25196, 25293, 25199, 25198, 25295, 25200, 25202, 25204, 25206, 25236, 25235, 25238, 25229, 25240, 25239, 25230, 25243, 25242, 25245, 25244, 25247, 25246, 25231, 25250, 25249, 25297, 25233, 25232, 25253, 25234, 25236, 25235, 25238, 25237, 25240, 25239, 25241, 25243, 25242, 25245, 25244, 25247, 25246, 25248, 25250, 25249, 25299, 25252, 25251, 25254, 25253, 25256, 25255, 25258, 25257, 25260, 25259, 25261, 25263, 25262, 25264, 25265, 25267, 25269, 25271, 25303, 25305, 25314, 25310, 25306, 25317, 25312, 25313, 25314, 25310, 25306, 25317, 25312, 25313, 25314, 25307, 25308, 25317, 25312, 25319, 25314, 25310, 25316, 25317, 25309, 25319, 25314, 25310, 25311, 25317, 25312, 25313, 25315, 25314, 25316, 25318, 25317, 25319, 14, 15, 8024, 8025, 8028, 8029, 8030, 8031, 8040, 8041, 8044, 8045, 8048, 8049, 8052, 8053, 8054, 8055, 8056, 8057, 8058, 8059, 8060, 8061, 8062, 8063, 8064, 8065, 8066, 8067, 8068, 8069, 8070, 8071, 8072, 8073, 8074, 8075, 8076, 8077, 8078, 8079, 8080, 8081, 8082, 8083, 8084, 8085, 8086, 8087, 8088, 8089, 8090, 8091, 8092, 8093, 8094, 8095, 8096, 8097, 8098, 8099, 8100, 8101, 8102, 8103, 8104, 8105, 8106, 8107, 8108, 8109, 8110, 8111, 8112, 8115, 8116, 8117, 8118, 8119, 8120, 8123, 8124, 8125, 8126, 8127, 8128, 8129, 8130, 8133, 8134, 8135, 8136, 8137, 8138, 8139, 8140, 8141, 8142, 8143, 8144, 8145, 8146, 8147, 8148, 8149, 8150, 8151, 8152, 8153, 8154, 8157, 8158, 8161, 8162, 8163, 8164, 8165, 8197, 8198, 8201, 8204, 8205, 8206, 8207, 8208, 8209, 8210, 8211, 8212, 8213, 8214, 8215, 8216, 8217, 8218, 8219, 8220, 8221, 8222, 8223, 8224, 8225, 8228, 8229, 8232, 8233, 8234, 8235, 8236, 8237, 8238, 8239, 8240, 8241, 8242, 8243, 8244, 8245, 8246, 8247, 8248, 8249, 8250, 8251, 8252, 8253, 8254, 8255, 8256, 8257, 8258, 8259, 8260, 8261, 8262, 8263, 8264, 8265, 8266, 8267, 8268, 8269, 8270, 8271, 8272, 8273, 8274, 8275, 8276, 8277, 8280, 8281, 8283, 8284, 8286, 8287, 8288, 8289, 8290, 8291, 8292, 8293, 8294, 8295, 8298, 8299, 8300, 8301, 8302, 8303, 8304, 8305, 8306, 8307, 8308, 8309, 8310, 8311, 8312, 8313, 8314, 8315, 8316, 8317, 8318, 8319, 8321, 8322, 8324, 8325, 8326, 8327, 8328, 8329, 8330, 8331, 8332, 8333, 8334, 8335, 8337, 8338, 8340, 8341, 8342, 8343, 8344, 8345, 8346, 8347, 8348, 8349, 8350, 8351, 8352, 8353, 8354, 8355, 8356, 8357, 8358, 8359, 8360, 8361, 8362, 8363, 8364, 8365, 8366, 8369, 8370, 8373, 8374, 8375, 8382, 8383, 8384, 8402, 8403, 8406, 8407, 8414, 8415, 8416, 8417, 8418, 8419, 8420, 8421, 8422, 8423, 8424, 8425, 8426, 8435, 8436, 8437, 8438, 8439, 8440, 8441, 8442, 8443, 8444, 8445, 8446, 8447, 8448, 8449, 8450, 8451, 8452, 8455, 8456, 8457, 8458, 8459, 8460, 8461, 8462, 8463, 8464, 8465, 8466, 8467, 8468, 8469, 8470, 8471, 8472, 8473, 8474, 8475, 8476, 8477, 8478, 8479, 8480, 8482, 8484, 8485, 8486, 8487, 8488, 8489, 8490, 8491, 8492, 8493, 8494, 8495, 8496, 8497, 8498, 8499, 8500, 8501, 8502, 8503, 8504, 8505, 8506, 8507, 8508, 8509, 8510, 8511, 8512, 8513, 8514, 8515, 8516, 8517, 8518, 8519, 8520, 8521, 8522, 8523, 8524, 8525, 8526, 8527, 8528, 8529, 8530, 8531, 8532, 8533, 8534, 8535, 8536, 8537, 8538, 8539, 8540, 8541, 8542, 8543, 8544, 8545, 8546, 8547, 8548, 8549, 8550, 8551, 8552, 8553, 8554, 8555, 8556, 8557, 8558, 8559, 8560, 8561, 8562, 8563, 8564, 8565, 8566, 8567, 8568, 8569, 8570, 8571, 8572, 8573, 8574, 8575, 8576, 8577, 8578, 8581, 8582, 8583, 8584, 8585, 8586, 8587, 8588, 8589, 8591, 8592, 8594, 8595, 8596, 8597, 8598, 8599, 8600, 8601, 8602, 8603, 8604, 8605, 8606, 8607, 8608, 8615, 8616, 8617, 8618, 8619, 8620, 8621, 8622, 8623, 8624, 8625, 8626, 8627, 8628, 8629, 8630, 8631, 8632, 8633, 8635, 8636, 8638, 8639, 8640, 8641, 8642, 8643, 8644, 8645, 8647, 8648, 8650, 8651, 8652, 8653, 8654, 8655, 8656, 8657, 8658, 8659, 8660, 8661, 8662, 8663, 8664, 8665, 8666, 8667, 8668, 8669, 8670, 8671, 8672, 8673, 8674, 8675, 8676, 8677, 8678, 8679, 8680, 8681, 8682, 8683, 8684, 8685, 8686, 8687, 8688, 8689, 8690, 8691, 8692, 8693, 8694, 8695, 8696, 8697, 8698, 8699, 8700, 8701, 8702, 8703, 8704, 8705, 8706, 8707, 8708, 8709, 8710, 8711, 8712, 8713, 8714, 8715, 8716, 8717, 8718, 8719, 8720, 8721, 8722, 8723, 8724, 8725, 8726, 8728, 8730, 8731, 8732, 8733, 8734, 8735, 8736, 8737, 8738, 8739, 8740, 8741, 8742, 8743, 8744, 8745, 8746, 8747, 8748, 8749, 8750, 8751, 8752, 8753, 8754, 8755, 8756, 8757, 8758, 8759, 8760, 8761, 8762, 8763, 8764, 8765, 8766, 8767, 8768, 8769, 8770, 8771, 8772, 8773, 8774, 8775, 8776, 8777, 8778, 8779, 8780, 8781, 8782, 8783, 8784, 8785, 8786, 8789, 8790, 8791, 8792, 8793, 8794, 8797, 8798, 8801, 8802, 8805, 8806, 8809, 8810, 8811, 8812, 8813, 8814, 8815, 8816, 8817, 8818, 8819, 8820, 8821, 8822, 8823, 8824, 8827, 8828, 8829, 8830, 8831, 8832, 8835, 8836, 8839, 8840, 8843, 8844, 8847, 8848, 8849, 8850, 8851, 8852, 8853, 8854, 8855, 8856, 8857, 8858, 8859, 8860, 8861, 8862, 8863, 8864, 8865, 8866, 8867, 8868, 8869, 8870, 8871, 8872, 8873, 8874, 8875, 8876, 8877, 8878, 8879, 8880, 8881, 8882, 8883, 8884, 8885, 8886, 8887, 8888, 8889, 8890, 8891, 8892, 8893, 8894, 8895, 8896, 8897, 8898, 8899, 8900, 8901, 8902, 8903, 8904, 8905, 8906, 8907, 8908, 8909, 8910, 8911, 8912, 8913, 8914, 8915, 8916, 8917, 8918, 8919, 8920, 8921, 8922, 8923, 8924, 8925, 8926, 8927, 8928, 8929, 8930, 8932, 8933, 8935, 8936, 8937, 8938, 8939, 8940, 8941, 8942, 8945, 8946, 8949, 8950, 8953, 8954, 8957, 8958, 8959, 8960, 8961, 8962, 8963, 8964, 8965, 8966, 8967, 8968, 8969, 8970, 8971, 8972, 8974, 8975, 8976, 8977, 8978, 8979, 8980, 8981, 8982, 8983, 8984, 8985, 8987, 8988, 8989, 8990, 8991, 8992, 8993, 8994, 8995, 8996, 8997, 8998, 8999, 9000, 9001, 9002, 9003, 9004, 9005, 9006, 9007, 9008, 9009, 9010, 9011, 9012, 9013, 9014, 9015, 9016, 9017, 9018, 9019, 9020, 9021, 9022, 9023, 9024, 9025, 9026, 9027, 9028, 9029, 9030, 9031, 9032, 9033, 9034, 9035, 9036, 9037, 9038, 9039, 9040, 9041, 9042, 9043, 9044, 9045, 9046, 9047, 9048, 9049, 9050, 9051, 9052, 9053, 9054, 9055, 9056, 9057, 9058, 9059, 9060, 9061, 9062, 9063, 9066, 9067, 9070, 9071, 9072, 9073, 9074, 9075, 9076, 9077, 9078, 9079, 9080, 9081, 9082, 9085, 9086, 9087, 9088, 9089, 9090, 9093, 9094, 9095, 9098, 9099, 9100, 9101, 9102, 9105, 9106, 9107, 9108, 9111, 9112, 9115, 9116, 9117, 9118, 9119, 9120, 9121, 9122, 9123, 9124, 9125, 9126, 9127, 9128, 9129, 9130, 9131, 9132, 9133, 9134, 9135, 9136, 9137, 9138, 9139, 9140, 9141, 9142, 9143, 9144, 9145, 9146, 9147, 9148, 9149, 9150, 9151, 9152, 9153, 9154, 9155, 9156, 9157, 9158, 9159, 9160, 9161, 9162, 9163, 9164, 9165, 9166, 9167, 9168, 9169, 9170, 9171, 9172, 9173, 9174, 9175, 9176, 9177, 9178, 9179, 9180, 9181, 9182, 9183, 9184, 9191, 9192, 9193, 9194, 9195, 9196, 9197, 9198, 9199, 9200, 9201, 9202, 9203, 9204, 9205, 9206, 9207, 9210, 9213, 9214, 9215, 9216, 9217, 9218, 9220, 9221, 9231, 9232, 9233, 9234, 9235, 9236, 9237, 9238, 9239, 9240, 9241, 9242, 9243, 9246, 9247, 9250, 9251, 9252, 9253, 9254, 9255, 9256, 9257, 9258, 9259, 9260, 9261, 9262, 9263, 9264, 9265, 9266, 9267, 9268, 9269, 9270, 9271, 9272, 9273, 9276, 9277, 9280, 9281, 9282, 9283, 9284, 9285, 9286, 9287, 9288, 9289, 9290, 9291, 9292, 9293, 9294, 9295, 9296, 9297, 9298, 9299, 9300, 9301, 9302, 9303, 9304, 9305, 9306, 9307, 9308, 9309, 9310, 9311, 9312, 9313, 9314, 9315, 9316, 9318, 9319, 9321, 9322, 9323, 9324, 9325, 9326, 9327, 9328, 9329, 9330, 9331, 9332, 9333, 9334, 9335, 9336, 9337, 9338, 9339, 9340, 9341, 9342, 9343, 9344, 9345, 9346, 9347, 9348, 9349, 9350, 9351, 9352, 9353, 9354, 9355, 9356, 9357, 9358, 9359, 9360, 9361, 9363, 9364, 9366, 9367, 9370, 9371, 9372, 9373, 9374, 9375, 9376, 9377, 9378, 9379, 9380, 9381, 9382, 9383, 9384, 9385, 9386, 9387, 9388, 9389, 9390, 9391, 9392, 9393, 9394, 9395, 9396, 9397, 9398, 9399, 9400, 9403, 9404, 9407, 9408, 9409, 9410, 9411, 9412, 9415, 9416, 9417, 9418, 9419, 9420, 9421, 9422, 9423, 9424, 9427, 9428, 9431, 9432, 9433, 9434, 9435, 9436, 9439, 9440, 9441, 9442, 9443, 9444, 9445, 9446, 9447, 9448, 9449, 9450, 9453, 9454, 9455, 9456, 9457, 9458, 9459, 9460, 9461, 9462, 9463, 9464, 9465, 9466, 9467, 9468, 9469, 9470, 9473, 9474, 9477, 9478, 9481, 9482, 9483, 9484, 9485, 9486, 9487, 9488, 9489, 9490, 9491, 9492, 9493, 9494, 9495, 9498, 9499, 9502, 9503, 9505, 9506, 9508, 9509, 9512, 9513, 9514, 9515, 9516, 9517, 9518, 9519, 9520, 9521, 9522, 9523, 9524, 9526, 9528, 9529, 9530, 9533, 9536, 9537, 9538, 9539, 9540, 9541, 9542, 9543, 9544, 9545, 9546, 9547, 9548, 9549, 9550, 9551, 9554, 9555, 9558, 9559, 9560, 9561, 9562, 9563, 9564, 9565, 9566, 9567, 9568, 9569, 9570, 9571, 9572, 9573, 9574, 9575, 9576, 9577, 9578, 9579, 9580, 9581, 9582, 9583, 9584, 9585, 9586, 9587, 9588, 9589, 9590, 9591, 9592, 9593, 9594, 9595, 9596, 9598, 9599, 9601, 9602, 9603, 9604, 9605, 9607, 9608, 9610, 9611, 9612, 9613, 9614, 9615, 9616, 9617, 9620, 9621, 9624, 9625, 9626, 9627, 9628, 9629, 9630, 9631, 9632, 9633, 9634, 9635, 9636, 9637, 9638, 9639, 9640, 9641, 9642, 9643, 9644, 9645, 9646, 9647, 9648, 9649, 9650, 9651, 9652, 9653, 9654, 9655, 9656, 9657, 9658, 9659, 9660, 9661, 9662, 9663, 9664, 9665, 9666, 9667, 9668, 9669, 9670, 9671, 9672, 9673, 9674, 9675, 9676, 9677, 9678, 9679, 9680, 9681, 9682, 9683, 9684, 9685, 9686, 9687, 9688, 9689, 9690, 9691, 9692, 9693, 9694, 9699, 9700, 9701, 9702, 9703, 9704, 9705, 9706, 9707, 9708, 9709, 9712, 9713, 9716, 9717, 9718, 9719, 9720, 9721, 9722, 9723, 9726, 9727, 9730, 9731, 9732, 9733, 9736, 9737, 9740, 9741, 9742, 9743, 9744, 9745, 9746, 9747, 9748, 9749, 9750, 9751, 9752, 9753, 9754, 9755, 9756, 9757, 9758, 9759, 9760, 9761, 9762, 9763, 9764, 9765, 9766, 9769, 9770, 9773, 9774, 9775, 9776, 9777, 9778, 9779, 9780, 9781, 9782, 9783, 9784, 9785, 9786, 9789, 9790, 9793, 9794, 9795, 9796, 9797, 9798, 9799, 9800, 9801, 9802, 9803, 9804, 9805, 9806, 9807, 9808, 9809, 9810, 9811, 9812, 9813, 9814, 9815, 9816, 9819, 9820, 9821, 9822, 9823, 9824, 9825, 9826, 9827, 9828, 9829, 9830, 9831, 9832, 9833, 9836, 9837, 9838, 9839, 9840, 9841, 9842, 9843, 9844, 9845, 9846, 9847, 9848, 9849, 9850, 9852, 9854, 9856, 9858, 9860, 9861, 9863, 9864, 9867, 9870, 9871, 9872, 9873, 9874, 9875, 9876, 9877, 9878, 9881, 9882, 9885, 9886, 9887, 9888, 9889, 9890, 9891, 9892, 9895, 9896, 9899, 9900, 9901, 9902, 9903, 9906, 9907, 9908, 9909, 9910, 9911, 9912, 9913, 9916, 9917, 9920, 9921, 9922, 9923, 9924, 9927, 9928, 9929, 9930, 9931, 9932, 9933, 9934, 9935, 9936, 9937, 9938, 9939, 9940, 9941, 9942, 9945, 9946, 9947, 9948, 9949, 9952, 9953, 9954, 9955, 9958, 9959, 9962, 9963, 9964, 9965, 9966, 9967, 9968, 9969, 9970, 9971, 9972, 9973, 9974, 9975, 9976, 9977, 9978, 9979, 9980, 9981, 9982, 9984, 9985, 9987, 9988, 9989, 9990, 9991, 9992, 9993, 9994, 9995, 9996, 9997, 9998, 9999, 10000, 10001, 10002, 10003, 10006, 10007, 10009, 10010, 10012, 10013, 10016, 10017, 10018, 10019, 10020, 10021, 10022, 10023, 10024, 10025, 10026, 10027, 10028, 10029, 10030, 10031, 10032, 10033, 10034, 10035, 10036, 10037, 10038, 10039, 10040, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 10048, 10049, 10050, 10051, 10052, 10053, 10054, 10055, 10056, 10057, 10058, 10059, 10060, 10061, 10062, 10063, 10064, 10065, 10066, 10067, 10068, 10069, 10070, 10071, 10072, 10073, 10074, 10075, 10076, 10077, 10078, 10079, 10080, 10081, 10082, 10083, 10086, 10089, 10090, 10091, 10092, 10093, 10094, 10095, 10096, 10097, 10098, 10099, 10100, 10101, 10102, 10103, 10105, 10106, 10107, 10108, 10109, 10110, 10111, 10112, 10113, 10114, 10115, 10116, 10117, 10118, 10119, 10120, 10122, 10123, 10124, 10125, 10126, 10127, 10128, 10129, 10130, 10131, 10132, 10135, 10136, 10139, 10140, 10141, 10142, 10143, 10144, 10146, 10147, 10149, 10150, 10151, 10152, 10153, 10156, 10157, 10160, 10161, 10164, 10165, 10168, 10169, 10170, 10171, 10172, 10173, 10174, 10175, 10176, 10177, 10178, 10179, 10180, 10181, 10182, 10183, 10184, 10185, 10186, 10187, 10188, 10189, 10190, 10191, 10194, 10195, 10198, 10199, 10200, 10201, 10202, 10203, 10204, 10205, 10206, 10207, 10208, 10209, 10210, 10211, 10212, 10213, 10214, 10215, 10216, 10217, 10218, 10219, 10220, 10221, 10222, 10223, 10224, 10225, 10226, 10227, 10228, 10229, 10230, 10231, 10232, 10233, 10234, 10235, 10238, 10239, 10240, 10241, 10242, 10243, 10244, 10245, 10246, 10249, 10250, 10253, 10254, 10255, 10256, 10257, 10258, 10259, 10260, 10261, 10262, 10263, 10264, 10265, 10266, 10267, 10268, 10269, 10270, 10271, 10272, 10273, 10274, 10275, 10276, 10277, 10278, 10279, 10280, 10281, 10282, 10283, 10284, 10285, 10286, 10287, 10288, 10289, 10290, 10291, 10294, 10295, 10296, 10297, 10299, 10300, 10302, 10303, 10304, 10305, 10306, 10308, 10309, 10312, 10313, 10315, 10316, 10317, 10318, 10319, 10320, 10321, 10322, 10323, 10324, 10325, 10326, 10327, 10328, 10329, 10332, 10333, 10336, 10337, 10338, 10339, 10340, 10341, 10342, 10343, 10344, 10345, 10346, 10347, 10348, 10349, 10350, 10351, 10352, 10353, 10354, 10355, 10356, 10357, 10358, 10359, 10360, 10361, 10363, 10365, 10366, 10367, 10368, 10369, 10370, 10371, 10372, 10373, 10374, 10375, 10377, 10379, 10380, 10381, 10382, 10383, 10384, 10385, 10386, 10387, 10388, 10389, 10390, 10391, 10392, 10395, 10396, 10397, 10398, 10399, 10400, 10401, 10402, 10403, 10404, 10405, 10406, 10407, 10408, 10409, 10410, 10411, 10412, 10413, 10414, 10415, 11664, 11665, 11668, 11669, 11670, 11671, 11672, 11673, 11674, 11675, 11676, 11677, 11678, 11679, 11680, 11681, 11682, 11683, 11686, 11687, 11690, 11691, 11692, 11693, 11694, 11695, 11702, 11703, 11704, 11705, 11706, 11707, 11708, 11709, 11710, 11711, 11712, 11713, 11714, 11715, 11716, 11717, 11718, 11719, 11720, 11721, 11722, 11723, 11724, 11725, 11726, 11727, 11728, 11729, 11730, 11731, 11732, 11733, 11734, 11735, 11736, 11737, 25339, 25337, 11996, 11997, 11998, 11999, 12000, 12001, 25460, 25459, 25627, 25626, 12091, 12092, 12093, 12094, 12095, 12096, 12097, 12098, 12109, 12110, 12113, 12114, 12125, 12126, 25655, 25653, 25825, 25824, 26366, 26364, 26397, 26395, 12893, 12894, 12896, 12897, 12899, 12900, 12901, 12902, 12903, 12906, 12907, 12908, 12909, 12912, 12913, 12916, 12917, 12918, 12919, 12920, 12921, 12922, 12923, 12926, 12927, 12930, 12931, 12932, 12933, 12934, 12935, 12936, 12937, 12938, 12939, 12940, 12941, 12942, 12943, 12944, 12945, 12948, 12949, 12952, 12953, 12954, 12955, 12957, 12958, 12960, 12961, 12964, 12965, 12966, 12967, 12968, 12969, 12970, 12971, 12974, 12975, 12976, 12979, 12980, 12983, 12984, 12985, 12986, 12987, 12988, 12990, 12991, 12994, 12995, 12996, 12999, 13000, 13003, 13004, 13005, 13006, 13007, 13008, 13010, 13011, 13012, 13013, 13014, 13015, 13016, 13019, 13020, 13023, 13024, 13025, 13026, 13027, 13028, 13030, 13031, 13033, 13034, 13035, 13036, 13037, 13038, 13039, 13040, 13041, 13042, 13043, 13044, 13045, 13048, 13049, 13052, 13053, 13054, 13055, 13056, 13057, 13060, 13061, 13062, 13063, 13064, 13065, 13066, 13067, 13068, 13069, 13070, 13071, 13072, 13073, 13074, 13075, 13078, 13079, 13080, 13081, 13082, 13083, 13084, 13085, 13088, 13089, 13092, 13093, 13096, 13097, 13098, 13099, 13100, 13101, 13102, 13103, 13106, 13107, 13110, 13111, 13112, 13113, 13114, 13115, 13116, 13117, 13118, 13119, 13120, 13123, 13124, 13127, 13128, 13129, 13130, 13131, 13132, 13133, 13134, 13135, 13136, 13137, 13138, 13139, 13142, 13143, 13144, 13145, 13146, 13147, 13148, 13149, 13150, 13151, 13152, 13153, 13154, 13157, 13158, 13159, 13160, 13161, 13162, 13163, 13164, 13165, 13166, 13167, 13168, 13169, 13170, 13171, 13172, 13173, 13175, 13176, 13178, 13179, 13181, 13182, 13184, 13185, 13196, 13197, 13200, 13201, 13214, 13215, 13216, 13217, 13218, 13219, 13222, 13223, 13224, 13225, 13226, 13227, 13230, 13231, 13232, 13233, 13234, 13235, 13236, 13237, 13239, 13240, 13241, 13242, 13243, 13244, 13246, 13247, 13248, 13249, 13250, 13251, 13253, 13254, 13256, 13257, 13260, 13261, 13264, 13265, 13268, 13269, 13272, 13273, 13274, 13275, 13276, 13277, 13280, 13281, 13282, 13283, 13284, 13285, 13288, 13289, 13290, 13291, 13292, 13293, 13295, 13296, 13298, 13299, 13300, 13301, 13302, 13303, 13304, 13305, 13306, 13307, 13308, 13309, 14158, 14159, 14160, 14161, 14162, 14163, 14164, 14169, 14170, 14171, 14172, 14173, 14174, 14175, 14176, 14177, 14178, 14183, 14184, 14185, 14186, 14187, 14188, 14189, 14190, 14191, 14192, 14193, 14194, 14197, 14198, 14199, 14202, 14203, 14204, 14205, 14206, 14207, 14208, 14209, 14210, 14211, 14212, 14213, 14214, 14215, 14332, 14333, 14334, 14335, 14336, 14337, 14338, 14339, 14340, 14341, 14342, 14343, 14344, 14345, 14346, 14347, 14348, 14349, 14350, 14351, 14352, 14353, 14354, 14355, 14356, 14357, 14360, 14361, 14362, 14363, 14364, 14365, 14366, 14367, 27560, 27558, 27570, 27569, 27568, 27830, 27828, 27840, 27839, 27838, 15040, 15041, 15042, 15043, 15044, 15045, 15046, 15047, 15048, 15049, 15050, 15051, 15052, 15053, 15054, 15055, 15056, 15057, 15058, 15059, 15060, 15061, 15062, 15063, 15064, 15065, 15066, 15067, 15068, 15069, 15070, 15071, 15072, 15073, 15074, 15075, 15076, 15077, 15078, 15079, 15080, 15081, 15082, 15083, 15084, 15085, 15086, 15087, 15088, 15089, 15090, 15091, 15092, 15093, 15094, 15095, 15096, 15097, 15098, 15099, 15100, 15101, 15102, 15103, 15104, 15105, 15106, 15107, 15108, 15109, 15112, 15113, 15116, 15117, 15120, 15121, 15124, 15125, 15126, 15127, 15128, 15129, 15130, 15131, 15132, 15133, 15134, 15135, 15136, 15137, 15138, 15139, 15140, 15141, 15142, 15143, 15144, 15145, 15146, 15147, 15148, 15149, 15150, 15152, 15153, 15155, 15156, 15157, 15158, 15159, 15160, 15161, 15162, 15163, 15164, 15165, 15166, 15167, 15168, 15169, 15170, 15171, 15172, 15173, 15174, 15175, 15178, 15179, 15182, 15183, 15184, 15185, 15186, 15187, 15188, 15189, 15190, 15191, 15192, 15195, 15196, 15199, 15200, 15201, 15202, 15203, 15204, 15205, 15206, 15207, 15208, 15209, 15210, 15211, 15212, 15215, 15216, 15442, 15443, 15444, 15445, 15446, 15447, 15448, 15449, 15450, 15451, 15452, 27931, 27943, 15473, 15474, 15477, 15478, 15479, 15480, 15481, 15484, 15485, 15488, 15489, 15492, 15493, 15494, 15495, 15496, 15497, 15498, 15499, 15500, 15501, 15502, 15504, 15505, 15507, 15508, 15509, 15510, 15513, 15514, 15517, 15518, 15531, 15532, 15533, 15534, 15535, 15536, 15538, 15539, 15540, 15541, 15542, 15543, 15545, 15546, 15547, 15548, 15549, 15550, 15553, 15554, 15557, 15558, 15561, 15562, 15563, 15564, 15912, 15913, 15914, 15915, 15916, 15917, 15918, 15919, 15920, 15921, 15922, 15923, 15924, 15925, 15926, 15927, 15930, 15931, 15932, 15933, 15934, 15935, 15936, 15937, 15938, 15939, 15940, 15941, 15942, 15943, 15944, 15945, 15946, 15947, 15948, 15949, 15952, 15953, 15954, 15955, 15956, 15957, 15958, 15959, 15960, 15961, 15962, 15963, 15964, 15965, 15966, 15968, 15970, 15971, 16724, 16725, 16726, 16727, 16728, 16729, 16741, 16742, 16743, 16744, 16745, 16746, 16754, 16755, 16756, 16757, 16758, 16759, 16767, 16768, 16769, 16770, 16771, 16772, 16780, 16781, 16782, 16783, 16784, 16785, 16876, 16877, 16878, 16879, 16880, 16881, 13, 14, 15, 28353, 28355, 28357, 28359, 28361, 28363, 28365, 28368, 28371, 28373, 28379, 28381, 28387, 28389, 28391, 28393, 28395, 28398, 28401, 28403, 28405, 28407, 28409, 28412, 28415, 28422, 28424, 28428, 28430, 28432, 28434, 28438, 28440, 28442, 28446, 28449, 28451, 28455, 28458, 28460, 28462, 28464, 28467, 28473, 28476, 28479, 28482, 28485, 28487, 28489, 28492, 28494, 28496, 28499, 28501, 28503, 28505, 28508, 28510, 28513, 28515, 28517, 28519, 28521, 28524, 28526, 28528, 28530, 28532, 28534, 28536, 28538, 28540, 28542, 28544, 28546, 28548, 28550, 28552, 28554, 28556, 28560, 28562, 28564, 28567, 28569, 28571, 28574, 28576, 28578, 28580, 28583, 28585, 28588, 28590, 28592, 28594, 28597, 28600, 28602, 28604, 28606, 28608, 28610, 28612, 28614, 28617, 28619, 28621, 28623, 28629, 28631, 28633, 28635, 28637, 28639, 28642, 28644, 28646, 28648, 28651, 28653, 28655, 28657, 28660, 28662, 28664, 28666, 28668, 28673, 28676, 28679, 28682, 28686, 28688, 28693, 28698, 28701, 28704, 28706, 28712, 28715, 28718, 28721, 28724, 28726, 28728, 28730, 28732, 28734, 28736, 28738, 28740, 28743, 28746, 28748, 28750, 28752, 28754, 28757, 28761, 28763, 28767, 28769, 28771, 28774, 28777, 28782, 28784, 28787, 28789, 28791, 28793, 28795, 28797, 28800, 28802, 28808, 28810, 28813, 28815, 28818, 28820, 28822, 28825, 28827, 28829, 28831, 28833, 28836, 28839, 28841, 28843, 28846, 28849, 28852, 28855, 28857, 28859, 28861, 28863, 28865, 28868, 28870, 28872, 28874, 28876, 28878, 28881, 28884, 28889, 28891, 28893, 28898, 28900, 28902, 28904, 28906, 28908, 28910, 28915, 28917, 28922, 28924, 28927, 28930, 28933, 28936, 28939, 28942, 28944, 28946, 28949, 28952, 28954, 28956, 28959, 28962, 28964, 28966, 28968, 28970, 28973, 28976, 28978, 28980, 28982, 28984, 28986, 28988, 28990, 28992, 28994, 28996, 28998, 29001, 29004, 29006, 29008, 29010, 29012, 29014, 29016, 29018, 29021, 29024, 29026, 29028, 29031, 29034, 29037, 29039, 29041, 29047, 29049, 29051, 29053, 29059, 29062, 29067, 29069, 29071, 29074, 29077, 29079, 29081, 29084, 29087, 29090, 29093, 29096, 29098, 29100, 29102, 29104, 29107, 29110, 29112, 29114, 29116, 29118, 29120, 29123, 29126, 29128, 29130, 29132, 29135, 29138, 29140, 29142, 29144, 29146, 29148, 29151, 29153, 29155, 29157, 29160, 29163, 29165, 29167, 29169, 29171, 29173, 29175, 29178, 29181, 29183, 29185, 29188, 29191, 29193, 29195, 29197, 29199, 29201, 29203, 29205, 29207, 29209, 29211, 29213, 29215, 29218, 29221, 29223, 29226, 29228, 29230, 29232, 29234, 29236, 29238, 29240, 29242, 29244, 29247, 29249, 29252, 29254, 29256, 29258, 29261, 29264, 29267, 29269, 29271, 29274, 29277, 29280, 29283, 29287, 29289, 29291, 29293, 29295, 29297, 29301, 29304, 29306, 29308, 29310, 29313, 29315, 29317, 29319, 29322, 29324, 29326, 29328, 29330, 29333, 29335, 29337, 29340, 29343, 29346, 29348, 29350, 29352, 29354, 29356, 29359, 29361, 29363, 29365, 29367, 29369, 29372, 29375, 29378, 29381, 29384, 29387, 29389, 29391, 29393, 29395, 29397, 29399, 29401, 29405, 29408, 29410, 29412, 29415, 29418, 29420, 29424, 29427, 29430, 29432, 29434, 29436, 29440, 29443, 29446, 29449, 29451, 29453, 29456, 29459, 29461, 29463, 29465, 29467, 29470, 29473, 29475, 29477, 29481, 29484, 29486, 29488, 29490, 29492, 29495, 29498, 29500, 29502, 29505, 29508, 29510, 29512, 29514, 29516, 29526, 29528, 29530, 29532, 29534, 29544, 29546, 29548, 29551, 29558, 29561, 29564, 29566, 29568, 29573, 29575, 29577, 29579, 29581, 29583, 29585, 29587, 29589, 29591, 29611, 29614, 29617, 29619, 29621, 29623, 29625, 29627, 29629, 29632, 29636, 29638, 29641, 29644, 29646, 29648, 29651, 29654, 29657, 29660, 29663, 29666, 29668, 29670, 29673, 29675, 29677, 29680, 29683, 29685, 29687, 29690, 29693, 29695, 29701, 29704, 29707, 29709, 29711, 29714, 29717, 29720, 29727, 29730, 29733, 29735, 29737, 29744, 29747, 29750, 29752, 29756, 29759, 29761, 29763, 29767, 29769, 29771, 29774, 29777, 29779, 29783, 29785, 29788, 29790, 29792, 29794, 29797, 29799, 29805, 29812, 29814, 29816, 29819, 29822, 29825, 29828, 29830, 29832, 29835, 29837, 29839, 29842, 29844, 29846, 29848, 29850, 29852, 29854, 29856, 29859, 29862, 29864, 29866, 29868, 29871, 29874, 29878, 29880, 29882, 29884, 29890, 29896, 29899, 29902, 29904, 29906, 29909, 29912, 29914, 29916, 29919, 29921, 29924, 29927, 29929, 29931, 29934, 29936, 29938, 29940, 29942, 29944, 29947, 29952, 29955, 29957, 29959, 29961, 29963, 29966, 29969, 29972, 29975, 29978, 29980, 29982, 29984, 29986, 29989, 29991, 29993, 29995, 29997, 29999, 30001, 30003, 30005, 30007, 30009, 30011, 30013, 30016, 30020, 30022, 30025, 30028, 30031, 30035, 30037, 30039, 30041, 30043, 30045, 30048, 30051, 30054, 30056, 30058, 30061, 30064, 30066, 30068, 30070, 30072, 30074, 30078, 30081, 30085, 30087, 30089, 30091, 30097, 30100, 30103, 30105, 30107, 30109, 30114, 30116, 30118, 30120, 30122, 30124, 30126, 30128, 30130, 30133, 30135, 30137, 30139, 30141, 30144, 30151, 30154, 30157, 30160, 30163, 30165, 30167, 30170, 30173, 30176, 30179, 30182, 30185, 30187, 30189, 30192, 30195, 30198, 30201, 30203, 30205, 30207, 30210, 30212, 30214, 30217, 30219, 30222, 30224, 30227, 30229, 30231, 30233, 30237, 30240, 30242, 30244, 30246, 30250, 30253, 30255, 30257, 30259, 30261, 30264, 30266, 30268, 30270, 30273, 30275, 30278, 30281, 30283, 30285, 30289, 30292, 30295, 30300, 30302, 30308, 30310, 30314, 30317, 30320, 30323, 30326, 30331, 30334, 30336, 30338, 30340, 30343, 30346, 30351, 30353, 30355, 30358, 30371, 30374, 30377, 30379, 30381, 30383, 30385, 25328, 25328, 25330, 25329, 30396, 30399, 30402, 30404, 30406, 30408, 30410, 30413, 30416, 30419, 30421, 11892, 11893, 29724, 29741, 28376, 28374, 29722, 29739, 28384, 28382, 28419, 28417, 25410, 28443, 28452, 30425, 30427, 30429, 12004, 12005, 28468, 25465, 25463, 29887, 28565, 30111, 29888, 29886, 30111, 12067, 12068, 30297, 30297, 30435, 30437, 30439, 30441, 30443, 30445, 30447, 12178, 12179, 28669, 28670, 28689, 28690, 28695, 28709, 28707, 28765, 28779, 25797, 28895, 28805, 28803, 12244, 12245, 28895, 28879, 28886, 28895, 28912, 28919, 28918, 29043, 29055, 29064, 12426, 12427, 29344, 12438, 12439, 29402, 29421, 29437, 29478, 29519, 29517, 29523, 29521, 29537, 29535, 29541, 29539, 29555, 29553, 29570, 29569, 29594, 29592, 29598, 29596, 29602, 29600, 29606, 29605, 29604, 29609, 29608, 29698, 29696, 29724, 29722, 29741, 29739, 29753, 29780, 29800, 29802, 29765, 29764, 29780, 29800, 29802, 29809, 29807, 29869, 29888, 29887, 29886, 29885, 29891, 26977, 26975, 29949, 30076, 30075, 30094, 30092, 30111, 30148, 30146, 30297, 30303, 30312, 30220, 30234, 30297, 30305, 30312, 30311, 30248, 30324, 30328, 30348, 30248, 30324, 30328, 30262, 30328, 30348, 30286, 30297, 30305, 30303, 30312, 30311, 30324, 30328, 30348, 30457, 30459, 30461, 30464, 30468, 30470, 30472, 30475, 30478, 30480, 30486, 30488, 30490, 30493, 30496, 30498, 30502, 30504, 30506, 30508, 30510, 30512, 30514, 30517, 30519, 30521, 30523, 30525, 30527, 30530, 30532, 30534, 30536, 30538, 30540, 30542, 30545, 30547, 30549, 30551, 30553, 30555, 30557, 30562, 30565, 30568, 30570, 30572, 30574, 30576, 30578, 30581, 30584, 30586, 30588, 30590, 30592, 30594, 30596, 30598, 30600, 30602, 30604, 30606, 30608, 30610, 30612, 30614, 30616, 30619, 30622, 30625, 30627, 30631, 30634, 30638, 30640, 30642, 30644, 30646, 30648, 30650, 30652, 30655, 30658, 30660, 30662, 30664, 30666, 30668, 30670, 30672, 30674, 30676, 30678, 30680, 30682, 30684, 30686, 30688, 30690, 30692, 30694, 30696, 30699, 30702, 30704, 30706, 30708, 30710, 30712, 30714, 30716, 30718, 30720, 30722, 30724, 30726, 30728, 30730, 30732, 30734, 30736, 30738, 30740, 30742, 30744, 30746, 30749, 30752, 30755, 30361, 30360, 27476, 30364, 30363, 30366, 30365, 30369, 30368, 30367, 30387, 30386, 30389, 30388, 30391, 30390, 30394, 30393, 30392, 30758, 30760, 30762, 30765, 30768, 30770, 30772, 30775, 30778, 30780, 30782, 30785, 30787, 30790, 30792, 30794, 30796, 30798, 30801, 30804, 30807, 30809, 30811, 30813, 30815, 30818, 30821, 30823, 30825, 30828, 30830, 30832, 30834, 30836, 14592, 14593, 14596, 14597, 14598, 30483, 30465, 30483, 30481, 30499, 27642, 27658, 30559, 30628, 30628, 30653, 15004, 15005, 15008, 15009, 15010, 30850, 30852, 30854, 30856, 30858, 30861, 30864, 30866, 30868, 30871, 30873, 30875, 30878, 30880, 30882, 30885, 30887, 30889, 30892, 30894, 30896, 30899, 30902, 30904, 30906, 30908, 30910, 30913, 30916, 30918, 30920, 30922, 30924, 30927, 30930, 30933, 30936, 30939, 30941, 30943, 30946, 30949, 30951, 30953, 30956, 30959, 30962, 30965, 30968, 30970, 30972, 30974, 30976, 30978, 30980, 30985, 30987, 30989, 30991, 30993, 30995, 30997, 30999, 31001, 31003, 31005, 31007, 31009, 31011, 15455, 15459, 31018, 31021, 31023, 31025, 31027, 31032, 31034, 31036, 31038, 31040, 31044, 31046, 31048, 31050, 31052, 31054, 31056, 31058, 31060, 31063, 31066, 31068, 31070, 30847, 30982, 31076, 31078, 31080, 31083, 31085, 31087, 31090, 31092, 31094, 31096, 31098, 31100, 31103, 31105, 31107, 31110, 31112, 31114, 31116, 31118, 31120, 31123, 31014, 31013, 31012, 31030, 31029, 31028, 31073, 31072, 31071, 31127, 31126, 31125, 31130, 31133, 31136, 31139, 31142, 31145, 31148, 31151, 31154, 31157, 31160, 31163, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 11698, 11699, 11700, 11701, 31702, 31171, 31707, 31168, 31170, 31169, 31962, 11894, 31702, 31171, 11897, 31707, 31706, 31173, 31172, 31175, 31174, 31176, 11905, 11906, 11907, 11908, 31178, 11910, 11911, 31180, 31182, 31185, 31184, 31186, 31188, 31191, 31190, 31192, 11921, 11922, 31193, 11924, 28811, 31197, 31349, 31351, 31352, 31355, 31196, 31357, 31356, 31198, 31359, 31361, 31360, 31362, 31363, 31364, 31366, 28811, 31197, 31349, 31351, 31352, 31355, 31354, 31357, 31356, 31198, 31359, 31361, 31360, 31362, 31363, 31364, 31366, 31200, 31202, 11961, 31203, 11963, 31281, 31497, 31498, 31206, 31420, 31423, 31422, 31424, 31425, 31427, 31429, 31428, 31208, 31863, 31862, 31865, 31209, 31867, 31210, 31869, 31868, 31871, 31870, 31873, 31872, 31874, 31876, 31878, 31877, 31879, 31980, 12006, 12007, 12008, 31212, 31211, 31214, 31213, 31215, 31217, 31219, 31218, 31220, 31221, 31223, 28506, 31226, 31734, 31227, 31230, 31229, 31231, 31232, 31234, 31236, 31238, 31240, 31242, 31241, 31243, 31245, 31247, 31248, 28558, 31250, 12040, 12041, 12042, 31252, 31253, 28572, 31257, 31256, 31258, 31259, 28586, 31263, 31262, 31264, 31265, 31266, 31268, 31270, 12058, 12059, 12060, 31846, 31272, 28615, 31276, 31275, 31277, 31991, 31910, 30225, 31892, 31915, 31914, 31916, 31917, 31883, 12077, 28625, 31910, 30225, 31892, 31915, 31914, 31882, 31917, 31883, 12087, 28627, 28920, 31392, 31391, 31394, 31393, 31278, 31396, 31397, 31400, 31399, 25634, 31494, 31493, 28920, 31392, 31391, 31394, 31393, 31396, 31395, 31397, 31400, 31399, 25637, 31494, 31493, 25638, 25639, 31480, 29149, 31483, 31486, 31485, 31487, 31489, 31491, 31494, 31493, 31281, 31283, 31497, 31499, 31501, 31503, 31505, 31507, 31509, 31285, 31284, 32002, 31286, 28649, 31560, 31290, 28658, 31565, 31568, 31293, 31294, 12189, 31296, 12191, 31297, 31298, 31300, 31299, 28684, 12197, 31302, 12199, 31303, 12201, 31305, 31304, 31306, 12205, 12206, 31309, 31308, 31311, 31310, 31312, 31314, 31316, 31318, 31321, 31320, 31322, 31324, 31327, 31326, 28759, 31378, 12223, 31380, 31330, 31342, 31332, 31333, 12229, 31334, 31335, 12232, 31368, 31380, 12235, 31338, 31342, 31341, 31340, 31343, 12241, 12242, 31345, 32017, 28811, 31348, 31349, 31351, 31352, 31355, 31354, 31357, 31356, 31359, 31358, 31361, 31360, 31363, 31362, 31364, 31366, 31368, 31380, 12265, 31370, 31372, 31374, 12269, 31387, 31377, 31376, 12273, 31378, 12275, 31380, 31381, 31383, 31385, 12280, 31387, 31388, 12283, 12284, 28920, 31392, 31391, 31394, 31393, 31396, 31395, 31397, 31400, 31399, 31401, 31404, 31403, 31405, 31407, 31410, 31409, 31411, 31412, 31414, 31416, 31415, 31417, 31495, 31498, 31497, 31418, 31420, 31423, 31422, 31424, 31425, 31427, 31429, 31428, 31430, 31512, 31431, 31432, 31433, 31435, 31436, 31437, 31438, 31440, 12330, 29045, 31442, 31444, 12334, 29057, 31512, 31511, 31446, 12339, 31516, 31447, 31449, 31450, 31451, 31453, 31455, 31454, 31456, 31457, 31458, 31462, 31461, 31460, 31463, 31465, 31464, 31467, 31466, 31468, 29121, 29124, 31472, 31474, 29133, 29136, 31477, 31479, 31480, 29149, 31483, 31486, 31485, 31487, 31489, 31491, 31494, 31493, 31495, 31498, 31497, 31499, 31501, 31503, 31505, 31507, 31509, 31512, 31511, 31514, 31513, 29224, 31516, 31518, 31520, 31521, 31523, 31524, 26279, 31526, 26285, 31529, 31528, 31531, 31530, 31532, 31533, 31535, 31537, 31536, 31538, 31539, 29285, 31541, 31543, 31545, 29299, 29302, 31548, 31550, 31551, 31553, 29320, 31556, 31568, 31567, 32029, 31558, 31559, 31560, 31563, 31562, 12433, 31564, 31565, 31568, 31567, 32032, 31569, 29357, 31572, 31575, 31574, 31577, 31576, 31579, 31578, 31581, 31580, 31582, 31585, 31584, 31586, 31588, 31590, 12457, 31591, 31594, 31593, 31595, 12462, 31598, 31597, 31600, 31599, 31601, 12468, 31604, 31603, 31605, 31606, 31609, 31608, 31610, 31612, 31615, 31614, 31618, 31617, 31616, 31619, 12483, 31620, 31622, 31625, 31624, 31626, 31629, 31628, 31631, 31630, 31634, 31633, 31632, 12496, 12497, 12498, 12499, 31636, 31635, 31639, 31638, 31637, 12505, 12506, 12507, 12508, 31640, 31688, 31646, 31643, 31642, 12514, 12515, 31644, 31645, 31688, 31646, 31648, 31647, 12522, 12523, 31649, 31660, 31659, 31651, 31663, 31655, 31654, 31658, 31657, 31656, 12534, 12535, 12536, 12537, 12538, 12539, 12540, 12541, 12542, 12543, 12544, 31660, 31659, 31661, 31663, 31666, 31665, 31668, 31667, 29634, 31671, 31670, 31672, 31674, 31676, 31675, 31677, 31679, 31678, 31682, 31681, 31680, 31685, 31684, 31683, 31686, 31688, 31687, 31690, 31689, 31691, 12575, 12576, 31694, 31693, 31695, 31698, 31697, 31700, 31699, 12584, 12585, 31702, 31701, 31703, 31705, 12590, 12591, 31707, 31706, 31708, 12595, 31715, 31710, 31717, 31716, 31719, 31718, 12602, 31720, 31721, 29757, 31723, 31725, 31712, 12609, 31728, 12611, 12612, 12613, 31715, 31714, 31717, 31716, 31719, 31718, 12620, 31721, 31720, 29786, 31723, 31725, 31726, 12627, 31728, 12629, 12630, 12631, 31730, 31729, 31732, 31731, 31734, 31733, 31736, 31735, 31737, 31738, 31740, 31741, 31743, 31745, 31747, 31749, 31748, 31750, 31752, 12651, 31755, 31754, 29876, 31757, 31759, 12657, 12658, 12659, 12660, 31760, 12662, 12663, 12664, 31762, 31761, 31764, 31763, 31766, 31765, 31768, 31767, 31769, 31770, 31772, 31771, 31774, 31773, 31775, 31776, 31777, 31779, 31781, 31782, 12685, 27043, 29953, 27049, 31787, 31786, 31789, 31788, 31791, 31790, 31792, 31793, 31797, 31796, 31795, 31798, 31800, 31803, 31802, 31804, 31806, 31805, 31807, 31808, 31811, 31810, 30018, 31813, 31815, 31814, 31816, 30033, 31818, 31820, 31822, 31824, 31823, 31825, 31828, 31827, 31829, 31831, 31833, 12728, 12729, 31836, 31835, 30083, 31838, 31840, 12735, 12736, 31842, 31841, 31843, 31845, 12741, 31846, 31847, 31850, 31849, 31851, 31855, 31854, 31853, 31857, 31856, 31859, 31858, 31861, 31860, 12756, 12757, 31863, 31862, 31865, 31864, 31867, 31866, 31869, 31868, 31871, 31870, 31873, 31872, 31874, 31876, 31878, 31877, 31879, 31910, 30225, 31892, 31915, 31914, 31882, 31917, 31883, 12783, 31919, 31884, 30276, 30279, 31886, 31915, 12790, 31887, 12792, 12793, 31923, 31889, 30225, 31892, 31915, 31894, 12800, 31895, 31918, 12803, 31919, 12805, 31921, 12807, 12808, 31923, 31925, 31905, 12812, 12813, 12814, 31927, 31900, 31929, 31907, 31896, 31897, 31908, 12822, 31902, 31899, 31938, 31925, 31905, 12828, 12829, 12830, 31927, 31900, 31929, 31907, 31906, 31908, 31933, 31934, 31902, 31938, 31904, 31903, 31925, 31905, 12845, 31926, 12847, 31927, 31928, 31929, 31907, 31906, 31908, 31933, 31909, 12856, 31935, 31938, 31937, 31910, 30276, 30279, 31915, 31914, 31916, 12866, 31918, 31917, 12869, 31919, 12871, 12872, 31921, 12874, 12875, 31923, 31925, 31924, 12879, 31926, 12881, 31927, 31928, 31929, 31931, 31933, 31932, 12888, 31934, 31935, 31938, 31937, 14070, 14071, 14072, 14073, 14074, 14075, 14076, 14077, 14078, 14079, 31940, 31939, 31942, 31941, 31943, 31945, 14106, 14107, 14108, 14109, 14110, 14111, 14112, 14113, 14114, 31951, 31950, 31952, 31953, 31955, 31957, 31956, 31960, 31959, 31958, 27540, 31976, 31978, 27548, 31997, 31996, 31995, 32306, 31999, 31998, 32308, 32000, 32240, 32241, 32243, 32126, 32125, 32128, 32127, 32130, 32122, 32121, 14907, 32123, 32124, 32134, 32133, 32136, 32135, 14914, 32126, 32125, 32128, 32127, 32130, 32129, 14921, 14922, 32131, 32134, 32133, 32136, 32135, 14928, 32139, 32138, 32137, 32140, 32143, 32142, 14935, 32145, 32144, 32146, 32149, 32148, 14941, 32151, 32150, 32152, 32155, 32154, 32156, 32158, 32157, 32159, 32163, 32162, 32161, 14954, 32165, 32164, 32167, 32166, 32168, 32170, 32172, 32171, 32173, 32175, 32177, 32178, 14967, 32195, 32180, 30636, 32181, 32183, 32182, 32200, 32202, 32184, 32204, 32185, 32188, 32187, 32190, 32189, 32191, 32193, 32192, 14986, 32195, 32194, 30636, 32197, 32198, 32200, 32202, 14994, 32204, 32205, 32207, 32209, 32212, 32211, 32215, 32214, 32213, 32322, 32217, 32216, 32324, 32218, 32220, 32221, 32223, 32225, 32224, 32226, 32227, 32229, 32249, 32230, 32233, 32232, 32231, 32235, 32234, 32237, 32236, 32238, 32240, 32241, 32243, 32244, 32247, 32246, 32249, 32248, 32251, 32250, 32271, 32273, 32274, 32275, 32277, 32278, 32279, 32281, 32282, 32283, 32373, 32372, 32284, 32286, 32289, 32288, 32373, 32372, 32290, 32291, 32293, 32296, 32295, 32297, 32300, 32299, 32301, 32303, 15860, 32326, 32328, 32331, 32330, 32332, 32334, 32335, 32337, 32338, 32340, 32341, 32343, 32344, 32347, 32346, 32348, 32350, 32353, 32352, 32355, 32354, 32357, 32356, 32359, 32358, 32360, 32362, 32361, 32363, 32366, 32365, 32369, 32368, 32367, 32371, 32370, 32373, 32372, 32374, 32377, 32376, 32378, 15903, 32380, 32382, 32381, 32383, 32385, 32387, 32390, 32389, 32391, 32394, 32393, 15993, 15994, 15995, 32397, 31019, 32400, 32399, 32401, 16010, 16011, 16012, 32402, 32403, 32406, 32405, 31041, 31042, 32408, 32407, 32409, 32410, 32412, 32413, 32415, 31061, 31064, 32418, 32417, 32419, 16035, 16036, 16037, 32422, 32424, 32425, 32427, 32428, 32429, 32431, 32433, 32434, 32436, 32437, 32438, 32440, 32443, 32442, 16327, 16328, 16329, 32457, 32456, 32459, 32458, 32461, 32460, 32463, 32462, 32465, 32464, 32467, 32466, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 32481, 32483, 11886, 11887, 11888, 11889, 11890, 11891, 11895, 11896, 11898, 11899, 11900, 11901, 11902, 11903, 11904, 32503, 11909, 32508, 11912, 11913, 11914, 11915, 11916, 11917, 11918, 11919, 11920, 32519, 11923, 11925, 11926, 11927, 11928, 11929, 11930, 11931, 11932, 11933, 11934, 11935, 11936, 11937, 11938, 11939, 11940, 11941, 11942, 11943, 11944, 11945, 11946, 11947, 11948, 11949, 11950, 11951, 11952, 11953, 11954, 11955, 11956, 11957, 11958, 11959, 11960, 11962, 11964, 11965, 11966, 11967, 11968, 11969, 11970, 11971, 11972, 11973, 11974, 11975, 11976, 11977, 11978, 11979, 11980, 11981, 11982, 11983, 11984, 11985, 11986, 11987, 11988, 11989, 11990, 11991, 11992, 11993, 32594, 12009, 12010, 12011, 12012, 12013, 12014, 12015, 12016, 12017, 12018, 12019, 12020, 12021, 12022, 12023, 12024, 12025, 12026, 12027, 12028, 12029, 12030, 12031, 12032, 12033, 12034, 12035, 12036, 12037, 12038, 12039, 32627, 12043, 12044, 12045, 12046, 12047, 12048, 12049, 12050, 12051, 12052, 12053, 12054, 12055, 12056, 12057, 32645, 12061, 12062, 12063, 12064, 12065, 12066, 12069, 12070, 12071, 12072, 12073, 12074, 12075, 12076, 12078, 12079, 12080, 12081, 12082, 12083, 12084, 12085, 12086, 12088, 12129, 12130, 12131, 12132, 12133, 12134, 12135, 12136, 12137, 12138, 12139, 12140, 12141, 12142, 12143, 12144, 12145, 12146, 12147, 12148, 12149, 12150, 12151, 12152, 12153, 12154, 12155, 12156, 12157, 12158, 12159, 12160, 12161, 12162, 12163, 12164, 12165, 12166, 12167, 12168, 12169, 12170, 12171, 12172, 12173, 12174, 12175, 12176, 12177, 12180, 12181, 12182, 12183, 12184, 12185, 12186, 12187, 12188, 12190, 12192, 12193, 12194, 12195, 12196, 12198, 12200, 12202, 12203, 12204, 32750, 12207, 12208, 12209, 12210, 12211, 12212, 12213, 12214, 12215, 12216, 12217, 12218, 12219, 12220, 12221, 12222, 12224, 12225, 12226, 12227, 12228, 12230, 12231, 12233, 12234, 12236, 12237, 12238, 12239, 12240, 32786, 12243, 12246, 12247, 12248, 12249, 12250, 12251, 12252, 12253, 12254, 12255, 12256, 12257, 12258, 12259, 12260, 12261, 12262, 12263, 12264, 12266, 12267, 12268, 12270, 12271, 12272, 12274, 12276, 12277, 12278, 12279, 12281, 12282, 32827, 12285, 12286, 12287, 12288, 12289, 12290, 12291, 12292, 12293, 12294, 12295, 12296, 12297, 12298, 12299, 12300, 12301, 12302, 12303, 12304, 12305, 12306, 12307, 12308, 12309, 12310, 12311, 12312, 12313, 12314, 12315, 12316, 12317, 12318, 12319, 12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12331, 12332, 12333, 12335, 12336, 12337, 12338, 12340, 12341, 12342, 12343, 12344, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12353, 12354, 12355, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 12368, 12369, 12370, 12371, 12372, 12373, 12374, 12375, 12376, 12377, 12378, 12379, 12380, 12381, 12382, 12383, 12384, 12385, 12386, 12387, 12388, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12398, 12399, 12400, 12401, 12402, 12403, 12404, 12405, 12406, 12407, 12408, 12409, 12410, 12411, 12412, 12413, 12414, 12415, 12416, 12417, 12418, 12419, 12420, 12421, 12422, 12423, 12424, 12425, 12428, 12429, 12430, 12431, 12432, 12434, 12435, 12436, 12437, 12440, 12441, 12442, 12443, 12444, 12445, 12446, 12447, 12448, 12449, 12450, 12451, 12452, 12453, 12454, 12455, 12456, 12458, 12459, 12460, 12461, 12463, 12464, 12465, 12466, 12467, 12469, 12470, 12471, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12479, 12480, 12481, 12482, 12484, 12485, 12486, 12487, 12488, 12489, 12490, 12491, 12492, 12493, 12494, 12495, 33038, 33040, 12500, 12501, 12502, 12503, 12504, 33047, 33049, 12509, 12510, 12511, 12512, 12513, 33056, 12516, 12517, 12518, 12519, 12520, 12521, 33064, 12524, 12525, 12526, 12527, 12528, 12529, 12530, 12531, 12532, 12533, 33076, 33078, 33080, 33082, 33085, 12545, 12546, 12547, 12548, 12549, 12550, 12551, 12552, 12553, 12554, 12555, 12556, 12557, 12558, 12559, 12560, 12561, 12562, 12563, 12564, 12565, 12566, 12567, 12568, 12569, 12570, 12571, 12572, 12573, 12574, 33117, 12577, 12578, 12579, 12580, 12581, 12582, 12583, 33126, 12586, 12587, 12588, 12589, 33132, 12592, 12593, 12594, 12596, 12597, 12598, 12599, 12600, 12601, 12603, 12604, 12605, 12606, 12607, 12608, 12610, 33154, 12614, 12615, 12616, 12617, 12618, 12619, 12621, 12622, 12623, 12624, 12625, 12626, 12628, 33172, 12632, 12633, 12634, 12635, 12636, 12637, 12638, 12639, 12640, 12641, 12642, 12643, 12644, 12645, 12646, 12647, 12648, 12649, 12650, 12652, 12653, 12654, 12655, 12656, 33199, 33201, 12661, 33205, 12665, 12666, 12667, 12668, 12669, 12670, 12671, 12672, 12673, 12674, 12675, 12676, 12677, 12678, 12679, 12680, 12681, 12682, 12683, 12684, 12686, 12687, 12688, 12689, 12690, 12691, 12692, 12693, 12694, 12695, 12696, 12697, 12698, 12699, 12700, 12701, 12702, 12703, 12704, 12705, 12706, 12707, 12708, 12709, 12710, 12711, 12712, 12713, 12714, 12715, 12716, 12717, 12718, 12719, 12720, 12721, 12722, 12723, 12724, 12725, 12726, 12727, 33270, 12730, 12731, 12732, 12733, 12734, 33277, 12737, 12738, 12739, 12740, 12742, 12743, 12744, 12745, 12746, 12747, 12748, 12749, 12750, 12751, 12752, 12753, 12754, 12755, 33298, 12758, 12759, 12760, 12761, 12762, 12763, 12764, 12765, 12766, 12767, 12768, 12769, 12770, 12771, 12772, 12773, 12774, 12775, 12776, 12777, 12778, 12779, 12780, 12781, 12782, 12784, 12785, 12786, 12787, 12788, 12789, 12791, 33334, 12794, 12795, 12796, 12797, 12798, 12799, 12801, 12802, 12804, 12806, 33349, 12809, 12810, 12811, 33354, 12815, 12816, 12817, 12818, 12819, 12820, 12821, 12823, 12824, 12825, 12826, 12827, 33370, 12831, 12832, 12833, 12834, 12835, 12836, 12837, 12838, 12839, 12840, 12841, 12842, 12843, 12844, 12846, 12848, 12849, 12850, 12851, 12852, 12853, 12854, 12855, 12857, 12858, 12859, 12860, 12861, 12862, 12863, 12864, 12865, 12867, 12868, 12870, 33413, 12873, 33416, 12876, 12877, 12878, 12880, 12882, 12883, 12884, 12885, 12886, 12887, 12889, 12890, 12891, 12892, 33435, 33438, 33440, 33442, 14080, 14081, 14082, 14083, 14084, 14085, 33451, 33453, 33455, 33457, 14115, 14116, 14117, 14118, 14119, 14120, 14121, 14122, 14123, 14124, 14536, 14537, 14538, 14588, 14589, 14590, 14591, 14594, 14595, 32309, 14599, 14600, 14601, 14602, 14900, 14901, 14902, 14903, 14904, 14905, 14906, 14908, 14909, 14910, 14911, 14912, 14913, 14915, 14916, 14917, 14918, 14919, 14920, 33506, 14923, 14924, 14925, 14926, 14927, 14929, 14930, 14931, 14932, 14933, 14934, 14936, 14937, 14938, 14939, 14940, 14942, 14943, 14944, 14945, 14946, 14947, 14948, 14949, 14950, 14951, 14952, 14953, 14955, 14956, 14957, 14958, 14959, 14960, 14961, 14962, 14963, 14964, 14965, 14966, 14968, 14969, 14970, 14971, 14972, 14973, 14974, 14975, 14976, 14977, 14978, 14979, 14980, 14981, 14982, 14983, 14984, 14985, 14987, 14988, 14989, 14990, 14991, 14992, 14993, 14995, 14996, 14997, 14998, 14999, 15000, 15001, 15002, 15003, 15006, 15007, 32325, 15011, 15012, 15013, 15014, 15015, 15016, 15017, 15018, 15019, 15020, 15021, 15022, 15023, 15024, 15025, 15026, 15027, 15028, 15029, 15030, 15031, 15032, 15033, 15034, 15035, 15036, 15037, 15038, 15039, 15453, 15454, 15456, 15457, 15458, 15460, 15461, 15462, 15463, 15464, 15465, 15466, 15467, 15468, 15469, 15470, 15471, 15472, 15521, 15522, 15523, 15524, 15525, 15526, 15527, 15528, 15529, 15530, 15861, 15862, 15863, 15864, 15865, 15866, 15867, 15868, 15869, 15870, 15871, 15872, 15873, 15874, 15875, 15876, 15877, 15878, 15879, 15880, 15881, 15882, 15883, 15884, 15885, 15886, 15887, 15888, 15889, 15890, 15891, 15892, 15893, 15894, 15895, 15896, 15897, 15898, 15899, 15900, 15901, 15902, 15904, 15905, 15906, 15907, 15908, 15909, 15910, 15911, 15990, 15991, 15992, 33705, 16005, 16006, 16007, 16008, 16009, 33713, 16013, 16014, 16015, 16016, 16017, 16018, 16019, 16020, 16021, 16026, 16027, 16028, 16029, 16030, 16031, 16032, 16033, 16034, 33734, 16312, 16313, 16314, 16315, 16316, 16317, 16318, 16319, 16320, 16321, 16322, 16323, 16324, 16325, 16326, 33752, 16912, 16913, 16919, 16920, 16924, 16925, 16929, 16930, 16934, 16935, 16969, 16970, 9, 10, 11, 12, 13, 14, 15, 33779, 33781, 33783, 33785, 33787, 33789, 33791, 33799, 33803, 33813, 33815, 33817, 33819, 33821, 33830, 33832, 33834, 33836, 33838, 32558, 33846, 33850, 33855, 33858, 33860, 33862, 33864, 33866, 33868, 33872, 33876, 33878, 33882, 33891, 33899, 33907, 33911, 33916, 33923, 33927, 33933, 33936, 33942, 33945, 33949, 33951, 33953, 33956, 33959, 33962, 33964, 33966, 33969, 33972, 33979, 33984, 33987, 33995, 34003, 34005, 34009, 34011, 34014, 34018, 34020, 34026, 34030, 34033, 34036, 34038, 32779, 34044, 34055, 34057, 34059, 34061, 34063, 32808, 34071, 34073, 34075, 34079, 34084, 34086, 34088, 34091, 34094, 34098, 34103, 34107, 34111, 34116, 34119, 34132, 34141, 34146, 34150, 34152, 34166, 34171, 34174, 34182, 34184, 34196, 34198, 34203, 34219, 34224, 34225, 34228, 34233, 34235, 34237, 34239, 34242, 32998, 34248, 34251, 34253, 34256, 34260, 34264, 34266, 33024, 34272, 34275, 34277, 34279, 34284, 34286, 34292, 34294, 34299, 34301, 34305, 34309, 34311, 33083, 34319, 34323, 34325, 34328, 34332, 34335, 34337, 34340, 34344, 34346, 34350, 34353, 34355, 34358, 34363, 34366, 34368, 34370, 34372, 33152, 34380, 34382, 34384, 34386, 33170, 34394, 34396, 34398, 34400, 34409, 34413, 34418, 34422, 34424, 34426, 34428, 34432, 34434, 34445, 34447, 34449, 34453, 34458, 34461, 34465, 34469, 34476, 34479, 34485, 34491, 34494, 34497, 34500, 34503, 34505, 34507, 34510, 34512, 34514, 34516, 34518, 34520, 34524, 34530, 34533, 34539, 34547, 34549, 34555, 34557, 34561, 34563, 34566, 34568, 34570, 34574, 34576, 34580, 34583, 34584, 34585, 34589, 34591, 33397, 34595, 34600, 33407, 34603, 34610, 34611, 34612, 34617, 34618, 34621, 33443, 34627, 34629, 34442, 33203, 34401, 34437, 34466, 34442, 33203, 34401, 34437, 34466, 33458, 34637, 34642, 34644, 33792, 32735, 32743, 34015, 34021, 33796, 33804, 33806, 33809, 33807, 33822, 33826, 33824, 33839, 33843, 33847, 33852, 33869, 34442, 32592, 33883, 33885, 33886, 34442, 33203, 34401, 34437, 34466, 33893, 33895, 33900, 33902, 33904, 34416, 33908, 33913, 33918, 33920, 34416, 33924, 33929, 32662, 33938, 32672, 34651, 34654, 33975, 33980, 33988, 33990, 33992, 33996, 33998, 34000, 32735, 32743, 34015, 34021, 34023, 34051, 34031, 34064, 34039, 34046, 34048, 34051, 34049, 34064, 34068, 34076, 34080, 34095, 34100, 34108, 34113, 34123, 34121, 34125, 32873, 34128, 32877, 32882, 34135, 34137, 34142, 34154, 34156, 34158, 34160, 34162, 34167, 34175, 34177, 34179, 34185, 34189, 34187, 34193, 34191, 34199, 34204, 34206, 34208, 34210, 34212, 34214, 34216, 34220, 34229, 34249, 34254, 34261, 34281, 34288, 34320, 34290, 34306, 34295, 34320, 34329, 34306, 34313, 34320, 34329, 34347, 34359, 34364, 34376, 34374, 34390, 34388, 34401, 34403, 34405, 34410, 34414, 34442, 33203, 34429, 34435, 34437, 34466, 34442, 33226, 34455, 34466, 34471, 34473, 34481, 34486, 34488, 34521, 34526, 33324, 34535, 33331, 34541, 34543, 33344, 33346, 34552, 34596, 33410, 34605, 34607, 34614, 34661, 34663, 34665, 34667, 34670, 34672, 34674, 34676, 34678, 34682, 34684, 34686, 34690, 34692, 34695, 34697, 34700, 34703, 34706, 34709, 34711, 34715, 34721, 34725, 34732, 34734, 34737, 34739, 34750, 34752, 34755, 34762, 34767, 34769, 34772, 34774, 34781, 34783, 34785, 34622, 34623, 34624, 34630, 34632, 34633, 34634, 34639, 34797, 34801, 34803, 34808, 34811, 34646, 34647, 34649, 34658, 34656, 34712, 34717, 34719, 34726, 34728, 34741, 34743, 33578, 34746, 34759, 34757, 34764, 34777, 34775, 34817, 34828, 34832, 34834, 34836, 34838, 34841, 34844, 34846, 34849, 34851, 34854, 34856, 34858, 34863, 34866, 33706, 34786, 32395, 34789, 32396, 34792, 34794, 34871, 33714, 34877, 34881, 34804, 34812, 34890, 33735, 33649, 34818, 34820, 34822, 34824, 34829, 34859, 34907, 33753, 34868, 34899, 34885, 34883, 34897, 34903, 34885, 34883, 34897, 34901, 34903, 34895, 34893, 34897, 34901, 34899, 34903, 34910, 34912, 34914, 34916, 34918, 34920, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 34045, 34147, 34267, 34280, 34287, 34312, 34338, 34341, 34454, 34501, 34581, 14086, 14087, 35091, 35106, 33776, 14092, 14093, 35111, 14095, 14096, 14097, 35091, 35106, 33777, 14102, 14103, 35111, 14105, 34645, 32491, 32494, 34930, 32491, 32494, 34933, 14495, 32504, 32505, 14498, 33794, 34007, 14501, 34010, 32745, 14504, 34991, 14506, 14507, 34935, 33801, 14510, 14511, 14512, 14513, 34938, 33811, 34940, 14517, 14518, 14519, 34943, 33828, 34945, 14523, 33841, 14525, 33844, 14527, 14528, 34949, 34950, 34951, 34953, 34955, 34957, 14535, 32591, 14540, 14541, 35091, 34959, 33880, 14546, 14547, 14548, 35111, 14550, 14551, 35091, 35106, 33888, 14556, 14557, 35111, 14559, 34961, 14561, 14562, 33897, 14564, 14565, 14566, 14567, 34419, 14569, 34964, 14571, 34965, 14573, 14574, 14575, 34419, 14577, 34967, 32653, 14580, 33931, 33934, 14583, 14584, 33940, 33943, 14587, 34652, 33947, 34973, 33954, 33957, 33960, 34978, 33967, 33970, 35039, 35039, 14613, 33977, 14615, 33982, 33985, 14618, 14619, 14620, 34985, 14622, 14623, 14624, 34986, 14626, 34004, 34007, 14629, 34010, 32745, 14632, 34991, 14634, 14635, 34993, 34028, 14638, 14639, 35001, 34053, 35003, 14643, 34032, 34034, 34037, 14647, 34040, 14650, 14651, 14652, 14653, 35001, 34053, 35003, 14657, 34066, 14659, 34070, 35007, 34074, 14663, 34078, 14665, 34082, 35011, 34089, 34092, 14670, 14671, 35015, 35016, 34105, 14675, 14676, 35018, 35019, 35020, 14680, 14681, 14682, 14683, 14684, 14685, 35021, 14687, 14688, 14689, 34139, 14691, 34148, 35025, 14695, 14696, 14697, 14698, 14699, 34164, 14701, 34169, 34172, 14704, 14705, 14706, 35029, 14708, 14709, 14710, 14711, 14712, 35031, 14714, 34201, 14716, 14717, 14718, 14719, 14720, 14721, 14722, 35034, 14724, 34222, 35036, 35037, 14728, 34231, 35039, 35041, 35042, 34244, 34246, 14735, 35045, 14737, 35047, 34258, 14740, 35051, 34270, 34273, 14746, 14748, 34317, 14750, 35067, 34326, 14753, 35070, 35058, 35061, 34303, 14760, 34315, 14763, 34317, 14765, 35067, 34326, 14768, 35070, 35060, 35061, 34303, 14775, 34315, 14778, 34317, 14780, 35067, 34326, 14783, 35070, 35074, 14788, 35076, 35077, 34356, 14792, 34361, 14794, 35081, 35084, 35083, 14798, 14799, 35085, 35086, 35089, 35088, 14804, 14805, 35090, 35091, 35093, 14809, 14810, 14811, 34407, 14813, 33192, 14815, 34416, 34419, 14818, 14819, 35104, 35106, 35100, 14824, 35102, 14826, 14827, 35111, 14829, 14830, 14831, 35104, 35106, 14835, 35109, 35108, 34463, 35111, 14840, 14841, 14842, 35112, 35113, 14845, 34483, 14847, 14848, 35115, 34493, 34495, 35119, 35121, 35122, 35124, 35126, 35128, 14859, 14860, 34528, 34531, 14863, 14864, 34537, 14866, 14867, 14868, 34545, 33341, 14871, 14872, 14873, 35135, 35134, 34559, 35137, 34564, 35140, 35139, 34572, 35142, 35146, 35144, 34587, 35148, 34593, 14889, 34598, 35152, 14892, 14893, 14894, 35156, 35154, 14897, 35157, 34619, 34666, 34687, 34707, 34753, 34770, 15421, 15422, 15423, 35160, 35161, 15426, 15435, 15436, 15437, 35173, 15439, 35174, 15594, 15595, 15638, 35222, 15641, 15642, 35343, 34668, 35348, 35349, 35351, 34680, 35353, 35355, 35356, 35357, 35358, 35359, 35360, 35362, 15831, 35364, 15833, 35365, 15835, 34723, 15837, 15838, 34730, 35368, 35370, 35369, 15843, 15844, 15845, 15846, 34748, 35373, 15850, 15851, 15852, 35374, 35377, 15856, 15857, 34779, 35380, 34847, 15996, 15997, 15998, 15999, 16000, 16001, 35390, 34799, 35392, 16022, 34806, 34809, 16025, 16294, 34815, 16296, 16297, 16298, 16299, 34826, 16301, 35416, 35418, 34839, 34842, 35424, 35425, 35426, 16310, 34861, 34864, 35430, 34875, 16407, 16408, 35437, 35438, 35440, 34875, 16425, 16426, 16427, 16428, 35440, 35443, 16437, 16438, 16439, 16440, 16441, 35443, 35444, 16651, 16652, 16653, 16654, 16655, 16656, 34905, 35453, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 35500, 14088, 14089, 35099, 14091, 14094, 35509, 14098, 14099, 35099, 14101, 14104, 14489, 14490, 14491, 14492, 14493, 14494, 14496, 14497, 14499, 14500, 14502, 14503, 14505, 14508, 14509, 35542, 14514, 14515, 14516, 35548, 14520, 14521, 14522, 14524, 14526, 14529, 14530, 14531, 14532, 14533, 14534, 14539, 35567, 14542, 14543, 35099, 14545, 14549, 35576, 14552, 14553, 35099, 14555, 14558, 14560, 14563, 14568, 14570, 14572, 14576, 14578, 14579, 14581, 14582, 14585, 14586, 14603, 14604, 14605, 14606, 14607, 14608, 14609, 14610, 14611, 14612, 14614, 14616, 14617, 14621, 14625, 14627, 14628, 14630, 14631, 14633, 14636, 14637, 35649, 14640, 14641, 14642, 14644, 14645, 14646, 14648, 34042, 35662, 14654, 14655, 14656, 14658, 14660, 14661, 14662, 14664, 14666, 14667, 14668, 14669, 14672, 14673, 14674, 14677, 14678, 14679, 35690, 14686, 14690, 34144, 14693, 14694, 14700, 14702, 14703, 14707, 35718, 35720, 14713, 14715, 14723, 14725, 14726, 14727, 14729, 14730, 14731, 14732, 14733, 14734, 14736, 14738, 14739, 35049, 14742, 14743, 14744, 35054, 35056, 14749, 14751, 14752, 14754, 35071, 35495, 14757, 14758, 14759, 35063, 14762, 14764, 14766, 14767, 14769, 35071, 35495, 14772, 14773, 14774, 35063, 14777, 14779, 14781, 14782, 14784, 35071, 35495, 14787, 14789, 14790, 14791, 14793, 14795, 14796, 14797, 35796, 14800, 14801, 14802, 14803, 35802, 14806, 14807, 14808, 14812, 14814, 14816, 14817, 35816, 14820, 14821, 35099, 14823, 14825, 14828, 35827, 14832, 14833, 34451, 14836, 14837, 14838, 14839, 14843, 14844, 14846, 14849, 14850, 14851, 34498, 14853, 14854, 14855, 14856, 14857, 14858, 14861, 14862, 14865, 14869, 14870, 14874, 14875, 14876, 14877, 14878, 14879, 14880, 14881, 14882, 34578, 14884, 14885, 14886, 14887, 14888, 14890, 14891, 14895, 14896, 14898, 14899, 15424, 15425, 15438, 15440, 35175, 35535, 35539, 35725, 35705, 35703, 35727, 35585, 35588, 35590, 35597, 35812, 15639, 35612, 35915, 35693, 35691, 35705, 35703, 35697, 35705, 35703, 35629, 35633, 35644, 35659, 35693, 35691, 35697, 35705, 35703, 35713, 35727, 35725, 35729, 35836, 35836, 35842, 35860, 35866, 35886, 15814, 35893, 15816, 15817, 15818, 15819, 15820, 15821, 35894, 15823, 15824, 15825, 15826, 15827, 15828, 35895, 15830, 15832, 15834, 15836, 15839, 15840, 15841, 15842, 15847, 15848, 35896, 35949, 15853, 35375, 15855, 35954, 15858, 15859, 16002, 16003, 16004, 16023, 16024, 35910, 35912, 35936, 35943, 16295, 16300, 16302, 16303, 16304, 16305, 35957, 16307, 16308, 16309, 16311, 16384, 16385, 35975, 35962, 35960, 35958, 35975, 35973, 16406, 16409, 16410, 16415, 16424, 16429, 16430, 36004, 16442, 16443, 35975, 35973, 36011, 36014, 16657, 16658, 35992, 35992, 36006, 12, 13, 14, 15, 14090, 35507, 14100, 35516, 36052, 36054, 36061, 36065, 36069, 35564, 14544, 36081, 14554, 35583, 36115, 36117, 36124, 14649, 36133, 36144, 36147, 14692, 14741, 14745, 14747, 14755, 14756, 14761, 35765, 14770, 14771, 14776, 35777, 14785, 14786, 36218, 36223, 14822, 35825, 14834, 36244, 35835, 14852, 35853, 36266, 36271, 14883, 36276, 36283, 36032, 36036, 36038, 36042, 36289, 15441, 36045, 36044, 36046, 36048, 36047, 36049, 36051, 36050, 35533, 15577, 36057, 15579, 36062, 36066, 36067, 36155, 15586, 36068, 15589, 15590, 15591, 36071, 36075, 36076, 36080, 36082, 36086, 36239, 35595, 35593, 15611, 35810, 15613, 35586, 36250, 15616, 36232, 36227, 36239, 35595, 35593, 15627, 35810, 35808, 36250, 15631, 35601, 36095, 35604, 36097, 35608, 36099, 15640, 36140, 36142, 36143, 36146, 36149, 15650, 15651, 36152, 15654, 15655, 36155, 36100, 36102, 36104, 36106, 36143, 36146, 36151, 15666, 36152, 15669, 15670, 36155, 36168, 36168, 35623, 35625, 36112, 15677, 36113, 15679, 36114, 35642, 15684, 36120, 36125, 36126, 36128, 15691, 36134, 36135, 36136, 36138, 36139, 36140, 36142, 36143, 36146, 36149, 15705, 15706, 36151, 15708, 36152, 15711, 15712, 36155, 35707, 35709, 36158, 15717, 36159, 36160, 36163, 36162, 15722, 15723, 15724, 36164, 36165, 36167, 36168, 36170, 36172, 35745, 35743, 36176, 36179, 36183, 36184, 35758, 36189, 36191, 36194, 36195, 35770, 36200, 36202, 36205, 36206, 35782, 36211, 36212, 36215, 36214, 36219, 36224, 36232, 36227, 36239, 15772, 35807, 35810, 35808, 36250, 35812, 36232, 36237, 36236, 36239, 15787, 36247, 35840, 36250, 15791, 36254, 36256, 35854, 36261, 35858, 15799, 35862, 15801, 36264, 36268, 36278, 35882, 15810, 36281, 36284, 15815, 15822, 15829, 35934, 36354, 15849, 36359, 15854, 36345, 36343, 36341, 36287, 36345, 36343, 36341, 36348, 36347, 16130, 16192, 36333, 36335, 36337, 36343, 36341, 36348, 36347, 16284, 36351, 16287, 35945, 36361, 36363, 16306, 35967, 16387, 16388, 16389, 16390, 36375, 36376, 36369, 36382, 35986, 35967, 16398, 16399, 36375, 36376, 36369, 36382, 35986, 35997, 36399, 35967, 36369, 35971, 16643, 16644, 36375, 36376, 36378, 36382, 35986, 16721, 36385, 36394, 16738, 36393, 36394, 36406, 36396, 36406, 16777, 36400, 36401, 36406, 36405, 36407, 12, 13, 14, 15, 15427, 36034, 15429, 35505, 15431, 36040, 15433, 35514, 15566, 15567, 15568, 15569, 15570, 15571, 15572, 15573, 36421, 36420, 15576, 15578, 36059, 15581, 36063, 15583, 15584, 15585, 15587, 36424, 36490, 15592, 36073, 15596, 15597, 36078, 15599, 35572, 15601, 36084, 15603, 35581, 15605, 36241, 35830, 36245, 15609, 15610, 15612, 15614, 15615, 15617, 36234, 15619, 35823, 15621, 36241, 35830, 36245, 15625, 15626, 15628, 15629, 15630, 15632, 15633, 15634, 15635, 15636, 15637, 36523, 15643, 15644, 15645, 36435, 15647, 36436, 15649, 36530, 36437, 15653, 15656, 36533, 15657, 15658, 15659, 15660, 15661, 36435, 15663, 36436, 15665, 36437, 15668, 15671, 36545, 15672, 15673, 15674, 15675, 15676, 15678, 15680, 36431, 36430, 15683, 15685, 36122, 15687, 15688, 15689, 36129, 36131, 15693, 15694, 15695, 15696, 15697, 15698, 15699, 15700, 36435, 15702, 36436, 15704, 36574, 15707, 36437, 15710, 15713, 36579, 15714, 15715, 15716, 15718, 15719, 15720, 15721, 36590, 15725, 15726, 15727, 15728, 15729, 15730, 15731, 15732, 15733, 36438, 15735, 36440, 36439, 15738, 15739, 36441, 15741, 15742, 15743, 36443, 15745, 15746, 36445, 15748, 15749, 15750, 36447, 15752, 15753, 36449, 15755, 15756, 15757, 15758, 15759, 36216, 15761, 36221, 15763, 15764, 36234, 15766, 35823, 15768, 36241, 35830, 36245, 15773, 15774, 15775, 15776, 15777, 15778, 36234, 15780, 15781, 35823, 15783, 36241, 35830, 36245, 15788, 15789, 15790, 36252, 15793, 15794, 36258, 15796, 15797, 15798, 15800, 15802, 36460, 15804, 36461, 36273, 36463, 15808, 15809, 15811, 36464, 15813, 36659, 15975, 15976, 15977, 36655, 15979, 15984, 15985, 15986, 36655, 36469, 15989, 16091, 36331, 16275, 16276, 16277, 36345, 16279, 16280, 36655, 16282, 16283, 36657, 16286, 36658, 16289, 36660, 16292, 16293, 16386, 36689, 36691, 16391, 16392, 16393, 36686, 16395, 16396, 16397, 36699, 16400, 16401, 16402, 36686, 16404, 16405, 35998, 16435, 16436, 16642, 36711, 16645, 16646, 16647, 36686, 16649, 16650, 16722, 16723, 16739, 16740, 16751, 16753, 16764, 36706, 16778, 16779, 16873, 16874, 16875, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15428, 15430, 15432, 15434, 36745, 36748, 36751, 15574, 15575, 15580, 15582, 15588, 15593, 15598, 15600, 15602, 15604, 15606, 15607, 15608, 36781, 36503, 36506, 15618, 15620, 15622, 15623, 15624, 36794, 36796, 36516, 15646, 15648, 15652, 36815, 15662, 15664, 15667, 36828, 15681, 15682, 15686, 15690, 15692, 15701, 15703, 15709, 36863, 36871, 36880, 15734, 15736, 15737, 15740, 15744, 15747, 15751, 15754, 36907, 15760, 15762, 15765, 15767, 15769, 15770, 15771, 36922, 36924, 15779, 36928, 15782, 15784, 15785, 15786, 36638, 15792, 15795, 36945, 15803, 15805, 15806, 15807, 36953, 15812, 15978, 36958, 15987, 36963, 15988, 36874, 36591, 36876, 36886, 36893, 36900, 36554, 36874, 36876, 36886, 36893, 36900, 36874, 36591, 36876, 36886, 36893, 36900, 36480, 36843, 36850, 36848, 36852, 36860, 36858, 36832, 36834, 36868, 36557, 36843, 36850, 36848, 36852, 36860, 36858, 36832, 36834, 36868, 36798, 36943, 36941, 36798, 36943, 36802, 36800, 36804, 36805, 36811, 36819, 36817, 36825, 36830, 36831, 36832, 36834, 36554, 36874, 36886, 36893, 36900, 36557, 36843, 36850, 36848, 36852, 36860, 36858, 36865, 36867, 36868, 36874, 36591, 36876, 36886, 36893, 36900, 36624, 36634, 36943, 36941, 16274, 16278, 16281, 36978, 16285, 16288, 36956, 16291, 36971, 36984, 36971, 36984, 36988, 16394, 16403, 36971, 36984, 36971, 36984, 36971, 36984, 16648, 36989, 36993, 36995, 36997, 37001, 37014, 36989, 36993, 36995, 36997, 37001, 37016, 37006, 37008, 37012, 37003, 37006, 37008, 37012, 37003, 16766, 37004, 37008, 37012, 37022, 37006, 37008, 37012, 37025, 12, 13, 14, 15, 37048, 36783, 36814, 36827, 37080, 36862, 37092, 36889, 36896, 36903, 37124, 36736, 36738, 36740, 36742, 37126, 16040, 16041, 16042, 36878, 36881, 16045, 16048, 36891, 16051, 36898, 36905, 37100, 37099, 16057, 16058, 16059, 36878, 36881, 16062, 16065, 36891, 16068, 36898, 36905, 37100, 37099, 16074, 16075, 16076, 36878, 36881, 16079, 16082, 36891, 16085, 36898, 36905, 37100, 37099, 16093, 37081, 16095, 37082, 37050, 37049, 16099, 16100, 16101, 36856, 36854, 16104, 16105, 16107, 16108, 16109, 37088, 16112, 37081, 37082, 16115, 37083, 16117, 16118, 16119, 36856, 36762, 16122, 16123, 16125, 16126, 16127, 37088, 36765, 36768, 36770, 36772, 36774, 36776, 37058, 37060, 36785, 36787, 36789, 37066, 37068, 37069, 36925, 37109, 36930, 37112, 16149, 36935, 37115, 36939, 36944, 16154, 16155, 37121, 37119, 37118, 36952, 37123, 36768, 36770, 36772, 36774, 36776, 37058, 37060, 36785, 36787, 36789, 37066, 37068, 37069, 36925, 37109, 36930, 37112, 16179, 36935, 37115, 36939, 36944, 16184, 16185, 16186, 37121, 37119, 37118, 36952, 37123, 16193, 16194, 36809, 36807, 16197, 16199, 16200, 36823, 36821, 16203, 16205, 16206, 36891, 36898, 16210, 16211, 16212, 16213, 16214, 16215, 16216, 16218, 37081, 37082, 16221, 37083, 16223, 16224, 16225, 36856, 36854, 16228, 16229, 16231, 16232, 16233, 37088, 16235, 16236, 16237, 36878, 36881, 16240, 16243, 36891, 16246, 36898, 36905, 37100, 37099, 36912, 36914, 36916, 37104, 16256, 37106, 36925, 37109, 36930, 37112, 16262, 36935, 37115, 36939, 36944, 16267, 16268, 37121, 37119, 37118, 36952, 37123, 36974, 37211, 37213, 37214, 16290, 16355, 37209, 16360, 16376, 37209, 16381, 16542, 37209, 16547, 16575, 37209, 16580, 16635, 37209, 16640, 36986, 16714, 36991, 16716, 16717, 16718, 36999, 16720, 36986, 16731, 36991, 16733, 16734, 16735, 36999, 16737, 16747, 16748, 37005, 16750, 16752, 16760, 16761, 37010, 16763, 16765, 16773, 16774, 37005, 16776, 16869, 16870, 37010, 16872, 37236, 37242, 37255, 37259, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37274, 15980, 15981, 15982, 15983, 37279, 37281, 16043, 16044, 36883, 37271, 16049, 37272, 16052, 37273, 16054, 16055, 16056, 37294, 16060, 16061, 36883, 37271, 16066, 37272, 16069, 37273, 16071, 16072, 16073, 37307, 16077, 16078, 36883, 37271, 16083, 37272, 16086, 37273, 16088, 16089, 16090, 37264, 16094, 16096, 16097, 16098, 37326, 16102, 16103, 37331, 37269, 16110, 37268, 16113, 16114, 16116, 37342, 16120, 16121, 37347, 37269, 16128, 16129, 16131, 16132, 16133, 16134, 16135, 16136, 16137, 37265, 16139, 16140, 16141, 16142, 16143, 16144, 16145, 16146, 16147, 16148, 16150, 16151, 16152, 16153, 16156, 16157, 16158, 16159, 16160, 16161, 16162, 16163, 16164, 16165, 16166, 16167, 37265, 16169, 16170, 16171, 16172, 16173, 16174, 16175, 16176, 16177, 16178, 16180, 16181, 16182, 16183, 37406, 16187, 16188, 16189, 16190, 16191, 16195, 16196, 37266, 37418, 16201, 16202, 37267, 36883, 16208, 16209, 37429, 37268, 16219, 16220, 16222, 37439, 16226, 16227, 37444, 37269, 16234, 37450, 16238, 16239, 36883, 37271, 16244, 37272, 16247, 37273, 16249, 16250, 16251, 16252, 16253, 16254, 16255, 16257, 16258, 16259, 16260, 16261, 16263, 16264, 16265, 16266, 16269, 16270, 16271, 16272, 16273, 37485, 16356, 37487, 37486, 37412, 16377, 37487, 37486, 37488, 37332, 37348, 16543, 37487, 37486, 37488, 16576, 37487, 37486, 37412, 37426, 37426, 37445, 16636, 37487, 37486, 37488, 16713, 16715, 16719, 16730, 16732, 16736, 16749, 37524, 16762, 37529, 16775, 16871, 37508, 16911, 37516, 16918, 37520, 37525, 37530, 16933, 37534, 16968, 10, 11, 12, 13, 14, 15, 16046, 16047, 37563, 16050, 37565, 16053, 37569, 16063, 16064, 37575, 16067, 37577, 16070, 37581, 16080, 16081, 37587, 16084, 37589, 16087, 37593, 16092, 37596, 37598, 37601, 16106, 16111, 37339, 37611, 16124, 16138, 37375, 37639, 16168, 37404, 37667, 37672, 16198, 37676, 16204, 16207, 16217, 37436, 37688, 16230, 16241, 16242, 37697, 16245, 37699, 16248, 37703, 37477, 37718, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37659, 37657, 37399, 37662, 37669, 37723, 37552, 16358, 16359, 16361, 37620, 37618, 37616, 37626, 37555, 37553, 37628, 37632, 37630, 37370, 37635, 37641, 37727, 37557, 16379, 16380, 16382, 37722, 37558, 37559, 37570, 37571, 37582, 37583, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37659, 37657, 37399, 37662, 37669, 37722, 16507, 37334, 37608, 16514, 37350, 37681, 37693, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37662, 37669, 37733, 37722, 16545, 16546, 16548, 37620, 37618, 37616, 37626, 37624, 37628, 37632, 37630, 37370, 37635, 37641, 37647, 37645, 37643, 37653, 37651, 37655, 37659, 37657, 37399, 37662, 37669, 37737, 37722, 16578, 16579, 16581, 37685, 16589, 37447, 37681, 37693, 37681, 37685, 16606, 37681, 37693, 37685, 16618, 37447, 37692, 37693, 37706, 37704, 37466, 37711, 37709, 37472, 37714, 37720, 37744, 37722, 16638, 16639, 16641, 37748, 37749, 16909, 37750, 37751, 37752, 16916, 37753, 16921, 37754, 37755, 16926, 37756, 37757, 16931, 37758, 16966, 37759, 37761, 37763, 37767, 37769, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 37776, 37783, 37790, 37376, 37640, 37665, 37668, 37821, 37478, 37719, 16332, 16333, 16334, 37622, 16336, 16337, 16338, 16339, 16340, 16341, 16342, 16343, 16345, 16346, 16347, 37649, 16349, 16350, 16351, 16352, 16353, 16357, 37852, 37853, 16362, 16363, 16364, 37622, 16366, 16367, 16368, 16369, 16370, 16371, 16372, 16373, 16374, 16378, 37869, 37870, 16383, 16460, 16461, 37780, 37778, 37567, 16466, 16467, 37787, 37785, 37579, 16472, 16473, 37794, 37792, 37591, 16478, 16479, 16480, 37622, 16482, 16483, 16484, 16485, 16486, 16487, 16488, 16489, 16491, 16492, 16493, 37649, 16495, 16496, 16497, 16498, 16499, 16501, 37797, 37799, 37595, 37327, 37602, 16508, 37802, 16510, 37606, 37343, 37612, 16515, 16516, 16517, 37825, 37823, 37701, 16522, 16523, 16524, 37622, 16526, 16527, 16528, 16529, 16530, 16531, 16532, 16533, 16535, 16536, 16537, 37649, 16539, 16540, 16544, 37924, 37925, 16549, 16550, 16551, 37622, 16553, 16554, 16555, 16556, 16557, 16558, 16559, 16560, 16562, 16563, 16564, 37649, 16566, 16567, 16568, 16569, 16570, 16571, 16572, 16573, 16577, 37951, 37952, 37817, 16583, 37683, 37413, 37416, 37674, 37421, 16590, 16591, 16592, 37680, 37679, 37816, 37701, 16597, 37680, 37679, 37816, 37817, 16602, 37683, 37440, 37689, 16607, 16608, 37432, 37431, 37430, 37701, 37817, 16614, 37683, 37440, 37689, 16619, 16620, 16621, 37825, 37823, 37701, 16626, 16627, 16628, 16629, 16630, 16631, 16632, 16633, 16637, 37979, 37980, 16907, 16908, 16910, 16914, 16915, 16917, 16922, 16923, 16927, 16928, 16932, 16967, 16985, 16988, 16994, 17008, 12, 13, 14, 15, 38027, 16335, 38031, 38034, 38019, 38039, 16348, 38043, 38021, 38051, 16365, 38055, 38059, 38019, 16462, 16463, 38016, 16465, 16468, 16469, 38017, 16471, 16474, 16475, 38018, 16477, 38083, 16481, 38087, 38090, 38019, 38095, 16494, 38099, 38021, 16502, 16503, 16504, 16505, 16506, 16509, 16511, 16512, 16513, 16518, 16519, 38023, 16521, 38122, 16525, 38126, 38129, 38019, 38134, 16538, 38021, 38143, 16552, 38147, 38150, 38019, 38155, 16565, 38159, 38162, 38021, 16582, 16584, 16585, 16586, 16587, 16588, 16593, 16594, 16595, 16596, 16598, 16599, 16600, 16601, 16603, 16604, 16605, 16609, 16610, 16611, 16612, 16613, 16615, 16616, 16617, 16622, 16623, 38023, 16625, 38210, 38213, 38024, 37849, 38048, 37866, 38064, 37976, 38218, 37898, 37901, 38067, 38072, 38077, 37964, 38204, 37976, 38218, 37898, 37901, 38116, 37964, 38204, 37921, 38140, 37948, 38167, 37954, 38177, 38183, 37960, 38192, 37964, 38204, 37976, 38218, 37983, 38220, 37987, 38223, 37989, 38227, 37992, 38229, 37995, 37997, 13, 14, 15, 38028, 16344, 38040, 16354, 38052, 38056, 16375, 16464, 38255, 16470, 38259, 16476, 38263, 38084, 16490, 38096, 16500, 38277, 38281, 16520, 38285, 38123, 16534, 38135, 16541, 38144, 16561, 38156, 16574, 38307, 38313, 38317, 38320, 38324, 38328, 16624, 38332, 16634, 38243, 38242, 38247, 38303, 16695, 16696, 38252, 16701, 16702, 38269, 38268, 38273, 38303, 16711, 16712, 16787, 38278, 16790, 38282, 16792, 16794, 16796, 16799, 38329, 16801, 38269, 38268, 38273, 38303, 16811, 16812, 16814, 38278, 16817, 38282, 16819, 16822, 38329, 16824, 38291, 38290, 38304, 38303, 16834, 16835, 38299, 38298, 38304, 38303, 16844, 16845, 16847, 38310, 38308, 16850, 16852, 16855, 38321, 16857, 16860, 38329, 16862, 38336, 38335, 16867, 16868, 16983, 16984, 16986, 16987, 16989, 16990, 16991, 16992, 16993, 17007, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38385, 38387, 38390, 38391, 38393, 38395, 38398, 38400, 38403, 38406, 38408, 38410, 38412, 38314, 38318, 38325, 38419, 38421, 16687, 16688, 38384, 16691, 16692, 38386, 16697, 38389, 38388, 16703, 16704, 38397, 16707, 16708, 38399, 38275, 16788, 38280, 16791, 38327, 16800, 16803, 16804, 38397, 16807, 16808, 38399, 38275, 16815, 38280, 16818, 38327, 16823, 16826, 16827, 38405, 16830, 16831, 38407, 16836, 16837, 38409, 16840, 16841, 38411, 38306, 16848, 16849, 38319, 16856, 38327, 16861, 16864, 16865, 38426, 38429, 38435, 38486, 38451, 38465, 38471, 38486, 38489, 38491, 38496, 38494, 38492, 38497, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16689, 38531, 38036, 16693, 38534, 38045, 16698, 16699, 38061, 16705, 38540, 38092, 16709, 38543, 38101, 16786, 38546, 16789, 38548, 38515, 38516, 38517, 16798, 38550, 38528, 16805, 38552, 38092, 16809, 38555, 38101, 16813, 38558, 16816, 38560, 38520, 16821, 38562, 38528, 16828, 38564, 38131, 16832, 38567, 38137, 16838, 38570, 38152, 16842, 38573, 38164, 16846, 38576, 38525, 38526, 16854, 38579, 38527, 16859, 38581, 38528, 38583, 38215, 16891, 16894, 16902, 16906, 16945, 16953, 16956, 16965, 17012, 17013, 38593, 17015, 38592, 17020, 13, 14, 15, 38608, 16690, 38611, 16694, 38614, 16700, 38617, 16706, 38620, 16710, 16793, 16795, 16797, 16802, 38633, 16806, 38636, 16810, 16820, 16825, 38647, 16829, 38650, 16833, 38653, 16839, 38656, 16843, 38577, 16851, 16853, 16858, 16863, 16866, 38625, 38623, 38630, 38641, 38639, 38644, 38625, 38623, 38630, 38641, 38639, 38644, 38663, 38666, 17014, 17016, 38680, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38615, 16882, 16883, 38443, 38442, 38441, 16887, 38446, 38690, 38688, 38714, 16895, 16896, 38457, 16898, 38460, 38696, 38694, 38659, 38476, 38669, 16936, 16937, 38443, 38442, 38441, 16941, 38446, 38704, 38702, 16946, 16947, 38457, 16949, 38460, 38710, 38708, 38714, 38712, 38659, 38476, 38477, 16960, 38480, 16962, 38483, 38669, 38682, 38754, 16884, 16885, 16886, 16888, 16889, 16890, 16892, 38752, 38764, 16897, 16899, 16900, 16901, 16903, 16904, 16905, 38774, 16938, 16939, 16940, 16942, 16943, 16944, 38783, 16948, 16950, 16951, 16952, 16954, 16955, 16957, 16958, 16959, 16961, 16963, 16964, 38799, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38802, 38806, 16893, 38813, 38819, 38823, 38828, 38830, 38758, 38766, 38809, 38796, 38794, 38794, 38814, 38816, 38778, 38785, 38824, 38796, 38794, 38794, 38831, 38836, 38737, 9, 10, 11, 12, 13, 14, 15, 38803, 38850, 38820, 16971, 38849, 16975, 16976, 38851, 16978, 16979, 16980, 16981, 16982, 16995, 38853, 16998, 16999, 38855, 38854, 17002, 17003, 17004, 17005, 17006, 8, 9, 10, 11, 12, 13, 14, 15, 38800, 38881, 16974, 38886, 16977, 38889, 38891, 38817, 16997, 38896, 17000, 17001, 38900, 38902, 14, 15, 16972, 16973, 38918, 16996, 38923, 38925, 38915, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38928, 38914, 38931, 38930, 17010, 38933, 38921, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17009, 38944, 17017, 17018, 38946, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17011, 38948, 17019, 38963, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38976, 38978, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 38993, 38992, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17021, 17022, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 39025, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
bool h_Op[]= {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#define THREADS_PER_BLOCK 16
#define BLOCKS_PER_GRID 1
#define SIZE_OF_IN 17024
#define SIZE_OF_AC 22032
__device__ void
ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) {
int i= blockDim.x * blockIdx.x + threadIdx.x;
__shared__ float R[2441*THREADS_PER_BLOCK];
const int t= THREADS_PER_BLOCK;
__shared__ float final;
final=0;
R[i + 0*t] = A[i + 0*t];
R[i + 1*t] = A[i + 1*t];
R[i + 2*t] = A[i + 2*t];
R[i + 3*t] = A[i + 3*t];
R[i + 4*t] = A[i + 4*t];
R[i + 5*t] = A[i + 5*t];
R[i + 6*t] = A[i + 6*t];
R[i + 7*t] = A[i + 7*t];
R[i + 8*t] = A[i + 8*t];
R[i + 9*t] = A[i + 9*t];
R[i + 10*t] = A[i + 10*t];
R[i + 11*t] = A[i + 11*t];
R[i + 12*t] = A[i + 12*t];
R[i + 13*t] = A[i + 13*t];
R[i + 14*t] = A[i + 14*t];
R[i + 15*t] = A[i + 15*t];
R[i + 16*t] = A[i + 16*t];
R[i + 17*t] = A[i + 17*t];
R[i + 18*t] = A[i + 18*t];
R[i + 19*t] = A[i + 19*t];
R[i + 20*t] = A[i + 20*t];
R[i + 21*t] = A[i + 21*t];
R[i + 22*t] = A[i + 22*t];
R[i + 23*t] = A[i + 23*t];
R[i + 24*t] = A[i + 24*t];
R[i + 25*t] = A[i + 25*t];
R[i + 26*t] = A[i + 26*t];
R[i + 27*t] = A[i + 27*t];
R[i + 28*t] = A[i + 28*t];
R[i + 29*t] = A[i + 29*t];
R[i + 30*t] = A[i + 30*t];
R[i + 31*t] = A[i + 31*t];
R[i + 32*t] = A[i + 32*t];
R[i + 33*t] = A[i + 33*t];
R[i + 34*t] = A[i + 34*t];
R[i + 35*t] = A[i + 35*t];
R[i + 36*t] = A[i + 36*t];
R[i + 37*t] = A[i + 37*t];
R[i + 38*t] = A[i + 38*t];
R[i + 39*t] = A[i + 39*t];
R[i + 40*t] = A[i + 40*t];
R[i + 41*t] = A[i + 41*t];
R[i + 42*t] = A[i + 42*t];
R[i + 43*t] = A[i + 43*t];
R[i + 44*t] = A[i + 44*t];
R[i + 45*t] = A[i + 45*t];
R[i + 46*t] = A[i + 46*t];
R[i + 47*t] = A[i + 47*t];
R[i + 48*t] = A[i + 48*t];
R[i + 49*t] = A[i + 49*t];
R[i + 50*t] = A[i + 50*t];
R[i + 51*t] = A[i + 51*t];
R[i + 52*t] = A[i + 52*t];
R[i + 53*t] = A[i + 53*t];
R[i + 54*t] = A[i + 54*t];
R[i + 55*t] = A[i + 55*t];
R[i + 56*t] = A[i + 56*t];
R[i + 57*t] = A[i + 57*t];
R[i + 58*t] = A[i + 58*t];
R[i + 59*t] = A[i + 59*t];
R[i + 60*t] = A[i + 60*t];
R[i + 61*t] = A[i + 61*t];
R[i + 62*t] = A[i + 62*t];
R[i + 63*t] = A[i + 63*t];
R[i + 64*t] = A[i + 64*t];
R[i + 65*t] = A[i + 65*t];
R[i + 66*t] = A[i + 66*t];
R[i + 67*t] = A[i + 67*t];
R[i + 68*t] = A[i + 68*t];
R[i + 69*t] = A[i + 69*t];
R[i + 70*t] = A[i + 70*t];
R[i + 71*t] = A[i + 71*t];
R[i + 72*t] = A[i + 72*t];
R[i + 73*t] = A[i + 73*t];
R[i + 74*t] = A[i + 74*t];
R[i + 75*t] = A[i + 75*t];
R[i + 76*t] = A[i + 76*t];
R[i + 77*t] = A[i + 77*t];
R[i + 78*t] = A[i + 78*t];
R[i + 79*t] = A[i + 79*t];
R[i + 80*t] = A[i + 80*t];
R[i + 81*t] = A[i + 81*t];
R[i + 82*t] = A[i + 82*t];
R[i + 83*t] = A[i + 83*t];
R[i + 84*t] = A[i + 84*t];
R[i + 85*t] = A[i + 85*t];
R[i + 86*t] = A[i + 86*t];
R[i + 87*t] = A[i + 87*t];
R[i + 88*t] = A[i + 88*t];
R[i + 89*t] = A[i + 89*t];
R[i + 90*t] = A[i + 90*t];
R[i + 91*t] = A[i + 91*t];
R[i + 92*t] = A[i + 92*t];
R[i + 93*t] = A[i + 93*t];
R[i + 94*t] = A[i + 94*t];
R[i + 95*t] = A[i + 95*t];
R[i + 96*t] = A[i + 96*t];
R[i + 97*t] = A[i + 97*t];
R[i + 98*t] = A[i + 98*t];
R[i + 99*t] = A[i + 99*t];
R[i + 100*t] = A[i + 100*t];
R[i + 101*t] = A[i + 101*t];
R[i + 102*t] = A[i + 102*t];
R[i + 103*t] = A[i + 103*t];
R[i + 104*t] = A[i + 104*t];
R[i + 105*t] = A[i + 105*t];
R[i + 106*t] = A[i + 106*t];
R[i + 107*t] = A[i + 107*t];
R[i + 108*t] = A[i + 108*t];
R[i + 109*t] = A[i + 109*t];
R[i + 110*t] = A[i + 110*t];
R[i + 111*t] = A[i + 111*t];
R[i + 112*t] = A[i + 112*t];
R[i + 113*t] = A[i + 113*t];
R[i + 114*t] = A[i + 114*t];
R[i + 115*t] = A[i + 115*t];
R[i + 116*t] = A[i + 116*t];
R[i + 117*t] = A[i + 117*t];
R[i + 118*t] = A[i + 118*t];
R[i + 119*t] = A[i + 119*t];
R[i + 120*t] = A[i + 120*t];
R[i + 121*t] = A[i + 121*t];
R[i + 122*t] = A[i + 122*t];
R[i + 123*t] = A[i + 123*t];
R[i + 124*t] = A[i + 124*t];
R[i + 125*t] = A[i + 125*t];
R[i + 126*t] = A[i + 126*t];
R[i + 127*t] = A[i + 127*t];
R[i + 128*t] = A[i + 128*t];
R[i + 129*t] = A[i + 129*t];
R[i + 130*t] = A[i + 130*t];
R[i + 131*t] = A[i + 131*t];
R[i + 132*t] = A[i + 132*t];
R[i + 133*t] = A[i + 133*t];
R[i + 134*t] = A[i + 134*t];
R[i + 135*t] = A[i + 135*t];
R[i + 136*t] = A[i + 136*t];
R[i + 137*t] = A[i + 137*t];
R[i + 138*t] = A[i + 138*t];
R[i + 139*t] = A[i + 139*t];
R[i + 140*t] = A[i + 140*t];
R[i + 141*t] = A[i + 141*t];
R[i + 142*t] = A[i + 142*t];
R[i + 143*t] = A[i + 143*t];
R[i + 144*t] = A[i + 144*t];
R[i + 145*t] = A[i + 145*t];
R[i + 146*t] = A[i + 146*t];
R[i + 147*t] = A[i + 147*t];
R[i + 148*t] = A[i + 148*t];
R[i + 149*t] = A[i + 149*t];
R[i + 150*t] = A[i + 150*t];
R[i + 151*t] = A[i + 151*t];
R[i + 152*t] = A[i + 152*t];
R[i + 153*t] = A[i + 153*t];
R[i + 154*t] = A[i + 154*t];
R[i + 155*t] = A[i + 155*t];
R[i + 156*t] = A[i + 156*t];
R[i + 157*t] = A[i + 157*t];
R[i + 158*t] = A[i + 158*t];
R[i + 159*t] = A[i + 159*t];
R[i + 160*t] = A[i + 160*t];
R[i + 161*t] = A[i + 161*t];
R[i + 162*t] = A[i + 162*t];
R[i + 163*t] = A[i + 163*t];
R[i + 164*t] = A[i + 164*t];
R[i + 165*t] = A[i + 165*t];
R[i + 166*t] = A[i + 166*t];
R[i + 167*t] = A[i + 167*t];
R[i + 168*t] = A[i + 168*t];
R[i + 169*t] = A[i + 169*t];
R[i + 170*t] = A[i + 170*t];
R[i + 171*t] = A[i + 171*t];
R[i + 172*t] = A[i + 172*t];
R[i + 173*t] = A[i + 173*t];
R[i + 174*t] = A[i + 174*t];
R[i + 175*t] = A[i + 175*t];
R[i + 176*t] = A[i + 176*t];
R[i + 177*t] = A[i + 177*t];
R[i + 178*t] = A[i + 178*t];
R[i + 179*t] = A[i + 179*t];
R[i + 180*t] = A[i + 180*t];
R[i + 181*t] = A[i + 181*t];
R[i + 182*t] = A[i + 182*t];
R[i + 183*t] = A[i + 183*t];
R[i + 184*t] = A[i + 184*t];
R[i + 185*t] = A[i + 185*t];
R[i + 186*t] = A[i + 186*t];
R[i + 187*t] = A[i + 187*t];
R[i + 188*t] = A[i + 188*t];
R[i + 189*t] = A[i + 189*t];
R[i + 190*t] = A[i + 190*t];
R[i + 191*t] = A[i + 191*t];
R[i + 192*t] = A[i + 192*t];
R[i + 193*t] = A[i + 193*t];
R[i + 194*t] = A[i + 194*t];
R[i + 195*t] = A[i + 195*t];
R[i + 196*t] = A[i + 196*t];
R[i + 197*t] = A[i + 197*t];
R[i + 198*t] = A[i + 198*t];
R[i + 199*t] = A[i + 199*t];
R[i + 200*t] = A[i + 200*t];
R[i + 201*t] = A[i + 201*t];
R[i + 202*t] = A[i + 202*t];
R[i + 203*t] = A[i + 203*t];
R[i + 204*t] = A[i + 204*t];
R[i + 205*t] = A[i + 205*t];
R[i + 206*t] = A[i + 206*t];
R[i + 207*t] = A[i + 207*t];
R[i + 208*t] = A[i + 208*t];
R[i + 209*t] = A[i + 209*t];
R[i + 210*t] = A[i + 210*t];
R[i + 211*t] = A[i + 211*t];
R[i + 212*t] = A[i + 212*t];
R[i + 213*t] = A[i + 213*t];
R[i + 214*t] = A[i + 214*t];
R[i + 215*t] = A[i + 215*t];
R[i + 216*t] = A[i + 216*t];
R[i + 217*t] = A[i + 217*t];
R[i + 218*t] = A[i + 218*t];
R[i + 219*t] = A[i + 219*t];
R[i + 220*t] = A[i + 220*t];
R[i + 221*t] = A[i + 221*t];
R[i + 222*t] = A[i + 222*t];
R[i + 223*t] = A[i + 223*t];
R[i + 224*t] = A[i + 224*t];
R[i + 225*t] = A[i + 225*t];
R[i + 226*t] = A[i + 226*t];
R[i + 227*t] = A[i + 227*t];
R[i + 228*t] = A[i + 228*t];
R[i + 229*t] = A[i + 229*t];
R[i + 230*t] = A[i + 230*t];
R[i + 231*t] = A[i + 231*t];
R[i + 232*t] = A[i + 232*t];
R[i + 233*t] = A[i + 233*t];
R[i + 234*t] = A[i + 234*t];
R[i + 235*t] = A[i + 235*t];
R[i + 236*t] = A[i + 236*t];
R[i + 237*t] = A[i + 237*t];
R[i + 238*t] = A[i + 238*t];
R[i + 239*t] = A[i + 239*t];
R[i + 240*t] = A[i + 240*t];
R[i + 241*t] = A[i + 241*t];
R[i + 242*t] = A[i + 242*t];
R[i + 243*t] = A[i + 243*t];
R[i + 244*t] = A[i + 244*t];
R[i + 245*t] = A[i + 245*t];
R[i + 246*t] = A[i + 246*t];
R[i + 247*t] = A[i + 247*t];
R[i + 248*t] = A[i + 248*t];
R[i + 249*t] = A[i + 249*t];
R[i + 250*t] = A[i + 250*t];
R[i + 251*t] = A[i + 251*t];
R[i + 252*t] = A[i + 252*t];
R[i + 253*t] = A[i + 253*t];
R[i + 254*t] = A[i + 254*t];
R[i + 255*t] = A[i + 255*t];
R[i + 256*t] = A[i + 256*t];
R[i + 257*t] = A[i + 257*t];
R[i + 258*t] = A[i + 258*t];
R[i + 259*t] = A[i + 259*t];
R[i + 260*t] = A[i + 260*t];
R[i + 261*t] = A[i + 261*t];
R[i + 262*t] = A[i + 262*t];
R[i + 263*t] = A[i + 263*t];
R[i + 264*t] = A[i + 264*t];
R[i + 265*t] = A[i + 265*t];
R[i + 266*t] = A[i + 266*t];
R[i + 267*t] = A[i + 267*t];
R[i + 268*t] = A[i + 268*t];
R[i + 269*t] = A[i + 269*t];
R[i + 270*t] = A[i + 270*t];
R[i + 271*t] = A[i + 271*t];
R[i + 272*t] = A[i + 272*t];
R[i + 273*t] = A[i + 273*t];
R[i + 274*t] = A[i + 274*t];
R[i + 275*t] = A[i + 275*t];
R[i + 276*t] = A[i + 276*t];
R[i + 277*t] = A[i + 277*t];
R[i + 278*t] = A[i + 278*t];
R[i + 279*t] = A[i + 279*t];
R[i + 280*t] = A[i + 280*t];
R[i + 281*t] = A[i + 281*t];
R[i + 282*t] = A[i + 282*t];
R[i + 283*t] = A[i + 283*t];
R[i + 284*t] = A[i + 284*t];
R[i + 285*t] = A[i + 285*t];
R[i + 286*t] = A[i + 286*t];
R[i + 287*t] = A[i + 287*t];
R[i + 288*t] = A[i + 288*t];
R[i + 289*t] = A[i + 289*t];
R[i + 290*t] = A[i + 290*t];
R[i + 291*t] = A[i + 291*t];
R[i + 292*t] = A[i + 292*t];
R[i + 293*t] = A[i + 293*t];
R[i + 294*t] = A[i + 294*t];
R[i + 295*t] = A[i + 295*t];
R[i + 296*t] = A[i + 296*t];
R[i + 297*t] = A[i + 297*t];
R[i + 298*t] = A[i + 298*t];
R[i + 299*t] = A[i + 299*t];
R[i + 300*t] = A[i + 300*t];
R[i + 301*t] = A[i + 301*t];
R[i + 302*t] = A[i + 302*t];
R[i + 303*t] = A[i + 303*t];
R[i + 304*t] = A[i + 304*t];
R[i + 305*t] = A[i + 305*t];
R[i + 306*t] = A[i + 306*t];
R[i + 307*t] = A[i + 307*t];
R[i + 308*t] = A[i + 308*t];
R[i + 309*t] = A[i + 309*t];
R[i + 310*t] = A[i + 310*t];
R[i + 311*t] = A[i + 311*t];
R[i + 312*t] = A[i + 312*t];
R[i + 313*t] = A[i + 313*t];
R[i + 314*t] = A[i + 314*t];
R[i + 315*t] = A[i + 315*t];
R[i + 316*t] = A[i + 316*t];
R[i + 317*t] = A[i + 317*t];
R[i + 318*t] = A[i + 318*t];
R[i + 319*t] = A[i + 319*t];
R[i + 320*t] = A[i + 320*t];
R[i + 321*t] = A[i + 321*t];
R[i + 322*t] = A[i + 322*t];
R[i + 323*t] = A[i + 323*t];
R[i + 324*t] = A[i + 324*t];
R[i + 325*t] = A[i + 325*t];
R[i + 326*t] = A[i + 326*t];
R[i + 327*t] = A[i + 327*t];
R[i + 328*t] = A[i + 328*t];
R[i + 329*t] = A[i + 329*t];
R[i + 330*t] = A[i + 330*t];
R[i + 331*t] = A[i + 331*t];
R[i + 332*t] = A[i + 332*t];
R[i + 333*t] = A[i + 333*t];
R[i + 334*t] = A[i + 334*t];
R[i + 335*t] = A[i + 335*t];
R[i + 336*t] = A[i + 336*t];
R[i + 337*t] = A[i + 337*t];
R[i + 338*t] = A[i + 338*t];
R[i + 339*t] = A[i + 339*t];
R[i + 340*t] = A[i + 340*t];
R[i + 341*t] = A[i + 341*t];
R[i + 342*t] = A[i + 342*t];
R[i + 343*t] = A[i + 343*t];
R[i + 344*t] = A[i + 344*t];
R[i + 345*t] = A[i + 345*t];
R[i + 346*t] = A[i + 346*t];
R[i + 347*t] = A[i + 347*t];
R[i + 348*t] = A[i + 348*t];
R[i + 349*t] = A[i + 349*t];
R[i + 350*t] = A[i + 350*t];
R[i + 351*t] = A[i + 351*t];
R[i + 352*t] = A[i + 352*t];
R[i + 353*t] = A[i + 353*t];
R[i + 354*t] = A[i + 354*t];
R[i + 355*t] = A[i + 355*t];
R[i + 356*t] = A[i + 356*t];
R[i + 357*t] = A[i + 357*t];
R[i + 358*t] = A[i + 358*t];
R[i + 359*t] = A[i + 359*t];
R[i + 360*t] = A[i + 360*t];
R[i + 361*t] = A[i + 361*t];
R[i + 362*t] = A[i + 362*t];
R[i + 363*t] = A[i + 363*t];
R[i + 364*t] = A[i + 364*t];
R[i + 365*t] = A[i + 365*t];
R[i + 366*t] = A[i + 366*t];
R[i + 367*t] = A[i + 367*t];
R[i + 368*t] = A[i + 368*t];
R[i + 369*t] = A[i + 369*t];
R[i + 370*t] = A[i + 370*t];
R[i + 371*t] = A[i + 371*t];
R[i + 372*t] = A[i + 372*t];
R[i + 373*t] = A[i + 373*t];
R[i + 374*t] = A[i + 374*t];
R[i + 375*t] = A[i + 375*t];
R[i + 376*t] = A[i + 376*t];
R[i + 377*t] = A[i + 377*t];
R[i + 378*t] = A[i + 378*t];
R[i + 379*t] = A[i + 379*t];
R[i + 380*t] = A[i + 380*t];
R[i + 381*t] = A[i + 381*t];
R[i + 382*t] = A[i + 382*t];
R[i + 383*t] = A[i + 383*t];
R[i + 384*t] = A[i + 384*t];
R[i + 385*t] = A[i + 385*t];
R[i + 386*t] = A[i + 386*t];
R[i + 387*t] = A[i + 387*t];
R[i + 388*t] = A[i + 388*t];
R[i + 389*t] = A[i + 389*t];
R[i + 390*t] = A[i + 390*t];
R[i + 391*t] = A[i + 391*t];
R[i + 392*t] = A[i + 392*t];
R[i + 393*t] = A[i + 393*t];
R[i + 394*t] = A[i + 394*t];
R[i + 395*t] = A[i + 395*t];
R[i + 396*t] = A[i + 396*t];
R[i + 397*t] = A[i + 397*t];
R[i + 398*t] = A[i + 398*t];
R[i + 399*t] = A[i + 399*t];
R[i + 400*t] = A[i + 400*t];
R[i + 401*t] = A[i + 401*t];
R[i + 402*t] = A[i + 402*t];
R[i + 403*t] = A[i + 403*t];
R[i + 404*t] = A[i + 404*t];
R[i + 405*t] = A[i + 405*t];
R[i + 406*t] = A[i + 406*t];
R[i + 407*t] = A[i + 407*t];
R[i + 408*t] = A[i + 408*t];
R[i + 409*t] = A[i + 409*t];
R[i + 410*t] = A[i + 410*t];
R[i + 411*t] = A[i + 411*t];
R[i + 412*t] = A[i + 412*t];
R[i + 413*t] = A[i + 413*t];
R[i + 414*t] = A[i + 414*t];
R[i + 415*t] = A[i + 415*t];
R[i + 416*t] = A[i + 416*t];
R[i + 417*t] = A[i + 417*t];
R[i + 418*t] = A[i + 418*t];
R[i + 419*t] = A[i + 419*t];
R[i + 420*t] = A[i + 420*t];
R[i + 421*t] = A[i + 421*t];
R[i + 422*t] = A[i + 422*t];
R[i + 423*t] = A[i + 423*t];
R[i + 424*t] = A[i + 424*t];
R[i + 425*t] = A[i + 425*t];
R[i + 426*t] = A[i + 426*t];
R[i + 427*t] = A[i + 427*t];
R[i + 428*t] = A[i + 428*t];
R[i + 429*t] = A[i + 429*t];
R[i + 430*t] = A[i + 430*t];
R[i + 431*t] = A[i + 431*t];
R[i + 432*t] = A[i + 432*t];
R[i + 433*t] = A[i + 433*t];
R[i + 434*t] = A[i + 434*t];
R[i + 435*t] = A[i + 435*t];
R[i + 436*t] = A[i + 436*t];
R[i + 437*t] = A[i + 437*t];
R[i + 438*t] = A[i + 438*t];
R[i + 439*t] = A[i + 439*t];
R[i + 440*t] = A[i + 440*t];
R[i + 441*t] = A[i + 441*t];
R[i + 442*t] = A[i + 442*t];
R[i + 443*t] = A[i + 443*t];
R[i + 444*t] = A[i + 444*t];
R[i + 445*t] = A[i + 445*t];
R[i + 446*t] = A[i + 446*t];
R[i + 447*t] = A[i + 447*t];
R[i + 448*t] = A[i + 448*t];
R[i + 449*t] = A[i + 449*t];
R[i + 450*t] = A[i + 450*t];
R[i + 451*t] = A[i + 451*t];
R[i + 452*t] = A[i + 452*t];
R[i + 453*t] = A[i + 453*t];
R[i + 454*t] = A[i + 454*t];
R[i + 455*t] = A[i + 455*t];
R[i + 456*t] = A[i + 456*t];
R[i + 457*t] = A[i + 457*t];
R[i + 458*t] = A[i + 458*t];
R[i + 459*t] = A[i + 459*t];
R[i + 460*t] = A[i + 460*t];
R[i + 461*t] = A[i + 461*t];
R[i + 462*t] = A[i + 462*t];
R[i + 463*t] = A[i + 463*t];
R[i + 464*t] = A[i + 464*t];
R[i + 465*t] = A[i + 465*t];
R[i + 466*t] = A[i + 466*t];
R[i + 467*t] = A[i + 467*t];
R[i + 468*t] = A[i + 468*t];
R[i + 469*t] = A[i + 469*t];
R[i + 470*t] = A[i + 470*t];
R[i + 471*t] = A[i + 471*t];
R[i + 472*t] = A[i + 472*t];
R[i + 473*t] = A[i + 473*t];
R[i + 474*t] = A[i + 474*t];
R[i + 475*t] = A[i + 475*t];
R[i + 476*t] = A[i + 476*t];
R[i + 477*t] = A[i + 477*t];
R[i + 478*t] = A[i + 478*t];
R[i + 479*t] = A[i + 479*t];
R[i + 480*t] = A[i + 480*t];
R[i + 481*t] = A[i + 481*t];
R[i + 482*t] = A[i + 482*t];
R[i + 483*t] = A[i + 483*t];
R[i + 484*t] = A[i + 484*t];
R[i + 485*t] = A[i + 485*t];
R[i + 486*t] = A[i + 486*t];
R[i + 487*t] = A[i + 487*t];
R[i + 488*t] = A[i + 488*t];
R[i + 489*t] = A[i + 489*t];
R[i + 490*t] = A[i + 490*t];
R[i + 491*t] = A[i + 491*t];
R[i + 492*t] = A[i + 492*t];
R[i + 493*t] = A[i + 493*t];
R[i + 494*t] = A[i + 494*t];
R[i + 495*t] = A[i + 495*t];
R[i + 496*t] = A[i + 496*t];
R[i + 497*t] = A[i + 497*t];
R[i + 498*t] = A[i + 498*t];
R[i + 499*t] = A[i + 499*t];
R[i + 500*t] = A[i + 500*t];
R[i + 501*t] = A[i + 501*t];
R[i + 502*t] = A[i + 502*t];
R[i + 503*t] = A[i + 503*t];
R[i + 504*t] = A[i + 504*t];
R[i + 505*t] = A[i + 505*t];
R[i + 506*t] = A[i + 506*t];
R[i + 507*t] = A[i + 507*t];
R[i + 508*t] = A[i + 508*t];
R[i + 509*t] = A[i + 509*t];
R[i + 510*t] = A[i + 510*t];
R[i + 511*t] = A[i + 511*t];
R[i + 512*t] = A[i + 512*t];
R[i + 513*t] = A[i + 513*t];
R[i + 514*t] = A[i + 514*t];
R[i + 515*t] = A[i + 515*t];
R[i + 516*t] = A[i + 516*t];
R[i + 517*t] = A[i + 517*t];
R[i + 518*t] = A[i + 518*t];
R[i + 519*t] = A[i + 519*t];
R[i + 520*t] = A[i + 520*t];
R[i + 521*t] = A[i + 521*t];
R[i + 522*t] = A[i + 522*t];
R[i + 523*t] = A[i + 523*t];
R[i + 524*t] = A[i + 524*t];
R[i + 525*t] = A[i + 525*t];
R[i + 526*t] = A[i + 526*t];
R[i + 527*t] = A[i + 527*t];
R[i + 528*t] = A[i + 528*t];
R[i + 529*t] = A[i + 529*t];
R[i + 530*t] = A[i + 530*t];
R[i + 531*t] = A[i + 531*t];
R[i + 532*t] = A[i + 532*t];
R[i + 533*t] = A[i + 533*t];
R[i + 534*t] = A[i + 534*t];
R[i + 535*t] = A[i + 535*t];
R[i + 536*t] = A[i + 536*t];
R[i + 537*t] = A[i + 537*t];
R[i + 538*t] = A[i + 538*t];
R[i + 539*t] = A[i + 539*t];
R[i + 540*t] = A[i + 540*t];
R[i + 541*t] = A[i + 541*t];
R[i + 542*t] = A[i + 542*t];
R[i + 543*t] = A[i + 543*t];
R[i + 544*t] = A[i + 544*t];
R[i + 545*t] = A[i + 545*t];
R[i + 546*t] = A[i + 546*t];
R[i + 547*t] = A[i + 547*t];
R[i + 548*t] = A[i + 548*t];
R[i + 549*t] = A[i + 549*t];
R[i + 550*t] = A[i + 550*t];
R[i + 551*t] = A[i + 551*t];
R[i + 552*t] = A[i + 552*t];
R[i + 553*t] = A[i + 553*t];
R[i + 554*t] = A[i + 554*t];
R[i + 555*t] = A[i + 555*t];
R[i + 556*t] = A[i + 556*t];
R[i + 557*t] = A[i + 557*t];
R[i + 558*t] = A[i + 558*t];
R[i + 559*t] = A[i + 559*t];
R[i + 560*t] = A[i + 560*t];
R[i + 561*t] = A[i + 561*t];
R[i + 562*t] = A[i + 562*t];
R[i + 563*t] = A[i + 563*t];
R[i + 564*t] = A[i + 564*t];
R[i + 565*t] = A[i + 565*t];
R[i + 566*t] = A[i + 566*t];
R[i + 567*t] = A[i + 567*t];
R[i + 568*t] = A[i + 568*t];
R[i + 569*t] = A[i + 569*t];
R[i + 570*t] = A[i + 570*t];
R[i + 571*t] = A[i + 571*t];
R[i + 572*t] = A[i + 572*t];
R[i + 573*t] = A[i + 573*t];
R[i + 574*t] = A[i + 574*t];
R[i + 575*t] = A[i + 575*t];
R[i + 576*t] = A[i + 576*t];
R[i + 577*t] = A[i + 577*t];
R[i + 578*t] = A[i + 578*t];
R[i + 579*t] = A[i + 579*t];
R[i + 580*t] = A[i + 580*t];
R[i + 581*t] = A[i + 581*t];
R[i + 582*t] = A[i + 582*t];
R[i + 583*t] = A[i + 583*t];
R[i + 584*t] = A[i + 584*t];
R[i + 585*t] = A[i + 585*t];
R[i + 586*t] = A[i + 586*t];
R[i + 587*t] = A[i + 587*t];
R[i + 588*t] = A[i + 588*t];
R[i + 589*t] = A[i + 589*t];
R[i + 590*t] = A[i + 590*t];
R[i + 591*t] = A[i + 591*t];
R[i + 592*t] = A[i + 592*t];
R[i + 593*t] = A[i + 593*t];
R[i + 594*t] = A[i + 594*t];
R[i + 595*t] = A[i + 595*t];
R[i + 596*t] = A[i + 596*t];
R[i + 597*t] = A[i + 597*t];
R[i + 598*t] = A[i + 598*t];
R[i + 599*t] = A[i + 599*t];
R[i + 600*t] = A[i + 600*t];
R[i + 601*t] = A[i + 601*t];
R[i + 602*t] = A[i + 602*t];
R[i + 603*t] = A[i + 603*t];
R[i + 604*t] = A[i + 604*t];
R[i + 605*t] = A[i + 605*t];
R[i + 606*t] = A[i + 606*t];
R[i + 607*t] = A[i + 607*t];
R[i + 608*t] = A[i + 608*t];
R[i + 609*t] = A[i + 609*t];
R[i + 610*t] = A[i + 610*t];
R[i + 611*t] = A[i + 611*t];
R[i + 612*t] = A[i + 612*t];
R[i + 613*t] = A[i + 613*t];
R[i + 614*t] = A[i + 614*t];
R[i + 615*t] = A[i + 615*t];
R[i + 616*t] = A[i + 616*t];
R[i + 617*t] = A[i + 617*t];
R[i + 618*t] = A[i + 618*t];
R[i + 619*t] = A[i + 619*t];
R[i + 620*t] = A[i + 620*t];
R[i + 621*t] = A[i + 621*t];
R[i + 622*t] = A[i + 622*t];
R[i + 623*t] = A[i + 623*t];
R[i + 624*t] = A[i + 624*t];
R[i + 625*t] = A[i + 625*t];
R[i + 626*t] = A[i + 626*t];
R[i + 627*t] = A[i + 627*t];
R[i + 628*t] = A[i + 628*t];
R[i + 629*t] = A[i + 629*t];
R[i + 630*t] = A[i + 630*t];
R[i + 631*t] = A[i + 631*t];
R[i + 632*t] = A[i + 632*t];
R[i + 633*t] = A[i + 633*t];
R[i + 634*t] = A[i + 634*t];
R[i + 635*t] = A[i + 635*t];
R[i + 636*t] = A[i + 636*t];
R[i + 637*t] = A[i + 637*t];
R[i + 638*t] = A[i + 638*t];
R[i + 639*t] = A[i + 639*t];
R[i + 640*t] = A[i + 640*t];
R[i + 641*t] = A[i + 641*t];
R[i + 642*t] = A[i + 642*t];
R[i + 643*t] = A[i + 643*t];
R[i + 644*t] = A[i + 644*t];
R[i + 645*t] = A[i + 645*t];
R[i + 646*t] = A[i + 646*t];
R[i + 647*t] = A[i + 647*t];
R[i + 648*t] = A[i + 648*t];
R[i + 649*t] = A[i + 649*t];
R[i + 650*t] = A[i + 650*t];
R[i + 651*t] = A[i + 651*t];
R[i + 652*t] = A[i + 652*t];
R[i + 653*t] = A[i + 653*t];
R[i + 654*t] = A[i + 654*t];
R[i + 655*t] = A[i + 655*t];
R[i + 656*t] = A[i + 656*t];
R[i + 657*t] = A[i + 657*t];
R[i + 658*t] = A[i + 658*t];
R[i + 659*t] = A[i + 659*t];
R[i + 660*t] = A[i + 660*t];
R[i + 661*t] = A[i + 661*t];
R[i + 662*t] = A[i + 662*t];
R[i + 663*t] = A[i + 663*t];
R[i + 664*t] = A[i + 664*t];
R[i + 665*t] = A[i + 665*t];
R[i + 666*t] = A[i + 666*t];
R[i + 667*t] = A[i + 667*t];
R[i + 668*t] = A[i + 668*t];
R[i + 669*t] = A[i + 669*t];
R[i + 670*t] = A[i + 670*t];
R[i + 671*t] = A[i + 671*t];
R[i + 672*t] = A[i + 672*t];
R[i + 673*t] = A[i + 673*t];
R[i + 674*t] = A[i + 674*t];
R[i + 675*t] = A[i + 675*t];
R[i + 676*t] = A[i + 676*t];
R[i + 677*t] = A[i + 677*t];
R[i + 678*t] = A[i + 678*t];
R[i + 679*t] = A[i + 679*t];
R[i + 680*t] = A[i + 680*t];
R[i + 681*t] = A[i + 681*t];
R[i + 682*t] = A[i + 682*t];
R[i + 683*t] = A[i + 683*t];
R[i + 684*t] = A[i + 684*t];
R[i + 685*t] = A[i + 685*t];
R[i + 686*t] = A[i + 686*t];
R[i + 687*t] = A[i + 687*t];
R[i + 688*t] = A[i + 688*t];
R[i + 689*t] = A[i + 689*t];
R[i + 690*t] = A[i + 690*t];
R[i + 691*t] = A[i + 691*t];
R[i + 692*t] = A[i + 692*t];
R[i + 693*t] = A[i + 693*t];
R[i + 694*t] = A[i + 694*t];
R[i + 695*t] = A[i + 695*t];
R[i + 696*t] = A[i + 696*t];
R[i + 697*t] = A[i + 697*t];
R[i + 698*t] = A[i + 698*t];
R[i + 699*t] = A[i + 699*t];
R[i + 700*t] = A[i + 700*t];
R[i + 701*t] = A[i + 701*t];
R[i + 702*t] = A[i + 702*t];
R[i + 703*t] = A[i + 703*t];
R[i + 704*t] = A[i + 704*t];
R[i + 705*t] = A[i + 705*t];
R[i + 706*t] = A[i + 706*t];
R[i + 707*t] = A[i + 707*t];
R[i + 708*t] = A[i + 708*t];
R[i + 709*t] = A[i + 709*t];
R[i + 710*t] = A[i + 710*t];
R[i + 711*t] = A[i + 711*t];
R[i + 712*t] = A[i + 712*t];
R[i + 713*t] = A[i + 713*t];
R[i + 714*t] = A[i + 714*t];
R[i + 715*t] = A[i + 715*t];
R[i + 716*t] = A[i + 716*t];
R[i + 717*t] = A[i + 717*t];
R[i + 718*t] = A[i + 718*t];
R[i + 719*t] = A[i + 719*t];
R[i + 720*t] = A[i + 720*t];
R[i + 721*t] = A[i + 721*t];
R[i + 722*t] = A[i + 722*t];
R[i + 723*t] = A[i + 723*t];
R[i + 724*t] = A[i + 724*t];
R[i + 725*t] = A[i + 725*t];
R[i + 726*t] = A[i + 726*t];
R[i + 727*t] = A[i + 727*t];
R[i + 728*t] = A[i + 728*t];
R[i + 729*t] = A[i + 729*t];
R[i + 730*t] = A[i + 730*t];
R[i + 731*t] = A[i + 731*t];
R[i + 732*t] = A[i + 732*t];
R[i + 733*t] = A[i + 733*t];
R[i + 734*t] = A[i + 734*t];
R[i + 735*t] = A[i + 735*t];
R[i + 736*t] = A[i + 736*t];
R[i + 737*t] = A[i + 737*t];
R[i + 738*t] = A[i + 738*t];
R[i + 739*t] = A[i + 739*t];
R[i + 740*t] = A[i + 740*t];
R[i + 741*t] = A[i + 741*t];
R[i + 742*t] = A[i + 742*t];
R[i + 743*t] = A[i + 743*t];
R[i + 744*t] = A[i + 744*t];
R[i + 745*t] = A[i + 745*t];
R[i + 746*t] = A[i + 746*t];
R[i + 747*t] = A[i + 747*t];
R[i + 748*t] = A[i + 748*t];
R[i + 749*t] = A[i + 749*t];
R[i + 750*t] = A[i + 750*t];
R[i + 751*t] = A[i + 751*t];
R[i + 752*t] = A[i + 752*t];
R[i + 753*t] = A[i + 753*t];
R[i + 754*t] = A[i + 754*t];
R[i + 755*t] = A[i + 755*t];
R[i + 756*t] = A[i + 756*t];
R[i + 757*t] = A[i + 757*t];
R[i + 758*t] = A[i + 758*t];
R[i + 759*t] = A[i + 759*t];
R[i + 760*t] = A[i + 760*t];
R[i + 761*t] = A[i + 761*t];
R[i + 762*t] = A[i + 762*t];
R[i + 763*t] = A[i + 763*t];
R[i + 764*t] = A[i + 764*t];
R[i + 765*t] = A[i + 765*t];
R[i + 766*t] = A[i + 766*t];
R[i + 767*t] = A[i + 767*t];
R[i + 768*t] = A[i + 768*t];
R[i + 769*t] = A[i + 769*t];
R[i + 770*t] = A[i + 770*t];
R[i + 771*t] = A[i + 771*t];
R[i + 772*t] = A[i + 772*t];
R[i + 773*t] = A[i + 773*t];
R[i + 774*t] = A[i + 774*t];
R[i + 775*t] = A[i + 775*t];
R[i + 776*t] = A[i + 776*t];
R[i + 777*t] = A[i + 777*t];
R[i + 778*t] = A[i + 778*t];
R[i + 779*t] = A[i + 779*t];
R[i + 780*t] = A[i + 780*t];
R[i + 781*t] = A[i + 781*t];
R[i + 782*t] = A[i + 782*t];
R[i + 783*t] = A[i + 783*t];
R[i + 784*t] = A[i + 784*t];
R[i + 785*t] = A[i + 785*t];
R[i + 786*t] = A[i + 786*t];
R[i + 787*t] = A[i + 787*t];
R[i + 788*t] = A[i + 788*t];
R[i + 789*t] = A[i + 789*t];
R[i + 790*t] = A[i + 790*t];
R[i + 791*t] = A[i + 791*t];
R[i + 792*t] = A[i + 792*t];
R[i + 793*t] = A[i + 793*t];
R[i + 794*t] = A[i + 794*t];
R[i + 795*t] = A[i + 795*t];
R[i + 796*t] = A[i + 796*t];
R[i + 797*t] = A[i + 797*t];
R[i + 798*t] = A[i + 798*t];
R[i + 799*t] = A[i + 799*t];
R[i + 800*t] = A[i + 800*t];
R[i + 801*t] = A[i + 801*t];
R[i + 802*t] = A[i + 802*t];
R[i + 803*t] = A[i + 803*t];
R[i + 804*t] = A[i + 804*t];
R[i + 805*t] = A[i + 805*t];
R[i + 806*t] = A[i + 806*t];
R[i + 807*t] = A[i + 807*t];
R[i + 808*t] = A[i + 808*t];
R[i + 809*t] = A[i + 809*t];
R[i + 810*t] = A[i + 810*t];
R[i + 811*t] = A[i + 811*t];
R[i + 812*t] = A[i + 812*t];
R[i + 813*t] = A[i + 813*t];
R[i + 814*t] = A[i + 814*t];
R[i + 815*t] = A[i + 815*t];
R[i + 816*t] = A[i + 816*t];
R[i + 817*t] = A[i + 817*t];
R[i + 818*t] = A[i + 818*t];
R[i + 819*t] = A[i + 819*t];
R[i + 820*t] = A[i + 820*t];
R[i + 821*t] = A[i + 821*t];
R[i + 822*t] = A[i + 822*t];
R[i + 823*t] = A[i + 823*t];
R[i + 824*t] = A[i + 824*t];
R[i + 825*t] = A[i + 825*t];
R[i + 826*t] = A[i + 826*t];
R[i + 827*t] = A[i + 827*t];
R[i + 828*t] = A[i + 828*t];
R[i + 829*t] = A[i + 829*t];
R[i + 830*t] = A[i + 830*t];
R[i + 831*t] = A[i + 831*t];
R[i + 832*t] = A[i + 832*t];
R[i + 833*t] = A[i + 833*t];
R[i + 834*t] = A[i + 834*t];
R[i + 835*t] = A[i + 835*t];
R[i + 836*t] = A[i + 836*t];
R[i + 837*t] = A[i + 837*t];
R[i + 838*t] = A[i + 838*t];
R[i + 839*t] = A[i + 839*t];
R[i + 840*t] = A[i + 840*t];
R[i + 841*t] = A[i + 841*t];
R[i + 842*t] = A[i + 842*t];
R[i + 843*t] = A[i + 843*t];
R[i + 844*t] = A[i + 844*t];
R[i + 845*t] = A[i + 845*t];
R[i + 846*t] = A[i + 846*t];
R[i + 847*t] = A[i + 847*t];
R[i + 848*t] = A[i + 848*t];
R[i + 849*t] = A[i + 849*t];
R[i + 850*t] = A[i + 850*t];
R[i + 851*t] = A[i + 851*t];
R[i + 852*t] = A[i + 852*t];
R[i + 853*t] = A[i + 853*t];
R[i + 854*t] = A[i + 854*t];
R[i + 855*t] = A[i + 855*t];
R[i + 856*t] = A[i + 856*t];
R[i + 857*t] = A[i + 857*t];
R[i + 858*t] = A[i + 858*t];
R[i + 859*t] = A[i + 859*t];
R[i + 860*t] = A[i + 860*t];
R[i + 861*t] = A[i + 861*t];
R[i + 862*t] = A[i + 862*t];
R[i + 863*t] = A[i + 863*t];
R[i + 864*t] = A[i + 864*t];
R[i + 865*t] = A[i + 865*t];
R[i + 866*t] = A[i + 866*t];
R[i + 867*t] = A[i + 867*t];
R[i + 868*t] = A[i + 868*t];
R[i + 869*t] = A[i + 869*t];
R[i + 870*t] = A[i + 870*t];
R[i + 871*t] = A[i + 871*t];
R[i + 872*t] = A[i + 872*t];
R[i + 873*t] = A[i + 873*t];
R[i + 874*t] = A[i + 874*t];
R[i + 875*t] = A[i + 875*t];
R[i + 876*t] = A[i + 876*t];
R[i + 877*t] = A[i + 877*t];
R[i + 878*t] = A[i + 878*t];
R[i + 879*t] = A[i + 879*t];
R[i + 880*t] = A[i + 880*t];
R[i + 881*t] = A[i + 881*t];
R[i + 882*t] = A[i + 882*t];
R[i + 883*t] = A[i + 883*t];
R[i + 884*t] = A[i + 884*t];
R[i + 885*t] = A[i + 885*t];
R[i + 886*t] = A[i + 886*t];
R[i + 887*t] = A[i + 887*t];
R[i + 888*t] = A[i + 888*t];
R[i + 889*t] = A[i + 889*t];
R[i + 890*t] = A[i + 890*t];
R[i + 891*t] = A[i + 891*t];
R[i + 892*t] = A[i + 892*t];
R[i + 893*t] = A[i + 893*t];
R[i + 894*t] = A[i + 894*t];
R[i + 895*t] = A[i + 895*t];
R[i + 896*t] = A[i + 896*t];
R[i + 897*t] = A[i + 897*t];
R[i + 898*t] = A[i + 898*t];
R[i + 899*t] = A[i + 899*t];
R[i + 900*t] = A[i + 900*t];
R[i + 901*t] = A[i + 901*t];
R[i + 902*t] = A[i + 902*t];
R[i + 903*t] = A[i + 903*t];
R[i + 904*t] = A[i + 904*t];
R[i + 905*t] = A[i + 905*t];
R[i + 906*t] = A[i + 906*t];
R[i + 907*t] = A[i + 907*t];
R[i + 908*t] = A[i + 908*t];
R[i + 909*t] = A[i + 909*t];
R[i + 910*t] = A[i + 910*t];
R[i + 911*t] = A[i + 911*t];
R[i + 912*t] = A[i + 912*t];
R[i + 913*t] = A[i + 913*t];
R[i + 914*t] = A[i + 914*t];
R[i + 915*t] = A[i + 915*t];
R[i + 916*t] = A[i + 916*t];
R[i + 917*t] = A[i + 917*t];
R[i + 918*t] = A[i + 918*t];
R[i + 919*t] = A[i + 919*t];
R[i + 920*t] = A[i + 920*t];
R[i + 921*t] = A[i + 921*t];
R[i + 922*t] = A[i + 922*t];
R[i + 923*t] = A[i + 923*t];
R[i + 924*t] = A[i + 924*t];
R[i + 925*t] = A[i + 925*t];
R[i + 926*t] = A[i + 926*t];
R[i + 927*t] = A[i + 927*t];
R[i + 928*t] = A[i + 928*t];
R[i + 929*t] = A[i + 929*t];
R[i + 930*t] = A[i + 930*t];
R[i + 931*t] = A[i + 931*t];
R[i + 932*t] = A[i + 932*t];
R[i + 933*t] = A[i + 933*t];
R[i + 934*t] = A[i + 934*t];
R[i + 935*t] = A[i + 935*t];
R[i + 936*t] = A[i + 936*t];
R[i + 937*t] = A[i + 937*t];
R[i + 938*t] = A[i + 938*t];
R[i + 939*t] = A[i + 939*t];
R[i + 940*t] = A[i + 940*t];
R[i + 941*t] = A[i + 941*t];
R[i + 942*t] = A[i + 942*t];
R[i + 943*t] = A[i + 943*t];
R[i + 944*t] = A[i + 944*t];
R[i + 945*t] = A[i + 945*t];
R[i + 946*t] = A[i + 946*t];
R[i + 947*t] = A[i + 947*t];
R[i + 948*t] = A[i + 948*t];
R[i + 949*t] = A[i + 949*t];
R[i + 950*t] = A[i + 950*t];
R[i + 951*t] = A[i + 951*t];
R[i + 952*t] = A[i + 952*t];
R[i + 953*t] = A[i + 953*t];
R[i + 954*t] = A[i + 954*t];
R[i + 955*t] = A[i + 955*t];
R[i + 956*t] = A[i + 956*t];
R[i + 957*t] = A[i + 957*t];
R[i + 958*t] = A[i + 958*t];
R[i + 959*t] = A[i + 959*t];
R[i + 960*t] = A[i + 960*t];
R[i + 961*t] = A[i + 961*t];
R[i + 962*t] = A[i + 962*t];
R[i + 963*t] = A[i + 963*t];
R[i + 964*t] = A[i + 964*t];
R[i + 965*t] = A[i + 965*t];
R[i + 966*t] = A[i + 966*t];
R[i + 967*t] = A[i + 967*t];
R[i + 968*t] = A[i + 968*t];
R[i + 969*t] = A[i + 969*t];
R[i + 970*t] = A[i + 970*t];
R[i + 971*t] = A[i + 971*t];
R[i + 972*t] = A[i + 972*t];
R[i + 973*t] = A[i + 973*t];
R[i + 974*t] = A[i + 974*t];
R[i + 975*t] = A[i + 975*t];
R[i + 976*t] = A[i + 976*t];
R[i + 977*t] = A[i + 977*t];
R[i + 978*t] = A[i + 978*t];
R[i + 979*t] = A[i + 979*t];
R[i + 980*t] = A[i + 980*t];
R[i + 981*t] = A[i + 981*t];
R[i + 982*t] = A[i + 982*t];
R[i + 983*t] = A[i + 983*t];
R[i + 984*t] = A[i + 984*t];
R[i + 985*t] = A[i + 985*t];
R[i + 986*t] = A[i + 986*t];
R[i + 987*t] = A[i + 987*t];
R[i + 988*t] = A[i + 988*t];
R[i + 989*t] = A[i + 989*t];
R[i + 990*t] = A[i + 990*t];
R[i + 991*t] = A[i + 991*t];
R[i + 992*t] = A[i + 992*t];
R[i + 993*t] = A[i + 993*t];
R[i + 994*t] = A[i + 994*t];
R[i + 995*t] = A[i + 995*t];
R[i + 996*t] = A[i + 996*t];
R[i + 997*t] = A[i + 997*t];
R[i + 998*t] = A[i + 998*t];
R[i + 999*t] = A[i + 999*t];
R[i + 1000*t] = A[i + 1000*t];
R[i + 1001*t] = A[i + 1001*t];
R[i + 1002*t] = A[i + 1002*t];
R[i + 1003*t] = A[i + 1003*t];
R[i + 1004*t] = A[i + 1004*t];
R[i + 1005*t] = A[i + 1005*t];
R[i + 1006*t] = A[i + 1006*t];
R[i + 1007*t] = A[i + 1007*t];
R[i + 1008*t] = A[i + 1008*t];
R[i + 1009*t] = A[i + 1009*t];
R[i + 1010*t] = A[i + 1010*t];
R[i + 1011*t] = A[i + 1011*t];
R[i + 1012*t] = A[i + 1012*t];
R[i + 1013*t] = A[i + 1013*t];
R[i + 1014*t] = A[i + 1014*t];
R[i + 1015*t] = A[i + 1015*t];
R[i + 1016*t] = A[i + 1016*t];
R[i + 1017*t] = A[i + 1017*t];
R[i + 1018*t] = A[i + 1018*t];
R[i + 1019*t] = A[i + 1019*t];
R[i + 1020*t] = A[i + 1020*t];
R[i + 1021*t] = A[i + 1021*t];
R[i + 1022*t] = A[i + 1022*t];
R[i + 1023*t] = A[i + 1023*t];
R[i + 1024*t] = A[i + 1024*t];
R[i + 1025*t] = A[i + 1025*t];
R[i + 1026*t] = A[i + 1026*t];
R[i + 1027*t] = A[i + 1027*t];
R[i + 1028*t] = A[i + 1028*t];
R[i + 1029*t] = A[i + 1029*t];
R[i + 1030*t] = A[i + 1030*t];
R[i + 1031*t] = A[i + 1031*t];
R[i + 1032*t] = A[i + 1032*t];
R[i + 1033*t] = A[i + 1033*t];
R[i + 1034*t] = A[i + 1034*t];
R[i + 1035*t] = A[i + 1035*t];
R[i + 1036*t] = A[i + 1036*t];
R[i + 1037*t] = A[i + 1037*t];
R[i + 1038*t] = A[i + 1038*t];
R[i + 1039*t] = A[i + 1039*t];
R[i + 1040*t] = A[i + 1040*t];
R[i + 1041*t] = A[i + 1041*t];
R[i + 1042*t] = A[i + 1042*t];
R[i + 1043*t] = A[i + 1043*t];
R[i + 1044*t] = A[i + 1044*t];
R[i + 1045*t] = A[i + 1045*t];
R[i + 1046*t] = A[i + 1046*t];
R[i + 1047*t] = A[i + 1047*t];
R[i + 1048*t] = A[i + 1048*t];
R[i + 1049*t] = A[i + 1049*t];
R[i + 1050*t] = A[i + 1050*t];
R[i + 1051*t] = A[i + 1051*t];
R[i + 1052*t] = A[i + 1052*t];
R[i + 1053*t] = A[i + 1053*t];
R[i + 1054*t] = A[i + 1054*t];
R[i + 1055*t] = A[i + 1055*t];
R[i + 1056*t] = A[i + 1056*t];
R[i + 1057*t] = A[i + 1057*t];
R[i + 1058*t] = A[i + 1058*t];
R[i + 1059*t] = A[i + 1059*t];
R[i + 1060*t] = A[i + 1060*t];
R[i + 1061*t] = A[i + 1061*t];
R[i + 1062*t] = A[i + 1062*t];
R[i + 1063*t] = A[i + 1063*t];
__syncthreads();
for (int iter=0; iter< n_iter; iter++) {
R[i + 1064*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]];
R[i + 1065*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]];
R[i + 1066*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]];
R[i + 1067*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]];
R[i + 1068*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]];
R[i + 1069*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]];
R[i + 1070*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]];
R[i + 1071*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]];
R[i + 1072*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]];
R[i + 1073*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]];
R[i + 1074*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]];
R[i + 1075*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]];
R[i + 1076*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]];
R[i + 1077*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]];
R[i + 1078*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]];
R[i + 1079*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]];
R[i + 1080*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]];
R[i + 1081*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]];
R[i + 1082*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]];
R[i + 1083*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]];
R[i + 1084*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]];
R[i + 1085*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]];
R[i + 1086*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]];
R[i + 1087*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]];
R[i + 1088*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]];
R[i + 1089*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]];
R[i + 1090*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]];
R[i + 1091*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]];
R[i + 1092*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]];
R[i + 1093*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]];
R[i + 1094*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]];
R[i + 1095*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]];
R[i + 1096*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]];
R[i + 1097*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]];
R[i + 1098*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]];
R[i + 1099*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]];
R[i + 1100*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]];
R[i + 1101*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]];
R[i + 1102*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]];
R[i + 1103*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]];
R[i + 1104*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]];
R[i + 1105*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]];
R[i + 1106*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]];
R[i + 1107*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]];
R[i + 1108*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]];
R[i + 1109*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]];
R[i + 1110*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]];
R[i + 1111*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]];
R[i + 1112*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]];
R[i + 1113*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]];
R[i + 1114*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]];
R[i + 1115*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]];
R[i + 1116*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]];
R[i + 1117*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]];
R[i + 1118*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]];
R[i + 1119*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]];
R[i + 1120*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]];
R[i + 1121*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]];
R[i + 1122*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]];
R[i + 1123*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]];
R[i + 1124*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]];
R[i + 1125*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]];
R[i + 1126*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]];
R[i + 1127*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]];
R[i + 1128*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]];
R[i + 1129*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]];
R[i + 1130*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]];
R[i + 1131*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]];
R[i + 1132*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]];
R[i + 1133*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]];
R[i + 1134*t] = Op[i + 70*t] ? R[B[i + 70*t]] * R[C[i + 70*t]] : R[B[i + 70*t]] + R[C[i + 70*t]];
R[i + 1135*t] = Op[i + 71*t] ? R[B[i + 71*t]] * R[C[i + 71*t]] : R[B[i + 71*t]] + R[C[i + 71*t]];
R[i + 1136*t] = Op[i + 72*t] ? R[B[i + 72*t]] * R[C[i + 72*t]] : R[B[i + 72*t]] + R[C[i + 72*t]];
R[i + 1137*t] = Op[i + 73*t] ? R[B[i + 73*t]] * R[C[i + 73*t]] : R[B[i + 73*t]] + R[C[i + 73*t]];
R[i + 1138*t] = Op[i + 74*t] ? R[B[i + 74*t]] * R[C[i + 74*t]] : R[B[i + 74*t]] + R[C[i + 74*t]];
R[i + 1139*t] = Op[i + 75*t] ? R[B[i + 75*t]] * R[C[i + 75*t]] : R[B[i + 75*t]] + R[C[i + 75*t]];
R[i + 1140*t] = Op[i + 76*t] ? R[B[i + 76*t]] * R[C[i + 76*t]] : R[B[i + 76*t]] + R[C[i + 76*t]];
R[i + 1141*t] = Op[i + 77*t] ? R[B[i + 77*t]] * R[C[i + 77*t]] : R[B[i + 77*t]] + R[C[i + 77*t]];
R[i + 1142*t] = Op[i + 78*t] ? R[B[i + 78*t]] * R[C[i + 78*t]] : R[B[i + 78*t]] + R[C[i + 78*t]];
R[i + 1143*t] = Op[i + 79*t] ? R[B[i + 79*t]] * R[C[i + 79*t]] : R[B[i + 79*t]] + R[C[i + 79*t]];
R[i + 1144*t] = Op[i + 80*t] ? R[B[i + 80*t]] * R[C[i + 80*t]] : R[B[i + 80*t]] + R[C[i + 80*t]];
R[i + 1145*t] = Op[i + 81*t] ? R[B[i + 81*t]] * R[C[i + 81*t]] : R[B[i + 81*t]] + R[C[i + 81*t]];
R[i + 1146*t] = Op[i + 82*t] ? R[B[i + 82*t]] * R[C[i + 82*t]] : R[B[i + 82*t]] + R[C[i + 82*t]];
R[i + 1147*t] = Op[i + 83*t] ? R[B[i + 83*t]] * R[C[i + 83*t]] : R[B[i + 83*t]] + R[C[i + 83*t]];
R[i + 1148*t] = Op[i + 84*t] ? R[B[i + 84*t]] * R[C[i + 84*t]] : R[B[i + 84*t]] + R[C[i + 84*t]];
R[i + 1149*t] = Op[i + 85*t] ? R[B[i + 85*t]] * R[C[i + 85*t]] : R[B[i + 85*t]] + R[C[i + 85*t]];
R[i + 1150*t] = Op[i + 86*t] ? R[B[i + 86*t]] * R[C[i + 86*t]] : R[B[i + 86*t]] + R[C[i + 86*t]];
R[i + 1151*t] = Op[i + 87*t] ? R[B[i + 87*t]] * R[C[i + 87*t]] : R[B[i + 87*t]] + R[C[i + 87*t]];
R[i + 1152*t] = Op[i + 88*t] ? R[B[i + 88*t]] * R[C[i + 88*t]] : R[B[i + 88*t]] + R[C[i + 88*t]];
R[i + 1153*t] = Op[i + 89*t] ? R[B[i + 89*t]] * R[C[i + 89*t]] : R[B[i + 89*t]] + R[C[i + 89*t]];
R[i + 1154*t] = Op[i + 90*t] ? R[B[i + 90*t]] * R[C[i + 90*t]] : R[B[i + 90*t]] + R[C[i + 90*t]];
R[i + 1155*t] = Op[i + 91*t] ? R[B[i + 91*t]] * R[C[i + 91*t]] : R[B[i + 91*t]] + R[C[i + 91*t]];
R[i + 1156*t] = Op[i + 92*t] ? R[B[i + 92*t]] * R[C[i + 92*t]] : R[B[i + 92*t]] + R[C[i + 92*t]];
R[i + 1157*t] = Op[i + 93*t] ? R[B[i + 93*t]] * R[C[i + 93*t]] : R[B[i + 93*t]] + R[C[i + 93*t]];
R[i + 1158*t] = Op[i + 94*t] ? R[B[i + 94*t]] * R[C[i + 94*t]] : R[B[i + 94*t]] + R[C[i + 94*t]];
R[i + 1159*t] = Op[i + 95*t] ? R[B[i + 95*t]] * R[C[i + 95*t]] : R[B[i + 95*t]] + R[C[i + 95*t]];
R[i + 1160*t] = Op[i + 96*t] ? R[B[i + 96*t]] * R[C[i + 96*t]] : R[B[i + 96*t]] + R[C[i + 96*t]];
R[i + 1161*t] = Op[i + 97*t] ? R[B[i + 97*t]] * R[C[i + 97*t]] : R[B[i + 97*t]] + R[C[i + 97*t]];
R[i + 1162*t] = Op[i + 98*t] ? R[B[i + 98*t]] * R[C[i + 98*t]] : R[B[i + 98*t]] + R[C[i + 98*t]];
R[i + 1163*t] = Op[i + 99*t] ? R[B[i + 99*t]] * R[C[i + 99*t]] : R[B[i + 99*t]] + R[C[i + 99*t]];
R[i + 1164*t] = Op[i + 100*t] ? R[B[i + 100*t]] * R[C[i + 100*t]] : R[B[i + 100*t]] + R[C[i + 100*t]];
R[i + 1165*t] = Op[i + 101*t] ? R[B[i + 101*t]] * R[C[i + 101*t]] : R[B[i + 101*t]] + R[C[i + 101*t]];
R[i + 1166*t] = Op[i + 102*t] ? R[B[i + 102*t]] * R[C[i + 102*t]] : R[B[i + 102*t]] + R[C[i + 102*t]];
R[i + 1167*t] = Op[i + 103*t] ? R[B[i + 103*t]] * R[C[i + 103*t]] : R[B[i + 103*t]] + R[C[i + 103*t]];
R[i + 1168*t] = Op[i + 104*t] ? R[B[i + 104*t]] * R[C[i + 104*t]] : R[B[i + 104*t]] + R[C[i + 104*t]];
R[i + 1169*t] = Op[i + 105*t] ? R[B[i + 105*t]] * R[C[i + 105*t]] : R[B[i + 105*t]] + R[C[i + 105*t]];
R[i + 1170*t] = Op[i + 106*t] ? R[B[i + 106*t]] * R[C[i + 106*t]] : R[B[i + 106*t]] + R[C[i + 106*t]];
R[i + 1171*t] = Op[i + 107*t] ? R[B[i + 107*t]] * R[C[i + 107*t]] : R[B[i + 107*t]] + R[C[i + 107*t]];
R[i + 1172*t] = Op[i + 108*t] ? R[B[i + 108*t]] * R[C[i + 108*t]] : R[B[i + 108*t]] + R[C[i + 108*t]];
R[i + 1173*t] = Op[i + 109*t] ? R[B[i + 109*t]] * R[C[i + 109*t]] : R[B[i + 109*t]] + R[C[i + 109*t]];
R[i + 1174*t] = Op[i + 110*t] ? R[B[i + 110*t]] * R[C[i + 110*t]] : R[B[i + 110*t]] + R[C[i + 110*t]];
R[i + 1175*t] = Op[i + 111*t] ? R[B[i + 111*t]] * R[C[i + 111*t]] : R[B[i + 111*t]] + R[C[i + 111*t]];
R[i + 1176*t] = Op[i + 112*t] ? R[B[i + 112*t]] * R[C[i + 112*t]] : R[B[i + 112*t]] + R[C[i + 112*t]];
R[i + 1177*t] = Op[i + 113*t] ? R[B[i + 113*t]] * R[C[i + 113*t]] : R[B[i + 113*t]] + R[C[i + 113*t]];
R[i + 1178*t] = Op[i + 114*t] ? R[B[i + 114*t]] * R[C[i + 114*t]] : R[B[i + 114*t]] + R[C[i + 114*t]];
R[i + 1179*t] = Op[i + 115*t] ? R[B[i + 115*t]] * R[C[i + 115*t]] : R[B[i + 115*t]] + R[C[i + 115*t]];
R[i + 1180*t] = Op[i + 116*t] ? R[B[i + 116*t]] * R[C[i + 116*t]] : R[B[i + 116*t]] + R[C[i + 116*t]];
R[i + 1181*t] = Op[i + 117*t] ? R[B[i + 117*t]] * R[C[i + 117*t]] : R[B[i + 117*t]] + R[C[i + 117*t]];
R[i + 1182*t] = Op[i + 118*t] ? R[B[i + 118*t]] * R[C[i + 118*t]] : R[B[i + 118*t]] + R[C[i + 118*t]];
R[i + 1183*t] = Op[i + 119*t] ? R[B[i + 119*t]] * R[C[i + 119*t]] : R[B[i + 119*t]] + R[C[i + 119*t]];
R[i + 1184*t] = Op[i + 120*t] ? R[B[i + 120*t]] * R[C[i + 120*t]] : R[B[i + 120*t]] + R[C[i + 120*t]];
R[i + 1185*t] = Op[i + 121*t] ? R[B[i + 121*t]] * R[C[i + 121*t]] : R[B[i + 121*t]] + R[C[i + 121*t]];
R[i + 1186*t] = Op[i + 122*t] ? R[B[i + 122*t]] * R[C[i + 122*t]] : R[B[i + 122*t]] + R[C[i + 122*t]];
R[i + 1187*t] = Op[i + 123*t] ? R[B[i + 123*t]] * R[C[i + 123*t]] : R[B[i + 123*t]] + R[C[i + 123*t]];
R[i + 1188*t] = Op[i + 124*t] ? R[B[i + 124*t]] * R[C[i + 124*t]] : R[B[i + 124*t]] + R[C[i + 124*t]];
R[i + 1189*t] = Op[i + 125*t] ? R[B[i + 125*t]] * R[C[i + 125*t]] : R[B[i + 125*t]] + R[C[i + 125*t]];
R[i + 1190*t] = Op[i + 126*t] ? R[B[i + 126*t]] * R[C[i + 126*t]] : R[B[i + 126*t]] + R[C[i + 126*t]];
R[i + 1191*t] = Op[i + 127*t] ? R[B[i + 127*t]] * R[C[i + 127*t]] : R[B[i + 127*t]] + R[C[i + 127*t]];
R[i + 1192*t] = Op[i + 128*t] ? R[B[i + 128*t]] * R[C[i + 128*t]] : R[B[i + 128*t]] + R[C[i + 128*t]];
R[i + 1193*t] = Op[i + 129*t] ? R[B[i + 129*t]] * R[C[i + 129*t]] : R[B[i + 129*t]] + R[C[i + 129*t]];
R[i + 1194*t] = Op[i + 130*t] ? R[B[i + 130*t]] * R[C[i + 130*t]] : R[B[i + 130*t]] + R[C[i + 130*t]];
R[i + 1195*t] = Op[i + 131*t] ? R[B[i + 131*t]] * R[C[i + 131*t]] : R[B[i + 131*t]] + R[C[i + 131*t]];
R[i + 1196*t] = Op[i + 132*t] ? R[B[i + 132*t]] * R[C[i + 132*t]] : R[B[i + 132*t]] + R[C[i + 132*t]];
R[i + 1197*t] = Op[i + 133*t] ? R[B[i + 133*t]] * R[C[i + 133*t]] : R[B[i + 133*t]] + R[C[i + 133*t]];
R[i + 1198*t] = Op[i + 134*t] ? R[B[i + 134*t]] * R[C[i + 134*t]] : R[B[i + 134*t]] + R[C[i + 134*t]];
R[i + 1199*t] = Op[i + 135*t] ? R[B[i + 135*t]] * R[C[i + 135*t]] : R[B[i + 135*t]] + R[C[i + 135*t]];
R[i + 1200*t] = Op[i + 136*t] ? R[B[i + 136*t]] * R[C[i + 136*t]] : R[B[i + 136*t]] + R[C[i + 136*t]];
R[i + 1201*t] = Op[i + 137*t] ? R[B[i + 137*t]] * R[C[i + 137*t]] : R[B[i + 137*t]] + R[C[i + 137*t]];
R[i + 1202*t] = Op[i + 138*t] ? R[B[i + 138*t]] * R[C[i + 138*t]] : R[B[i + 138*t]] + R[C[i + 138*t]];
R[i + 1203*t] = Op[i + 139*t] ? R[B[i + 139*t]] * R[C[i + 139*t]] : R[B[i + 139*t]] + R[C[i + 139*t]];
R[i + 1204*t] = Op[i + 140*t] ? R[B[i + 140*t]] * R[C[i + 140*t]] : R[B[i + 140*t]] + R[C[i + 140*t]];
R[i + 1205*t] = Op[i + 141*t] ? R[B[i + 141*t]] * R[C[i + 141*t]] : R[B[i + 141*t]] + R[C[i + 141*t]];
R[i + 1206*t] = Op[i + 142*t] ? R[B[i + 142*t]] * R[C[i + 142*t]] : R[B[i + 142*t]] + R[C[i + 142*t]];
R[i + 1207*t] = Op[i + 143*t] ? R[B[i + 143*t]] * R[C[i + 143*t]] : R[B[i + 143*t]] + R[C[i + 143*t]];
R[i + 1208*t] = Op[i + 144*t] ? R[B[i + 144*t]] * R[C[i + 144*t]] : R[B[i + 144*t]] + R[C[i + 144*t]];
R[i + 1209*t] = Op[i + 145*t] ? R[B[i + 145*t]] * R[C[i + 145*t]] : R[B[i + 145*t]] + R[C[i + 145*t]];
R[i + 1210*t] = Op[i + 146*t] ? R[B[i + 146*t]] * R[C[i + 146*t]] : R[B[i + 146*t]] + R[C[i + 146*t]];
R[i + 1211*t] = Op[i + 147*t] ? R[B[i + 147*t]] * R[C[i + 147*t]] : R[B[i + 147*t]] + R[C[i + 147*t]];
R[i + 1212*t] = Op[i + 148*t] ? R[B[i + 148*t]] * R[C[i + 148*t]] : R[B[i + 148*t]] + R[C[i + 148*t]];
R[i + 1213*t] = Op[i + 149*t] ? R[B[i + 149*t]] * R[C[i + 149*t]] : R[B[i + 149*t]] + R[C[i + 149*t]];
R[i + 1214*t] = Op[i + 150*t] ? R[B[i + 150*t]] * R[C[i + 150*t]] : R[B[i + 150*t]] + R[C[i + 150*t]];
R[i + 1215*t] = Op[i + 151*t] ? R[B[i + 151*t]] * R[C[i + 151*t]] : R[B[i + 151*t]] + R[C[i + 151*t]];
R[i + 1216*t] = Op[i + 152*t] ? R[B[i + 152*t]] * R[C[i + 152*t]] : R[B[i + 152*t]] + R[C[i + 152*t]];
R[i + 1217*t] = Op[i + 153*t] ? R[B[i + 153*t]] * R[C[i + 153*t]] : R[B[i + 153*t]] + R[C[i + 153*t]];
R[i + 1218*t] = Op[i + 154*t] ? R[B[i + 154*t]] * R[C[i + 154*t]] : R[B[i + 154*t]] + R[C[i + 154*t]];
R[i + 1219*t] = Op[i + 155*t] ? R[B[i + 155*t]] * R[C[i + 155*t]] : R[B[i + 155*t]] + R[C[i + 155*t]];
R[i + 1220*t] = Op[i + 156*t] ? R[B[i + 156*t]] * R[C[i + 156*t]] : R[B[i + 156*t]] + R[C[i + 156*t]];
R[i + 1221*t] = Op[i + 157*t] ? R[B[i + 157*t]] * R[C[i + 157*t]] : R[B[i + 157*t]] + R[C[i + 157*t]];
R[i + 1222*t] = Op[i + 158*t] ? R[B[i + 158*t]] * R[C[i + 158*t]] : R[B[i + 158*t]] + R[C[i + 158*t]];
R[i + 1223*t] = Op[i + 159*t] ? R[B[i + 159*t]] * R[C[i + 159*t]] : R[B[i + 159*t]] + R[C[i + 159*t]];
R[i + 1224*t] = Op[i + 160*t] ? R[B[i + 160*t]] * R[C[i + 160*t]] : R[B[i + 160*t]] + R[C[i + 160*t]];
R[i + 1225*t] = Op[i + 161*t] ? R[B[i + 161*t]] * R[C[i + 161*t]] : R[B[i + 161*t]] + R[C[i + 161*t]];
R[i + 1226*t] = Op[i + 162*t] ? R[B[i + 162*t]] * R[C[i + 162*t]] : R[B[i + 162*t]] + R[C[i + 162*t]];
R[i + 1227*t] = Op[i + 163*t] ? R[B[i + 163*t]] * R[C[i + 163*t]] : R[B[i + 163*t]] + R[C[i + 163*t]];
R[i + 1228*t] = Op[i + 164*t] ? R[B[i + 164*t]] * R[C[i + 164*t]] : R[B[i + 164*t]] + R[C[i + 164*t]];
R[i + 1229*t] = Op[i + 165*t] ? R[B[i + 165*t]] * R[C[i + 165*t]] : R[B[i + 165*t]] + R[C[i + 165*t]];
R[i + 1230*t] = Op[i + 166*t] ? R[B[i + 166*t]] * R[C[i + 166*t]] : R[B[i + 166*t]] + R[C[i + 166*t]];
R[i + 1231*t] = Op[i + 167*t] ? R[B[i + 167*t]] * R[C[i + 167*t]] : R[B[i + 167*t]] + R[C[i + 167*t]];
R[i + 1232*t] = Op[i + 168*t] ? R[B[i + 168*t]] * R[C[i + 168*t]] : R[B[i + 168*t]] + R[C[i + 168*t]];
R[i + 1233*t] = Op[i + 169*t] ? R[B[i + 169*t]] * R[C[i + 169*t]] : R[B[i + 169*t]] + R[C[i + 169*t]];
R[i + 1234*t] = Op[i + 170*t] ? R[B[i + 170*t]] * R[C[i + 170*t]] : R[B[i + 170*t]] + R[C[i + 170*t]];
R[i + 1235*t] = Op[i + 171*t] ? R[B[i + 171*t]] * R[C[i + 171*t]] : R[B[i + 171*t]] + R[C[i + 171*t]];
R[i + 1236*t] = Op[i + 172*t] ? R[B[i + 172*t]] * R[C[i + 172*t]] : R[B[i + 172*t]] + R[C[i + 172*t]];
R[i + 1237*t] = Op[i + 173*t] ? R[B[i + 173*t]] * R[C[i + 173*t]] : R[B[i + 173*t]] + R[C[i + 173*t]];
R[i + 1238*t] = Op[i + 174*t] ? R[B[i + 174*t]] * R[C[i + 174*t]] : R[B[i + 174*t]] + R[C[i + 174*t]];
R[i + 1239*t] = Op[i + 175*t] ? R[B[i + 175*t]] * R[C[i + 175*t]] : R[B[i + 175*t]] + R[C[i + 175*t]];
R[i + 1240*t] = Op[i + 176*t] ? R[B[i + 176*t]] * R[C[i + 176*t]] : R[B[i + 176*t]] + R[C[i + 176*t]];
R[i + 1241*t] = Op[i + 177*t] ? R[B[i + 177*t]] * R[C[i + 177*t]] : R[B[i + 177*t]] + R[C[i + 177*t]];
R[i + 1242*t] = Op[i + 178*t] ? R[B[i + 178*t]] * R[C[i + 178*t]] : R[B[i + 178*t]] + R[C[i + 178*t]];
R[i + 1243*t] = Op[i + 179*t] ? R[B[i + 179*t]] * R[C[i + 179*t]] : R[B[i + 179*t]] + R[C[i + 179*t]];
R[i + 1244*t] = Op[i + 180*t] ? R[B[i + 180*t]] * R[C[i + 180*t]] : R[B[i + 180*t]] + R[C[i + 180*t]];
R[i + 1245*t] = Op[i + 181*t] ? R[B[i + 181*t]] * R[C[i + 181*t]] : R[B[i + 181*t]] + R[C[i + 181*t]];
R[i + 1246*t] = Op[i + 182*t] ? R[B[i + 182*t]] * R[C[i + 182*t]] : R[B[i + 182*t]] + R[C[i + 182*t]];
R[i + 1247*t] = Op[i + 183*t] ? R[B[i + 183*t]] * R[C[i + 183*t]] : R[B[i + 183*t]] + R[C[i + 183*t]];
R[i + 1248*t] = Op[i + 184*t] ? R[B[i + 184*t]] * R[C[i + 184*t]] : R[B[i + 184*t]] + R[C[i + 184*t]];
R[i + 1249*t] = Op[i + 185*t] ? R[B[i + 185*t]] * R[C[i + 185*t]] : R[B[i + 185*t]] + R[C[i + 185*t]];
R[i + 1250*t] = Op[i + 186*t] ? R[B[i + 186*t]] * R[C[i + 186*t]] : R[B[i + 186*t]] + R[C[i + 186*t]];
R[i + 1251*t] = Op[i + 187*t] ? R[B[i + 187*t]] * R[C[i + 187*t]] : R[B[i + 187*t]] + R[C[i + 187*t]];
R[i + 1252*t] = Op[i + 188*t] ? R[B[i + 188*t]] * R[C[i + 188*t]] : R[B[i + 188*t]] + R[C[i + 188*t]];
R[i + 1253*t] = Op[i + 189*t] ? R[B[i + 189*t]] * R[C[i + 189*t]] : R[B[i + 189*t]] + R[C[i + 189*t]];
R[i + 1254*t] = Op[i + 190*t] ? R[B[i + 190*t]] * R[C[i + 190*t]] : R[B[i + 190*t]] + R[C[i + 190*t]];
R[i + 1255*t] = Op[i + 191*t] ? R[B[i + 191*t]] * R[C[i + 191*t]] : R[B[i + 191*t]] + R[C[i + 191*t]];
R[i + 1256*t] = Op[i + 192*t] ? R[B[i + 192*t]] * R[C[i + 192*t]] : R[B[i + 192*t]] + R[C[i + 192*t]];
R[i + 1257*t] = Op[i + 193*t] ? R[B[i + 193*t]] * R[C[i + 193*t]] : R[B[i + 193*t]] + R[C[i + 193*t]];
R[i + 1258*t] = Op[i + 194*t] ? R[B[i + 194*t]] * R[C[i + 194*t]] : R[B[i + 194*t]] + R[C[i + 194*t]];
R[i + 1259*t] = Op[i + 195*t] ? R[B[i + 195*t]] * R[C[i + 195*t]] : R[B[i + 195*t]] + R[C[i + 195*t]];
R[i + 1260*t] = Op[i + 196*t] ? R[B[i + 196*t]] * R[C[i + 196*t]] : R[B[i + 196*t]] + R[C[i + 196*t]];
R[i + 1261*t] = Op[i + 197*t] ? R[B[i + 197*t]] * R[C[i + 197*t]] : R[B[i + 197*t]] + R[C[i + 197*t]];
R[i + 1262*t] = Op[i + 198*t] ? R[B[i + 198*t]] * R[C[i + 198*t]] : R[B[i + 198*t]] + R[C[i + 198*t]];
R[i + 1263*t] = Op[i + 199*t] ? R[B[i + 199*t]] * R[C[i + 199*t]] : R[B[i + 199*t]] + R[C[i + 199*t]];
R[i + 1264*t] = Op[i + 200*t] ? R[B[i + 200*t]] * R[C[i + 200*t]] : R[B[i + 200*t]] + R[C[i + 200*t]];
R[i + 1265*t] = Op[i + 201*t] ? R[B[i + 201*t]] * R[C[i + 201*t]] : R[B[i + 201*t]] + R[C[i + 201*t]];
R[i + 1266*t] = Op[i + 202*t] ? R[B[i + 202*t]] * R[C[i + 202*t]] : R[B[i + 202*t]] + R[C[i + 202*t]];
R[i + 1267*t] = Op[i + 203*t] ? R[B[i + 203*t]] * R[C[i + 203*t]] : R[B[i + 203*t]] + R[C[i + 203*t]];
R[i + 1268*t] = Op[i + 204*t] ? R[B[i + 204*t]] * R[C[i + 204*t]] : R[B[i + 204*t]] + R[C[i + 204*t]];
R[i + 1269*t] = Op[i + 205*t] ? R[B[i + 205*t]] * R[C[i + 205*t]] : R[B[i + 205*t]] + R[C[i + 205*t]];
R[i + 1270*t] = Op[i + 206*t] ? R[B[i + 206*t]] * R[C[i + 206*t]] : R[B[i + 206*t]] + R[C[i + 206*t]];
R[i + 1271*t] = Op[i + 207*t] ? R[B[i + 207*t]] * R[C[i + 207*t]] : R[B[i + 207*t]] + R[C[i + 207*t]];
R[i + 1272*t] = Op[i + 208*t] ? R[B[i + 208*t]] * R[C[i + 208*t]] : R[B[i + 208*t]] + R[C[i + 208*t]];
R[i + 1273*t] = Op[i + 209*t] ? R[B[i + 209*t]] * R[C[i + 209*t]] : R[B[i + 209*t]] + R[C[i + 209*t]];
R[i + 1274*t] = Op[i + 210*t] ? R[B[i + 210*t]] * R[C[i + 210*t]] : R[B[i + 210*t]] + R[C[i + 210*t]];
R[i + 1275*t] = Op[i + 211*t] ? R[B[i + 211*t]] * R[C[i + 211*t]] : R[B[i + 211*t]] + R[C[i + 211*t]];
R[i + 1276*t] = Op[i + 212*t] ? R[B[i + 212*t]] * R[C[i + 212*t]] : R[B[i + 212*t]] + R[C[i + 212*t]];
R[i + 1277*t] = Op[i + 213*t] ? R[B[i + 213*t]] * R[C[i + 213*t]] : R[B[i + 213*t]] + R[C[i + 213*t]];
R[i + 1278*t] = Op[i + 214*t] ? R[B[i + 214*t]] * R[C[i + 214*t]] : R[B[i + 214*t]] + R[C[i + 214*t]];
R[i + 1279*t] = Op[i + 215*t] ? R[B[i + 215*t]] * R[C[i + 215*t]] : R[B[i + 215*t]] + R[C[i + 215*t]];
R[i + 1280*t] = Op[i + 216*t] ? R[B[i + 216*t]] * R[C[i + 216*t]] : R[B[i + 216*t]] + R[C[i + 216*t]];
R[i + 1281*t] = Op[i + 217*t] ? R[B[i + 217*t]] * R[C[i + 217*t]] : R[B[i + 217*t]] + R[C[i + 217*t]];
R[i + 1282*t] = Op[i + 218*t] ? R[B[i + 218*t]] * R[C[i + 218*t]] : R[B[i + 218*t]] + R[C[i + 218*t]];
R[i + 1283*t] = Op[i + 219*t] ? R[B[i + 219*t]] * R[C[i + 219*t]] : R[B[i + 219*t]] + R[C[i + 219*t]];
R[i + 1284*t] = Op[i + 220*t] ? R[B[i + 220*t]] * R[C[i + 220*t]] : R[B[i + 220*t]] + R[C[i + 220*t]];
R[i + 1285*t] = Op[i + 221*t] ? R[B[i + 221*t]] * R[C[i + 221*t]] : R[B[i + 221*t]] + R[C[i + 221*t]];
R[i + 1286*t] = Op[i + 222*t] ? R[B[i + 222*t]] * R[C[i + 222*t]] : R[B[i + 222*t]] + R[C[i + 222*t]];
R[i + 1287*t] = Op[i + 223*t] ? R[B[i + 223*t]] * R[C[i + 223*t]] : R[B[i + 223*t]] + R[C[i + 223*t]];
R[i + 1288*t] = Op[i + 224*t] ? R[B[i + 224*t]] * R[C[i + 224*t]] : R[B[i + 224*t]] + R[C[i + 224*t]];
R[i + 1289*t] = Op[i + 225*t] ? R[B[i + 225*t]] * R[C[i + 225*t]] : R[B[i + 225*t]] + R[C[i + 225*t]];
R[i + 1290*t] = Op[i + 226*t] ? R[B[i + 226*t]] * R[C[i + 226*t]] : R[B[i + 226*t]] + R[C[i + 226*t]];
R[i + 1291*t] = Op[i + 227*t] ? R[B[i + 227*t]] * R[C[i + 227*t]] : R[B[i + 227*t]] + R[C[i + 227*t]];
R[i + 1292*t] = Op[i + 228*t] ? R[B[i + 228*t]] * R[C[i + 228*t]] : R[B[i + 228*t]] + R[C[i + 228*t]];
R[i + 1293*t] = Op[i + 229*t] ? R[B[i + 229*t]] * R[C[i + 229*t]] : R[B[i + 229*t]] + R[C[i + 229*t]];
R[i + 1294*t] = Op[i + 230*t] ? R[B[i + 230*t]] * R[C[i + 230*t]] : R[B[i + 230*t]] + R[C[i + 230*t]];
R[i + 1295*t] = Op[i + 231*t] ? R[B[i + 231*t]] * R[C[i + 231*t]] : R[B[i + 231*t]] + R[C[i + 231*t]];
R[i + 1296*t] = Op[i + 232*t] ? R[B[i + 232*t]] * R[C[i + 232*t]] : R[B[i + 232*t]] + R[C[i + 232*t]];
R[i + 1297*t] = Op[i + 233*t] ? R[B[i + 233*t]] * R[C[i + 233*t]] : R[B[i + 233*t]] + R[C[i + 233*t]];
R[i + 1298*t] = Op[i + 234*t] ? R[B[i + 234*t]] * R[C[i + 234*t]] : R[B[i + 234*t]] + R[C[i + 234*t]];
R[i + 1299*t] = Op[i + 235*t] ? R[B[i + 235*t]] * R[C[i + 235*t]] : R[B[i + 235*t]] + R[C[i + 235*t]];
R[i + 1300*t] = Op[i + 236*t] ? R[B[i + 236*t]] * R[C[i + 236*t]] : R[B[i + 236*t]] + R[C[i + 236*t]];
R[i + 1301*t] = Op[i + 237*t] ? R[B[i + 237*t]] * R[C[i + 237*t]] : R[B[i + 237*t]] + R[C[i + 237*t]];
R[i + 1302*t] = Op[i + 238*t] ? R[B[i + 238*t]] * R[C[i + 238*t]] : R[B[i + 238*t]] + R[C[i + 238*t]];
R[i + 1303*t] = Op[i + 239*t] ? R[B[i + 239*t]] * R[C[i + 239*t]] : R[B[i + 239*t]] + R[C[i + 239*t]];
R[i + 1304*t] = Op[i + 240*t] ? R[B[i + 240*t]] * R[C[i + 240*t]] : R[B[i + 240*t]] + R[C[i + 240*t]];
R[i + 1305*t] = Op[i + 241*t] ? R[B[i + 241*t]] * R[C[i + 241*t]] : R[B[i + 241*t]] + R[C[i + 241*t]];
R[i + 1306*t] = Op[i + 242*t] ? R[B[i + 242*t]] * R[C[i + 242*t]] : R[B[i + 242*t]] + R[C[i + 242*t]];
R[i + 1307*t] = Op[i + 243*t] ? R[B[i + 243*t]] * R[C[i + 243*t]] : R[B[i + 243*t]] + R[C[i + 243*t]];
R[i + 1308*t] = Op[i + 244*t] ? R[B[i + 244*t]] * R[C[i + 244*t]] : R[B[i + 244*t]] + R[C[i + 244*t]];
R[i + 1309*t] = Op[i + 245*t] ? R[B[i + 245*t]] * R[C[i + 245*t]] : R[B[i + 245*t]] + R[C[i + 245*t]];
R[i + 1310*t] = Op[i + 246*t] ? R[B[i + 246*t]] * R[C[i + 246*t]] : R[B[i + 246*t]] + R[C[i + 246*t]];
R[i + 1311*t] = Op[i + 247*t] ? R[B[i + 247*t]] * R[C[i + 247*t]] : R[B[i + 247*t]] + R[C[i + 247*t]];
R[i + 1312*t] = Op[i + 248*t] ? R[B[i + 248*t]] * R[C[i + 248*t]] : R[B[i + 248*t]] + R[C[i + 248*t]];
R[i + 1313*t] = Op[i + 249*t] ? R[B[i + 249*t]] * R[C[i + 249*t]] : R[B[i + 249*t]] + R[C[i + 249*t]];
R[i + 1314*t] = Op[i + 250*t] ? R[B[i + 250*t]] * R[C[i + 250*t]] : R[B[i + 250*t]] + R[C[i + 250*t]];
R[i + 1315*t] = Op[i + 251*t] ? R[B[i + 251*t]] * R[C[i + 251*t]] : R[B[i + 251*t]] + R[C[i + 251*t]];
R[i + 1316*t] = Op[i + 252*t] ? R[B[i + 252*t]] * R[C[i + 252*t]] : R[B[i + 252*t]] + R[C[i + 252*t]];
R[i + 1317*t] = Op[i + 253*t] ? R[B[i + 253*t]] * R[C[i + 253*t]] : R[B[i + 253*t]] + R[C[i + 253*t]];
R[i + 1318*t] = Op[i + 254*t] ? R[B[i + 254*t]] * R[C[i + 254*t]] : R[B[i + 254*t]] + R[C[i + 254*t]];
R[i + 1319*t] = Op[i + 255*t] ? R[B[i + 255*t]] * R[C[i + 255*t]] : R[B[i + 255*t]] + R[C[i + 255*t]];
R[i + 1320*t] = Op[i + 256*t] ? R[B[i + 256*t]] * R[C[i + 256*t]] : R[B[i + 256*t]] + R[C[i + 256*t]];
R[i + 1321*t] = Op[i + 257*t] ? R[B[i + 257*t]] * R[C[i + 257*t]] : R[B[i + 257*t]] + R[C[i + 257*t]];
R[i + 1322*t] = Op[i + 258*t] ? R[B[i + 258*t]] * R[C[i + 258*t]] : R[B[i + 258*t]] + R[C[i + 258*t]];
R[i + 1323*t] = Op[i + 259*t] ? R[B[i + 259*t]] * R[C[i + 259*t]] : R[B[i + 259*t]] + R[C[i + 259*t]];
R[i + 1324*t] = Op[i + 260*t] ? R[B[i + 260*t]] * R[C[i + 260*t]] : R[B[i + 260*t]] + R[C[i + 260*t]];
R[i + 1325*t] = Op[i + 261*t] ? R[B[i + 261*t]] * R[C[i + 261*t]] : R[B[i + 261*t]] + R[C[i + 261*t]];
R[i + 1326*t] = Op[i + 262*t] ? R[B[i + 262*t]] * R[C[i + 262*t]] : R[B[i + 262*t]] + R[C[i + 262*t]];
R[i + 1327*t] = Op[i + 263*t] ? R[B[i + 263*t]] * R[C[i + 263*t]] : R[B[i + 263*t]] + R[C[i + 263*t]];
R[i + 1328*t] = Op[i + 264*t] ? R[B[i + 264*t]] * R[C[i + 264*t]] : R[B[i + 264*t]] + R[C[i + 264*t]];
R[i + 1329*t] = Op[i + 265*t] ? R[B[i + 265*t]] * R[C[i + 265*t]] : R[B[i + 265*t]] + R[C[i + 265*t]];
R[i + 1330*t] = Op[i + 266*t] ? R[B[i + 266*t]] * R[C[i + 266*t]] : R[B[i + 266*t]] + R[C[i + 266*t]];
R[i + 1331*t] = Op[i + 267*t] ? R[B[i + 267*t]] * R[C[i + 267*t]] : R[B[i + 267*t]] + R[C[i + 267*t]];
R[i + 1332*t] = Op[i + 268*t] ? R[B[i + 268*t]] * R[C[i + 268*t]] : R[B[i + 268*t]] + R[C[i + 268*t]];
R[i + 1333*t] = Op[i + 269*t] ? R[B[i + 269*t]] * R[C[i + 269*t]] : R[B[i + 269*t]] + R[C[i + 269*t]];
R[i + 1334*t] = Op[i + 270*t] ? R[B[i + 270*t]] * R[C[i + 270*t]] : R[B[i + 270*t]] + R[C[i + 270*t]];
R[i + 1335*t] = Op[i + 271*t] ? R[B[i + 271*t]] * R[C[i + 271*t]] : R[B[i + 271*t]] + R[C[i + 271*t]];
R[i + 1336*t] = Op[i + 272*t] ? R[B[i + 272*t]] * R[C[i + 272*t]] : R[B[i + 272*t]] + R[C[i + 272*t]];
R[i + 1337*t] = Op[i + 273*t] ? R[B[i + 273*t]] * R[C[i + 273*t]] : R[B[i + 273*t]] + R[C[i + 273*t]];
R[i + 1338*t] = Op[i + 274*t] ? R[B[i + 274*t]] * R[C[i + 274*t]] : R[B[i + 274*t]] + R[C[i + 274*t]];
R[i + 1339*t] = Op[i + 275*t] ? R[B[i + 275*t]] * R[C[i + 275*t]] : R[B[i + 275*t]] + R[C[i + 275*t]];
R[i + 1340*t] = Op[i + 276*t] ? R[B[i + 276*t]] * R[C[i + 276*t]] : R[B[i + 276*t]] + R[C[i + 276*t]];
R[i + 1341*t] = Op[i + 277*t] ? R[B[i + 277*t]] * R[C[i + 277*t]] : R[B[i + 277*t]] + R[C[i + 277*t]];
R[i + 1342*t] = Op[i + 278*t] ? R[B[i + 278*t]] * R[C[i + 278*t]] : R[B[i + 278*t]] + R[C[i + 278*t]];
R[i + 1343*t] = Op[i + 279*t] ? R[B[i + 279*t]] * R[C[i + 279*t]] : R[B[i + 279*t]] + R[C[i + 279*t]];
R[i + 1344*t] = Op[i + 280*t] ? R[B[i + 280*t]] * R[C[i + 280*t]] : R[B[i + 280*t]] + R[C[i + 280*t]];
R[i + 1345*t] = Op[i + 281*t] ? R[B[i + 281*t]] * R[C[i + 281*t]] : R[B[i + 281*t]] + R[C[i + 281*t]];
R[i + 1346*t] = Op[i + 282*t] ? R[B[i + 282*t]] * R[C[i + 282*t]] : R[B[i + 282*t]] + R[C[i + 282*t]];
R[i + 1347*t] = Op[i + 283*t] ? R[B[i + 283*t]] * R[C[i + 283*t]] : R[B[i + 283*t]] + R[C[i + 283*t]];
R[i + 1348*t] = Op[i + 284*t] ? R[B[i + 284*t]] * R[C[i + 284*t]] : R[B[i + 284*t]] + R[C[i + 284*t]];
R[i + 1349*t] = Op[i + 285*t] ? R[B[i + 285*t]] * R[C[i + 285*t]] : R[B[i + 285*t]] + R[C[i + 285*t]];
R[i + 1350*t] = Op[i + 286*t] ? R[B[i + 286*t]] * R[C[i + 286*t]] : R[B[i + 286*t]] + R[C[i + 286*t]];
R[i + 1351*t] = Op[i + 287*t] ? R[B[i + 287*t]] * R[C[i + 287*t]] : R[B[i + 287*t]] + R[C[i + 287*t]];
R[i + 1352*t] = Op[i + 288*t] ? R[B[i + 288*t]] * R[C[i + 288*t]] : R[B[i + 288*t]] + R[C[i + 288*t]];
R[i + 1353*t] = Op[i + 289*t] ? R[B[i + 289*t]] * R[C[i + 289*t]] : R[B[i + 289*t]] + R[C[i + 289*t]];
R[i + 1354*t] = Op[i + 290*t] ? R[B[i + 290*t]] * R[C[i + 290*t]] : R[B[i + 290*t]] + R[C[i + 290*t]];
R[i + 1355*t] = Op[i + 291*t] ? R[B[i + 291*t]] * R[C[i + 291*t]] : R[B[i + 291*t]] + R[C[i + 291*t]];
R[i + 1356*t] = Op[i + 292*t] ? R[B[i + 292*t]] * R[C[i + 292*t]] : R[B[i + 292*t]] + R[C[i + 292*t]];
R[i + 1357*t] = Op[i + 293*t] ? R[B[i + 293*t]] * R[C[i + 293*t]] : R[B[i + 293*t]] + R[C[i + 293*t]];
R[i + 1358*t] = Op[i + 294*t] ? R[B[i + 294*t]] * R[C[i + 294*t]] : R[B[i + 294*t]] + R[C[i + 294*t]];
R[i + 1359*t] = Op[i + 295*t] ? R[B[i + 295*t]] * R[C[i + 295*t]] : R[B[i + 295*t]] + R[C[i + 295*t]];
R[i + 1360*t] = Op[i + 296*t] ? R[B[i + 296*t]] * R[C[i + 296*t]] : R[B[i + 296*t]] + R[C[i + 296*t]];
R[i + 1361*t] = Op[i + 297*t] ? R[B[i + 297*t]] * R[C[i + 297*t]] : R[B[i + 297*t]] + R[C[i + 297*t]];
R[i + 1362*t] = Op[i + 298*t] ? R[B[i + 298*t]] * R[C[i + 298*t]] : R[B[i + 298*t]] + R[C[i + 298*t]];
R[i + 1363*t] = Op[i + 299*t] ? R[B[i + 299*t]] * R[C[i + 299*t]] : R[B[i + 299*t]] + R[C[i + 299*t]];
R[i + 1364*t] = Op[i + 300*t] ? R[B[i + 300*t]] * R[C[i + 300*t]] : R[B[i + 300*t]] + R[C[i + 300*t]];
R[i + 1365*t] = Op[i + 301*t] ? R[B[i + 301*t]] * R[C[i + 301*t]] : R[B[i + 301*t]] + R[C[i + 301*t]];
R[i + 1366*t] = Op[i + 302*t] ? R[B[i + 302*t]] * R[C[i + 302*t]] : R[B[i + 302*t]] + R[C[i + 302*t]];
R[i + 1367*t] = Op[i + 303*t] ? R[B[i + 303*t]] * R[C[i + 303*t]] : R[B[i + 303*t]] + R[C[i + 303*t]];
R[i + 1368*t] = Op[i + 304*t] ? R[B[i + 304*t]] * R[C[i + 304*t]] : R[B[i + 304*t]] + R[C[i + 304*t]];
R[i + 1369*t] = Op[i + 305*t] ? R[B[i + 305*t]] * R[C[i + 305*t]] : R[B[i + 305*t]] + R[C[i + 305*t]];
R[i + 1370*t] = Op[i + 306*t] ? R[B[i + 306*t]] * R[C[i + 306*t]] : R[B[i + 306*t]] + R[C[i + 306*t]];
R[i + 1371*t] = Op[i + 307*t] ? R[B[i + 307*t]] * R[C[i + 307*t]] : R[B[i + 307*t]] + R[C[i + 307*t]];
R[i + 1372*t] = Op[i + 308*t] ? R[B[i + 308*t]] * R[C[i + 308*t]] : R[B[i + 308*t]] + R[C[i + 308*t]];
R[i + 1373*t] = Op[i + 309*t] ? R[B[i + 309*t]] * R[C[i + 309*t]] : R[B[i + 309*t]] + R[C[i + 309*t]];
R[i + 1374*t] = Op[i + 310*t] ? R[B[i + 310*t]] * R[C[i + 310*t]] : R[B[i + 310*t]] + R[C[i + 310*t]];
R[i + 1375*t] = Op[i + 311*t] ? R[B[i + 311*t]] * R[C[i + 311*t]] : R[B[i + 311*t]] + R[C[i + 311*t]];
R[i + 1376*t] = Op[i + 312*t] ? R[B[i + 312*t]] * R[C[i + 312*t]] : R[B[i + 312*t]] + R[C[i + 312*t]];
R[i + 1377*t] = Op[i + 313*t] ? R[B[i + 313*t]] * R[C[i + 313*t]] : R[B[i + 313*t]] + R[C[i + 313*t]];
R[i + 1378*t] = Op[i + 314*t] ? R[B[i + 314*t]] * R[C[i + 314*t]] : R[B[i + 314*t]] + R[C[i + 314*t]];
R[i + 1379*t] = Op[i + 315*t] ? R[B[i + 315*t]] * R[C[i + 315*t]] : R[B[i + 315*t]] + R[C[i + 315*t]];
R[i + 1380*t] = Op[i + 316*t] ? R[B[i + 316*t]] * R[C[i + 316*t]] : R[B[i + 316*t]] + R[C[i + 316*t]];
R[i + 1381*t] = Op[i + 317*t] ? R[B[i + 317*t]] * R[C[i + 317*t]] : R[B[i + 317*t]] + R[C[i + 317*t]];
R[i + 1382*t] = Op[i + 318*t] ? R[B[i + 318*t]] * R[C[i + 318*t]] : R[B[i + 318*t]] + R[C[i + 318*t]];
R[i + 1383*t] = Op[i + 319*t] ? R[B[i + 319*t]] * R[C[i + 319*t]] : R[B[i + 319*t]] + R[C[i + 319*t]];
R[i + 1384*t] = Op[i + 320*t] ? R[B[i + 320*t]] * R[C[i + 320*t]] : R[B[i + 320*t]] + R[C[i + 320*t]];
R[i + 1385*t] = Op[i + 321*t] ? R[B[i + 321*t]] * R[C[i + 321*t]] : R[B[i + 321*t]] + R[C[i + 321*t]];
R[i + 1386*t] = Op[i + 322*t] ? R[B[i + 322*t]] * R[C[i + 322*t]] : R[B[i + 322*t]] + R[C[i + 322*t]];
R[i + 1387*t] = Op[i + 323*t] ? R[B[i + 323*t]] * R[C[i + 323*t]] : R[B[i + 323*t]] + R[C[i + 323*t]];
R[i + 1388*t] = Op[i + 324*t] ? R[B[i + 324*t]] * R[C[i + 324*t]] : R[B[i + 324*t]] + R[C[i + 324*t]];
R[i + 1389*t] = Op[i + 325*t] ? R[B[i + 325*t]] * R[C[i + 325*t]] : R[B[i + 325*t]] + R[C[i + 325*t]];
R[i + 1390*t] = Op[i + 326*t] ? R[B[i + 326*t]] * R[C[i + 326*t]] : R[B[i + 326*t]] + R[C[i + 326*t]];
R[i + 1391*t] = Op[i + 327*t] ? R[B[i + 327*t]] * R[C[i + 327*t]] : R[B[i + 327*t]] + R[C[i + 327*t]];
R[i + 1392*t] = Op[i + 328*t] ? R[B[i + 328*t]] * R[C[i + 328*t]] : R[B[i + 328*t]] + R[C[i + 328*t]];
R[i + 1393*t] = Op[i + 329*t] ? R[B[i + 329*t]] * R[C[i + 329*t]] : R[B[i + 329*t]] + R[C[i + 329*t]];
R[i + 1394*t] = Op[i + 330*t] ? R[B[i + 330*t]] * R[C[i + 330*t]] : R[B[i + 330*t]] + R[C[i + 330*t]];
R[i + 1395*t] = Op[i + 331*t] ? R[B[i + 331*t]] * R[C[i + 331*t]] : R[B[i + 331*t]] + R[C[i + 331*t]];
R[i + 1396*t] = Op[i + 332*t] ? R[B[i + 332*t]] * R[C[i + 332*t]] : R[B[i + 332*t]] + R[C[i + 332*t]];
R[i + 1397*t] = Op[i + 333*t] ? R[B[i + 333*t]] * R[C[i + 333*t]] : R[B[i + 333*t]] + R[C[i + 333*t]];
__syncthreads();
R[i + 1398*t] = Op[i + 334*t] ? R[B[i + 334*t]] * R[C[i + 334*t]] : R[B[i + 334*t]] + R[C[i + 334*t]];
R[i + 1399*t] = Op[i + 335*t] ? R[B[i + 335*t]] * R[C[i + 335*t]] : R[B[i + 335*t]] + R[C[i + 335*t]];
R[i + 1400*t] = Op[i + 336*t] ? R[B[i + 336*t]] * R[C[i + 336*t]] : R[B[i + 336*t]] + R[C[i + 336*t]];
R[i + 1401*t] = Op[i + 337*t] ? R[B[i + 337*t]] * R[C[i + 337*t]] : R[B[i + 337*t]] + R[C[i + 337*t]];
R[i + 1402*t] = Op[i + 338*t] ? R[B[i + 338*t]] * R[C[i + 338*t]] : R[B[i + 338*t]] + R[C[i + 338*t]];
R[i + 1403*t] = Op[i + 339*t] ? R[B[i + 339*t]] * R[C[i + 339*t]] : R[B[i + 339*t]] + R[C[i + 339*t]];
R[i + 1404*t] = Op[i + 340*t] ? R[B[i + 340*t]] * R[C[i + 340*t]] : R[B[i + 340*t]] + R[C[i + 340*t]];
R[i + 1405*t] = Op[i + 341*t] ? R[B[i + 341*t]] * R[C[i + 341*t]] : R[B[i + 341*t]] + R[C[i + 341*t]];
R[i + 1406*t] = Op[i + 342*t] ? R[B[i + 342*t]] * R[C[i + 342*t]] : R[B[i + 342*t]] + R[C[i + 342*t]];
R[i + 1407*t] = Op[i + 343*t] ? R[B[i + 343*t]] * R[C[i + 343*t]] : R[B[i + 343*t]] + R[C[i + 343*t]];
R[i + 1408*t] = Op[i + 344*t] ? R[B[i + 344*t]] * R[C[i + 344*t]] : R[B[i + 344*t]] + R[C[i + 344*t]];
R[i + 1409*t] = Op[i + 345*t] ? R[B[i + 345*t]] * R[C[i + 345*t]] : R[B[i + 345*t]] + R[C[i + 345*t]];
R[i + 1410*t] = Op[i + 346*t] ? R[B[i + 346*t]] * R[C[i + 346*t]] : R[B[i + 346*t]] + R[C[i + 346*t]];
R[i + 1411*t] = Op[i + 347*t] ? R[B[i + 347*t]] * R[C[i + 347*t]] : R[B[i + 347*t]] + R[C[i + 347*t]];
R[i + 1412*t] = Op[i + 348*t] ? R[B[i + 348*t]] * R[C[i + 348*t]] : R[B[i + 348*t]] + R[C[i + 348*t]];
R[i + 1413*t] = Op[i + 349*t] ? R[B[i + 349*t]] * R[C[i + 349*t]] : R[B[i + 349*t]] + R[C[i + 349*t]];
R[i + 1414*t] = Op[i + 350*t] ? R[B[i + 350*t]] * R[C[i + 350*t]] : R[B[i + 350*t]] + R[C[i + 350*t]];
R[i + 1415*t] = Op[i + 351*t] ? R[B[i + 351*t]] * R[C[i + 351*t]] : R[B[i + 351*t]] + R[C[i + 351*t]];
R[i + 1416*t] = Op[i + 352*t] ? R[B[i + 352*t]] * R[C[i + 352*t]] : R[B[i + 352*t]] + R[C[i + 352*t]];
R[i + 1417*t] = Op[i + 353*t] ? R[B[i + 353*t]] * R[C[i + 353*t]] : R[B[i + 353*t]] + R[C[i + 353*t]];
R[i + 1418*t] = Op[i + 354*t] ? R[B[i + 354*t]] * R[C[i + 354*t]] : R[B[i + 354*t]] + R[C[i + 354*t]];
R[i + 1419*t] = Op[i + 355*t] ? R[B[i + 355*t]] * R[C[i + 355*t]] : R[B[i + 355*t]] + R[C[i + 355*t]];
R[i + 1420*t] = Op[i + 356*t] ? R[B[i + 356*t]] * R[C[i + 356*t]] : R[B[i + 356*t]] + R[C[i + 356*t]];
R[i + 1421*t] = Op[i + 357*t] ? R[B[i + 357*t]] * R[C[i + 357*t]] : R[B[i + 357*t]] + R[C[i + 357*t]];
R[i + 1422*t] = Op[i + 358*t] ? R[B[i + 358*t]] * R[C[i + 358*t]] : R[B[i + 358*t]] + R[C[i + 358*t]];
R[i + 1423*t] = Op[i + 359*t] ? R[B[i + 359*t]] * R[C[i + 359*t]] : R[B[i + 359*t]] + R[C[i + 359*t]];
R[i + 1424*t] = Op[i + 360*t] ? R[B[i + 360*t]] * R[C[i + 360*t]] : R[B[i + 360*t]] + R[C[i + 360*t]];
R[i + 1425*t] = Op[i + 361*t] ? R[B[i + 361*t]] * R[C[i + 361*t]] : R[B[i + 361*t]] + R[C[i + 361*t]];
R[i + 1426*t] = Op[i + 362*t] ? R[B[i + 362*t]] * R[C[i + 362*t]] : R[B[i + 362*t]] + R[C[i + 362*t]];
R[i + 1427*t] = Op[i + 363*t] ? R[B[i + 363*t]] * R[C[i + 363*t]] : R[B[i + 363*t]] + R[C[i + 363*t]];
R[i + 1428*t] = Op[i + 364*t] ? R[B[i + 364*t]] * R[C[i + 364*t]] : R[B[i + 364*t]] + R[C[i + 364*t]];
R[i + 1429*t] = Op[i + 365*t] ? R[B[i + 365*t]] * R[C[i + 365*t]] : R[B[i + 365*t]] + R[C[i + 365*t]];
R[i + 1430*t] = Op[i + 366*t] ? R[B[i + 366*t]] * R[C[i + 366*t]] : R[B[i + 366*t]] + R[C[i + 366*t]];
R[i + 1431*t] = Op[i + 367*t] ? R[B[i + 367*t]] * R[C[i + 367*t]] : R[B[i + 367*t]] + R[C[i + 367*t]];
R[i + 1432*t] = Op[i + 368*t] ? R[B[i + 368*t]] * R[C[i + 368*t]] : R[B[i + 368*t]] + R[C[i + 368*t]];
R[i + 1433*t] = Op[i + 369*t] ? R[B[i + 369*t]] * R[C[i + 369*t]] : R[B[i + 369*t]] + R[C[i + 369*t]];
R[i + 1434*t] = Op[i + 370*t] ? R[B[i + 370*t]] * R[C[i + 370*t]] : R[B[i + 370*t]] + R[C[i + 370*t]];
R[i + 1435*t] = Op[i + 371*t] ? R[B[i + 371*t]] * R[C[i + 371*t]] : R[B[i + 371*t]] + R[C[i + 371*t]];
R[i + 1436*t] = Op[i + 372*t] ? R[B[i + 372*t]] * R[C[i + 372*t]] : R[B[i + 372*t]] + R[C[i + 372*t]];
R[i + 1437*t] = Op[i + 373*t] ? R[B[i + 373*t]] * R[C[i + 373*t]] : R[B[i + 373*t]] + R[C[i + 373*t]];
R[i + 1438*t] = Op[i + 374*t] ? R[B[i + 374*t]] * R[C[i + 374*t]] : R[B[i + 374*t]] + R[C[i + 374*t]];
R[i + 1439*t] = Op[i + 375*t] ? R[B[i + 375*t]] * R[C[i + 375*t]] : R[B[i + 375*t]] + R[C[i + 375*t]];
R[i + 1440*t] = Op[i + 376*t] ? R[B[i + 376*t]] * R[C[i + 376*t]] : R[B[i + 376*t]] + R[C[i + 376*t]];
R[i + 1441*t] = Op[i + 377*t] ? R[B[i + 377*t]] * R[C[i + 377*t]] : R[B[i + 377*t]] + R[C[i + 377*t]];
R[i + 1442*t] = Op[i + 378*t] ? R[B[i + 378*t]] * R[C[i + 378*t]] : R[B[i + 378*t]] + R[C[i + 378*t]];
R[i + 1443*t] = Op[i + 379*t] ? R[B[i + 379*t]] * R[C[i + 379*t]] : R[B[i + 379*t]] + R[C[i + 379*t]];
R[i + 1444*t] = Op[i + 380*t] ? R[B[i + 380*t]] * R[C[i + 380*t]] : R[B[i + 380*t]] + R[C[i + 380*t]];
R[i + 1445*t] = Op[i + 381*t] ? R[B[i + 381*t]] * R[C[i + 381*t]] : R[B[i + 381*t]] + R[C[i + 381*t]];
R[i + 1446*t] = Op[i + 382*t] ? R[B[i + 382*t]] * R[C[i + 382*t]] : R[B[i + 382*t]] + R[C[i + 382*t]];
R[i + 1447*t] = Op[i + 383*t] ? R[B[i + 383*t]] * R[C[i + 383*t]] : R[B[i + 383*t]] + R[C[i + 383*t]];
R[i + 1448*t] = Op[i + 384*t] ? R[B[i + 384*t]] * R[C[i + 384*t]] : R[B[i + 384*t]] + R[C[i + 384*t]];
R[i + 1449*t] = Op[i + 385*t] ? R[B[i + 385*t]] * R[C[i + 385*t]] : R[B[i + 385*t]] + R[C[i + 385*t]];
R[i + 1450*t] = Op[i + 386*t] ? R[B[i + 386*t]] * R[C[i + 386*t]] : R[B[i + 386*t]] + R[C[i + 386*t]];
R[i + 1451*t] = Op[i + 387*t] ? R[B[i + 387*t]] * R[C[i + 387*t]] : R[B[i + 387*t]] + R[C[i + 387*t]];
R[i + 1452*t] = Op[i + 388*t] ? R[B[i + 388*t]] * R[C[i + 388*t]] : R[B[i + 388*t]] + R[C[i + 388*t]];
R[i + 1453*t] = Op[i + 389*t] ? R[B[i + 389*t]] * R[C[i + 389*t]] : R[B[i + 389*t]] + R[C[i + 389*t]];
R[i + 1454*t] = Op[i + 390*t] ? R[B[i + 390*t]] * R[C[i + 390*t]] : R[B[i + 390*t]] + R[C[i + 390*t]];
R[i + 1455*t] = Op[i + 391*t] ? R[B[i + 391*t]] * R[C[i + 391*t]] : R[B[i + 391*t]] + R[C[i + 391*t]];
R[i + 1456*t] = Op[i + 392*t] ? R[B[i + 392*t]] * R[C[i + 392*t]] : R[B[i + 392*t]] + R[C[i + 392*t]];
R[i + 1457*t] = Op[i + 393*t] ? R[B[i + 393*t]] * R[C[i + 393*t]] : R[B[i + 393*t]] + R[C[i + 393*t]];
R[i + 1458*t] = Op[i + 394*t] ? R[B[i + 394*t]] * R[C[i + 394*t]] : R[B[i + 394*t]] + R[C[i + 394*t]];
R[i + 1459*t] = Op[i + 395*t] ? R[B[i + 395*t]] * R[C[i + 395*t]] : R[B[i + 395*t]] + R[C[i + 395*t]];
R[i + 1460*t] = Op[i + 396*t] ? R[B[i + 396*t]] * R[C[i + 396*t]] : R[B[i + 396*t]] + R[C[i + 396*t]];
R[i + 1461*t] = Op[i + 397*t] ? R[B[i + 397*t]] * R[C[i + 397*t]] : R[B[i + 397*t]] + R[C[i + 397*t]];
R[i + 1462*t] = Op[i + 398*t] ? R[B[i + 398*t]] * R[C[i + 398*t]] : R[B[i + 398*t]] + R[C[i + 398*t]];
R[i + 1463*t] = Op[i + 399*t] ? R[B[i + 399*t]] * R[C[i + 399*t]] : R[B[i + 399*t]] + R[C[i + 399*t]];
R[i + 1464*t] = Op[i + 400*t] ? R[B[i + 400*t]] * R[C[i + 400*t]] : R[B[i + 400*t]] + R[C[i + 400*t]];
R[i + 1465*t] = Op[i + 401*t] ? R[B[i + 401*t]] * R[C[i + 401*t]] : R[B[i + 401*t]] + R[C[i + 401*t]];
R[i + 1466*t] = Op[i + 402*t] ? R[B[i + 402*t]] * R[C[i + 402*t]] : R[B[i + 402*t]] + R[C[i + 402*t]];
R[i + 1467*t] = Op[i + 403*t] ? R[B[i + 403*t]] * R[C[i + 403*t]] : R[B[i + 403*t]] + R[C[i + 403*t]];
R[i + 1468*t] = Op[i + 404*t] ? R[B[i + 404*t]] * R[C[i + 404*t]] : R[B[i + 404*t]] + R[C[i + 404*t]];
R[i + 1469*t] = Op[i + 405*t] ? R[B[i + 405*t]] * R[C[i + 405*t]] : R[B[i + 405*t]] + R[C[i + 405*t]];
R[i + 1470*t] = Op[i + 406*t] ? R[B[i + 406*t]] * R[C[i + 406*t]] : R[B[i + 406*t]] + R[C[i + 406*t]];
R[i + 1471*t] = Op[i + 407*t] ? R[B[i + 407*t]] * R[C[i + 407*t]] : R[B[i + 407*t]] + R[C[i + 407*t]];
R[i + 1472*t] = Op[i + 408*t] ? R[B[i + 408*t]] * R[C[i + 408*t]] : R[B[i + 408*t]] + R[C[i + 408*t]];
R[i + 1473*t] = Op[i + 409*t] ? R[B[i + 409*t]] * R[C[i + 409*t]] : R[B[i + 409*t]] + R[C[i + 409*t]];
R[i + 1474*t] = Op[i + 410*t] ? R[B[i + 410*t]] * R[C[i + 410*t]] : R[B[i + 410*t]] + R[C[i + 410*t]];
R[i + 1475*t] = Op[i + 411*t] ? R[B[i + 411*t]] * R[C[i + 411*t]] : R[B[i + 411*t]] + R[C[i + 411*t]];
R[i + 1476*t] = Op[i + 412*t] ? R[B[i + 412*t]] * R[C[i + 412*t]] : R[B[i + 412*t]] + R[C[i + 412*t]];
R[i + 1477*t] = Op[i + 413*t] ? R[B[i + 413*t]] * R[C[i + 413*t]] : R[B[i + 413*t]] + R[C[i + 413*t]];
R[i + 1478*t] = Op[i + 414*t] ? R[B[i + 414*t]] * R[C[i + 414*t]] : R[B[i + 414*t]] + R[C[i + 414*t]];
R[i + 1479*t] = Op[i + 415*t] ? R[B[i + 415*t]] * R[C[i + 415*t]] : R[B[i + 415*t]] + R[C[i + 415*t]];
R[i + 1480*t] = Op[i + 416*t] ? R[B[i + 416*t]] * R[C[i + 416*t]] : R[B[i + 416*t]] + R[C[i + 416*t]];
R[i + 1481*t] = Op[i + 417*t] ? R[B[i + 417*t]] * R[C[i + 417*t]] : R[B[i + 417*t]] + R[C[i + 417*t]];
R[i + 1482*t] = Op[i + 418*t] ? R[B[i + 418*t]] * R[C[i + 418*t]] : R[B[i + 418*t]] + R[C[i + 418*t]];
R[i + 1483*t] = Op[i + 419*t] ? R[B[i + 419*t]] * R[C[i + 419*t]] : R[B[i + 419*t]] + R[C[i + 419*t]];
R[i + 1484*t] = Op[i + 420*t] ? R[B[i + 420*t]] * R[C[i + 420*t]] : R[B[i + 420*t]] + R[C[i + 420*t]];
R[i + 1485*t] = Op[i + 421*t] ? R[B[i + 421*t]] * R[C[i + 421*t]] : R[B[i + 421*t]] + R[C[i + 421*t]];
R[i + 1486*t] = Op[i + 422*t] ? R[B[i + 422*t]] * R[C[i + 422*t]] : R[B[i + 422*t]] + R[C[i + 422*t]];
R[i + 1487*t] = Op[i + 423*t] ? R[B[i + 423*t]] * R[C[i + 423*t]] : R[B[i + 423*t]] + R[C[i + 423*t]];
R[i + 1488*t] = Op[i + 424*t] ? R[B[i + 424*t]] * R[C[i + 424*t]] : R[B[i + 424*t]] + R[C[i + 424*t]];
R[i + 1489*t] = Op[i + 425*t] ? R[B[i + 425*t]] * R[C[i + 425*t]] : R[B[i + 425*t]] + R[C[i + 425*t]];
R[i + 1490*t] = Op[i + 426*t] ? R[B[i + 426*t]] * R[C[i + 426*t]] : R[B[i + 426*t]] + R[C[i + 426*t]];
R[i + 1491*t] = Op[i + 427*t] ? R[B[i + 427*t]] * R[C[i + 427*t]] : R[B[i + 427*t]] + R[C[i + 427*t]];
R[i + 1492*t] = Op[i + 428*t] ? R[B[i + 428*t]] * R[C[i + 428*t]] : R[B[i + 428*t]] + R[C[i + 428*t]];
R[i + 1493*t] = Op[i + 429*t] ? R[B[i + 429*t]] * R[C[i + 429*t]] : R[B[i + 429*t]] + R[C[i + 429*t]];
R[i + 1494*t] = Op[i + 430*t] ? R[B[i + 430*t]] * R[C[i + 430*t]] : R[B[i + 430*t]] + R[C[i + 430*t]];
R[i + 1495*t] = Op[i + 431*t] ? R[B[i + 431*t]] * R[C[i + 431*t]] : R[B[i + 431*t]] + R[C[i + 431*t]];
R[i + 1496*t] = Op[i + 432*t] ? R[B[i + 432*t]] * R[C[i + 432*t]] : R[B[i + 432*t]] + R[C[i + 432*t]];
R[i + 1497*t] = Op[i + 433*t] ? R[B[i + 433*t]] * R[C[i + 433*t]] : R[B[i + 433*t]] + R[C[i + 433*t]];
R[i + 1498*t] = Op[i + 434*t] ? R[B[i + 434*t]] * R[C[i + 434*t]] : R[B[i + 434*t]] + R[C[i + 434*t]];
R[i + 1499*t] = Op[i + 435*t] ? R[B[i + 435*t]] * R[C[i + 435*t]] : R[B[i + 435*t]] + R[C[i + 435*t]];
R[i + 1500*t] = Op[i + 436*t] ? R[B[i + 436*t]] * R[C[i + 436*t]] : R[B[i + 436*t]] + R[C[i + 436*t]];
R[i + 1501*t] = Op[i + 437*t] ? R[B[i + 437*t]] * R[C[i + 437*t]] : R[B[i + 437*t]] + R[C[i + 437*t]];
R[i + 1502*t] = Op[i + 438*t] ? R[B[i + 438*t]] * R[C[i + 438*t]] : R[B[i + 438*t]] + R[C[i + 438*t]];
R[i + 1503*t] = Op[i + 439*t] ? R[B[i + 439*t]] * R[C[i + 439*t]] : R[B[i + 439*t]] + R[C[i + 439*t]];
R[i + 1504*t] = Op[i + 440*t] ? R[B[i + 440*t]] * R[C[i + 440*t]] : R[B[i + 440*t]] + R[C[i + 440*t]];
R[i + 1505*t] = Op[i + 441*t] ? R[B[i + 441*t]] * R[C[i + 441*t]] : R[B[i + 441*t]] + R[C[i + 441*t]];
R[i + 1506*t] = Op[i + 442*t] ? R[B[i + 442*t]] * R[C[i + 442*t]] : R[B[i + 442*t]] + R[C[i + 442*t]];
R[i + 1507*t] = Op[i + 443*t] ? R[B[i + 443*t]] * R[C[i + 443*t]] : R[B[i + 443*t]] + R[C[i + 443*t]];
R[i + 1508*t] = Op[i + 444*t] ? R[B[i + 444*t]] * R[C[i + 444*t]] : R[B[i + 444*t]] + R[C[i + 444*t]];
R[i + 1509*t] = Op[i + 445*t] ? R[B[i + 445*t]] * R[C[i + 445*t]] : R[B[i + 445*t]] + R[C[i + 445*t]];
R[i + 1510*t] = Op[i + 446*t] ? R[B[i + 446*t]] * R[C[i + 446*t]] : R[B[i + 446*t]] + R[C[i + 446*t]];
R[i + 1511*t] = Op[i + 447*t] ? R[B[i + 447*t]] * R[C[i + 447*t]] : R[B[i + 447*t]] + R[C[i + 447*t]];
R[i + 1512*t] = Op[i + 448*t] ? R[B[i + 448*t]] * R[C[i + 448*t]] : R[B[i + 448*t]] + R[C[i + 448*t]];
R[i + 1513*t] = Op[i + 449*t] ? R[B[i + 449*t]] * R[C[i + 449*t]] : R[B[i + 449*t]] + R[C[i + 449*t]];
R[i + 1514*t] = Op[i + 450*t] ? R[B[i + 450*t]] * R[C[i + 450*t]] : R[B[i + 450*t]] + R[C[i + 450*t]];
R[i + 1515*t] = Op[i + 451*t] ? R[B[i + 451*t]] * R[C[i + 451*t]] : R[B[i + 451*t]] + R[C[i + 451*t]];
R[i + 1516*t] = Op[i + 452*t] ? R[B[i + 452*t]] * R[C[i + 452*t]] : R[B[i + 452*t]] + R[C[i + 452*t]];
R[i + 1517*t] = Op[i + 453*t] ? R[B[i + 453*t]] * R[C[i + 453*t]] : R[B[i + 453*t]] + R[C[i + 453*t]];
R[i + 1518*t] = Op[i + 454*t] ? R[B[i + 454*t]] * R[C[i + 454*t]] : R[B[i + 454*t]] + R[C[i + 454*t]];
R[i + 1519*t] = Op[i + 455*t] ? R[B[i + 455*t]] * R[C[i + 455*t]] : R[B[i + 455*t]] + R[C[i + 455*t]];
R[i + 1520*t] = Op[i + 456*t] ? R[B[i + 456*t]] * R[C[i + 456*t]] : R[B[i + 456*t]] + R[C[i + 456*t]];
R[i + 1521*t] = Op[i + 457*t] ? R[B[i + 457*t]] * R[C[i + 457*t]] : R[B[i + 457*t]] + R[C[i + 457*t]];
R[i + 1522*t] = Op[i + 458*t] ? R[B[i + 458*t]] * R[C[i + 458*t]] : R[B[i + 458*t]] + R[C[i + 458*t]];
R[i + 1523*t] = Op[i + 459*t] ? R[B[i + 459*t]] * R[C[i + 459*t]] : R[B[i + 459*t]] + R[C[i + 459*t]];
R[i + 1524*t] = Op[i + 460*t] ? R[B[i + 460*t]] * R[C[i + 460*t]] : R[B[i + 460*t]] + R[C[i + 460*t]];
R[i + 1525*t] = Op[i + 461*t] ? R[B[i + 461*t]] * R[C[i + 461*t]] : R[B[i + 461*t]] + R[C[i + 461*t]];
R[i + 1526*t] = Op[i + 462*t] ? R[B[i + 462*t]] * R[C[i + 462*t]] : R[B[i + 462*t]] + R[C[i + 462*t]];
R[i + 1527*t] = Op[i + 463*t] ? R[B[i + 463*t]] * R[C[i + 463*t]] : R[B[i + 463*t]] + R[C[i + 463*t]];
R[i + 1528*t] = Op[i + 464*t] ? R[B[i + 464*t]] * R[C[i + 464*t]] : R[B[i + 464*t]] + R[C[i + 464*t]];
R[i + 1529*t] = Op[i + 465*t] ? R[B[i + 465*t]] * R[C[i + 465*t]] : R[B[i + 465*t]] + R[C[i + 465*t]];
R[i + 1530*t] = Op[i + 466*t] ? R[B[i + 466*t]] * R[C[i + 466*t]] : R[B[i + 466*t]] + R[C[i + 466*t]];
R[i + 1531*t] = Op[i + 467*t] ? R[B[i + 467*t]] * R[C[i + 467*t]] : R[B[i + 467*t]] + R[C[i + 467*t]];
R[i + 1532*t] = Op[i + 468*t] ? R[B[i + 468*t]] * R[C[i + 468*t]] : R[B[i + 468*t]] + R[C[i + 468*t]];
R[i + 1533*t] = Op[i + 469*t] ? R[B[i + 469*t]] * R[C[i + 469*t]] : R[B[i + 469*t]] + R[C[i + 469*t]];
R[i + 1534*t] = Op[i + 470*t] ? R[B[i + 470*t]] * R[C[i + 470*t]] : R[B[i + 470*t]] + R[C[i + 470*t]];
R[i + 1535*t] = Op[i + 471*t] ? R[B[i + 471*t]] * R[C[i + 471*t]] : R[B[i + 471*t]] + R[C[i + 471*t]];
R[i + 1536*t] = Op[i + 472*t] ? R[B[i + 472*t]] * R[C[i + 472*t]] : R[B[i + 472*t]] + R[C[i + 472*t]];
R[i + 1537*t] = Op[i + 473*t] ? R[B[i + 473*t]] * R[C[i + 473*t]] : R[B[i + 473*t]] + R[C[i + 473*t]];
R[i + 1538*t] = Op[i + 474*t] ? R[B[i + 474*t]] * R[C[i + 474*t]] : R[B[i + 474*t]] + R[C[i + 474*t]];
R[i + 1539*t] = Op[i + 475*t] ? R[B[i + 475*t]] * R[C[i + 475*t]] : R[B[i + 475*t]] + R[C[i + 475*t]];
R[i + 1540*t] = Op[i + 476*t] ? R[B[i + 476*t]] * R[C[i + 476*t]] : R[B[i + 476*t]] + R[C[i + 476*t]];
R[i + 1541*t] = Op[i + 477*t] ? R[B[i + 477*t]] * R[C[i + 477*t]] : R[B[i + 477*t]] + R[C[i + 477*t]];
R[i + 1542*t] = Op[i + 478*t] ? R[B[i + 478*t]] * R[C[i + 478*t]] : R[B[i + 478*t]] + R[C[i + 478*t]];
R[i + 1543*t] = Op[i + 479*t] ? R[B[i + 479*t]] * R[C[i + 479*t]] : R[B[i + 479*t]] + R[C[i + 479*t]];
R[i + 1544*t] = Op[i + 480*t] ? R[B[i + 480*t]] * R[C[i + 480*t]] : R[B[i + 480*t]] + R[C[i + 480*t]];
R[i + 1545*t] = Op[i + 481*t] ? R[B[i + 481*t]] * R[C[i + 481*t]] : R[B[i + 481*t]] + R[C[i + 481*t]];
R[i + 1546*t] = Op[i + 482*t] ? R[B[i + 482*t]] * R[C[i + 482*t]] : R[B[i + 482*t]] + R[C[i + 482*t]];
R[i + 1547*t] = Op[i + 483*t] ? R[B[i + 483*t]] * R[C[i + 483*t]] : R[B[i + 483*t]] + R[C[i + 483*t]];
R[i + 1548*t] = Op[i + 484*t] ? R[B[i + 484*t]] * R[C[i + 484*t]] : R[B[i + 484*t]] + R[C[i + 484*t]];
R[i + 1549*t] = Op[i + 485*t] ? R[B[i + 485*t]] * R[C[i + 485*t]] : R[B[i + 485*t]] + R[C[i + 485*t]];
R[i + 1550*t] = Op[i + 486*t] ? R[B[i + 486*t]] * R[C[i + 486*t]] : R[B[i + 486*t]] + R[C[i + 486*t]];
R[i + 1551*t] = Op[i + 487*t] ? R[B[i + 487*t]] * R[C[i + 487*t]] : R[B[i + 487*t]] + R[C[i + 487*t]];
R[i + 1552*t] = Op[i + 488*t] ? R[B[i + 488*t]] * R[C[i + 488*t]] : R[B[i + 488*t]] + R[C[i + 488*t]];
R[i + 1553*t] = Op[i + 489*t] ? R[B[i + 489*t]] * R[C[i + 489*t]] : R[B[i + 489*t]] + R[C[i + 489*t]];
R[i + 1554*t] = Op[i + 490*t] ? R[B[i + 490*t]] * R[C[i + 490*t]] : R[B[i + 490*t]] + R[C[i + 490*t]];
R[i + 1555*t] = Op[i + 491*t] ? R[B[i + 491*t]] * R[C[i + 491*t]] : R[B[i + 491*t]] + R[C[i + 491*t]];
R[i + 1556*t] = Op[i + 492*t] ? R[B[i + 492*t]] * R[C[i + 492*t]] : R[B[i + 492*t]] + R[C[i + 492*t]];
R[i + 1557*t] = Op[i + 493*t] ? R[B[i + 493*t]] * R[C[i + 493*t]] : R[B[i + 493*t]] + R[C[i + 493*t]];
R[i + 1558*t] = Op[i + 494*t] ? R[B[i + 494*t]] * R[C[i + 494*t]] : R[B[i + 494*t]] + R[C[i + 494*t]];
R[i + 1559*t] = Op[i + 495*t] ? R[B[i + 495*t]] * R[C[i + 495*t]] : R[B[i + 495*t]] + R[C[i + 495*t]];
R[i + 1560*t] = Op[i + 496*t] ? R[B[i + 496*t]] * R[C[i + 496*t]] : R[B[i + 496*t]] + R[C[i + 496*t]];
R[i + 1561*t] = Op[i + 497*t] ? R[B[i + 497*t]] * R[C[i + 497*t]] : R[B[i + 497*t]] + R[C[i + 497*t]];
R[i + 1562*t] = Op[i + 498*t] ? R[B[i + 498*t]] * R[C[i + 498*t]] : R[B[i + 498*t]] + R[C[i + 498*t]];
R[i + 1563*t] = Op[i + 499*t] ? R[B[i + 499*t]] * R[C[i + 499*t]] : R[B[i + 499*t]] + R[C[i + 499*t]];
R[i + 1564*t] = Op[i + 500*t] ? R[B[i + 500*t]] * R[C[i + 500*t]] : R[B[i + 500*t]] + R[C[i + 500*t]];
R[i + 1565*t] = Op[i + 501*t] ? R[B[i + 501*t]] * R[C[i + 501*t]] : R[B[i + 501*t]] + R[C[i + 501*t]];
R[i + 1566*t] = Op[i + 502*t] ? R[B[i + 502*t]] * R[C[i + 502*t]] : R[B[i + 502*t]] + R[C[i + 502*t]];
R[i + 1567*t] = Op[i + 503*t] ? R[B[i + 503*t]] * R[C[i + 503*t]] : R[B[i + 503*t]] + R[C[i + 503*t]];
R[i + 1568*t] = Op[i + 504*t] ? R[B[i + 504*t]] * R[C[i + 504*t]] : R[B[i + 504*t]] + R[C[i + 504*t]];
R[i + 1569*t] = Op[i + 505*t] ? R[B[i + 505*t]] * R[C[i + 505*t]] : R[B[i + 505*t]] + R[C[i + 505*t]];
R[i + 1570*t] = Op[i + 506*t] ? R[B[i + 506*t]] * R[C[i + 506*t]] : R[B[i + 506*t]] + R[C[i + 506*t]];
R[i + 1571*t] = Op[i + 507*t] ? R[B[i + 507*t]] * R[C[i + 507*t]] : R[B[i + 507*t]] + R[C[i + 507*t]];
R[i + 1572*t] = Op[i + 508*t] ? R[B[i + 508*t]] * R[C[i + 508*t]] : R[B[i + 508*t]] + R[C[i + 508*t]];
R[i + 1573*t] = Op[i + 509*t] ? R[B[i + 509*t]] * R[C[i + 509*t]] : R[B[i + 509*t]] + R[C[i + 509*t]];
R[i + 1574*t] = Op[i + 510*t] ? R[B[i + 510*t]] * R[C[i + 510*t]] : R[B[i + 510*t]] + R[C[i + 510*t]];
R[i + 1575*t] = Op[i + 511*t] ? R[B[i + 511*t]] * R[C[i + 511*t]] : R[B[i + 511*t]] + R[C[i + 511*t]];
R[i + 1576*t] = Op[i + 512*t] ? R[B[i + 512*t]] * R[C[i + 512*t]] : R[B[i + 512*t]] + R[C[i + 512*t]];
R[i + 1577*t] = Op[i + 513*t] ? R[B[i + 513*t]] * R[C[i + 513*t]] : R[B[i + 513*t]] + R[C[i + 513*t]];
R[i + 1578*t] = Op[i + 514*t] ? R[B[i + 514*t]] * R[C[i + 514*t]] : R[B[i + 514*t]] + R[C[i + 514*t]];
R[i + 1579*t] = Op[i + 515*t] ? R[B[i + 515*t]] * R[C[i + 515*t]] : R[B[i + 515*t]] + R[C[i + 515*t]];
R[i + 1580*t] = Op[i + 516*t] ? R[B[i + 516*t]] * R[C[i + 516*t]] : R[B[i + 516*t]] + R[C[i + 516*t]];
R[i + 1581*t] = Op[i + 517*t] ? R[B[i + 517*t]] * R[C[i + 517*t]] : R[B[i + 517*t]] + R[C[i + 517*t]];
R[i + 1582*t] = Op[i + 518*t] ? R[B[i + 518*t]] * R[C[i + 518*t]] : R[B[i + 518*t]] + R[C[i + 518*t]];
__syncthreads();
R[i + 1583*t] = Op[i + 519*t] ? R[B[i + 519*t]] * R[C[i + 519*t]] : R[B[i + 519*t]] + R[C[i + 519*t]];
R[i + 1584*t] = Op[i + 520*t] ? R[B[i + 520*t]] * R[C[i + 520*t]] : R[B[i + 520*t]] + R[C[i + 520*t]];
R[i + 1585*t] = Op[i + 521*t] ? R[B[i + 521*t]] * R[C[i + 521*t]] : R[B[i + 521*t]] + R[C[i + 521*t]];
R[i + 1586*t] = Op[i + 522*t] ? R[B[i + 522*t]] * R[C[i + 522*t]] : R[B[i + 522*t]] + R[C[i + 522*t]];
R[i + 1587*t] = Op[i + 523*t] ? R[B[i + 523*t]] * R[C[i + 523*t]] : R[B[i + 523*t]] + R[C[i + 523*t]];
R[i + 1588*t] = Op[i + 524*t] ? R[B[i + 524*t]] * R[C[i + 524*t]] : R[B[i + 524*t]] + R[C[i + 524*t]];
R[i + 1589*t] = Op[i + 525*t] ? R[B[i + 525*t]] * R[C[i + 525*t]] : R[B[i + 525*t]] + R[C[i + 525*t]];
R[i + 1590*t] = Op[i + 526*t] ? R[B[i + 526*t]] * R[C[i + 526*t]] : R[B[i + 526*t]] + R[C[i + 526*t]];
R[i + 1591*t] = Op[i + 527*t] ? R[B[i + 527*t]] * R[C[i + 527*t]] : R[B[i + 527*t]] + R[C[i + 527*t]];
R[i + 1592*t] = Op[i + 528*t] ? R[B[i + 528*t]] * R[C[i + 528*t]] : R[B[i + 528*t]] + R[C[i + 528*t]];
R[i + 1593*t] = Op[i + 529*t] ? R[B[i + 529*t]] * R[C[i + 529*t]] : R[B[i + 529*t]] + R[C[i + 529*t]];
R[i + 1594*t] = Op[i + 530*t] ? R[B[i + 530*t]] * R[C[i + 530*t]] : R[B[i + 530*t]] + R[C[i + 530*t]];
R[i + 1595*t] = Op[i + 531*t] ? R[B[i + 531*t]] * R[C[i + 531*t]] : R[B[i + 531*t]] + R[C[i + 531*t]];
R[i + 1596*t] = Op[i + 532*t] ? R[B[i + 532*t]] * R[C[i + 532*t]] : R[B[i + 532*t]] + R[C[i + 532*t]];
R[i + 1597*t] = Op[i + 533*t] ? R[B[i + 533*t]] * R[C[i + 533*t]] : R[B[i + 533*t]] + R[C[i + 533*t]];
R[i + 1598*t] = Op[i + 534*t] ? R[B[i + 534*t]] * R[C[i + 534*t]] : R[B[i + 534*t]] + R[C[i + 534*t]];
R[i + 1599*t] = Op[i + 535*t] ? R[B[i + 535*t]] * R[C[i + 535*t]] : R[B[i + 535*t]] + R[C[i + 535*t]];
R[i + 1600*t] = Op[i + 536*t] ? R[B[i + 536*t]] * R[C[i + 536*t]] : R[B[i + 536*t]] + R[C[i + 536*t]];
R[i + 1601*t] = Op[i + 537*t] ? R[B[i + 537*t]] * R[C[i + 537*t]] : R[B[i + 537*t]] + R[C[i + 537*t]];
R[i + 1602*t] = Op[i + 538*t] ? R[B[i + 538*t]] * R[C[i + 538*t]] : R[B[i + 538*t]] + R[C[i + 538*t]];
R[i + 1603*t] = Op[i + 539*t] ? R[B[i + 539*t]] * R[C[i + 539*t]] : R[B[i + 539*t]] + R[C[i + 539*t]];
R[i + 1604*t] = Op[i + 540*t] ? R[B[i + 540*t]] * R[C[i + 540*t]] : R[B[i + 540*t]] + R[C[i + 540*t]];
R[i + 1605*t] = Op[i + 541*t] ? R[B[i + 541*t]] * R[C[i + 541*t]] : R[B[i + 541*t]] + R[C[i + 541*t]];
R[i + 1606*t] = Op[i + 542*t] ? R[B[i + 542*t]] * R[C[i + 542*t]] : R[B[i + 542*t]] + R[C[i + 542*t]];
R[i + 1607*t] = Op[i + 543*t] ? R[B[i + 543*t]] * R[C[i + 543*t]] : R[B[i + 543*t]] + R[C[i + 543*t]];
R[i + 1608*t] = Op[i + 544*t] ? R[B[i + 544*t]] * R[C[i + 544*t]] : R[B[i + 544*t]] + R[C[i + 544*t]];
R[i + 1609*t] = Op[i + 545*t] ? R[B[i + 545*t]] * R[C[i + 545*t]] : R[B[i + 545*t]] + R[C[i + 545*t]];
R[i + 1610*t] = Op[i + 546*t] ? R[B[i + 546*t]] * R[C[i + 546*t]] : R[B[i + 546*t]] + R[C[i + 546*t]];
R[i + 1611*t] = Op[i + 547*t] ? R[B[i + 547*t]] * R[C[i + 547*t]] : R[B[i + 547*t]] + R[C[i + 547*t]];
R[i + 1612*t] = Op[i + 548*t] ? R[B[i + 548*t]] * R[C[i + 548*t]] : R[B[i + 548*t]] + R[C[i + 548*t]];
R[i + 1613*t] = Op[i + 549*t] ? R[B[i + 549*t]] * R[C[i + 549*t]] : R[B[i + 549*t]] + R[C[i + 549*t]];
R[i + 1614*t] = Op[i + 550*t] ? R[B[i + 550*t]] * R[C[i + 550*t]] : R[B[i + 550*t]] + R[C[i + 550*t]];
R[i + 1615*t] = Op[i + 551*t] ? R[B[i + 551*t]] * R[C[i + 551*t]] : R[B[i + 551*t]] + R[C[i + 551*t]];
R[i + 1616*t] = Op[i + 552*t] ? R[B[i + 552*t]] * R[C[i + 552*t]] : R[B[i + 552*t]] + R[C[i + 552*t]];
R[i + 1617*t] = Op[i + 553*t] ? R[B[i + 553*t]] * R[C[i + 553*t]] : R[B[i + 553*t]] + R[C[i + 553*t]];
R[i + 1618*t] = Op[i + 554*t] ? R[B[i + 554*t]] * R[C[i + 554*t]] : R[B[i + 554*t]] + R[C[i + 554*t]];
R[i + 1619*t] = Op[i + 555*t] ? R[B[i + 555*t]] * R[C[i + 555*t]] : R[B[i + 555*t]] + R[C[i + 555*t]];
R[i + 1620*t] = Op[i + 556*t] ? R[B[i + 556*t]] * R[C[i + 556*t]] : R[B[i + 556*t]] + R[C[i + 556*t]];
R[i + 1621*t] = Op[i + 557*t] ? R[B[i + 557*t]] * R[C[i + 557*t]] : R[B[i + 557*t]] + R[C[i + 557*t]];
R[i + 1622*t] = Op[i + 558*t] ? R[B[i + 558*t]] * R[C[i + 558*t]] : R[B[i + 558*t]] + R[C[i + 558*t]];
R[i + 1623*t] = Op[i + 559*t] ? R[B[i + 559*t]] * R[C[i + 559*t]] : R[B[i + 559*t]] + R[C[i + 559*t]];
R[i + 1624*t] = Op[i + 560*t] ? R[B[i + 560*t]] * R[C[i + 560*t]] : R[B[i + 560*t]] + R[C[i + 560*t]];
R[i + 1625*t] = Op[i + 561*t] ? R[B[i + 561*t]] * R[C[i + 561*t]] : R[B[i + 561*t]] + R[C[i + 561*t]];
R[i + 1626*t] = Op[i + 562*t] ? R[B[i + 562*t]] * R[C[i + 562*t]] : R[B[i + 562*t]] + R[C[i + 562*t]];
R[i + 1627*t] = Op[i + 563*t] ? R[B[i + 563*t]] * R[C[i + 563*t]] : R[B[i + 563*t]] + R[C[i + 563*t]];
R[i + 1628*t] = Op[i + 564*t] ? R[B[i + 564*t]] * R[C[i + 564*t]] : R[B[i + 564*t]] + R[C[i + 564*t]];
R[i + 1629*t] = Op[i + 565*t] ? R[B[i + 565*t]] * R[C[i + 565*t]] : R[B[i + 565*t]] + R[C[i + 565*t]];
R[i + 1630*t] = Op[i + 566*t] ? R[B[i + 566*t]] * R[C[i + 566*t]] : R[B[i + 566*t]] + R[C[i + 566*t]];
R[i + 1631*t] = Op[i + 567*t] ? R[B[i + 567*t]] * R[C[i + 567*t]] : R[B[i + 567*t]] + R[C[i + 567*t]];
R[i + 1632*t] = Op[i + 568*t] ? R[B[i + 568*t]] * R[C[i + 568*t]] : R[B[i + 568*t]] + R[C[i + 568*t]];
R[i + 1633*t] = Op[i + 569*t] ? R[B[i + 569*t]] * R[C[i + 569*t]] : R[B[i + 569*t]] + R[C[i + 569*t]];
R[i + 1634*t] = Op[i + 570*t] ? R[B[i + 570*t]] * R[C[i + 570*t]] : R[B[i + 570*t]] + R[C[i + 570*t]];
R[i + 1635*t] = Op[i + 571*t] ? R[B[i + 571*t]] * R[C[i + 571*t]] : R[B[i + 571*t]] + R[C[i + 571*t]];
R[i + 1636*t] = Op[i + 572*t] ? R[B[i + 572*t]] * R[C[i + 572*t]] : R[B[i + 572*t]] + R[C[i + 572*t]];
R[i + 1637*t] = Op[i + 573*t] ? R[B[i + 573*t]] * R[C[i + 573*t]] : R[B[i + 573*t]] + R[C[i + 573*t]];
R[i + 1638*t] = Op[i + 574*t] ? R[B[i + 574*t]] * R[C[i + 574*t]] : R[B[i + 574*t]] + R[C[i + 574*t]];
R[i + 1639*t] = Op[i + 575*t] ? R[B[i + 575*t]] * R[C[i + 575*t]] : R[B[i + 575*t]] + R[C[i + 575*t]];
R[i + 1640*t] = Op[i + 576*t] ? R[B[i + 576*t]] * R[C[i + 576*t]] : R[B[i + 576*t]] + R[C[i + 576*t]];
R[i + 1641*t] = Op[i + 577*t] ? R[B[i + 577*t]] * R[C[i + 577*t]] : R[B[i + 577*t]] + R[C[i + 577*t]];
R[i + 1642*t] = Op[i + 578*t] ? R[B[i + 578*t]] * R[C[i + 578*t]] : R[B[i + 578*t]] + R[C[i + 578*t]];
R[i + 1643*t] = Op[i + 579*t] ? R[B[i + 579*t]] * R[C[i + 579*t]] : R[B[i + 579*t]] + R[C[i + 579*t]];
R[i + 1644*t] = Op[i + 580*t] ? R[B[i + 580*t]] * R[C[i + 580*t]] : R[B[i + 580*t]] + R[C[i + 580*t]];
R[i + 1645*t] = Op[i + 581*t] ? R[B[i + 581*t]] * R[C[i + 581*t]] : R[B[i + 581*t]] + R[C[i + 581*t]];
R[i + 1646*t] = Op[i + 582*t] ? R[B[i + 582*t]] * R[C[i + 582*t]] : R[B[i + 582*t]] + R[C[i + 582*t]];
R[i + 1647*t] = Op[i + 583*t] ? R[B[i + 583*t]] * R[C[i + 583*t]] : R[B[i + 583*t]] + R[C[i + 583*t]];
R[i + 1648*t] = Op[i + 584*t] ? R[B[i + 584*t]] * R[C[i + 584*t]] : R[B[i + 584*t]] + R[C[i + 584*t]];
R[i + 1649*t] = Op[i + 585*t] ? R[B[i + 585*t]] * R[C[i + 585*t]] : R[B[i + 585*t]] + R[C[i + 585*t]];
R[i + 1650*t] = Op[i + 586*t] ? R[B[i + 586*t]] * R[C[i + 586*t]] : R[B[i + 586*t]] + R[C[i + 586*t]];
R[i + 1651*t] = Op[i + 587*t] ? R[B[i + 587*t]] * R[C[i + 587*t]] : R[B[i + 587*t]] + R[C[i + 587*t]];
R[i + 1652*t] = Op[i + 588*t] ? R[B[i + 588*t]] * R[C[i + 588*t]] : R[B[i + 588*t]] + R[C[i + 588*t]];
R[i + 1653*t] = Op[i + 589*t] ? R[B[i + 589*t]] * R[C[i + 589*t]] : R[B[i + 589*t]] + R[C[i + 589*t]];
R[i + 1654*t] = Op[i + 590*t] ? R[B[i + 590*t]] * R[C[i + 590*t]] : R[B[i + 590*t]] + R[C[i + 590*t]];
R[i + 1655*t] = Op[i + 591*t] ? R[B[i + 591*t]] * R[C[i + 591*t]] : R[B[i + 591*t]] + R[C[i + 591*t]];
R[i + 1656*t] = Op[i + 592*t] ? R[B[i + 592*t]] * R[C[i + 592*t]] : R[B[i + 592*t]] + R[C[i + 592*t]];
R[i + 1657*t] = Op[i + 593*t] ? R[B[i + 593*t]] * R[C[i + 593*t]] : R[B[i + 593*t]] + R[C[i + 593*t]];
R[i + 1658*t] = Op[i + 594*t] ? R[B[i + 594*t]] * R[C[i + 594*t]] : R[B[i + 594*t]] + R[C[i + 594*t]];
R[i + 1659*t] = Op[i + 595*t] ? R[B[i + 595*t]] * R[C[i + 595*t]] : R[B[i + 595*t]] + R[C[i + 595*t]];
R[i + 1660*t] = Op[i + 596*t] ? R[B[i + 596*t]] * R[C[i + 596*t]] : R[B[i + 596*t]] + R[C[i + 596*t]];
R[i + 1661*t] = Op[i + 597*t] ? R[B[i + 597*t]] * R[C[i + 597*t]] : R[B[i + 597*t]] + R[C[i + 597*t]];
R[i + 1662*t] = Op[i + 598*t] ? R[B[i + 598*t]] * R[C[i + 598*t]] : R[B[i + 598*t]] + R[C[i + 598*t]];
R[i + 1663*t] = Op[i + 599*t] ? R[B[i + 599*t]] * R[C[i + 599*t]] : R[B[i + 599*t]] + R[C[i + 599*t]];
R[i + 1664*t] = Op[i + 600*t] ? R[B[i + 600*t]] * R[C[i + 600*t]] : R[B[i + 600*t]] + R[C[i + 600*t]];
R[i + 1665*t] = Op[i + 601*t] ? R[B[i + 601*t]] * R[C[i + 601*t]] : R[B[i + 601*t]] + R[C[i + 601*t]];
R[i + 1666*t] = Op[i + 602*t] ? R[B[i + 602*t]] * R[C[i + 602*t]] : R[B[i + 602*t]] + R[C[i + 602*t]];
R[i + 1667*t] = Op[i + 603*t] ? R[B[i + 603*t]] * R[C[i + 603*t]] : R[B[i + 603*t]] + R[C[i + 603*t]];
R[i + 1668*t] = Op[i + 604*t] ? R[B[i + 604*t]] * R[C[i + 604*t]] : R[B[i + 604*t]] + R[C[i + 604*t]];
R[i + 1669*t] = Op[i + 605*t] ? R[B[i + 605*t]] * R[C[i + 605*t]] : R[B[i + 605*t]] + R[C[i + 605*t]];
R[i + 1670*t] = Op[i + 606*t] ? R[B[i + 606*t]] * R[C[i + 606*t]] : R[B[i + 606*t]] + R[C[i + 606*t]];
R[i + 1671*t] = Op[i + 607*t] ? R[B[i + 607*t]] * R[C[i + 607*t]] : R[B[i + 607*t]] + R[C[i + 607*t]];
R[i + 1672*t] = Op[i + 608*t] ? R[B[i + 608*t]] * R[C[i + 608*t]] : R[B[i + 608*t]] + R[C[i + 608*t]];
R[i + 1673*t] = Op[i + 609*t] ? R[B[i + 609*t]] * R[C[i + 609*t]] : R[B[i + 609*t]] + R[C[i + 609*t]];
R[i + 1674*t] = Op[i + 610*t] ? R[B[i + 610*t]] * R[C[i + 610*t]] : R[B[i + 610*t]] + R[C[i + 610*t]];
R[i + 1675*t] = Op[i + 611*t] ? R[B[i + 611*t]] * R[C[i + 611*t]] : R[B[i + 611*t]] + R[C[i + 611*t]];
R[i + 1676*t] = Op[i + 612*t] ? R[B[i + 612*t]] * R[C[i + 612*t]] : R[B[i + 612*t]] + R[C[i + 612*t]];
R[i + 1677*t] = Op[i + 613*t] ? R[B[i + 613*t]] * R[C[i + 613*t]] : R[B[i + 613*t]] + R[C[i + 613*t]];
R[i + 1678*t] = Op[i + 614*t] ? R[B[i + 614*t]] * R[C[i + 614*t]] : R[B[i + 614*t]] + R[C[i + 614*t]];
R[i + 1679*t] = Op[i + 615*t] ? R[B[i + 615*t]] * R[C[i + 615*t]] : R[B[i + 615*t]] + R[C[i + 615*t]];
R[i + 1680*t] = Op[i + 616*t] ? R[B[i + 616*t]] * R[C[i + 616*t]] : R[B[i + 616*t]] + R[C[i + 616*t]];
R[i + 1681*t] = Op[i + 617*t] ? R[B[i + 617*t]] * R[C[i + 617*t]] : R[B[i + 617*t]] + R[C[i + 617*t]];
R[i + 1682*t] = Op[i + 618*t] ? R[B[i + 618*t]] * R[C[i + 618*t]] : R[B[i + 618*t]] + R[C[i + 618*t]];
R[i + 1683*t] = Op[i + 619*t] ? R[B[i + 619*t]] * R[C[i + 619*t]] : R[B[i + 619*t]] + R[C[i + 619*t]];
R[i + 1684*t] = Op[i + 620*t] ? R[B[i + 620*t]] * R[C[i + 620*t]] : R[B[i + 620*t]] + R[C[i + 620*t]];
R[i + 1685*t] = Op[i + 621*t] ? R[B[i + 621*t]] * R[C[i + 621*t]] : R[B[i + 621*t]] + R[C[i + 621*t]];
R[i + 1686*t] = Op[i + 622*t] ? R[B[i + 622*t]] * R[C[i + 622*t]] : R[B[i + 622*t]] + R[C[i + 622*t]];
R[i + 1687*t] = Op[i + 623*t] ? R[B[i + 623*t]] * R[C[i + 623*t]] : R[B[i + 623*t]] + R[C[i + 623*t]];
R[i + 1688*t] = Op[i + 624*t] ? R[B[i + 624*t]] * R[C[i + 624*t]] : R[B[i + 624*t]] + R[C[i + 624*t]];
R[i + 1689*t] = Op[i + 625*t] ? R[B[i + 625*t]] * R[C[i + 625*t]] : R[B[i + 625*t]] + R[C[i + 625*t]];
R[i + 1690*t] = Op[i + 626*t] ? R[B[i + 626*t]] * R[C[i + 626*t]] : R[B[i + 626*t]] + R[C[i + 626*t]];
R[i + 1691*t] = Op[i + 627*t] ? R[B[i + 627*t]] * R[C[i + 627*t]] : R[B[i + 627*t]] + R[C[i + 627*t]];
R[i + 1692*t] = Op[i + 628*t] ? R[B[i + 628*t]] * R[C[i + 628*t]] : R[B[i + 628*t]] + R[C[i + 628*t]];
R[i + 1693*t] = Op[i + 629*t] ? R[B[i + 629*t]] * R[C[i + 629*t]] : R[B[i + 629*t]] + R[C[i + 629*t]];
R[i + 1694*t] = Op[i + 630*t] ? R[B[i + 630*t]] * R[C[i + 630*t]] : R[B[i + 630*t]] + R[C[i + 630*t]];
R[i + 1695*t] = Op[i + 631*t] ? R[B[i + 631*t]] * R[C[i + 631*t]] : R[B[i + 631*t]] + R[C[i + 631*t]];
R[i + 1696*t] = Op[i + 632*t] ? R[B[i + 632*t]] * R[C[i + 632*t]] : R[B[i + 632*t]] + R[C[i + 632*t]];
R[i + 1697*t] = Op[i + 633*t] ? R[B[i + 633*t]] * R[C[i + 633*t]] : R[B[i + 633*t]] + R[C[i + 633*t]];
R[i + 1698*t] = Op[i + 634*t] ? R[B[i + 634*t]] * R[C[i + 634*t]] : R[B[i + 634*t]] + R[C[i + 634*t]];
R[i + 1699*t] = Op[i + 635*t] ? R[B[i + 635*t]] * R[C[i + 635*t]] : R[B[i + 635*t]] + R[C[i + 635*t]];
R[i + 1700*t] = Op[i + 636*t] ? R[B[i + 636*t]] * R[C[i + 636*t]] : R[B[i + 636*t]] + R[C[i + 636*t]];
R[i + 1701*t] = Op[i + 637*t] ? R[B[i + 637*t]] * R[C[i + 637*t]] : R[B[i + 637*t]] + R[C[i + 637*t]];
R[i + 1702*t] = Op[i + 638*t] ? R[B[i + 638*t]] * R[C[i + 638*t]] : R[B[i + 638*t]] + R[C[i + 638*t]];
R[i + 1703*t] = Op[i + 639*t] ? R[B[i + 639*t]] * R[C[i + 639*t]] : R[B[i + 639*t]] + R[C[i + 639*t]];
R[i + 1704*t] = Op[i + 640*t] ? R[B[i + 640*t]] * R[C[i + 640*t]] : R[B[i + 640*t]] + R[C[i + 640*t]];
R[i + 1705*t] = Op[i + 641*t] ? R[B[i + 641*t]] * R[C[i + 641*t]] : R[B[i + 641*t]] + R[C[i + 641*t]];
R[i + 1706*t] = Op[i + 642*t] ? R[B[i + 642*t]] * R[C[i + 642*t]] : R[B[i + 642*t]] + R[C[i + 642*t]];
R[i + 1707*t] = Op[i + 643*t] ? R[B[i + 643*t]] * R[C[i + 643*t]] : R[B[i + 643*t]] + R[C[i + 643*t]];
R[i + 1708*t] = Op[i + 644*t] ? R[B[i + 644*t]] * R[C[i + 644*t]] : R[B[i + 644*t]] + R[C[i + 644*t]];
R[i + 1709*t] = Op[i + 645*t] ? R[B[i + 645*t]] * R[C[i + 645*t]] : R[B[i + 645*t]] + R[C[i + 645*t]];
R[i + 1710*t] = Op[i + 646*t] ? R[B[i + 646*t]] * R[C[i + 646*t]] : R[B[i + 646*t]] + R[C[i + 646*t]];
R[i + 1711*t] = Op[i + 647*t] ? R[B[i + 647*t]] * R[C[i + 647*t]] : R[B[i + 647*t]] + R[C[i + 647*t]];
R[i + 1712*t] = Op[i + 648*t] ? R[B[i + 648*t]] * R[C[i + 648*t]] : R[B[i + 648*t]] + R[C[i + 648*t]];
R[i + 1713*t] = Op[i + 649*t] ? R[B[i + 649*t]] * R[C[i + 649*t]] : R[B[i + 649*t]] + R[C[i + 649*t]];
R[i + 1714*t] = Op[i + 650*t] ? R[B[i + 650*t]] * R[C[i + 650*t]] : R[B[i + 650*t]] + R[C[i + 650*t]];
R[i + 1715*t] = Op[i + 651*t] ? R[B[i + 651*t]] * R[C[i + 651*t]] : R[B[i + 651*t]] + R[C[i + 651*t]];
R[i + 1716*t] = Op[i + 652*t] ? R[B[i + 652*t]] * R[C[i + 652*t]] : R[B[i + 652*t]] + R[C[i + 652*t]];
R[i + 1717*t] = Op[i + 653*t] ? R[B[i + 653*t]] * R[C[i + 653*t]] : R[B[i + 653*t]] + R[C[i + 653*t]];
R[i + 1718*t] = Op[i + 654*t] ? R[B[i + 654*t]] * R[C[i + 654*t]] : R[B[i + 654*t]] + R[C[i + 654*t]];
R[i + 1719*t] = Op[i + 655*t] ? R[B[i + 655*t]] * R[C[i + 655*t]] : R[B[i + 655*t]] + R[C[i + 655*t]];
R[i + 1720*t] = Op[i + 656*t] ? R[B[i + 656*t]] * R[C[i + 656*t]] : R[B[i + 656*t]] + R[C[i + 656*t]];
R[i + 1721*t] = Op[i + 657*t] ? R[B[i + 657*t]] * R[C[i + 657*t]] : R[B[i + 657*t]] + R[C[i + 657*t]];
R[i + 1722*t] = Op[i + 658*t] ? R[B[i + 658*t]] * R[C[i + 658*t]] : R[B[i + 658*t]] + R[C[i + 658*t]];
R[i + 1723*t] = Op[i + 659*t] ? R[B[i + 659*t]] * R[C[i + 659*t]] : R[B[i + 659*t]] + R[C[i + 659*t]];
R[i + 1724*t] = Op[i + 660*t] ? R[B[i + 660*t]] * R[C[i + 660*t]] : R[B[i + 660*t]] + R[C[i + 660*t]];
R[i + 1725*t] = Op[i + 661*t] ? R[B[i + 661*t]] * R[C[i + 661*t]] : R[B[i + 661*t]] + R[C[i + 661*t]];
R[i + 1726*t] = Op[i + 662*t] ? R[B[i + 662*t]] * R[C[i + 662*t]] : R[B[i + 662*t]] + R[C[i + 662*t]];
R[i + 1727*t] = Op[i + 663*t] ? R[B[i + 663*t]] * R[C[i + 663*t]] : R[B[i + 663*t]] + R[C[i + 663*t]];
R[i + 1728*t] = Op[i + 664*t] ? R[B[i + 664*t]] * R[C[i + 664*t]] : R[B[i + 664*t]] + R[C[i + 664*t]];
R[i + 1729*t] = Op[i + 665*t] ? R[B[i + 665*t]] * R[C[i + 665*t]] : R[B[i + 665*t]] + R[C[i + 665*t]];
R[i + 1730*t] = Op[i + 666*t] ? R[B[i + 666*t]] * R[C[i + 666*t]] : R[B[i + 666*t]] + R[C[i + 666*t]];
R[i + 1731*t] = Op[i + 667*t] ? R[B[i + 667*t]] * R[C[i + 667*t]] : R[B[i + 667*t]] + R[C[i + 667*t]];
R[i + 1732*t] = Op[i + 668*t] ? R[B[i + 668*t]] * R[C[i + 668*t]] : R[B[i + 668*t]] + R[C[i + 668*t]];
R[i + 1733*t] = Op[i + 669*t] ? R[B[i + 669*t]] * R[C[i + 669*t]] : R[B[i + 669*t]] + R[C[i + 669*t]];
R[i + 1734*t] = Op[i + 670*t] ? R[B[i + 670*t]] * R[C[i + 670*t]] : R[B[i + 670*t]] + R[C[i + 670*t]];
R[i + 1735*t] = Op[i + 671*t] ? R[B[i + 671*t]] * R[C[i + 671*t]] : R[B[i + 671*t]] + R[C[i + 671*t]];
R[i + 1736*t] = Op[i + 672*t] ? R[B[i + 672*t]] * R[C[i + 672*t]] : R[B[i + 672*t]] + R[C[i + 672*t]];
R[i + 1737*t] = Op[i + 673*t] ? R[B[i + 673*t]] * R[C[i + 673*t]] : R[B[i + 673*t]] + R[C[i + 673*t]];
R[i + 1738*t] = Op[i + 674*t] ? R[B[i + 674*t]] * R[C[i + 674*t]] : R[B[i + 674*t]] + R[C[i + 674*t]];
R[i + 1739*t] = Op[i + 675*t] ? R[B[i + 675*t]] * R[C[i + 675*t]] : R[B[i + 675*t]] + R[C[i + 675*t]];
R[i + 1740*t] = Op[i + 676*t] ? R[B[i + 676*t]] * R[C[i + 676*t]] : R[B[i + 676*t]] + R[C[i + 676*t]];
R[i + 1741*t] = Op[i + 677*t] ? R[B[i + 677*t]] * R[C[i + 677*t]] : R[B[i + 677*t]] + R[C[i + 677*t]];
R[i + 1742*t] = Op[i + 678*t] ? R[B[i + 678*t]] * R[C[i + 678*t]] : R[B[i + 678*t]] + R[C[i + 678*t]];
R[i + 1743*t] = Op[i + 679*t] ? R[B[i + 679*t]] * R[C[i + 679*t]] : R[B[i + 679*t]] + R[C[i + 679*t]];
R[i + 1744*t] = Op[i + 680*t] ? R[B[i + 680*t]] * R[C[i + 680*t]] : R[B[i + 680*t]] + R[C[i + 680*t]];
R[i + 1745*t] = Op[i + 681*t] ? R[B[i + 681*t]] * R[C[i + 681*t]] : R[B[i + 681*t]] + R[C[i + 681*t]];
R[i + 1746*t] = Op[i + 682*t] ? R[B[i + 682*t]] * R[C[i + 682*t]] : R[B[i + 682*t]] + R[C[i + 682*t]];
R[i + 1747*t] = Op[i + 683*t] ? R[B[i + 683*t]] * R[C[i + 683*t]] : R[B[i + 683*t]] + R[C[i + 683*t]];
R[i + 1748*t] = Op[i + 684*t] ? R[B[i + 684*t]] * R[C[i + 684*t]] : R[B[i + 684*t]] + R[C[i + 684*t]];
R[i + 1749*t] = Op[i + 685*t] ? R[B[i + 685*t]] * R[C[i + 685*t]] : R[B[i + 685*t]] + R[C[i + 685*t]];
R[i + 1750*t] = Op[i + 686*t] ? R[B[i + 686*t]] * R[C[i + 686*t]] : R[B[i + 686*t]] + R[C[i + 686*t]];
R[i + 1751*t] = Op[i + 687*t] ? R[B[i + 687*t]] * R[C[i + 687*t]] : R[B[i + 687*t]] + R[C[i + 687*t]];
R[i + 1752*t] = Op[i + 688*t] ? R[B[i + 688*t]] * R[C[i + 688*t]] : R[B[i + 688*t]] + R[C[i + 688*t]];
R[i + 1753*t] = Op[i + 689*t] ? R[B[i + 689*t]] * R[C[i + 689*t]] : R[B[i + 689*t]] + R[C[i + 689*t]];
R[i + 1754*t] = Op[i + 690*t] ? R[B[i + 690*t]] * R[C[i + 690*t]] : R[B[i + 690*t]] + R[C[i + 690*t]];
R[i + 1755*t] = Op[i + 691*t] ? R[B[i + 691*t]] * R[C[i + 691*t]] : R[B[i + 691*t]] + R[C[i + 691*t]];
R[i + 1756*t] = Op[i + 692*t] ? R[B[i + 692*t]] * R[C[i + 692*t]] : R[B[i + 692*t]] + R[C[i + 692*t]];
R[i + 1757*t] = Op[i + 693*t] ? R[B[i + 693*t]] * R[C[i + 693*t]] : R[B[i + 693*t]] + R[C[i + 693*t]];
R[i + 1758*t] = Op[i + 694*t] ? R[B[i + 694*t]] * R[C[i + 694*t]] : R[B[i + 694*t]] + R[C[i + 694*t]];
R[i + 1759*t] = Op[i + 695*t] ? R[B[i + 695*t]] * R[C[i + 695*t]] : R[B[i + 695*t]] + R[C[i + 695*t]];
R[i + 1760*t] = Op[i + 696*t] ? R[B[i + 696*t]] * R[C[i + 696*t]] : R[B[i + 696*t]] + R[C[i + 696*t]];
R[i + 1761*t] = Op[i + 697*t] ? R[B[i + 697*t]] * R[C[i + 697*t]] : R[B[i + 697*t]] + R[C[i + 697*t]];
R[i + 1762*t] = Op[i + 698*t] ? R[B[i + 698*t]] * R[C[i + 698*t]] : R[B[i + 698*t]] + R[C[i + 698*t]];
R[i + 1763*t] = Op[i + 699*t] ? R[B[i + 699*t]] * R[C[i + 699*t]] : R[B[i + 699*t]] + R[C[i + 699*t]];
R[i + 1764*t] = Op[i + 700*t] ? R[B[i + 700*t]] * R[C[i + 700*t]] : R[B[i + 700*t]] + R[C[i + 700*t]];
R[i + 1765*t] = Op[i + 701*t] ? R[B[i + 701*t]] * R[C[i + 701*t]] : R[B[i + 701*t]] + R[C[i + 701*t]];
R[i + 1766*t] = Op[i + 702*t] ? R[B[i + 702*t]] * R[C[i + 702*t]] : R[B[i + 702*t]] + R[C[i + 702*t]];
R[i + 1767*t] = Op[i + 703*t] ? R[B[i + 703*t]] * R[C[i + 703*t]] : R[B[i + 703*t]] + R[C[i + 703*t]];
R[i + 1768*t] = Op[i + 704*t] ? R[B[i + 704*t]] * R[C[i + 704*t]] : R[B[i + 704*t]] + R[C[i + 704*t]];
R[i + 1769*t] = Op[i + 705*t] ? R[B[i + 705*t]] * R[C[i + 705*t]] : R[B[i + 705*t]] + R[C[i + 705*t]];
R[i + 1770*t] = Op[i + 706*t] ? R[B[i + 706*t]] * R[C[i + 706*t]] : R[B[i + 706*t]] + R[C[i + 706*t]];
R[i + 1771*t] = Op[i + 707*t] ? R[B[i + 707*t]] * R[C[i + 707*t]] : R[B[i + 707*t]] + R[C[i + 707*t]];
__syncthreads();
R[i + 1772*t] = Op[i + 708*t] ? R[B[i + 708*t]] * R[C[i + 708*t]] : R[B[i + 708*t]] + R[C[i + 708*t]];
R[i + 1773*t] = Op[i + 709*t] ? R[B[i + 709*t]] * R[C[i + 709*t]] : R[B[i + 709*t]] + R[C[i + 709*t]];
R[i + 1774*t] = Op[i + 710*t] ? R[B[i + 710*t]] * R[C[i + 710*t]] : R[B[i + 710*t]] + R[C[i + 710*t]];
R[i + 1775*t] = Op[i + 711*t] ? R[B[i + 711*t]] * R[C[i + 711*t]] : R[B[i + 711*t]] + R[C[i + 711*t]];
R[i + 1776*t] = Op[i + 712*t] ? R[B[i + 712*t]] * R[C[i + 712*t]] : R[B[i + 712*t]] + R[C[i + 712*t]];
R[i + 1777*t] = Op[i + 713*t] ? R[B[i + 713*t]] * R[C[i + 713*t]] : R[B[i + 713*t]] + R[C[i + 713*t]];
R[i + 1778*t] = Op[i + 714*t] ? R[B[i + 714*t]] * R[C[i + 714*t]] : R[B[i + 714*t]] + R[C[i + 714*t]];
R[i + 1779*t] = Op[i + 715*t] ? R[B[i + 715*t]] * R[C[i + 715*t]] : R[B[i + 715*t]] + R[C[i + 715*t]];
R[i + 1780*t] = Op[i + 716*t] ? R[B[i + 716*t]] * R[C[i + 716*t]] : R[B[i + 716*t]] + R[C[i + 716*t]];
R[i + 1781*t] = Op[i + 717*t] ? R[B[i + 717*t]] * R[C[i + 717*t]] : R[B[i + 717*t]] + R[C[i + 717*t]];
R[i + 1782*t] = Op[i + 718*t] ? R[B[i + 718*t]] * R[C[i + 718*t]] : R[B[i + 718*t]] + R[C[i + 718*t]];
R[i + 1783*t] = Op[i + 719*t] ? R[B[i + 719*t]] * R[C[i + 719*t]] : R[B[i + 719*t]] + R[C[i + 719*t]];
R[i + 1784*t] = Op[i + 720*t] ? R[B[i + 720*t]] * R[C[i + 720*t]] : R[B[i + 720*t]] + R[C[i + 720*t]];
R[i + 1785*t] = Op[i + 721*t] ? R[B[i + 721*t]] * R[C[i + 721*t]] : R[B[i + 721*t]] + R[C[i + 721*t]];
R[i + 1786*t] = Op[i + 722*t] ? R[B[i + 722*t]] * R[C[i + 722*t]] : R[B[i + 722*t]] + R[C[i + 722*t]];
R[i + 1787*t] = Op[i + 723*t] ? R[B[i + 723*t]] * R[C[i + 723*t]] : R[B[i + 723*t]] + R[C[i + 723*t]];
R[i + 1788*t] = Op[i + 724*t] ? R[B[i + 724*t]] * R[C[i + 724*t]] : R[B[i + 724*t]] + R[C[i + 724*t]];
R[i + 1789*t] = Op[i + 725*t] ? R[B[i + 725*t]] * R[C[i + 725*t]] : R[B[i + 725*t]] + R[C[i + 725*t]];
R[i + 1790*t] = Op[i + 726*t] ? R[B[i + 726*t]] * R[C[i + 726*t]] : R[B[i + 726*t]] + R[C[i + 726*t]];
R[i + 1791*t] = Op[i + 727*t] ? R[B[i + 727*t]] * R[C[i + 727*t]] : R[B[i + 727*t]] + R[C[i + 727*t]];
R[i + 1792*t] = Op[i + 728*t] ? R[B[i + 728*t]] * R[C[i + 728*t]] : R[B[i + 728*t]] + R[C[i + 728*t]];
R[i + 1793*t] = Op[i + 729*t] ? R[B[i + 729*t]] * R[C[i + 729*t]] : R[B[i + 729*t]] + R[C[i + 729*t]];
R[i + 1794*t] = Op[i + 730*t] ? R[B[i + 730*t]] * R[C[i + 730*t]] : R[B[i + 730*t]] + R[C[i + 730*t]];
R[i + 1795*t] = Op[i + 731*t] ? R[B[i + 731*t]] * R[C[i + 731*t]] : R[B[i + 731*t]] + R[C[i + 731*t]];
R[i + 1796*t] = Op[i + 732*t] ? R[B[i + 732*t]] * R[C[i + 732*t]] : R[B[i + 732*t]] + R[C[i + 732*t]];
R[i + 1797*t] = Op[i + 733*t] ? R[B[i + 733*t]] * R[C[i + 733*t]] : R[B[i + 733*t]] + R[C[i + 733*t]];
R[i + 1798*t] = Op[i + 734*t] ? R[B[i + 734*t]] * R[C[i + 734*t]] : R[B[i + 734*t]] + R[C[i + 734*t]];
R[i + 1799*t] = Op[i + 735*t] ? R[B[i + 735*t]] * R[C[i + 735*t]] : R[B[i + 735*t]] + R[C[i + 735*t]];
R[i + 1800*t] = Op[i + 736*t] ? R[B[i + 736*t]] * R[C[i + 736*t]] : R[B[i + 736*t]] + R[C[i + 736*t]];
R[i + 1801*t] = Op[i + 737*t] ? R[B[i + 737*t]] * R[C[i + 737*t]] : R[B[i + 737*t]] + R[C[i + 737*t]];
R[i + 1802*t] = Op[i + 738*t] ? R[B[i + 738*t]] * R[C[i + 738*t]] : R[B[i + 738*t]] + R[C[i + 738*t]];
R[i + 1803*t] = Op[i + 739*t] ? R[B[i + 739*t]] * R[C[i + 739*t]] : R[B[i + 739*t]] + R[C[i + 739*t]];
R[i + 1804*t] = Op[i + 740*t] ? R[B[i + 740*t]] * R[C[i + 740*t]] : R[B[i + 740*t]] + R[C[i + 740*t]];
R[i + 1805*t] = Op[i + 741*t] ? R[B[i + 741*t]] * R[C[i + 741*t]] : R[B[i + 741*t]] + R[C[i + 741*t]];
R[i + 1806*t] = Op[i + 742*t] ? R[B[i + 742*t]] * R[C[i + 742*t]] : R[B[i + 742*t]] + R[C[i + 742*t]];
R[i + 1807*t] = Op[i + 743*t] ? R[B[i + 743*t]] * R[C[i + 743*t]] : R[B[i + 743*t]] + R[C[i + 743*t]];
R[i + 1808*t] = Op[i + 744*t] ? R[B[i + 744*t]] * R[C[i + 744*t]] : R[B[i + 744*t]] + R[C[i + 744*t]];
R[i + 1809*t] = Op[i + 745*t] ? R[B[i + 745*t]] * R[C[i + 745*t]] : R[B[i + 745*t]] + R[C[i + 745*t]];
R[i + 1810*t] = Op[i + 746*t] ? R[B[i + 746*t]] * R[C[i + 746*t]] : R[B[i + 746*t]] + R[C[i + 746*t]];
R[i + 1811*t] = Op[i + 747*t] ? R[B[i + 747*t]] * R[C[i + 747*t]] : R[B[i + 747*t]] + R[C[i + 747*t]];
R[i + 1812*t] = Op[i + 748*t] ? R[B[i + 748*t]] * R[C[i + 748*t]] : R[B[i + 748*t]] + R[C[i + 748*t]];
R[i + 1813*t] = Op[i + 749*t] ? R[B[i + 749*t]] * R[C[i + 749*t]] : R[B[i + 749*t]] + R[C[i + 749*t]];
R[i + 1814*t] = Op[i + 750*t] ? R[B[i + 750*t]] * R[C[i + 750*t]] : R[B[i + 750*t]] + R[C[i + 750*t]];
R[i + 1815*t] = Op[i + 751*t] ? R[B[i + 751*t]] * R[C[i + 751*t]] : R[B[i + 751*t]] + R[C[i + 751*t]];
R[i + 1816*t] = Op[i + 752*t] ? R[B[i + 752*t]] * R[C[i + 752*t]] : R[B[i + 752*t]] + R[C[i + 752*t]];
R[i + 1817*t] = Op[i + 753*t] ? R[B[i + 753*t]] * R[C[i + 753*t]] : R[B[i + 753*t]] + R[C[i + 753*t]];
R[i + 1818*t] = Op[i + 754*t] ? R[B[i + 754*t]] * R[C[i + 754*t]] : R[B[i + 754*t]] + R[C[i + 754*t]];
R[i + 1819*t] = Op[i + 755*t] ? R[B[i + 755*t]] * R[C[i + 755*t]] : R[B[i + 755*t]] + R[C[i + 755*t]];
R[i + 1820*t] = Op[i + 756*t] ? R[B[i + 756*t]] * R[C[i + 756*t]] : R[B[i + 756*t]] + R[C[i + 756*t]];
R[i + 1821*t] = Op[i + 757*t] ? R[B[i + 757*t]] * R[C[i + 757*t]] : R[B[i + 757*t]] + R[C[i + 757*t]];
R[i + 1822*t] = Op[i + 758*t] ? R[B[i + 758*t]] * R[C[i + 758*t]] : R[B[i + 758*t]] + R[C[i + 758*t]];
R[i + 1823*t] = Op[i + 759*t] ? R[B[i + 759*t]] * R[C[i + 759*t]] : R[B[i + 759*t]] + R[C[i + 759*t]];
R[i + 1824*t] = Op[i + 760*t] ? R[B[i + 760*t]] * R[C[i + 760*t]] : R[B[i + 760*t]] + R[C[i + 760*t]];
R[i + 1825*t] = Op[i + 761*t] ? R[B[i + 761*t]] * R[C[i + 761*t]] : R[B[i + 761*t]] + R[C[i + 761*t]];
R[i + 1826*t] = Op[i + 762*t] ? R[B[i + 762*t]] * R[C[i + 762*t]] : R[B[i + 762*t]] + R[C[i + 762*t]];
R[i + 1827*t] = Op[i + 763*t] ? R[B[i + 763*t]] * R[C[i + 763*t]] : R[B[i + 763*t]] + R[C[i + 763*t]];
R[i + 1828*t] = Op[i + 764*t] ? R[B[i + 764*t]] * R[C[i + 764*t]] : R[B[i + 764*t]] + R[C[i + 764*t]];
R[i + 1829*t] = Op[i + 765*t] ? R[B[i + 765*t]] * R[C[i + 765*t]] : R[B[i + 765*t]] + R[C[i + 765*t]];
R[i + 1830*t] = Op[i + 766*t] ? R[B[i + 766*t]] * R[C[i + 766*t]] : R[B[i + 766*t]] + R[C[i + 766*t]];
R[i + 1831*t] = Op[i + 767*t] ? R[B[i + 767*t]] * R[C[i + 767*t]] : R[B[i + 767*t]] + R[C[i + 767*t]];
R[i + 1832*t] = Op[i + 768*t] ? R[B[i + 768*t]] * R[C[i + 768*t]] : R[B[i + 768*t]] + R[C[i + 768*t]];
R[i + 1833*t] = Op[i + 769*t] ? R[B[i + 769*t]] * R[C[i + 769*t]] : R[B[i + 769*t]] + R[C[i + 769*t]];
R[i + 1834*t] = Op[i + 770*t] ? R[B[i + 770*t]] * R[C[i + 770*t]] : R[B[i + 770*t]] + R[C[i + 770*t]];
R[i + 1835*t] = Op[i + 771*t] ? R[B[i + 771*t]] * R[C[i + 771*t]] : R[B[i + 771*t]] + R[C[i + 771*t]];
R[i + 1836*t] = Op[i + 772*t] ? R[B[i + 772*t]] * R[C[i + 772*t]] : R[B[i + 772*t]] + R[C[i + 772*t]];
R[i + 1837*t] = Op[i + 773*t] ? R[B[i + 773*t]] * R[C[i + 773*t]] : R[B[i + 773*t]] + R[C[i + 773*t]];
R[i + 1838*t] = Op[i + 774*t] ? R[B[i + 774*t]] * R[C[i + 774*t]] : R[B[i + 774*t]] + R[C[i + 774*t]];
R[i + 1839*t] = Op[i + 775*t] ? R[B[i + 775*t]] * R[C[i + 775*t]] : R[B[i + 775*t]] + R[C[i + 775*t]];
R[i + 1840*t] = Op[i + 776*t] ? R[B[i + 776*t]] * R[C[i + 776*t]] : R[B[i + 776*t]] + R[C[i + 776*t]];
R[i + 1841*t] = Op[i + 777*t] ? R[B[i + 777*t]] * R[C[i + 777*t]] : R[B[i + 777*t]] + R[C[i + 777*t]];
R[i + 1842*t] = Op[i + 778*t] ? R[B[i + 778*t]] * R[C[i + 778*t]] : R[B[i + 778*t]] + R[C[i + 778*t]];
R[i + 1843*t] = Op[i + 779*t] ? R[B[i + 779*t]] * R[C[i + 779*t]] : R[B[i + 779*t]] + R[C[i + 779*t]];
R[i + 1844*t] = Op[i + 780*t] ? R[B[i + 780*t]] * R[C[i + 780*t]] : R[B[i + 780*t]] + R[C[i + 780*t]];
R[i + 1845*t] = Op[i + 781*t] ? R[B[i + 781*t]] * R[C[i + 781*t]] : R[B[i + 781*t]] + R[C[i + 781*t]];
R[i + 1846*t] = Op[i + 782*t] ? R[B[i + 782*t]] * R[C[i + 782*t]] : R[B[i + 782*t]] + R[C[i + 782*t]];
R[i + 1847*t] = Op[i + 783*t] ? R[B[i + 783*t]] * R[C[i + 783*t]] : R[B[i + 783*t]] + R[C[i + 783*t]];
R[i + 1848*t] = Op[i + 784*t] ? R[B[i + 784*t]] * R[C[i + 784*t]] : R[B[i + 784*t]] + R[C[i + 784*t]];
R[i + 1849*t] = Op[i + 785*t] ? R[B[i + 785*t]] * R[C[i + 785*t]] : R[B[i + 785*t]] + R[C[i + 785*t]];
R[i + 1850*t] = Op[i + 786*t] ? R[B[i + 786*t]] * R[C[i + 786*t]] : R[B[i + 786*t]] + R[C[i + 786*t]];
R[i + 1851*t] = Op[i + 787*t] ? R[B[i + 787*t]] * R[C[i + 787*t]] : R[B[i + 787*t]] + R[C[i + 787*t]];
R[i + 1852*t] = Op[i + 788*t] ? R[B[i + 788*t]] * R[C[i + 788*t]] : R[B[i + 788*t]] + R[C[i + 788*t]];
R[i + 1853*t] = Op[i + 789*t] ? R[B[i + 789*t]] * R[C[i + 789*t]] : R[B[i + 789*t]] + R[C[i + 789*t]];
R[i + 1854*t] = Op[i + 790*t] ? R[B[i + 790*t]] * R[C[i + 790*t]] : R[B[i + 790*t]] + R[C[i + 790*t]];
R[i + 1855*t] = Op[i + 791*t] ? R[B[i + 791*t]] * R[C[i + 791*t]] : R[B[i + 791*t]] + R[C[i + 791*t]];
R[i + 1856*t] = Op[i + 792*t] ? R[B[i + 792*t]] * R[C[i + 792*t]] : R[B[i + 792*t]] + R[C[i + 792*t]];
R[i + 1857*t] = Op[i + 793*t] ? R[B[i + 793*t]] * R[C[i + 793*t]] : R[B[i + 793*t]] + R[C[i + 793*t]];
R[i + 1858*t] = Op[i + 794*t] ? R[B[i + 794*t]] * R[C[i + 794*t]] : R[B[i + 794*t]] + R[C[i + 794*t]];
R[i + 1859*t] = Op[i + 795*t] ? R[B[i + 795*t]] * R[C[i + 795*t]] : R[B[i + 795*t]] + R[C[i + 795*t]];
R[i + 1860*t] = Op[i + 796*t] ? R[B[i + 796*t]] * R[C[i + 796*t]] : R[B[i + 796*t]] + R[C[i + 796*t]];
R[i + 1861*t] = Op[i + 797*t] ? R[B[i + 797*t]] * R[C[i + 797*t]] : R[B[i + 797*t]] + R[C[i + 797*t]];
R[i + 1862*t] = Op[i + 798*t] ? R[B[i + 798*t]] * R[C[i + 798*t]] : R[B[i + 798*t]] + R[C[i + 798*t]];
R[i + 1863*t] = Op[i + 799*t] ? R[B[i + 799*t]] * R[C[i + 799*t]] : R[B[i + 799*t]] + R[C[i + 799*t]];
R[i + 1864*t] = Op[i + 800*t] ? R[B[i + 800*t]] * R[C[i + 800*t]] : R[B[i + 800*t]] + R[C[i + 800*t]];
R[i + 1865*t] = Op[i + 801*t] ? R[B[i + 801*t]] * R[C[i + 801*t]] : R[B[i + 801*t]] + R[C[i + 801*t]];
R[i + 1866*t] = Op[i + 802*t] ? R[B[i + 802*t]] * R[C[i + 802*t]] : R[B[i + 802*t]] + R[C[i + 802*t]];
R[i + 1867*t] = Op[i + 803*t] ? R[B[i + 803*t]] * R[C[i + 803*t]] : R[B[i + 803*t]] + R[C[i + 803*t]];
R[i + 1868*t] = Op[i + 804*t] ? R[B[i + 804*t]] * R[C[i + 804*t]] : R[B[i + 804*t]] + R[C[i + 804*t]];
R[i + 1869*t] = Op[i + 805*t] ? R[B[i + 805*t]] * R[C[i + 805*t]] : R[B[i + 805*t]] + R[C[i + 805*t]];
R[i + 1870*t] = Op[i + 806*t] ? R[B[i + 806*t]] * R[C[i + 806*t]] : R[B[i + 806*t]] + R[C[i + 806*t]];
R[i + 1871*t] = Op[i + 807*t] ? R[B[i + 807*t]] * R[C[i + 807*t]] : R[B[i + 807*t]] + R[C[i + 807*t]];
R[i + 1872*t] = Op[i + 808*t] ? R[B[i + 808*t]] * R[C[i + 808*t]] : R[B[i + 808*t]] + R[C[i + 808*t]];
R[i + 1873*t] = Op[i + 809*t] ? R[B[i + 809*t]] * R[C[i + 809*t]] : R[B[i + 809*t]] + R[C[i + 809*t]];
R[i + 1874*t] = Op[i + 810*t] ? R[B[i + 810*t]] * R[C[i + 810*t]] : R[B[i + 810*t]] + R[C[i + 810*t]];
R[i + 1875*t] = Op[i + 811*t] ? R[B[i + 811*t]] * R[C[i + 811*t]] : R[B[i + 811*t]] + R[C[i + 811*t]];
R[i + 1876*t] = Op[i + 812*t] ? R[B[i + 812*t]] * R[C[i + 812*t]] : R[B[i + 812*t]] + R[C[i + 812*t]];
R[i + 1877*t] = Op[i + 813*t] ? R[B[i + 813*t]] * R[C[i + 813*t]] : R[B[i + 813*t]] + R[C[i + 813*t]];
R[i + 1878*t] = Op[i + 814*t] ? R[B[i + 814*t]] * R[C[i + 814*t]] : R[B[i + 814*t]] + R[C[i + 814*t]];
R[i + 1879*t] = Op[i + 815*t] ? R[B[i + 815*t]] * R[C[i + 815*t]] : R[B[i + 815*t]] + R[C[i + 815*t]];
R[i + 1880*t] = Op[i + 816*t] ? R[B[i + 816*t]] * R[C[i + 816*t]] : R[B[i + 816*t]] + R[C[i + 816*t]];
R[i + 1881*t] = Op[i + 817*t] ? R[B[i + 817*t]] * R[C[i + 817*t]] : R[B[i + 817*t]] + R[C[i + 817*t]];
R[i + 1882*t] = Op[i + 818*t] ? R[B[i + 818*t]] * R[C[i + 818*t]] : R[B[i + 818*t]] + R[C[i + 818*t]];
R[i + 1883*t] = Op[i + 819*t] ? R[B[i + 819*t]] * R[C[i + 819*t]] : R[B[i + 819*t]] + R[C[i + 819*t]];
R[i + 1884*t] = Op[i + 820*t] ? R[B[i + 820*t]] * R[C[i + 820*t]] : R[B[i + 820*t]] + R[C[i + 820*t]];
R[i + 1885*t] = Op[i + 821*t] ? R[B[i + 821*t]] * R[C[i + 821*t]] : R[B[i + 821*t]] + R[C[i + 821*t]];
R[i + 1886*t] = Op[i + 822*t] ? R[B[i + 822*t]] * R[C[i + 822*t]] : R[B[i + 822*t]] + R[C[i + 822*t]];
R[i + 1887*t] = Op[i + 823*t] ? R[B[i + 823*t]] * R[C[i + 823*t]] : R[B[i + 823*t]] + R[C[i + 823*t]];
R[i + 1888*t] = Op[i + 824*t] ? R[B[i + 824*t]] * R[C[i + 824*t]] : R[B[i + 824*t]] + R[C[i + 824*t]];
R[i + 1889*t] = Op[i + 825*t] ? R[B[i + 825*t]] * R[C[i + 825*t]] : R[B[i + 825*t]] + R[C[i + 825*t]];
R[i + 1890*t] = Op[i + 826*t] ? R[B[i + 826*t]] * R[C[i + 826*t]] : R[B[i + 826*t]] + R[C[i + 826*t]];
R[i + 1891*t] = Op[i + 827*t] ? R[B[i + 827*t]] * R[C[i + 827*t]] : R[B[i + 827*t]] + R[C[i + 827*t]];
R[i + 1892*t] = Op[i + 828*t] ? R[B[i + 828*t]] * R[C[i + 828*t]] : R[B[i + 828*t]] + R[C[i + 828*t]];
R[i + 1893*t] = Op[i + 829*t] ? R[B[i + 829*t]] * R[C[i + 829*t]] : R[B[i + 829*t]] + R[C[i + 829*t]];
R[i + 1894*t] = Op[i + 830*t] ? R[B[i + 830*t]] * R[C[i + 830*t]] : R[B[i + 830*t]] + R[C[i + 830*t]];
R[i + 1895*t] = Op[i + 831*t] ? R[B[i + 831*t]] * R[C[i + 831*t]] : R[B[i + 831*t]] + R[C[i + 831*t]];
R[i + 1896*t] = Op[i + 832*t] ? R[B[i + 832*t]] * R[C[i + 832*t]] : R[B[i + 832*t]] + R[C[i + 832*t]];
R[i + 1897*t] = Op[i + 833*t] ? R[B[i + 833*t]] * R[C[i + 833*t]] : R[B[i + 833*t]] + R[C[i + 833*t]];
R[i + 1898*t] = Op[i + 834*t] ? R[B[i + 834*t]] * R[C[i + 834*t]] : R[B[i + 834*t]] + R[C[i + 834*t]];
R[i + 1899*t] = Op[i + 835*t] ? R[B[i + 835*t]] * R[C[i + 835*t]] : R[B[i + 835*t]] + R[C[i + 835*t]];
R[i + 1900*t] = Op[i + 836*t] ? R[B[i + 836*t]] * R[C[i + 836*t]] : R[B[i + 836*t]] + R[C[i + 836*t]];
R[i + 1901*t] = Op[i + 837*t] ? R[B[i + 837*t]] * R[C[i + 837*t]] : R[B[i + 837*t]] + R[C[i + 837*t]];
R[i + 1902*t] = Op[i + 838*t] ? R[B[i + 838*t]] * R[C[i + 838*t]] : R[B[i + 838*t]] + R[C[i + 838*t]];
R[i + 1903*t] = Op[i + 839*t] ? R[B[i + 839*t]] * R[C[i + 839*t]] : R[B[i + 839*t]] + R[C[i + 839*t]];
R[i + 1904*t] = Op[i + 840*t] ? R[B[i + 840*t]] * R[C[i + 840*t]] : R[B[i + 840*t]] + R[C[i + 840*t]];
R[i + 1905*t] = Op[i + 841*t] ? R[B[i + 841*t]] * R[C[i + 841*t]] : R[B[i + 841*t]] + R[C[i + 841*t]];
R[i + 1906*t] = Op[i + 842*t] ? R[B[i + 842*t]] * R[C[i + 842*t]] : R[B[i + 842*t]] + R[C[i + 842*t]];
R[i + 1907*t] = Op[i + 843*t] ? R[B[i + 843*t]] * R[C[i + 843*t]] : R[B[i + 843*t]] + R[C[i + 843*t]];
R[i + 1908*t] = Op[i + 844*t] ? R[B[i + 844*t]] * R[C[i + 844*t]] : R[B[i + 844*t]] + R[C[i + 844*t]];
R[i + 1909*t] = Op[i + 845*t] ? R[B[i + 845*t]] * R[C[i + 845*t]] : R[B[i + 845*t]] + R[C[i + 845*t]];
R[i + 1910*t] = Op[i + 846*t] ? R[B[i + 846*t]] * R[C[i + 846*t]] : R[B[i + 846*t]] + R[C[i + 846*t]];
R[i + 1911*t] = Op[i + 847*t] ? R[B[i + 847*t]] * R[C[i + 847*t]] : R[B[i + 847*t]] + R[C[i + 847*t]];
R[i + 1912*t] = Op[i + 848*t] ? R[B[i + 848*t]] * R[C[i + 848*t]] : R[B[i + 848*t]] + R[C[i + 848*t]];
R[i + 1913*t] = Op[i + 849*t] ? R[B[i + 849*t]] * R[C[i + 849*t]] : R[B[i + 849*t]] + R[C[i + 849*t]];
R[i + 1914*t] = Op[i + 850*t] ? R[B[i + 850*t]] * R[C[i + 850*t]] : R[B[i + 850*t]] + R[C[i + 850*t]];
R[i + 1915*t] = Op[i + 851*t] ? R[B[i + 851*t]] * R[C[i + 851*t]] : R[B[i + 851*t]] + R[C[i + 851*t]];
R[i + 1916*t] = Op[i + 852*t] ? R[B[i + 852*t]] * R[C[i + 852*t]] : R[B[i + 852*t]] + R[C[i + 852*t]];
R[i + 1917*t] = Op[i + 853*t] ? R[B[i + 853*t]] * R[C[i + 853*t]] : R[B[i + 853*t]] + R[C[i + 853*t]];
R[i + 1918*t] = Op[i + 854*t] ? R[B[i + 854*t]] * R[C[i + 854*t]] : R[B[i + 854*t]] + R[C[i + 854*t]];
R[i + 1919*t] = Op[i + 855*t] ? R[B[i + 855*t]] * R[C[i + 855*t]] : R[B[i + 855*t]] + R[C[i + 855*t]];
R[i + 1920*t] = Op[i + 856*t] ? R[B[i + 856*t]] * R[C[i + 856*t]] : R[B[i + 856*t]] + R[C[i + 856*t]];
R[i + 1921*t] = Op[i + 857*t] ? R[B[i + 857*t]] * R[C[i + 857*t]] : R[B[i + 857*t]] + R[C[i + 857*t]];
R[i + 1922*t] = Op[i + 858*t] ? R[B[i + 858*t]] * R[C[i + 858*t]] : R[B[i + 858*t]] + R[C[i + 858*t]];
R[i + 1923*t] = Op[i + 859*t] ? R[B[i + 859*t]] * R[C[i + 859*t]] : R[B[i + 859*t]] + R[C[i + 859*t]];
R[i + 1924*t] = Op[i + 860*t] ? R[B[i + 860*t]] * R[C[i + 860*t]] : R[B[i + 860*t]] + R[C[i + 860*t]];
R[i + 1925*t] = Op[i + 861*t] ? R[B[i + 861*t]] * R[C[i + 861*t]] : R[B[i + 861*t]] + R[C[i + 861*t]];
R[i + 1926*t] = Op[i + 862*t] ? R[B[i + 862*t]] * R[C[i + 862*t]] : R[B[i + 862*t]] + R[C[i + 862*t]];
R[i + 1927*t] = Op[i + 863*t] ? R[B[i + 863*t]] * R[C[i + 863*t]] : R[B[i + 863*t]] + R[C[i + 863*t]];
R[i + 1928*t] = Op[i + 864*t] ? R[B[i + 864*t]] * R[C[i + 864*t]] : R[B[i + 864*t]] + R[C[i + 864*t]];
R[i + 1929*t] = Op[i + 865*t] ? R[B[i + 865*t]] * R[C[i + 865*t]] : R[B[i + 865*t]] + R[C[i + 865*t]];
R[i + 1930*t] = Op[i + 866*t] ? R[B[i + 866*t]] * R[C[i + 866*t]] : R[B[i + 866*t]] + R[C[i + 866*t]];
R[i + 1931*t] = Op[i + 867*t] ? R[B[i + 867*t]] * R[C[i + 867*t]] : R[B[i + 867*t]] + R[C[i + 867*t]];
R[i + 1932*t] = Op[i + 868*t] ? R[B[i + 868*t]] * R[C[i + 868*t]] : R[B[i + 868*t]] + R[C[i + 868*t]];
R[i + 1933*t] = Op[i + 869*t] ? R[B[i + 869*t]] * R[C[i + 869*t]] : R[B[i + 869*t]] + R[C[i + 869*t]];
R[i + 1934*t] = Op[i + 870*t] ? R[B[i + 870*t]] * R[C[i + 870*t]] : R[B[i + 870*t]] + R[C[i + 870*t]];
R[i + 1935*t] = Op[i + 871*t] ? R[B[i + 871*t]] * R[C[i + 871*t]] : R[B[i + 871*t]] + R[C[i + 871*t]];
R[i + 1936*t] = Op[i + 872*t] ? R[B[i + 872*t]] * R[C[i + 872*t]] : R[B[i + 872*t]] + R[C[i + 872*t]];
R[i + 1937*t] = Op[i + 873*t] ? R[B[i + 873*t]] * R[C[i + 873*t]] : R[B[i + 873*t]] + R[C[i + 873*t]];
R[i + 1938*t] = Op[i + 874*t] ? R[B[i + 874*t]] * R[C[i + 874*t]] : R[B[i + 874*t]] + R[C[i + 874*t]];
R[i + 1939*t] = Op[i + 875*t] ? R[B[i + 875*t]] * R[C[i + 875*t]] : R[B[i + 875*t]] + R[C[i + 875*t]];
R[i + 1940*t] = Op[i + 876*t] ? R[B[i + 876*t]] * R[C[i + 876*t]] : R[B[i + 876*t]] + R[C[i + 876*t]];
R[i + 1941*t] = Op[i + 877*t] ? R[B[i + 877*t]] * R[C[i + 877*t]] : R[B[i + 877*t]] + R[C[i + 877*t]];
R[i + 1942*t] = Op[i + 878*t] ? R[B[i + 878*t]] * R[C[i + 878*t]] : R[B[i + 878*t]] + R[C[i + 878*t]];
R[i + 1943*t] = Op[i + 879*t] ? R[B[i + 879*t]] * R[C[i + 879*t]] : R[B[i + 879*t]] + R[C[i + 879*t]];
R[i + 1944*t] = Op[i + 880*t] ? R[B[i + 880*t]] * R[C[i + 880*t]] : R[B[i + 880*t]] + R[C[i + 880*t]];
R[i + 1945*t] = Op[i + 881*t] ? R[B[i + 881*t]] * R[C[i + 881*t]] : R[B[i + 881*t]] + R[C[i + 881*t]];
R[i + 1946*t] = Op[i + 882*t] ? R[B[i + 882*t]] * R[C[i + 882*t]] : R[B[i + 882*t]] + R[C[i + 882*t]];
R[i + 1947*t] = Op[i + 883*t] ? R[B[i + 883*t]] * R[C[i + 883*t]] : R[B[i + 883*t]] + R[C[i + 883*t]];
__syncthreads();
R[i + 1948*t] = Op[i + 884*t] ? R[B[i + 884*t]] * R[C[i + 884*t]] : R[B[i + 884*t]] + R[C[i + 884*t]];
R[i + 1949*t] = Op[i + 885*t] ? R[B[i + 885*t]] * R[C[i + 885*t]] : R[B[i + 885*t]] + R[C[i + 885*t]];
R[i + 1950*t] = Op[i + 886*t] ? R[B[i + 886*t]] * R[C[i + 886*t]] : R[B[i + 886*t]] + R[C[i + 886*t]];
R[i + 1951*t] = Op[i + 887*t] ? R[B[i + 887*t]] * R[C[i + 887*t]] : R[B[i + 887*t]] + R[C[i + 887*t]];
R[i + 1952*t] = Op[i + 888*t] ? R[B[i + 888*t]] * R[C[i + 888*t]] : R[B[i + 888*t]] + R[C[i + 888*t]];
R[i + 1953*t] = Op[i + 889*t] ? R[B[i + 889*t]] * R[C[i + 889*t]] : R[B[i + 889*t]] + R[C[i + 889*t]];
R[i + 1954*t] = Op[i + 890*t] ? R[B[i + 890*t]] * R[C[i + 890*t]] : R[B[i + 890*t]] + R[C[i + 890*t]];
R[i + 1955*t] = Op[i + 891*t] ? R[B[i + 891*t]] * R[C[i + 891*t]] : R[B[i + 891*t]] + R[C[i + 891*t]];
R[i + 1956*t] = Op[i + 892*t] ? R[B[i + 892*t]] * R[C[i + 892*t]] : R[B[i + 892*t]] + R[C[i + 892*t]];
R[i + 1957*t] = Op[i + 893*t] ? R[B[i + 893*t]] * R[C[i + 893*t]] : R[B[i + 893*t]] + R[C[i + 893*t]];
R[i + 1958*t] = Op[i + 894*t] ? R[B[i + 894*t]] * R[C[i + 894*t]] : R[B[i + 894*t]] + R[C[i + 894*t]];
R[i + 1959*t] = Op[i + 895*t] ? R[B[i + 895*t]] * R[C[i + 895*t]] : R[B[i + 895*t]] + R[C[i + 895*t]];
R[i + 1960*t] = Op[i + 896*t] ? R[B[i + 896*t]] * R[C[i + 896*t]] : R[B[i + 896*t]] + R[C[i + 896*t]];
R[i + 1961*t] = Op[i + 897*t] ? R[B[i + 897*t]] * R[C[i + 897*t]] : R[B[i + 897*t]] + R[C[i + 897*t]];
R[i + 1962*t] = Op[i + 898*t] ? R[B[i + 898*t]] * R[C[i + 898*t]] : R[B[i + 898*t]] + R[C[i + 898*t]];
R[i + 1963*t] = Op[i + 899*t] ? R[B[i + 899*t]] * R[C[i + 899*t]] : R[B[i + 899*t]] + R[C[i + 899*t]];
R[i + 1964*t] = Op[i + 900*t] ? R[B[i + 900*t]] * R[C[i + 900*t]] : R[B[i + 900*t]] + R[C[i + 900*t]];
R[i + 1965*t] = Op[i + 901*t] ? R[B[i + 901*t]] * R[C[i + 901*t]] : R[B[i + 901*t]] + R[C[i + 901*t]];
R[i + 1966*t] = Op[i + 902*t] ? R[B[i + 902*t]] * R[C[i + 902*t]] : R[B[i + 902*t]] + R[C[i + 902*t]];
R[i + 1967*t] = Op[i + 903*t] ? R[B[i + 903*t]] * R[C[i + 903*t]] : R[B[i + 903*t]] + R[C[i + 903*t]];
R[i + 1968*t] = Op[i + 904*t] ? R[B[i + 904*t]] * R[C[i + 904*t]] : R[B[i + 904*t]] + R[C[i + 904*t]];
R[i + 1969*t] = Op[i + 905*t] ? R[B[i + 905*t]] * R[C[i + 905*t]] : R[B[i + 905*t]] + R[C[i + 905*t]];
R[i + 1970*t] = Op[i + 906*t] ? R[B[i + 906*t]] * R[C[i + 906*t]] : R[B[i + 906*t]] + R[C[i + 906*t]];
R[i + 1971*t] = Op[i + 907*t] ? R[B[i + 907*t]] * R[C[i + 907*t]] : R[B[i + 907*t]] + R[C[i + 907*t]];
R[i + 1972*t] = Op[i + 908*t] ? R[B[i + 908*t]] * R[C[i + 908*t]] : R[B[i + 908*t]] + R[C[i + 908*t]];
R[i + 1973*t] = Op[i + 909*t] ? R[B[i + 909*t]] * R[C[i + 909*t]] : R[B[i + 909*t]] + R[C[i + 909*t]];
R[i + 1974*t] = Op[i + 910*t] ? R[B[i + 910*t]] * R[C[i + 910*t]] : R[B[i + 910*t]] + R[C[i + 910*t]];
R[i + 1975*t] = Op[i + 911*t] ? R[B[i + 911*t]] * R[C[i + 911*t]] : R[B[i + 911*t]] + R[C[i + 911*t]];
R[i + 1976*t] = Op[i + 912*t] ? R[B[i + 912*t]] * R[C[i + 912*t]] : R[B[i + 912*t]] + R[C[i + 912*t]];
R[i + 1977*t] = Op[i + 913*t] ? R[B[i + 913*t]] * R[C[i + 913*t]] : R[B[i + 913*t]] + R[C[i + 913*t]];
R[i + 1978*t] = Op[i + 914*t] ? R[B[i + 914*t]] * R[C[i + 914*t]] : R[B[i + 914*t]] + R[C[i + 914*t]];
R[i + 1979*t] = Op[i + 915*t] ? R[B[i + 915*t]] * R[C[i + 915*t]] : R[B[i + 915*t]] + R[C[i + 915*t]];
R[i + 1980*t] = Op[i + 916*t] ? R[B[i + 916*t]] * R[C[i + 916*t]] : R[B[i + 916*t]] + R[C[i + 916*t]];
R[i + 1981*t] = Op[i + 917*t] ? R[B[i + 917*t]] * R[C[i + 917*t]] : R[B[i + 917*t]] + R[C[i + 917*t]];
R[i + 1982*t] = Op[i + 918*t] ? R[B[i + 918*t]] * R[C[i + 918*t]] : R[B[i + 918*t]] + R[C[i + 918*t]];
R[i + 1983*t] = Op[i + 919*t] ? R[B[i + 919*t]] * R[C[i + 919*t]] : R[B[i + 919*t]] + R[C[i + 919*t]];
R[i + 1984*t] = Op[i + 920*t] ? R[B[i + 920*t]] * R[C[i + 920*t]] : R[B[i + 920*t]] + R[C[i + 920*t]];
R[i + 1985*t] = Op[i + 921*t] ? R[B[i + 921*t]] * R[C[i + 921*t]] : R[B[i + 921*t]] + R[C[i + 921*t]];
R[i + 1986*t] = Op[i + 922*t] ? R[B[i + 922*t]] * R[C[i + 922*t]] : R[B[i + 922*t]] + R[C[i + 922*t]];
R[i + 1987*t] = Op[i + 923*t] ? R[B[i + 923*t]] * R[C[i + 923*t]] : R[B[i + 923*t]] + R[C[i + 923*t]];
R[i + 1988*t] = Op[i + 924*t] ? R[B[i + 924*t]] * R[C[i + 924*t]] : R[B[i + 924*t]] + R[C[i + 924*t]];
R[i + 1989*t] = Op[i + 925*t] ? R[B[i + 925*t]] * R[C[i + 925*t]] : R[B[i + 925*t]] + R[C[i + 925*t]];
R[i + 1990*t] = Op[i + 926*t] ? R[B[i + 926*t]] * R[C[i + 926*t]] : R[B[i + 926*t]] + R[C[i + 926*t]];
R[i + 1991*t] = Op[i + 927*t] ? R[B[i + 927*t]] * R[C[i + 927*t]] : R[B[i + 927*t]] + R[C[i + 927*t]];
R[i + 1992*t] = Op[i + 928*t] ? R[B[i + 928*t]] * R[C[i + 928*t]] : R[B[i + 928*t]] + R[C[i + 928*t]];
R[i + 1993*t] = Op[i + 929*t] ? R[B[i + 929*t]] * R[C[i + 929*t]] : R[B[i + 929*t]] + R[C[i + 929*t]];
R[i + 1994*t] = Op[i + 930*t] ? R[B[i + 930*t]] * R[C[i + 930*t]] : R[B[i + 930*t]] + R[C[i + 930*t]];
R[i + 1995*t] = Op[i + 931*t] ? R[B[i + 931*t]] * R[C[i + 931*t]] : R[B[i + 931*t]] + R[C[i + 931*t]];
R[i + 1996*t] = Op[i + 932*t] ? R[B[i + 932*t]] * R[C[i + 932*t]] : R[B[i + 932*t]] + R[C[i + 932*t]];
R[i + 1997*t] = Op[i + 933*t] ? R[B[i + 933*t]] * R[C[i + 933*t]] : R[B[i + 933*t]] + R[C[i + 933*t]];
R[i + 1998*t] = Op[i + 934*t] ? R[B[i + 934*t]] * R[C[i + 934*t]] : R[B[i + 934*t]] + R[C[i + 934*t]];
R[i + 1999*t] = Op[i + 935*t] ? R[B[i + 935*t]] * R[C[i + 935*t]] : R[B[i + 935*t]] + R[C[i + 935*t]];
R[i + 2000*t] = Op[i + 936*t] ? R[B[i + 936*t]] * R[C[i + 936*t]] : R[B[i + 936*t]] + R[C[i + 936*t]];
R[i + 2001*t] = Op[i + 937*t] ? R[B[i + 937*t]] * R[C[i + 937*t]] : R[B[i + 937*t]] + R[C[i + 937*t]];
R[i + 2002*t] = Op[i + 938*t] ? R[B[i + 938*t]] * R[C[i + 938*t]] : R[B[i + 938*t]] + R[C[i + 938*t]];
R[i + 2003*t] = Op[i + 939*t] ? R[B[i + 939*t]] * R[C[i + 939*t]] : R[B[i + 939*t]] + R[C[i + 939*t]];
R[i + 2004*t] = Op[i + 940*t] ? R[B[i + 940*t]] * R[C[i + 940*t]] : R[B[i + 940*t]] + R[C[i + 940*t]];
R[i + 2005*t] = Op[i + 941*t] ? R[B[i + 941*t]] * R[C[i + 941*t]] : R[B[i + 941*t]] + R[C[i + 941*t]];
R[i + 2006*t] = Op[i + 942*t] ? R[B[i + 942*t]] * R[C[i + 942*t]] : R[B[i + 942*t]] + R[C[i + 942*t]];
R[i + 2007*t] = Op[i + 943*t] ? R[B[i + 943*t]] * R[C[i + 943*t]] : R[B[i + 943*t]] + R[C[i + 943*t]];
R[i + 2008*t] = Op[i + 944*t] ? R[B[i + 944*t]] * R[C[i + 944*t]] : R[B[i + 944*t]] + R[C[i + 944*t]];
R[i + 2009*t] = Op[i + 945*t] ? R[B[i + 945*t]] * R[C[i + 945*t]] : R[B[i + 945*t]] + R[C[i + 945*t]];
R[i + 2010*t] = Op[i + 946*t] ? R[B[i + 946*t]] * R[C[i + 946*t]] : R[B[i + 946*t]] + R[C[i + 946*t]];
R[i + 2011*t] = Op[i + 947*t] ? R[B[i + 947*t]] * R[C[i + 947*t]] : R[B[i + 947*t]] + R[C[i + 947*t]];
R[i + 2012*t] = Op[i + 948*t] ? R[B[i + 948*t]] * R[C[i + 948*t]] : R[B[i + 948*t]] + R[C[i + 948*t]];
R[i + 2013*t] = Op[i + 949*t] ? R[B[i + 949*t]] * R[C[i + 949*t]] : R[B[i + 949*t]] + R[C[i + 949*t]];
R[i + 2014*t] = Op[i + 950*t] ? R[B[i + 950*t]] * R[C[i + 950*t]] : R[B[i + 950*t]] + R[C[i + 950*t]];
R[i + 2015*t] = Op[i + 951*t] ? R[B[i + 951*t]] * R[C[i + 951*t]] : R[B[i + 951*t]] + R[C[i + 951*t]];
R[i + 2016*t] = Op[i + 952*t] ? R[B[i + 952*t]] * R[C[i + 952*t]] : R[B[i + 952*t]] + R[C[i + 952*t]];
R[i + 2017*t] = Op[i + 953*t] ? R[B[i + 953*t]] * R[C[i + 953*t]] : R[B[i + 953*t]] + R[C[i + 953*t]];
R[i + 2018*t] = Op[i + 954*t] ? R[B[i + 954*t]] * R[C[i + 954*t]] : R[B[i + 954*t]] + R[C[i + 954*t]];
R[i + 2019*t] = Op[i + 955*t] ? R[B[i + 955*t]] * R[C[i + 955*t]] : R[B[i + 955*t]] + R[C[i + 955*t]];
R[i + 2020*t] = Op[i + 956*t] ? R[B[i + 956*t]] * R[C[i + 956*t]] : R[B[i + 956*t]] + R[C[i + 956*t]];
R[i + 2021*t] = Op[i + 957*t] ? R[B[i + 957*t]] * R[C[i + 957*t]] : R[B[i + 957*t]] + R[C[i + 957*t]];
R[i + 2022*t] = Op[i + 958*t] ? R[B[i + 958*t]] * R[C[i + 958*t]] : R[B[i + 958*t]] + R[C[i + 958*t]];
R[i + 2023*t] = Op[i + 959*t] ? R[B[i + 959*t]] * R[C[i + 959*t]] : R[B[i + 959*t]] + R[C[i + 959*t]];
R[i + 2024*t] = Op[i + 960*t] ? R[B[i + 960*t]] * R[C[i + 960*t]] : R[B[i + 960*t]] + R[C[i + 960*t]];
R[i + 2025*t] = Op[i + 961*t] ? R[B[i + 961*t]] * R[C[i + 961*t]] : R[B[i + 961*t]] + R[C[i + 961*t]];
R[i + 2026*t] = Op[i + 962*t] ? R[B[i + 962*t]] * R[C[i + 962*t]] : R[B[i + 962*t]] + R[C[i + 962*t]];
R[i + 2027*t] = Op[i + 963*t] ? R[B[i + 963*t]] * R[C[i + 963*t]] : R[B[i + 963*t]] + R[C[i + 963*t]];
R[i + 2028*t] = Op[i + 964*t] ? R[B[i + 964*t]] * R[C[i + 964*t]] : R[B[i + 964*t]] + R[C[i + 964*t]];
R[i + 2029*t] = Op[i + 965*t] ? R[B[i + 965*t]] * R[C[i + 965*t]] : R[B[i + 965*t]] + R[C[i + 965*t]];
__syncthreads();
R[i + 2030*t] = Op[i + 966*t] ? R[B[i + 966*t]] * R[C[i + 966*t]] : R[B[i + 966*t]] + R[C[i + 966*t]];
R[i + 2031*t] = Op[i + 967*t] ? R[B[i + 967*t]] * R[C[i + 967*t]] : R[B[i + 967*t]] + R[C[i + 967*t]];
R[i + 2032*t] = Op[i + 968*t] ? R[B[i + 968*t]] * R[C[i + 968*t]] : R[B[i + 968*t]] + R[C[i + 968*t]];
R[i + 2033*t] = Op[i + 969*t] ? R[B[i + 969*t]] * R[C[i + 969*t]] : R[B[i + 969*t]] + R[C[i + 969*t]];
R[i + 2034*t] = Op[i + 970*t] ? R[B[i + 970*t]] * R[C[i + 970*t]] : R[B[i + 970*t]] + R[C[i + 970*t]];
R[i + 2035*t] = Op[i + 971*t] ? R[B[i + 971*t]] * R[C[i + 971*t]] : R[B[i + 971*t]] + R[C[i + 971*t]];
R[i + 2036*t] = Op[i + 972*t] ? R[B[i + 972*t]] * R[C[i + 972*t]] : R[B[i + 972*t]] + R[C[i + 972*t]];
R[i + 2037*t] = Op[i + 973*t] ? R[B[i + 973*t]] * R[C[i + 973*t]] : R[B[i + 973*t]] + R[C[i + 973*t]];
R[i + 2038*t] = Op[i + 974*t] ? R[B[i + 974*t]] * R[C[i + 974*t]] : R[B[i + 974*t]] + R[C[i + 974*t]];
R[i + 2039*t] = Op[i + 975*t] ? R[B[i + 975*t]] * R[C[i + 975*t]] : R[B[i + 975*t]] + R[C[i + 975*t]];
R[i + 2040*t] = Op[i + 976*t] ? R[B[i + 976*t]] * R[C[i + 976*t]] : R[B[i + 976*t]] + R[C[i + 976*t]];
R[i + 2041*t] = Op[i + 977*t] ? R[B[i + 977*t]] * R[C[i + 977*t]] : R[B[i + 977*t]] + R[C[i + 977*t]];
R[i + 2042*t] = Op[i + 978*t] ? R[B[i + 978*t]] * R[C[i + 978*t]] : R[B[i + 978*t]] + R[C[i + 978*t]];
R[i + 2043*t] = Op[i + 979*t] ? R[B[i + 979*t]] * R[C[i + 979*t]] : R[B[i + 979*t]] + R[C[i + 979*t]];
R[i + 2044*t] = Op[i + 980*t] ? R[B[i + 980*t]] * R[C[i + 980*t]] : R[B[i + 980*t]] + R[C[i + 980*t]];
R[i + 2045*t] = Op[i + 981*t] ? R[B[i + 981*t]] * R[C[i + 981*t]] : R[B[i + 981*t]] + R[C[i + 981*t]];
R[i + 2046*t] = Op[i + 982*t] ? R[B[i + 982*t]] * R[C[i + 982*t]] : R[B[i + 982*t]] + R[C[i + 982*t]];
R[i + 2047*t] = Op[i + 983*t] ? R[B[i + 983*t]] * R[C[i + 983*t]] : R[B[i + 983*t]] + R[C[i + 983*t]];
R[i + 2048*t] = Op[i + 984*t] ? R[B[i + 984*t]] * R[C[i + 984*t]] : R[B[i + 984*t]] + R[C[i + 984*t]];
R[i + 2049*t] = Op[i + 985*t] ? R[B[i + 985*t]] * R[C[i + 985*t]] : R[B[i + 985*t]] + R[C[i + 985*t]];
R[i + 2050*t] = Op[i + 986*t] ? R[B[i + 986*t]] * R[C[i + 986*t]] : R[B[i + 986*t]] + R[C[i + 986*t]];
R[i + 2051*t] = Op[i + 987*t] ? R[B[i + 987*t]] * R[C[i + 987*t]] : R[B[i + 987*t]] + R[C[i + 987*t]];
R[i + 2052*t] = Op[i + 988*t] ? R[B[i + 988*t]] * R[C[i + 988*t]] : R[B[i + 988*t]] + R[C[i + 988*t]];
R[i + 2053*t] = Op[i + 989*t] ? R[B[i + 989*t]] * R[C[i + 989*t]] : R[B[i + 989*t]] + R[C[i + 989*t]];
R[i + 2054*t] = Op[i + 990*t] ? R[B[i + 990*t]] * R[C[i + 990*t]] : R[B[i + 990*t]] + R[C[i + 990*t]];
R[i + 2055*t] = Op[i + 991*t] ? R[B[i + 991*t]] * R[C[i + 991*t]] : R[B[i + 991*t]] + R[C[i + 991*t]];
R[i + 2056*t] = Op[i + 992*t] ? R[B[i + 992*t]] * R[C[i + 992*t]] : R[B[i + 992*t]] + R[C[i + 992*t]];
R[i + 2057*t] = Op[i + 993*t] ? R[B[i + 993*t]] * R[C[i + 993*t]] : R[B[i + 993*t]] + R[C[i + 993*t]];
R[i + 2058*t] = Op[i + 994*t] ? R[B[i + 994*t]] * R[C[i + 994*t]] : R[B[i + 994*t]] + R[C[i + 994*t]];
R[i + 2059*t] = Op[i + 995*t] ? R[B[i + 995*t]] * R[C[i + 995*t]] : R[B[i + 995*t]] + R[C[i + 995*t]];
R[i + 2060*t] = Op[i + 996*t] ? R[B[i + 996*t]] * R[C[i + 996*t]] : R[B[i + 996*t]] + R[C[i + 996*t]];
R[i + 2061*t] = Op[i + 997*t] ? R[B[i + 997*t]] * R[C[i + 997*t]] : R[B[i + 997*t]] + R[C[i + 997*t]];
R[i + 2062*t] = Op[i + 998*t] ? R[B[i + 998*t]] * R[C[i + 998*t]] : R[B[i + 998*t]] + R[C[i + 998*t]];
R[i + 2063*t] = Op[i + 999*t] ? R[B[i + 999*t]] * R[C[i + 999*t]] : R[B[i + 999*t]] + R[C[i + 999*t]];
R[i + 2064*t] = Op[i + 1000*t] ? R[B[i + 1000*t]] * R[C[i + 1000*t]] : R[B[i + 1000*t]] + R[C[i + 1000*t]];
R[i + 2065*t] = Op[i + 1001*t] ? R[B[i + 1001*t]] * R[C[i + 1001*t]] : R[B[i + 1001*t]] + R[C[i + 1001*t]];
R[i + 2066*t] = Op[i + 1002*t] ? R[B[i + 1002*t]] * R[C[i + 1002*t]] : R[B[i + 1002*t]] + R[C[i + 1002*t]];
R[i + 2067*t] = Op[i + 1003*t] ? R[B[i + 1003*t]] * R[C[i + 1003*t]] : R[B[i + 1003*t]] + R[C[i + 1003*t]];
R[i + 2068*t] = Op[i + 1004*t] ? R[B[i + 1004*t]] * R[C[i + 1004*t]] : R[B[i + 1004*t]] + R[C[i + 1004*t]];
R[i + 2069*t] = Op[i + 1005*t] ? R[B[i + 1005*t]] * R[C[i + 1005*t]] : R[B[i + 1005*t]] + R[C[i + 1005*t]];
R[i + 2070*t] = Op[i + 1006*t] ? R[B[i + 1006*t]] * R[C[i + 1006*t]] : R[B[i + 1006*t]] + R[C[i + 1006*t]];
R[i + 2071*t] = Op[i + 1007*t] ? R[B[i + 1007*t]] * R[C[i + 1007*t]] : R[B[i + 1007*t]] + R[C[i + 1007*t]];
R[i + 2072*t] = Op[i + 1008*t] ? R[B[i + 1008*t]] * R[C[i + 1008*t]] : R[B[i + 1008*t]] + R[C[i + 1008*t]];
R[i + 2073*t] = Op[i + 1009*t] ? R[B[i + 1009*t]] * R[C[i + 1009*t]] : R[B[i + 1009*t]] + R[C[i + 1009*t]];
R[i + 2074*t] = Op[i + 1010*t] ? R[B[i + 1010*t]] * R[C[i + 1010*t]] : R[B[i + 1010*t]] + R[C[i + 1010*t]];
R[i + 2075*t] = Op[i + 1011*t] ? R[B[i + 1011*t]] * R[C[i + 1011*t]] : R[B[i + 1011*t]] + R[C[i + 1011*t]];
R[i + 2076*t] = Op[i + 1012*t] ? R[B[i + 1012*t]] * R[C[i + 1012*t]] : R[B[i + 1012*t]] + R[C[i + 1012*t]];
R[i + 2077*t] = Op[i + 1013*t] ? R[B[i + 1013*t]] * R[C[i + 1013*t]] : R[B[i + 1013*t]] + R[C[i + 1013*t]];
R[i + 2078*t] = Op[i + 1014*t] ? R[B[i + 1014*t]] * R[C[i + 1014*t]] : R[B[i + 1014*t]] + R[C[i + 1014*t]];
R[i + 2079*t] = Op[i + 1015*t] ? R[B[i + 1015*t]] * R[C[i + 1015*t]] : R[B[i + 1015*t]] + R[C[i + 1015*t]];
R[i + 2080*t] = Op[i + 1016*t] ? R[B[i + 1016*t]] * R[C[i + 1016*t]] : R[B[i + 1016*t]] + R[C[i + 1016*t]];
R[i + 2081*t] = Op[i + 1017*t] ? R[B[i + 1017*t]] * R[C[i + 1017*t]] : R[B[i + 1017*t]] + R[C[i + 1017*t]];
R[i + 2082*t] = Op[i + 1018*t] ? R[B[i + 1018*t]] * R[C[i + 1018*t]] : R[B[i + 1018*t]] + R[C[i + 1018*t]];
R[i + 2083*t] = Op[i + 1019*t] ? R[B[i + 1019*t]] * R[C[i + 1019*t]] : R[B[i + 1019*t]] + R[C[i + 1019*t]];
R[i + 2084*t] = Op[i + 1020*t] ? R[B[i + 1020*t]] * R[C[i + 1020*t]] : R[B[i + 1020*t]] + R[C[i + 1020*t]];
R[i + 2085*t] = Op[i + 1021*t] ? R[B[i + 1021*t]] * R[C[i + 1021*t]] : R[B[i + 1021*t]] + R[C[i + 1021*t]];
R[i + 2086*t] = Op[i + 1022*t] ? R[B[i + 1022*t]] * R[C[i + 1022*t]] : R[B[i + 1022*t]] + R[C[i + 1022*t]];
R[i + 2087*t] = Op[i + 1023*t] ? R[B[i + 1023*t]] * R[C[i + 1023*t]] : R[B[i + 1023*t]] + R[C[i + 1023*t]];
R[i + 2088*t] = Op[i + 1024*t] ? R[B[i + 1024*t]] * R[C[i + 1024*t]] : R[B[i + 1024*t]] + R[C[i + 1024*t]];
R[i + 2089*t] = Op[i + 1025*t] ? R[B[i + 1025*t]] * R[C[i + 1025*t]] : R[B[i + 1025*t]] + R[C[i + 1025*t]];
R[i + 2090*t] = Op[i + 1026*t] ? R[B[i + 1026*t]] * R[C[i + 1026*t]] : R[B[i + 1026*t]] + R[C[i + 1026*t]];
R[i + 2091*t] = Op[i + 1027*t] ? R[B[i + 1027*t]] * R[C[i + 1027*t]] : R[B[i + 1027*t]] + R[C[i + 1027*t]];
R[i + 2092*t] = Op[i + 1028*t] ? R[B[i + 1028*t]] * R[C[i + 1028*t]] : R[B[i + 1028*t]] + R[C[i + 1028*t]];
R[i + 2093*t] = Op[i + 1029*t] ? R[B[i + 1029*t]] * R[C[i + 1029*t]] : R[B[i + 1029*t]] + R[C[i + 1029*t]];
R[i + 2094*t] = Op[i + 1030*t] ? R[B[i + 1030*t]] * R[C[i + 1030*t]] : R[B[i + 1030*t]] + R[C[i + 1030*t]];
R[i + 2095*t] = Op[i + 1031*t] ? R[B[i + 1031*t]] * R[C[i + 1031*t]] : R[B[i + 1031*t]] + R[C[i + 1031*t]];
R[i + 2096*t] = Op[i + 1032*t] ? R[B[i + 1032*t]] * R[C[i + 1032*t]] : R[B[i + 1032*t]] + R[C[i + 1032*t]];
R[i + 2097*t] = Op[i + 1033*t] ? R[B[i + 1033*t]] * R[C[i + 1033*t]] : R[B[i + 1033*t]] + R[C[i + 1033*t]];
R[i + 2098*t] = Op[i + 1034*t] ? R[B[i + 1034*t]] * R[C[i + 1034*t]] : R[B[i + 1034*t]] + R[C[i + 1034*t]];
R[i + 2099*t] = Op[i + 1035*t] ? R[B[i + 1035*t]] * R[C[i + 1035*t]] : R[B[i + 1035*t]] + R[C[i + 1035*t]];
R[i + 2100*t] = Op[i + 1036*t] ? R[B[i + 1036*t]] * R[C[i + 1036*t]] : R[B[i + 1036*t]] + R[C[i + 1036*t]];
R[i + 2101*t] = Op[i + 1037*t] ? R[B[i + 1037*t]] * R[C[i + 1037*t]] : R[B[i + 1037*t]] + R[C[i + 1037*t]];
R[i + 2102*t] = Op[i + 1038*t] ? R[B[i + 1038*t]] * R[C[i + 1038*t]] : R[B[i + 1038*t]] + R[C[i + 1038*t]];
R[i + 2103*t] = Op[i + 1039*t] ? R[B[i + 1039*t]] * R[C[i + 1039*t]] : R[B[i + 1039*t]] + R[C[i + 1039*t]];
R[i + 2104*t] = Op[i + 1040*t] ? R[B[i + 1040*t]] * R[C[i + 1040*t]] : R[B[i + 1040*t]] + R[C[i + 1040*t]];
R[i + 2105*t] = Op[i + 1041*t] ? R[B[i + 1041*t]] * R[C[i + 1041*t]] : R[B[i + 1041*t]] + R[C[i + 1041*t]];
R[i + 2106*t] = Op[i + 1042*t] ? R[B[i + 1042*t]] * R[C[i + 1042*t]] : R[B[i + 1042*t]] + R[C[i + 1042*t]];
R[i + 2107*t] = Op[i + 1043*t] ? R[B[i + 1043*t]] * R[C[i + 1043*t]] : R[B[i + 1043*t]] + R[C[i + 1043*t]];
R[i + 2108*t] = Op[i + 1044*t] ? R[B[i + 1044*t]] * R[C[i + 1044*t]] : R[B[i + 1044*t]] + R[C[i + 1044*t]];
R[i + 2109*t] = Op[i + 1045*t] ? R[B[i + 1045*t]] * R[C[i + 1045*t]] : R[B[i + 1045*t]] + R[C[i + 1045*t]];
R[i + 2110*t] = Op[i + 1046*t] ? R[B[i + 1046*t]] * R[C[i + 1046*t]] : R[B[i + 1046*t]] + R[C[i + 1046*t]];
__syncthreads();
R[i + 2111*t] = Op[i + 1047*t] ? R[B[i + 1047*t]] * R[C[i + 1047*t]] : R[B[i + 1047*t]] + R[C[i + 1047*t]];
R[i + 2112*t] = Op[i + 1048*t] ? R[B[i + 1048*t]] * R[C[i + 1048*t]] : R[B[i + 1048*t]] + R[C[i + 1048*t]];
R[i + 2113*t] = Op[i + 1049*t] ? R[B[i + 1049*t]] * R[C[i + 1049*t]] : R[B[i + 1049*t]] + R[C[i + 1049*t]];
R[i + 2114*t] = Op[i + 1050*t] ? R[B[i + 1050*t]] * R[C[i + 1050*t]] : R[B[i + 1050*t]] + R[C[i + 1050*t]];
R[i + 2115*t] = Op[i + 1051*t] ? R[B[i + 1051*t]] * R[C[i + 1051*t]] : R[B[i + 1051*t]] + R[C[i + 1051*t]];
R[i + 2116*t] = Op[i + 1052*t] ? R[B[i + 1052*t]] * R[C[i + 1052*t]] : R[B[i + 1052*t]] + R[C[i + 1052*t]];
R[i + 2117*t] = Op[i + 1053*t] ? R[B[i + 1053*t]] * R[C[i + 1053*t]] : R[B[i + 1053*t]] + R[C[i + 1053*t]];
R[i + 2118*t] = Op[i + 1054*t] ? R[B[i + 1054*t]] * R[C[i + 1054*t]] : R[B[i + 1054*t]] + R[C[i + 1054*t]];
R[i + 2119*t] = Op[i + 1055*t] ? R[B[i + 1055*t]] * R[C[i + 1055*t]] : R[B[i + 1055*t]] + R[C[i + 1055*t]];
R[i + 2120*t] = Op[i + 1056*t] ? R[B[i + 1056*t]] * R[C[i + 1056*t]] : R[B[i + 1056*t]] + R[C[i + 1056*t]];
R[i + 2121*t] = Op[i + 1057*t] ? R[B[i + 1057*t]] * R[C[i + 1057*t]] : R[B[i + 1057*t]] + R[C[i + 1057*t]];
R[i + 2122*t] = Op[i + 1058*t] ? R[B[i + 1058*t]] * R[C[i + 1058*t]] : R[B[i + 1058*t]] + R[C[i + 1058*t]];
R[i + 2123*t] = Op[i + 1059*t] ? R[B[i + 1059*t]] * R[C[i + 1059*t]] : R[B[i + 1059*t]] + R[C[i + 1059*t]];
R[i + 2124*t] = Op[i + 1060*t] ? R[B[i + 1060*t]] * R[C[i + 1060*t]] : R[B[i + 1060*t]] + R[C[i + 1060*t]];
R[i + 2125*t] = Op[i + 1061*t] ? R[B[i + 1061*t]] * R[C[i + 1061*t]] : R[B[i + 1061*t]] + R[C[i + 1061*t]];
R[i + 2126*t] = Op[i + 1062*t] ? R[B[i + 1062*t]] * R[C[i + 1062*t]] : R[B[i + 1062*t]] + R[C[i + 1062*t]];
R[i + 2127*t] = Op[i + 1063*t] ? R[B[i + 1063*t]] * R[C[i + 1063*t]] : R[B[i + 1063*t]] + R[C[i + 1063*t]];
R[i + 2128*t] = Op[i + 1064*t] ? R[B[i + 1064*t]] * R[C[i + 1064*t]] : R[B[i + 1064*t]] + R[C[i + 1064*t]];
R[i + 2129*t] = Op[i + 1065*t] ? R[B[i + 1065*t]] * R[C[i + 1065*t]] : R[B[i + 1065*t]] + R[C[i + 1065*t]];
R[i + 2130*t] = Op[i + 1066*t] ? R[B[i + 1066*t]] * R[C[i + 1066*t]] : R[B[i + 1066*t]] + R[C[i + 1066*t]];
R[i + 2131*t] = Op[i + 1067*t] ? R[B[i + 1067*t]] * R[C[i + 1067*t]] : R[B[i + 1067*t]] + R[C[i + 1067*t]];
R[i + 2132*t] = Op[i + 1068*t] ? R[B[i + 1068*t]] * R[C[i + 1068*t]] : R[B[i + 1068*t]] + R[C[i + 1068*t]];
R[i + 2133*t] = Op[i + 1069*t] ? R[B[i + 1069*t]] * R[C[i + 1069*t]] : R[B[i + 1069*t]] + R[C[i + 1069*t]];
R[i + 2134*t] = Op[i + 1070*t] ? R[B[i + 1070*t]] * R[C[i + 1070*t]] : R[B[i + 1070*t]] + R[C[i + 1070*t]];
R[i + 2135*t] = Op[i + 1071*t] ? R[B[i + 1071*t]] * R[C[i + 1071*t]] : R[B[i + 1071*t]] + R[C[i + 1071*t]];
R[i + 2136*t] = Op[i + 1072*t] ? R[B[i + 1072*t]] * R[C[i + 1072*t]] : R[B[i + 1072*t]] + R[C[i + 1072*t]];
R[i + 2137*t] = Op[i + 1073*t] ? R[B[i + 1073*t]] * R[C[i + 1073*t]] : R[B[i + 1073*t]] + R[C[i + 1073*t]];
R[i + 2138*t] = Op[i + 1074*t] ? R[B[i + 1074*t]] * R[C[i + 1074*t]] : R[B[i + 1074*t]] + R[C[i + 1074*t]];
R[i + 2139*t] = Op[i + 1075*t] ? R[B[i + 1075*t]] * R[C[i + 1075*t]] : R[B[i + 1075*t]] + R[C[i + 1075*t]];
R[i + 2140*t] = Op[i + 1076*t] ? R[B[i + 1076*t]] * R[C[i + 1076*t]] : R[B[i + 1076*t]] + R[C[i + 1076*t]];
R[i + 2141*t] = Op[i + 1077*t] ? R[B[i + 1077*t]] * R[C[i + 1077*t]] : R[B[i + 1077*t]] + R[C[i + 1077*t]];
R[i + 2142*t] = Op[i + 1078*t] ? R[B[i + 1078*t]] * R[C[i + 1078*t]] : R[B[i + 1078*t]] + R[C[i + 1078*t]];
R[i + 2143*t] = Op[i + 1079*t] ? R[B[i + 1079*t]] * R[C[i + 1079*t]] : R[B[i + 1079*t]] + R[C[i + 1079*t]];
R[i + 2144*t] = Op[i + 1080*t] ? R[B[i + 1080*t]] * R[C[i + 1080*t]] : R[B[i + 1080*t]] + R[C[i + 1080*t]];
R[i + 2145*t] = Op[i + 1081*t] ? R[B[i + 1081*t]] * R[C[i + 1081*t]] : R[B[i + 1081*t]] + R[C[i + 1081*t]];
R[i + 2146*t] = Op[i + 1082*t] ? R[B[i + 1082*t]] * R[C[i + 1082*t]] : R[B[i + 1082*t]] + R[C[i + 1082*t]];
R[i + 2147*t] = Op[i + 1083*t] ? R[B[i + 1083*t]] * R[C[i + 1083*t]] : R[B[i + 1083*t]] + R[C[i + 1083*t]];
R[i + 2148*t] = Op[i + 1084*t] ? R[B[i + 1084*t]] * R[C[i + 1084*t]] : R[B[i + 1084*t]] + R[C[i + 1084*t]];
R[i + 2149*t] = Op[i + 1085*t] ? R[B[i + 1085*t]] * R[C[i + 1085*t]] : R[B[i + 1085*t]] + R[C[i + 1085*t]];
R[i + 2150*t] = Op[i + 1086*t] ? R[B[i + 1086*t]] * R[C[i + 1086*t]] : R[B[i + 1086*t]] + R[C[i + 1086*t]];
R[i + 2151*t] = Op[i + 1087*t] ? R[B[i + 1087*t]] * R[C[i + 1087*t]] : R[B[i + 1087*t]] + R[C[i + 1087*t]];
R[i + 2152*t] = Op[i + 1088*t] ? R[B[i + 1088*t]] * R[C[i + 1088*t]] : R[B[i + 1088*t]] + R[C[i + 1088*t]];
R[i + 2153*t] = Op[i + 1089*t] ? R[B[i + 1089*t]] * R[C[i + 1089*t]] : R[B[i + 1089*t]] + R[C[i + 1089*t]];
R[i + 2154*t] = Op[i + 1090*t] ? R[B[i + 1090*t]] * R[C[i + 1090*t]] : R[B[i + 1090*t]] + R[C[i + 1090*t]];
R[i + 2155*t] = Op[i + 1091*t] ? R[B[i + 1091*t]] * R[C[i + 1091*t]] : R[B[i + 1091*t]] + R[C[i + 1091*t]];
R[i + 2156*t] = Op[i + 1092*t] ? R[B[i + 1092*t]] * R[C[i + 1092*t]] : R[B[i + 1092*t]] + R[C[i + 1092*t]];
R[i + 2157*t] = Op[i + 1093*t] ? R[B[i + 1093*t]] * R[C[i + 1093*t]] : R[B[i + 1093*t]] + R[C[i + 1093*t]];
R[i + 2158*t] = Op[i + 1094*t] ? R[B[i + 1094*t]] * R[C[i + 1094*t]] : R[B[i + 1094*t]] + R[C[i + 1094*t]];
R[i + 2159*t] = Op[i + 1095*t] ? R[B[i + 1095*t]] * R[C[i + 1095*t]] : R[B[i + 1095*t]] + R[C[i + 1095*t]];
R[i + 2160*t] = Op[i + 1096*t] ? R[B[i + 1096*t]] * R[C[i + 1096*t]] : R[B[i + 1096*t]] + R[C[i + 1096*t]];
R[i + 2161*t] = Op[i + 1097*t] ? R[B[i + 1097*t]] * R[C[i + 1097*t]] : R[B[i + 1097*t]] + R[C[i + 1097*t]];
R[i + 2162*t] = Op[i + 1098*t] ? R[B[i + 1098*t]] * R[C[i + 1098*t]] : R[B[i + 1098*t]] + R[C[i + 1098*t]];
R[i + 2163*t] = Op[i + 1099*t] ? R[B[i + 1099*t]] * R[C[i + 1099*t]] : R[B[i + 1099*t]] + R[C[i + 1099*t]];
R[i + 2164*t] = Op[i + 1100*t] ? R[B[i + 1100*t]] * R[C[i + 1100*t]] : R[B[i + 1100*t]] + R[C[i + 1100*t]];
R[i + 2165*t] = Op[i + 1101*t] ? R[B[i + 1101*t]] * R[C[i + 1101*t]] : R[B[i + 1101*t]] + R[C[i + 1101*t]];
R[i + 2166*t] = Op[i + 1102*t] ? R[B[i + 1102*t]] * R[C[i + 1102*t]] : R[B[i + 1102*t]] + R[C[i + 1102*t]];
R[i + 2167*t] = Op[i + 1103*t] ? R[B[i + 1103*t]] * R[C[i + 1103*t]] : R[B[i + 1103*t]] + R[C[i + 1103*t]];
R[i + 2168*t] = Op[i + 1104*t] ? R[B[i + 1104*t]] * R[C[i + 1104*t]] : R[B[i + 1104*t]] + R[C[i + 1104*t]];
R[i + 2169*t] = Op[i + 1105*t] ? R[B[i + 1105*t]] * R[C[i + 1105*t]] : R[B[i + 1105*t]] + R[C[i + 1105*t]];
R[i + 2170*t] = Op[i + 1106*t] ? R[B[i + 1106*t]] * R[C[i + 1106*t]] : R[B[i + 1106*t]] + R[C[i + 1106*t]];
R[i + 2171*t] = Op[i + 1107*t] ? R[B[i + 1107*t]] * R[C[i + 1107*t]] : R[B[i + 1107*t]] + R[C[i + 1107*t]];
R[i + 2172*t] = Op[i + 1108*t] ? R[B[i + 1108*t]] * R[C[i + 1108*t]] : R[B[i + 1108*t]] + R[C[i + 1108*t]];
R[i + 2173*t] = Op[i + 1109*t] ? R[B[i + 1109*t]] * R[C[i + 1109*t]] : R[B[i + 1109*t]] + R[C[i + 1109*t]];
R[i + 2174*t] = Op[i + 1110*t] ? R[B[i + 1110*t]] * R[C[i + 1110*t]] : R[B[i + 1110*t]] + R[C[i + 1110*t]];
R[i + 2175*t] = Op[i + 1111*t] ? R[B[i + 1111*t]] * R[C[i + 1111*t]] : R[B[i + 1111*t]] + R[C[i + 1111*t]];
R[i + 2176*t] = Op[i + 1112*t] ? R[B[i + 1112*t]] * R[C[i + 1112*t]] : R[B[i + 1112*t]] + R[C[i + 1112*t]];
R[i + 2177*t] = Op[i + 1113*t] ? R[B[i + 1113*t]] * R[C[i + 1113*t]] : R[B[i + 1113*t]] + R[C[i + 1113*t]];
R[i + 2178*t] = Op[i + 1114*t] ? R[B[i + 1114*t]] * R[C[i + 1114*t]] : R[B[i + 1114*t]] + R[C[i + 1114*t]];
R[i + 2179*t] = Op[i + 1115*t] ? R[B[i + 1115*t]] * R[C[i + 1115*t]] : R[B[i + 1115*t]] + R[C[i + 1115*t]];
R[i + 2180*t] = Op[i + 1116*t] ? R[B[i + 1116*t]] * R[C[i + 1116*t]] : R[B[i + 1116*t]] + R[C[i + 1116*t]];
R[i + 2181*t] = Op[i + 1117*t] ? R[B[i + 1117*t]] * R[C[i + 1117*t]] : R[B[i + 1117*t]] + R[C[i + 1117*t]];
R[i + 2182*t] = Op[i + 1118*t] ? R[B[i + 1118*t]] * R[C[i + 1118*t]] : R[B[i + 1118*t]] + R[C[i + 1118*t]];
__syncthreads();
R[i + 2183*t] = Op[i + 1119*t] ? R[B[i + 1119*t]] * R[C[i + 1119*t]] : R[B[i + 1119*t]] + R[C[i + 1119*t]];
R[i + 2184*t] = Op[i + 1120*t] ? R[B[i + 1120*t]] * R[C[i + 1120*t]] : R[B[i + 1120*t]] + R[C[i + 1120*t]];
R[i + 2185*t] = Op[i + 1121*t] ? R[B[i + 1121*t]] * R[C[i + 1121*t]] : R[B[i + 1121*t]] + R[C[i + 1121*t]];
R[i + 2186*t] = Op[i + 1122*t] ? R[B[i + 1122*t]] * R[C[i + 1122*t]] : R[B[i + 1122*t]] + R[C[i + 1122*t]];
R[i + 2187*t] = Op[i + 1123*t] ? R[B[i + 1123*t]] * R[C[i + 1123*t]] : R[B[i + 1123*t]] + R[C[i + 1123*t]];
R[i + 2188*t] = Op[i + 1124*t] ? R[B[i + 1124*t]] * R[C[i + 1124*t]] : R[B[i + 1124*t]] + R[C[i + 1124*t]];
R[i + 2189*t] = Op[i + 1125*t] ? R[B[i + 1125*t]] * R[C[i + 1125*t]] : R[B[i + 1125*t]] + R[C[i + 1125*t]];
R[i + 2190*t] = Op[i + 1126*t] ? R[B[i + 1126*t]] * R[C[i + 1126*t]] : R[B[i + 1126*t]] + R[C[i + 1126*t]];
R[i + 2191*t] = Op[i + 1127*t] ? R[B[i + 1127*t]] * R[C[i + 1127*t]] : R[B[i + 1127*t]] + R[C[i + 1127*t]];
R[i + 2192*t] = Op[i + 1128*t] ? R[B[i + 1128*t]] * R[C[i + 1128*t]] : R[B[i + 1128*t]] + R[C[i + 1128*t]];
R[i + 2193*t] = Op[i + 1129*t] ? R[B[i + 1129*t]] * R[C[i + 1129*t]] : R[B[i + 1129*t]] + R[C[i + 1129*t]];
R[i + 2194*t] = Op[i + 1130*t] ? R[B[i + 1130*t]] * R[C[i + 1130*t]] : R[B[i + 1130*t]] + R[C[i + 1130*t]];
R[i + 2195*t] = Op[i + 1131*t] ? R[B[i + 1131*t]] * R[C[i + 1131*t]] : R[B[i + 1131*t]] + R[C[i + 1131*t]];
R[i + 2196*t] = Op[i + 1132*t] ? R[B[i + 1132*t]] * R[C[i + 1132*t]] : R[B[i + 1132*t]] + R[C[i + 1132*t]];
R[i + 2197*t] = Op[i + 1133*t] ? R[B[i + 1133*t]] * R[C[i + 1133*t]] : R[B[i + 1133*t]] + R[C[i + 1133*t]];
R[i + 2198*t] = Op[i + 1134*t] ? R[B[i + 1134*t]] * R[C[i + 1134*t]] : R[B[i + 1134*t]] + R[C[i + 1134*t]];
R[i + 2199*t] = Op[i + 1135*t] ? R[B[i + 1135*t]] * R[C[i + 1135*t]] : R[B[i + 1135*t]] + R[C[i + 1135*t]];
R[i + 2200*t] = Op[i + 1136*t] ? R[B[i + 1136*t]] * R[C[i + 1136*t]] : R[B[i + 1136*t]] + R[C[i + 1136*t]];
R[i + 2201*t] = Op[i + 1137*t] ? R[B[i + 1137*t]] * R[C[i + 1137*t]] : R[B[i + 1137*t]] + R[C[i + 1137*t]];
R[i + 2202*t] = Op[i + 1138*t] ? R[B[i + 1138*t]] * R[C[i + 1138*t]] : R[B[i + 1138*t]] + R[C[i + 1138*t]];
R[i + 2203*t] = Op[i + 1139*t] ? R[B[i + 1139*t]] * R[C[i + 1139*t]] : R[B[i + 1139*t]] + R[C[i + 1139*t]];
R[i + 2204*t] = Op[i + 1140*t] ? R[B[i + 1140*t]] * R[C[i + 1140*t]] : R[B[i + 1140*t]] + R[C[i + 1140*t]];
R[i + 2205*t] = Op[i + 1141*t] ? R[B[i + 1141*t]] * R[C[i + 1141*t]] : R[B[i + 1141*t]] + R[C[i + 1141*t]];
R[i + 2206*t] = Op[i + 1142*t] ? R[B[i + 1142*t]] * R[C[i + 1142*t]] : R[B[i + 1142*t]] + R[C[i + 1142*t]];
R[i + 2207*t] = Op[i + 1143*t] ? R[B[i + 1143*t]] * R[C[i + 1143*t]] : R[B[i + 1143*t]] + R[C[i + 1143*t]];
R[i + 2208*t] = Op[i + 1144*t] ? R[B[i + 1144*t]] * R[C[i + 1144*t]] : R[B[i + 1144*t]] + R[C[i + 1144*t]];
R[i + 2209*t] = Op[i + 1145*t] ? R[B[i + 1145*t]] * R[C[i + 1145*t]] : R[B[i + 1145*t]] + R[C[i + 1145*t]];
R[i + 2210*t] = Op[i + 1146*t] ? R[B[i + 1146*t]] * R[C[i + 1146*t]] : R[B[i + 1146*t]] + R[C[i + 1146*t]];
R[i + 2211*t] = Op[i + 1147*t] ? R[B[i + 1147*t]] * R[C[i + 1147*t]] : R[B[i + 1147*t]] + R[C[i + 1147*t]];
R[i + 2212*t] = Op[i + 1148*t] ? R[B[i + 1148*t]] * R[C[i + 1148*t]] : R[B[i + 1148*t]] + R[C[i + 1148*t]];
R[i + 2213*t] = Op[i + 1149*t] ? R[B[i + 1149*t]] * R[C[i + 1149*t]] : R[B[i + 1149*t]] + R[C[i + 1149*t]];
R[i + 2214*t] = Op[i + 1150*t] ? R[B[i + 1150*t]] * R[C[i + 1150*t]] : R[B[i + 1150*t]] + R[C[i + 1150*t]];
R[i + 2215*t] = Op[i + 1151*t] ? R[B[i + 1151*t]] * R[C[i + 1151*t]] : R[B[i + 1151*t]] + R[C[i + 1151*t]];
R[i + 2216*t] = Op[i + 1152*t] ? R[B[i + 1152*t]] * R[C[i + 1152*t]] : R[B[i + 1152*t]] + R[C[i + 1152*t]];
R[i + 2217*t] = Op[i + 1153*t] ? R[B[i + 1153*t]] * R[C[i + 1153*t]] : R[B[i + 1153*t]] + R[C[i + 1153*t]];
__syncthreads();
R[i + 2218*t] = Op[i + 1154*t] ? R[B[i + 1154*t]] * R[C[i + 1154*t]] : R[B[i + 1154*t]] + R[C[i + 1154*t]];
R[i + 2219*t] = Op[i + 1155*t] ? R[B[i + 1155*t]] * R[C[i + 1155*t]] : R[B[i + 1155*t]] + R[C[i + 1155*t]];
R[i + 2220*t] = Op[i + 1156*t] ? R[B[i + 1156*t]] * R[C[i + 1156*t]] : R[B[i + 1156*t]] + R[C[i + 1156*t]];
R[i + 2221*t] = Op[i + 1157*t] ? R[B[i + 1157*t]] * R[C[i + 1157*t]] : R[B[i + 1157*t]] + R[C[i + 1157*t]];
R[i + 2222*t] = Op[i + 1158*t] ? R[B[i + 1158*t]] * R[C[i + 1158*t]] : R[B[i + 1158*t]] + R[C[i + 1158*t]];
R[i + 2223*t] = Op[i + 1159*t] ? R[B[i + 1159*t]] * R[C[i + 1159*t]] : R[B[i + 1159*t]] + R[C[i + 1159*t]];
R[i + 2224*t] = Op[i + 1160*t] ? R[B[i + 1160*t]] * R[C[i + 1160*t]] : R[B[i + 1160*t]] + R[C[i + 1160*t]];
R[i + 2225*t] = Op[i + 1161*t] ? R[B[i + 1161*t]] * R[C[i + 1161*t]] : R[B[i + 1161*t]] + R[C[i + 1161*t]];
R[i + 2226*t] = Op[i + 1162*t] ? R[B[i + 1162*t]] * R[C[i + 1162*t]] : R[B[i + 1162*t]] + R[C[i + 1162*t]];
R[i + 2227*t] = Op[i + 1163*t] ? R[B[i + 1163*t]] * R[C[i + 1163*t]] : R[B[i + 1163*t]] + R[C[i + 1163*t]];
R[i + 2228*t] = Op[i + 1164*t] ? R[B[i + 1164*t]] * R[C[i + 1164*t]] : R[B[i + 1164*t]] + R[C[i + 1164*t]];
R[i + 2229*t] = Op[i + 1165*t] ? R[B[i + 1165*t]] * R[C[i + 1165*t]] : R[B[i + 1165*t]] + R[C[i + 1165*t]];
R[i + 2230*t] = Op[i + 1166*t] ? R[B[i + 1166*t]] * R[C[i + 1166*t]] : R[B[i + 1166*t]] + R[C[i + 1166*t]];
R[i + 2231*t] = Op[i + 1167*t] ? R[B[i + 1167*t]] * R[C[i + 1167*t]] : R[B[i + 1167*t]] + R[C[i + 1167*t]];
R[i + 2232*t] = Op[i + 1168*t] ? R[B[i + 1168*t]] * R[C[i + 1168*t]] : R[B[i + 1168*t]] + R[C[i + 1168*t]];
R[i + 2233*t] = Op[i + 1169*t] ? R[B[i + 1169*t]] * R[C[i + 1169*t]] : R[B[i + 1169*t]] + R[C[i + 1169*t]];
R[i + 2234*t] = Op[i + 1170*t] ? R[B[i + 1170*t]] * R[C[i + 1170*t]] : R[B[i + 1170*t]] + R[C[i + 1170*t]];
R[i + 2235*t] = Op[i + 1171*t] ? R[B[i + 1171*t]] * R[C[i + 1171*t]] : R[B[i + 1171*t]] + R[C[i + 1171*t]];
R[i + 2236*t] = Op[i + 1172*t] ? R[B[i + 1172*t]] * R[C[i + 1172*t]] : R[B[i + 1172*t]] + R[C[i + 1172*t]];
R[i + 2237*t] = Op[i + 1173*t] ? R[B[i + 1173*t]] * R[C[i + 1173*t]] : R[B[i + 1173*t]] + R[C[i + 1173*t]];
R[i + 2238*t] = Op[i + 1174*t] ? R[B[i + 1174*t]] * R[C[i + 1174*t]] : R[B[i + 1174*t]] + R[C[i + 1174*t]];
R[i + 2239*t] = Op[i + 1175*t] ? R[B[i + 1175*t]] * R[C[i + 1175*t]] : R[B[i + 1175*t]] + R[C[i + 1175*t]];
R[i + 2240*t] = Op[i + 1176*t] ? R[B[i + 1176*t]] * R[C[i + 1176*t]] : R[B[i + 1176*t]] + R[C[i + 1176*t]];
R[i + 2241*t] = Op[i + 1177*t] ? R[B[i + 1177*t]] * R[C[i + 1177*t]] : R[B[i + 1177*t]] + R[C[i + 1177*t]];
R[i + 2242*t] = Op[i + 1178*t] ? R[B[i + 1178*t]] * R[C[i + 1178*t]] : R[B[i + 1178*t]] + R[C[i + 1178*t]];
R[i + 2243*t] = Op[i + 1179*t] ? R[B[i + 1179*t]] * R[C[i + 1179*t]] : R[B[i + 1179*t]] + R[C[i + 1179*t]];
R[i + 2244*t] = Op[i + 1180*t] ? R[B[i + 1180*t]] * R[C[i + 1180*t]] : R[B[i + 1180*t]] + R[C[i + 1180*t]];
R[i + 2245*t] = Op[i + 1181*t] ? R[B[i + 1181*t]] * R[C[i + 1181*t]] : R[B[i + 1181*t]] + R[C[i + 1181*t]];
R[i + 2246*t] = Op[i + 1182*t] ? R[B[i + 1182*t]] * R[C[i + 1182*t]] : R[B[i + 1182*t]] + R[C[i + 1182*t]];
R[i + 2247*t] = Op[i + 1183*t] ? R[B[i + 1183*t]] * R[C[i + 1183*t]] : R[B[i + 1183*t]] + R[C[i + 1183*t]];
R[i + 2248*t] = Op[i + 1184*t] ? R[B[i + 1184*t]] * R[C[i + 1184*t]] : R[B[i + 1184*t]] + R[C[i + 1184*t]];
R[i + 2249*t] = Op[i + 1185*t] ? R[B[i + 1185*t]] * R[C[i + 1185*t]] : R[B[i + 1185*t]] + R[C[i + 1185*t]];
R[i + 2250*t] = Op[i + 1186*t] ? R[B[i + 1186*t]] * R[C[i + 1186*t]] : R[B[i + 1186*t]] + R[C[i + 1186*t]];
R[i + 2251*t] = Op[i + 1187*t] ? R[B[i + 1187*t]] * R[C[i + 1187*t]] : R[B[i + 1187*t]] + R[C[i + 1187*t]];
__syncthreads();
R[i + 2252*t] = Op[i + 1188*t] ? R[B[i + 1188*t]] * R[C[i + 1188*t]] : R[B[i + 1188*t]] + R[C[i + 1188*t]];
R[i + 2253*t] = Op[i + 1189*t] ? R[B[i + 1189*t]] * R[C[i + 1189*t]] : R[B[i + 1189*t]] + R[C[i + 1189*t]];
R[i + 2254*t] = Op[i + 1190*t] ? R[B[i + 1190*t]] * R[C[i + 1190*t]] : R[B[i + 1190*t]] + R[C[i + 1190*t]];
R[i + 2255*t] = Op[i + 1191*t] ? R[B[i + 1191*t]] * R[C[i + 1191*t]] : R[B[i + 1191*t]] + R[C[i + 1191*t]];
R[i + 2256*t] = Op[i + 1192*t] ? R[B[i + 1192*t]] * R[C[i + 1192*t]] : R[B[i + 1192*t]] + R[C[i + 1192*t]];
R[i + 2257*t] = Op[i + 1193*t] ? R[B[i + 1193*t]] * R[C[i + 1193*t]] : R[B[i + 1193*t]] + R[C[i + 1193*t]];
R[i + 2258*t] = Op[i + 1194*t] ? R[B[i + 1194*t]] * R[C[i + 1194*t]] : R[B[i + 1194*t]] + R[C[i + 1194*t]];
R[i + 2259*t] = Op[i + 1195*t] ? R[B[i + 1195*t]] * R[C[i + 1195*t]] : R[B[i + 1195*t]] + R[C[i + 1195*t]];
R[i + 2260*t] = Op[i + 1196*t] ? R[B[i + 1196*t]] * R[C[i + 1196*t]] : R[B[i + 1196*t]] + R[C[i + 1196*t]];
R[i + 2261*t] = Op[i + 1197*t] ? R[B[i + 1197*t]] * R[C[i + 1197*t]] : R[B[i + 1197*t]] + R[C[i + 1197*t]];
R[i + 2262*t] = Op[i + 1198*t] ? R[B[i + 1198*t]] * R[C[i + 1198*t]] : R[B[i + 1198*t]] + R[C[i + 1198*t]];
R[i + 2263*t] = Op[i + 1199*t] ? R[B[i + 1199*t]] * R[C[i + 1199*t]] : R[B[i + 1199*t]] + R[C[i + 1199*t]];
R[i + 2264*t] = Op[i + 1200*t] ? R[B[i + 1200*t]] * R[C[i + 1200*t]] : R[B[i + 1200*t]] + R[C[i + 1200*t]];
R[i + 2265*t] = Op[i + 1201*t] ? R[B[i + 1201*t]] * R[C[i + 1201*t]] : R[B[i + 1201*t]] + R[C[i + 1201*t]];
R[i + 2266*t] = Op[i + 1202*t] ? R[B[i + 1202*t]] * R[C[i + 1202*t]] : R[B[i + 1202*t]] + R[C[i + 1202*t]];
R[i + 2267*t] = Op[i + 1203*t] ? R[B[i + 1203*t]] * R[C[i + 1203*t]] : R[B[i + 1203*t]] + R[C[i + 1203*t]];
R[i + 2268*t] = Op[i + 1204*t] ? R[B[i + 1204*t]] * R[C[i + 1204*t]] : R[B[i + 1204*t]] + R[C[i + 1204*t]];
R[i + 2269*t] = Op[i + 1205*t] ? R[B[i + 1205*t]] * R[C[i + 1205*t]] : R[B[i + 1205*t]] + R[C[i + 1205*t]];
R[i + 2270*t] = Op[i + 1206*t] ? R[B[i + 1206*t]] * R[C[i + 1206*t]] : R[B[i + 1206*t]] + R[C[i + 1206*t]];
R[i + 2271*t] = Op[i + 1207*t] ? R[B[i + 1207*t]] * R[C[i + 1207*t]] : R[B[i + 1207*t]] + R[C[i + 1207*t]];
R[i + 2272*t] = Op[i + 1208*t] ? R[B[i + 1208*t]] * R[C[i + 1208*t]] : R[B[i + 1208*t]] + R[C[i + 1208*t]];
R[i + 2273*t] = Op[i + 1209*t] ? R[B[i + 1209*t]] * R[C[i + 1209*t]] : R[B[i + 1209*t]] + R[C[i + 1209*t]];
R[i + 2274*t] = Op[i + 1210*t] ? R[B[i + 1210*t]] * R[C[i + 1210*t]] : R[B[i + 1210*t]] + R[C[i + 1210*t]];
R[i + 2275*t] = Op[i + 1211*t] ? R[B[i + 1211*t]] * R[C[i + 1211*t]] : R[B[i + 1211*t]] + R[C[i + 1211*t]];
__syncthreads();
R[i + 2276*t] = Op[i + 1212*t] ? R[B[i + 1212*t]] * R[C[i + 1212*t]] : R[B[i + 1212*t]] + R[C[i + 1212*t]];
R[i + 2277*t] = Op[i + 1213*t] ? R[B[i + 1213*t]] * R[C[i + 1213*t]] : R[B[i + 1213*t]] + R[C[i + 1213*t]];
R[i + 2278*t] = Op[i + 1214*t] ? R[B[i + 1214*t]] * R[C[i + 1214*t]] : R[B[i + 1214*t]] + R[C[i + 1214*t]];
R[i + 2279*t] = Op[i + 1215*t] ? R[B[i + 1215*t]] * R[C[i + 1215*t]] : R[B[i + 1215*t]] + R[C[i + 1215*t]];
R[i + 2280*t] = Op[i + 1216*t] ? R[B[i + 1216*t]] * R[C[i + 1216*t]] : R[B[i + 1216*t]] + R[C[i + 1216*t]];
R[i + 2281*t] = Op[i + 1217*t] ? R[B[i + 1217*t]] * R[C[i + 1217*t]] : R[B[i + 1217*t]] + R[C[i + 1217*t]];
R[i + 2282*t] = Op[i + 1218*t] ? R[B[i + 1218*t]] * R[C[i + 1218*t]] : R[B[i + 1218*t]] + R[C[i + 1218*t]];
R[i + 2283*t] = Op[i + 1219*t] ? R[B[i + 1219*t]] * R[C[i + 1219*t]] : R[B[i + 1219*t]] + R[C[i + 1219*t]];
R[i + 2284*t] = Op[i + 1220*t] ? R[B[i + 1220*t]] * R[C[i + 1220*t]] : R[B[i + 1220*t]] + R[C[i + 1220*t]];
R[i + 2285*t] = Op[i + 1221*t] ? R[B[i + 1221*t]] * R[C[i + 1221*t]] : R[B[i + 1221*t]] + R[C[i + 1221*t]];
R[i + 2286*t] = Op[i + 1222*t] ? R[B[i + 1222*t]] * R[C[i + 1222*t]] : R[B[i + 1222*t]] + R[C[i + 1222*t]];
R[i + 2287*t] = Op[i + 1223*t] ? R[B[i + 1223*t]] * R[C[i + 1223*t]] : R[B[i + 1223*t]] + R[C[i + 1223*t]];
R[i + 2288*t] = Op[i + 1224*t] ? R[B[i + 1224*t]] * R[C[i + 1224*t]] : R[B[i + 1224*t]] + R[C[i + 1224*t]];
R[i + 2289*t] = Op[i + 1225*t] ? R[B[i + 1225*t]] * R[C[i + 1225*t]] : R[B[i + 1225*t]] + R[C[i + 1225*t]];
R[i + 2290*t] = Op[i + 1226*t] ? R[B[i + 1226*t]] * R[C[i + 1226*t]] : R[B[i + 1226*t]] + R[C[i + 1226*t]];
R[i + 2291*t] = Op[i + 1227*t] ? R[B[i + 1227*t]] * R[C[i + 1227*t]] : R[B[i + 1227*t]] + R[C[i + 1227*t]];
R[i + 2292*t] = Op[i + 1228*t] ? R[B[i + 1228*t]] * R[C[i + 1228*t]] : R[B[i + 1228*t]] + R[C[i + 1228*t]];
R[i + 2293*t] = Op[i + 1229*t] ? R[B[i + 1229*t]] * R[C[i + 1229*t]] : R[B[i + 1229*t]] + R[C[i + 1229*t]];
R[i + 2294*t] = Op[i + 1230*t] ? R[B[i + 1230*t]] * R[C[i + 1230*t]] : R[B[i + 1230*t]] + R[C[i + 1230*t]];
R[i + 2295*t] = Op[i + 1231*t] ? R[B[i + 1231*t]] * R[C[i + 1231*t]] : R[B[i + 1231*t]] + R[C[i + 1231*t]];
__syncthreads();
R[i + 2296*t] = Op[i + 1232*t] ? R[B[i + 1232*t]] * R[C[i + 1232*t]] : R[B[i + 1232*t]] + R[C[i + 1232*t]];
R[i + 2297*t] = Op[i + 1233*t] ? R[B[i + 1233*t]] * R[C[i + 1233*t]] : R[B[i + 1233*t]] + R[C[i + 1233*t]];
R[i + 2298*t] = Op[i + 1234*t] ? R[B[i + 1234*t]] * R[C[i + 1234*t]] : R[B[i + 1234*t]] + R[C[i + 1234*t]];
R[i + 2299*t] = Op[i + 1235*t] ? R[B[i + 1235*t]] * R[C[i + 1235*t]] : R[B[i + 1235*t]] + R[C[i + 1235*t]];
R[i + 2300*t] = Op[i + 1236*t] ? R[B[i + 1236*t]] * R[C[i + 1236*t]] : R[B[i + 1236*t]] + R[C[i + 1236*t]];
R[i + 2301*t] = Op[i + 1237*t] ? R[B[i + 1237*t]] * R[C[i + 1237*t]] : R[B[i + 1237*t]] + R[C[i + 1237*t]];
R[i + 2302*t] = Op[i + 1238*t] ? R[B[i + 1238*t]] * R[C[i + 1238*t]] : R[B[i + 1238*t]] + R[C[i + 1238*t]];
R[i + 2303*t] = Op[i + 1239*t] ? R[B[i + 1239*t]] * R[C[i + 1239*t]] : R[B[i + 1239*t]] + R[C[i + 1239*t]];
R[i + 2304*t] = Op[i + 1240*t] ? R[B[i + 1240*t]] * R[C[i + 1240*t]] : R[B[i + 1240*t]] + R[C[i + 1240*t]];
R[i + 2305*t] = Op[i + 1241*t] ? R[B[i + 1241*t]] * R[C[i + 1241*t]] : R[B[i + 1241*t]] + R[C[i + 1241*t]];
R[i + 2306*t] = Op[i + 1242*t] ? R[B[i + 1242*t]] * R[C[i + 1242*t]] : R[B[i + 1242*t]] + R[C[i + 1242*t]];
R[i + 2307*t] = Op[i + 1243*t] ? R[B[i + 1243*t]] * R[C[i + 1243*t]] : R[B[i + 1243*t]] + R[C[i + 1243*t]];
R[i + 2308*t] = Op[i + 1244*t] ? R[B[i + 1244*t]] * R[C[i + 1244*t]] : R[B[i + 1244*t]] + R[C[i + 1244*t]];
R[i + 2309*t] = Op[i + 1245*t] ? R[B[i + 1245*t]] * R[C[i + 1245*t]] : R[B[i + 1245*t]] + R[C[i + 1245*t]];
R[i + 2310*t] = Op[i + 1246*t] ? R[B[i + 1246*t]] * R[C[i + 1246*t]] : R[B[i + 1246*t]] + R[C[i + 1246*t]];
R[i + 2311*t] = Op[i + 1247*t] ? R[B[i + 1247*t]] * R[C[i + 1247*t]] : R[B[i + 1247*t]] + R[C[i + 1247*t]];
R[i + 2312*t] = Op[i + 1248*t] ? R[B[i + 1248*t]] * R[C[i + 1248*t]] : R[B[i + 1248*t]] + R[C[i + 1248*t]];
R[i + 2313*t] = Op[i + 1249*t] ? R[B[i + 1249*t]] * R[C[i + 1249*t]] : R[B[i + 1249*t]] + R[C[i + 1249*t]];
R[i + 2314*t] = Op[i + 1250*t] ? R[B[i + 1250*t]] * R[C[i + 1250*t]] : R[B[i + 1250*t]] + R[C[i + 1250*t]];
__syncthreads();
R[i + 2315*t] = Op[i + 1251*t] ? R[B[i + 1251*t]] * R[C[i + 1251*t]] : R[B[i + 1251*t]] + R[C[i + 1251*t]];
R[i + 2316*t] = Op[i + 1252*t] ? R[B[i + 1252*t]] * R[C[i + 1252*t]] : R[B[i + 1252*t]] + R[C[i + 1252*t]];
R[i + 2317*t] = Op[i + 1253*t] ? R[B[i + 1253*t]] * R[C[i + 1253*t]] : R[B[i + 1253*t]] + R[C[i + 1253*t]];
R[i + 2318*t] = Op[i + 1254*t] ? R[B[i + 1254*t]] * R[C[i + 1254*t]] : R[B[i + 1254*t]] + R[C[i + 1254*t]];
R[i + 2319*t] = Op[i + 1255*t] ? R[B[i + 1255*t]] * R[C[i + 1255*t]] : R[B[i + 1255*t]] + R[C[i + 1255*t]];
R[i + 2320*t] = Op[i + 1256*t] ? R[B[i + 1256*t]] * R[C[i + 1256*t]] : R[B[i + 1256*t]] + R[C[i + 1256*t]];
R[i + 2321*t] = Op[i + 1257*t] ? R[B[i + 1257*t]] * R[C[i + 1257*t]] : R[B[i + 1257*t]] + R[C[i + 1257*t]];
R[i + 2322*t] = Op[i + 1258*t] ? R[B[i + 1258*t]] * R[C[i + 1258*t]] : R[B[i + 1258*t]] + R[C[i + 1258*t]];
R[i + 2323*t] = Op[i + 1259*t] ? R[B[i + 1259*t]] * R[C[i + 1259*t]] : R[B[i + 1259*t]] + R[C[i + 1259*t]];
R[i + 2324*t] = Op[i + 1260*t] ? R[B[i + 1260*t]] * R[C[i + 1260*t]] : R[B[i + 1260*t]] + R[C[i + 1260*t]];
R[i + 2325*t] = Op[i + 1261*t] ? R[B[i + 1261*t]] * R[C[i + 1261*t]] : R[B[i + 1261*t]] + R[C[i + 1261*t]];
R[i + 2326*t] = Op[i + 1262*t] ? R[B[i + 1262*t]] * R[C[i + 1262*t]] : R[B[i + 1262*t]] + R[C[i + 1262*t]];
R[i + 2327*t] = Op[i + 1263*t] ? R[B[i + 1263*t]] * R[C[i + 1263*t]] : R[B[i + 1263*t]] + R[C[i + 1263*t]];
R[i + 2328*t] = Op[i + 1264*t] ? R[B[i + 1264*t]] * R[C[i + 1264*t]] : R[B[i + 1264*t]] + R[C[i + 1264*t]];
__syncthreads();
R[i + 2329*t] = Op[i + 1265*t] ? R[B[i + 1265*t]] * R[C[i + 1265*t]] : R[B[i + 1265*t]] + R[C[i + 1265*t]];
R[i + 2330*t] = Op[i + 1266*t] ? R[B[i + 1266*t]] * R[C[i + 1266*t]] : R[B[i + 1266*t]] + R[C[i + 1266*t]];
R[i + 2331*t] = Op[i + 1267*t] ? R[B[i + 1267*t]] * R[C[i + 1267*t]] : R[B[i + 1267*t]] + R[C[i + 1267*t]];
R[i + 2332*t] = Op[i + 1268*t] ? R[B[i + 1268*t]] * R[C[i + 1268*t]] : R[B[i + 1268*t]] + R[C[i + 1268*t]];
R[i + 2333*t] = Op[i + 1269*t] ? R[B[i + 1269*t]] * R[C[i + 1269*t]] : R[B[i + 1269*t]] + R[C[i + 1269*t]];
R[i + 2334*t] = Op[i + 1270*t] ? R[B[i + 1270*t]] * R[C[i + 1270*t]] : R[B[i + 1270*t]] + R[C[i + 1270*t]];
R[i + 2335*t] = Op[i + 1271*t] ? R[B[i + 1271*t]] * R[C[i + 1271*t]] : R[B[i + 1271*t]] + R[C[i + 1271*t]];
R[i + 2336*t] = Op[i + 1272*t] ? R[B[i + 1272*t]] * R[C[i + 1272*t]] : R[B[i + 1272*t]] + R[C[i + 1272*t]];
R[i + 2337*t] = Op[i + 1273*t] ? R[B[i + 1273*t]] * R[C[i + 1273*t]] : R[B[i + 1273*t]] + R[C[i + 1273*t]];
R[i + 2338*t] = Op[i + 1274*t] ? R[B[i + 1274*t]] * R[C[i + 1274*t]] : R[B[i + 1274*t]] + R[C[i + 1274*t]];
R[i + 2339*t] = Op[i + 1275*t] ? R[B[i + 1275*t]] * R[C[i + 1275*t]] : R[B[i + 1275*t]] + R[C[i + 1275*t]];
R[i + 2340*t] = Op[i + 1276*t] ? R[B[i + 1276*t]] * R[C[i + 1276*t]] : R[B[i + 1276*t]] + R[C[i + 1276*t]];
R[i + 2341*t] = Op[i + 1277*t] ? R[B[i + 1277*t]] * R[C[i + 1277*t]] : R[B[i + 1277*t]] + R[C[i + 1277*t]];
R[i + 2342*t] = Op[i + 1278*t] ? R[B[i + 1278*t]] * R[C[i + 1278*t]] : R[B[i + 1278*t]] + R[C[i + 1278*t]];
R[i + 2343*t] = Op[i + 1279*t] ? R[B[i + 1279*t]] * R[C[i + 1279*t]] : R[B[i + 1279*t]] + R[C[i + 1279*t]];
R[i + 2344*t] = Op[i + 1280*t] ? R[B[i + 1280*t]] * R[C[i + 1280*t]] : R[B[i + 1280*t]] + R[C[i + 1280*t]];
R[i + 2345*t] = Op[i + 1281*t] ? R[B[i + 1281*t]] * R[C[i + 1281*t]] : R[B[i + 1281*t]] + R[C[i + 1281*t]];
R[i + 2346*t] = Op[i + 1282*t] ? R[B[i + 1282*t]] * R[C[i + 1282*t]] : R[B[i + 1282*t]] + R[C[i + 1282*t]];
__syncthreads();
R[i + 2347*t] = Op[i + 1283*t] ? R[B[i + 1283*t]] * R[C[i + 1283*t]] : R[B[i + 1283*t]] + R[C[i + 1283*t]];
R[i + 2348*t] = Op[i + 1284*t] ? R[B[i + 1284*t]] * R[C[i + 1284*t]] : R[B[i + 1284*t]] + R[C[i + 1284*t]];
R[i + 2349*t] = Op[i + 1285*t] ? R[B[i + 1285*t]] * R[C[i + 1285*t]] : R[B[i + 1285*t]] + R[C[i + 1285*t]];
R[i + 2350*t] = Op[i + 1286*t] ? R[B[i + 1286*t]] * R[C[i + 1286*t]] : R[B[i + 1286*t]] + R[C[i + 1286*t]];
R[i + 2351*t] = Op[i + 1287*t] ? R[B[i + 1287*t]] * R[C[i + 1287*t]] : R[B[i + 1287*t]] + R[C[i + 1287*t]];
R[i + 2352*t] = Op[i + 1288*t] ? R[B[i + 1288*t]] * R[C[i + 1288*t]] : R[B[i + 1288*t]] + R[C[i + 1288*t]];
R[i + 2353*t] = Op[i + 1289*t] ? R[B[i + 1289*t]] * R[C[i + 1289*t]] : R[B[i + 1289*t]] + R[C[i + 1289*t]];
R[i + 2354*t] = Op[i + 1290*t] ? R[B[i + 1290*t]] * R[C[i + 1290*t]] : R[B[i + 1290*t]] + R[C[i + 1290*t]];
R[i + 2355*t] = Op[i + 1291*t] ? R[B[i + 1291*t]] * R[C[i + 1291*t]] : R[B[i + 1291*t]] + R[C[i + 1291*t]];
R[i + 2356*t] = Op[i + 1292*t] ? R[B[i + 1292*t]] * R[C[i + 1292*t]] : R[B[i + 1292*t]] + R[C[i + 1292*t]];
R[i + 2357*t] = Op[i + 1293*t] ? R[B[i + 1293*t]] * R[C[i + 1293*t]] : R[B[i + 1293*t]] + R[C[i + 1293*t]];
R[i + 2358*t] = Op[i + 1294*t] ? R[B[i + 1294*t]] * R[C[i + 1294*t]] : R[B[i + 1294*t]] + R[C[i + 1294*t]];
R[i + 2359*t] = Op[i + 1295*t] ? R[B[i + 1295*t]] * R[C[i + 1295*t]] : R[B[i + 1295*t]] + R[C[i + 1295*t]];
R[i + 2360*t] = Op[i + 1296*t] ? R[B[i + 1296*t]] * R[C[i + 1296*t]] : R[B[i + 1296*t]] + R[C[i + 1296*t]];
__syncthreads();
R[i + 2361*t] = Op[i + 1297*t] ? R[B[i + 1297*t]] * R[C[i + 1297*t]] : R[B[i + 1297*t]] + R[C[i + 1297*t]];
R[i + 2362*t] = Op[i + 1298*t] ? R[B[i + 1298*t]] * R[C[i + 1298*t]] : R[B[i + 1298*t]] + R[C[i + 1298*t]];
R[i + 2363*t] = Op[i + 1299*t] ? R[B[i + 1299*t]] * R[C[i + 1299*t]] : R[B[i + 1299*t]] + R[C[i + 1299*t]];
R[i + 2364*t] = Op[i + 1300*t] ? R[B[i + 1300*t]] * R[C[i + 1300*t]] : R[B[i + 1300*t]] + R[C[i + 1300*t]];
R[i + 2365*t] = Op[i + 1301*t] ? R[B[i + 1301*t]] * R[C[i + 1301*t]] : R[B[i + 1301*t]] + R[C[i + 1301*t]];
R[i + 2366*t] = Op[i + 1302*t] ? R[B[i + 1302*t]] * R[C[i + 1302*t]] : R[B[i + 1302*t]] + R[C[i + 1302*t]];
R[i + 2367*t] = Op[i + 1303*t] ? R[B[i + 1303*t]] * R[C[i + 1303*t]] : R[B[i + 1303*t]] + R[C[i + 1303*t]];
R[i + 2368*t] = Op[i + 1304*t] ? R[B[i + 1304*t]] * R[C[i + 1304*t]] : R[B[i + 1304*t]] + R[C[i + 1304*t]];
R[i + 2369*t] = Op[i + 1305*t] ? R[B[i + 1305*t]] * R[C[i + 1305*t]] : R[B[i + 1305*t]] + R[C[i + 1305*t]];
R[i + 2370*t] = Op[i + 1306*t] ? R[B[i + 1306*t]] * R[C[i + 1306*t]] : R[B[i + 1306*t]] + R[C[i + 1306*t]];
R[i + 2371*t] = Op[i + 1307*t] ? R[B[i + 1307*t]] * R[C[i + 1307*t]] : R[B[i + 1307*t]] + R[C[i + 1307*t]];
R[i + 2372*t] = Op[i + 1308*t] ? R[B[i + 1308*t]] * R[C[i + 1308*t]] : R[B[i + 1308*t]] + R[C[i + 1308*t]];
R[i + 2373*t] = Op[i + 1309*t] ? R[B[i + 1309*t]] * R[C[i + 1309*t]] : R[B[i + 1309*t]] + R[C[i + 1309*t]];
R[i + 2374*t] = Op[i + 1310*t] ? R[B[i + 1310*t]] * R[C[i + 1310*t]] : R[B[i + 1310*t]] + R[C[i + 1310*t]];
R[i + 2375*t] = Op[i + 1311*t] ? R[B[i + 1311*t]] * R[C[i + 1311*t]] : R[B[i + 1311*t]] + R[C[i + 1311*t]];
__syncthreads();
R[i + 2376*t] = Op[i + 1312*t] ? R[B[i + 1312*t]] * R[C[i + 1312*t]] : R[B[i + 1312*t]] + R[C[i + 1312*t]];
R[i + 2377*t] = Op[i + 1313*t] ? R[B[i + 1313*t]] * R[C[i + 1313*t]] : R[B[i + 1313*t]] + R[C[i + 1313*t]];
R[i + 2378*t] = Op[i + 1314*t] ? R[B[i + 1314*t]] * R[C[i + 1314*t]] : R[B[i + 1314*t]] + R[C[i + 1314*t]];
R[i + 2379*t] = Op[i + 1315*t] ? R[B[i + 1315*t]] * R[C[i + 1315*t]] : R[B[i + 1315*t]] + R[C[i + 1315*t]];
R[i + 2380*t] = Op[i + 1316*t] ? R[B[i + 1316*t]] * R[C[i + 1316*t]] : R[B[i + 1316*t]] + R[C[i + 1316*t]];
R[i + 2381*t] = Op[i + 1317*t] ? R[B[i + 1317*t]] * R[C[i + 1317*t]] : R[B[i + 1317*t]] + R[C[i + 1317*t]];
R[i + 2382*t] = Op[i + 1318*t] ? R[B[i + 1318*t]] * R[C[i + 1318*t]] : R[B[i + 1318*t]] + R[C[i + 1318*t]];
R[i + 2383*t] = Op[i + 1319*t] ? R[B[i + 1319*t]] * R[C[i + 1319*t]] : R[B[i + 1319*t]] + R[C[i + 1319*t]];
R[i + 2384*t] = Op[i + 1320*t] ? R[B[i + 1320*t]] * R[C[i + 1320*t]] : R[B[i + 1320*t]] + R[C[i + 1320*t]];
R[i + 2385*t] = Op[i + 1321*t] ? R[B[i + 1321*t]] * R[C[i + 1321*t]] : R[B[i + 1321*t]] + R[C[i + 1321*t]];
R[i + 2386*t] = Op[i + 1322*t] ? R[B[i + 1322*t]] * R[C[i + 1322*t]] : R[B[i + 1322*t]] + R[C[i + 1322*t]];
R[i + 2387*t] = Op[i + 1323*t] ? R[B[i + 1323*t]] * R[C[i + 1323*t]] : R[B[i + 1323*t]] + R[C[i + 1323*t]];
R[i + 2388*t] = Op[i + 1324*t] ? R[B[i + 1324*t]] * R[C[i + 1324*t]] : R[B[i + 1324*t]] + R[C[i + 1324*t]];
R[i + 2389*t] = Op[i + 1325*t] ? R[B[i + 1325*t]] * R[C[i + 1325*t]] : R[B[i + 1325*t]] + R[C[i + 1325*t]];
__syncthreads();
R[i + 2390*t] = Op[i + 1326*t] ? R[B[i + 1326*t]] * R[C[i + 1326*t]] : R[B[i + 1326*t]] + R[C[i + 1326*t]];
R[i + 2391*t] = Op[i + 1327*t] ? R[B[i + 1327*t]] * R[C[i + 1327*t]] : R[B[i + 1327*t]] + R[C[i + 1327*t]];
R[i + 2392*t] = Op[i + 1328*t] ? R[B[i + 1328*t]] * R[C[i + 1328*t]] : R[B[i + 1328*t]] + R[C[i + 1328*t]];
R[i + 2393*t] = Op[i + 1329*t] ? R[B[i + 1329*t]] * R[C[i + 1329*t]] : R[B[i + 1329*t]] + R[C[i + 1329*t]];
R[i + 2394*t] = Op[i + 1330*t] ? R[B[i + 1330*t]] * R[C[i + 1330*t]] : R[B[i + 1330*t]] + R[C[i + 1330*t]];
R[i + 2395*t] = Op[i + 1331*t] ? R[B[i + 1331*t]] * R[C[i + 1331*t]] : R[B[i + 1331*t]] + R[C[i + 1331*t]];
R[i + 2396*t] = Op[i + 1332*t] ? R[B[i + 1332*t]] * R[C[i + 1332*t]] : R[B[i + 1332*t]] + R[C[i + 1332*t]];
R[i + 2397*t] = Op[i + 1333*t] ? R[B[i + 1333*t]] * R[C[i + 1333*t]] : R[B[i + 1333*t]] + R[C[i + 1333*t]];
R[i + 2398*t] = Op[i + 1334*t] ? R[B[i + 1334*t]] * R[C[i + 1334*t]] : R[B[i + 1334*t]] + R[C[i + 1334*t]];
__syncthreads();
R[i + 2399*t] = Op[i + 1335*t] ? R[B[i + 1335*t]] * R[C[i + 1335*t]] : R[B[i + 1335*t]] + R[C[i + 1335*t]];
R[i + 2400*t] = Op[i + 1336*t] ? R[B[i + 1336*t]] * R[C[i + 1336*t]] : R[B[i + 1336*t]] + R[C[i + 1336*t]];
R[i + 2401*t] = Op[i + 1337*t] ? R[B[i + 1337*t]] * R[C[i + 1337*t]] : R[B[i + 1337*t]] + R[C[i + 1337*t]];
R[i + 2402*t] = Op[i + 1338*t] ? R[B[i + 1338*t]] * R[C[i + 1338*t]] : R[B[i + 1338*t]] + R[C[i + 1338*t]];
R[i + 2403*t] = Op[i + 1339*t] ? R[B[i + 1339*t]] * R[C[i + 1339*t]] : R[B[i + 1339*t]] + R[C[i + 1339*t]];
R[i + 2404*t] = Op[i + 1340*t] ? R[B[i + 1340*t]] * R[C[i + 1340*t]] : R[B[i + 1340*t]] + R[C[i + 1340*t]];
R[i + 2405*t] = Op[i + 1341*t] ? R[B[i + 1341*t]] * R[C[i + 1341*t]] : R[B[i + 1341*t]] + R[C[i + 1341*t]];
R[i + 2406*t] = Op[i + 1342*t] ? R[B[i + 1342*t]] * R[C[i + 1342*t]] : R[B[i + 1342*t]] + R[C[i + 1342*t]];
__syncthreads();
R[i + 2407*t] = Op[i + 1343*t] ? R[B[i + 1343*t]] * R[C[i + 1343*t]] : R[B[i + 1343*t]] + R[C[i + 1343*t]];
R[i + 2408*t] = Op[i + 1344*t] ? R[B[i + 1344*t]] * R[C[i + 1344*t]] : R[B[i + 1344*t]] + R[C[i + 1344*t]];
R[i + 2409*t] = Op[i + 1345*t] ? R[B[i + 1345*t]] * R[C[i + 1345*t]] : R[B[i + 1345*t]] + R[C[i + 1345*t]];
R[i + 2410*t] = Op[i + 1346*t] ? R[B[i + 1346*t]] * R[C[i + 1346*t]] : R[B[i + 1346*t]] + R[C[i + 1346*t]];
R[i + 2411*t] = Op[i + 1347*t] ? R[B[i + 1347*t]] * R[C[i + 1347*t]] : R[B[i + 1347*t]] + R[C[i + 1347*t]];
R[i + 2412*t] = Op[i + 1348*t] ? R[B[i + 1348*t]] * R[C[i + 1348*t]] : R[B[i + 1348*t]] + R[C[i + 1348*t]];
__syncthreads();
R[i + 2413*t] = Op[i + 1349*t] ? R[B[i + 1349*t]] * R[C[i + 1349*t]] : R[B[i + 1349*t]] + R[C[i + 1349*t]];
R[i + 2414*t] = Op[i + 1350*t] ? R[B[i + 1350*t]] * R[C[i + 1350*t]] : R[B[i + 1350*t]] + R[C[i + 1350*t]];
R[i + 2415*t] = Op[i + 1351*t] ? R[B[i + 1351*t]] * R[C[i + 1351*t]] : R[B[i + 1351*t]] + R[C[i + 1351*t]];
R[i + 2416*t] = Op[i + 1352*t] ? R[B[i + 1352*t]] * R[C[i + 1352*t]] : R[B[i + 1352*t]] + R[C[i + 1352*t]];
R[i + 2417*t] = Op[i + 1353*t] ? R[B[i + 1353*t]] * R[C[i + 1353*t]] : R[B[i + 1353*t]] + R[C[i + 1353*t]];
__syncthreads();
R[i + 2418*t] = Op[i + 1354*t] ? R[B[i + 1354*t]] * R[C[i + 1354*t]] : R[B[i + 1354*t]] + R[C[i + 1354*t]];
R[i + 2419*t] = Op[i + 1355*t] ? R[B[i + 1355*t]] * R[C[i + 1355*t]] : R[B[i + 1355*t]] + R[C[i + 1355*t]];
R[i + 2420*t] = Op[i + 1356*t] ? R[B[i + 1356*t]] * R[C[i + 1356*t]] : R[B[i + 1356*t]] + R[C[i + 1356*t]];
R[i + 2421*t] = Op[i + 1357*t] ? R[B[i + 1357*t]] * R[C[i + 1357*t]] : R[B[i + 1357*t]] + R[C[i + 1357*t]];
__syncthreads();
R[i + 2422*t] = Op[i + 1358*t] ? R[B[i + 1358*t]] * R[C[i + 1358*t]] : R[B[i + 1358*t]] + R[C[i + 1358*t]];
R[i + 2423*t] = Op[i + 1359*t] ? R[B[i + 1359*t]] * R[C[i + 1359*t]] : R[B[i + 1359*t]] + R[C[i + 1359*t]];
R[i + 2424*t] = Op[i + 1360*t] ? R[B[i + 1360*t]] * R[C[i + 1360*t]] : R[B[i + 1360*t]] + R[C[i + 1360*t]];
__syncthreads();
R[i + 2425*t] = Op[i + 1361*t] ? R[B[i + 1361*t]] * R[C[i + 1361*t]] : R[B[i + 1361*t]] + R[C[i + 1361*t]];
R[i + 2426*t] = Op[i + 1362*t] ? R[B[i + 1362*t]] * R[C[i + 1362*t]] : R[B[i + 1362*t]] + R[C[i + 1362*t]];
R[i + 2427*t] = Op[i + 1363*t] ? R[B[i + 1363*t]] * R[C[i + 1363*t]] : R[B[i + 1363*t]] + R[C[i + 1363*t]];
__syncthreads();
R[i + 2428*t] = Op[i + 1364*t] ? R[B[i + 1364*t]] * R[C[i + 1364*t]] : R[B[i + 1364*t]] + R[C[i + 1364*t]];
R[i + 2429*t] = Op[i + 1365*t] ? R[B[i + 1365*t]] * R[C[i + 1365*t]] : R[B[i + 1365*t]] + R[C[i + 1365*t]];
__syncthreads();
R[i + 2430*t] = Op[i + 1366*t] ? R[B[i + 1366*t]] * R[C[i + 1366*t]] : R[B[i + 1366*t]] + R[C[i + 1366*t]];
R[i + 2431*t] = Op[i + 1367*t] ? R[B[i + 1367*t]] * R[C[i + 1367*t]] : R[B[i + 1367*t]] + R[C[i + 1367*t]];
__syncthreads();
R[i + 2432*t] = Op[i + 1368*t] ? R[B[i + 1368*t]] * R[C[i + 1368*t]] : R[B[i + 1368*t]] + R[C[i + 1368*t]];
__syncthreads();
R[i + 2433*t] = Op[i + 1369*t] ? R[B[i + 1369*t]] * R[C[i + 1369*t]] : R[B[i + 1369*t]] + R[C[i + 1369*t]];
__syncthreads();
R[i + 2434*t] = Op[i + 1370*t] ? R[B[i + 1370*t]] * R[C[i + 1370*t]] : R[B[i + 1370*t]] + R[C[i + 1370*t]];
__syncthreads();
R[i + 2435*t] = Op[i + 1371*t] ? R[B[i + 1371*t]] * R[C[i + 1371*t]] : R[B[i + 1371*t]] + R[C[i + 1371*t]];
__syncthreads();
R[i + 2436*t] = Op[i + 1372*t] ? R[B[i + 1372*t]] * R[C[i + 1372*t]] : R[B[i + 1372*t]] + R[C[i + 1372*t]];
__syncthreads();
R[i + 2437*t] = Op[i + 1373*t] ? R[B[i + 1373*t]] * R[C[i + 1373*t]] : R[B[i + 1373*t]] + R[C[i + 1373*t]];
__syncthreads();
R[i + 2438*t] = Op[i + 1374*t] ? R[B[i + 1374*t]] * R[C[i + 1374*t]] : R[B[i + 1374*t]] + R[C[i + 1374*t]];
__syncthreads();
R[i + 2439*t] = Op[i + 1375*t] ? R[B[i + 1375*t]] * R[C[i + 1375*t]] : R[B[i + 1375*t]] + R[C[i + 1375*t]];
__syncthreads();
R[i + 2440*t] = Op[i + 1376*t] ? R[B[i + 1376*t]] * R[C[i + 1376*t]] : R[B[i + 1376*t]] + R[C[i + 1376*t]];
if (i==0) { final += R[2440*t]; }
__syncthreads();
}
if (i==0) { A[0]= final;}
}
|
d9d053ef47b5fe28ca6bad26a3d1e20ec6855412.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdio.h>
#include <cmath>
#include <hip/hip_runtime.h>
#include <hip/hip_runtime.h>
#include <driver_functions.h>
#include <hiprand/hiprand.h>
#include <hiprand/hiprand_kernel.h>
#include "CycleTimer.h"
using namespace std;
// this is dependent on the time tiling and grid size of one thread block
// we first finish a non-time tiling version
#define BLOCK_DIM_X 16
#define BLOCK_DIM_Y 16
#define HALO_LEFT 1
#define HALO_RIGHT 1
#define HALO_TOP 1
#define HALO_BOTTOM 1
void printCudaInfo();
extern float toBW(int bytes, float sec);
struct GlobalConstants {
int nx;
int ny;
int Mt;
int nts;
int ictype;
float G;
float R;
float delta;
float k;
float c_infm;
float Dl;
float d0;
float W0;
float lT;
float lamd;
float tau0;
float c_infty;
float R_tilde;
float Dl_tilde;
float lT_tilde;
float eps;
float alpha0;
float dx;
float dt;
float asp_ratio;
float lxd;
float lx;
float lyd;
float eta;
float U0;
// parameters that are not in the input file
float hi;
float cosa;
float sina;
float sqrt2;
float a_s;
float epsilon;
float a_12;
};
__constant__ GlobalConstants cP;
// Device codes
// boundary condition
// only use this function to access the boundary points,
// other functions return at the boundary
// TODO: this function is doing what, we can definetly merge this into kenrel right?
__global__ void
set_BC(float* ps, float* ph, float* U, float* dpsi, int fnx, int fny){
// find the location of boundary:
int index = blockIdx.x * blockDim.x + threadIdx.x;
// z=0, lx
if (index<fnx) {
int b_in = index+2*fnx;
int t_out = index+(fny-1)*fnx;
int t_in = index+(fny-3)*fnx;
ps[index] = ps[b_in];
ph[index] = ph[b_in];
U[index] = U[b_in];
dpsi[index] = dpsi[b_in];
ps[t_out] = ps[t_in];
ph[t_out] = ph[t_in];
U[t_out] = U[t_in];
dpsi[t_out] = dpsi[t_in];
}
if (index<fny){
int l_out = index*fnx;
int l_in = index*fnx + 2;
int r_out = index*fnx + fnx -1;
int r_in = index*fnx + fnx -3;
ps[l_out] = ps[l_in];
ph[l_out] = ph[l_in];
U[l_out] = U[l_in];
dpsi[l_out] = dpsi[l_in];
ps[r_out] = ps[r_in];
ph[r_out] = ph[r_in];
U[r_out] = U[r_in];
dpsi[r_out] = dpsi[r_in];
}
}
// initialization
__global__ void
initialize(float* ps_old, float* ph_old, float* U_old, float* ps_new, float* ph_new, float* U_new
, float* x, float* y, int fnx, int fny){
int C = blockIdx.x * blockDim.x + threadIdx.x;
// obtain i and j(2D position)
int j=C/fnx;
int i=C-j*fnx;
// when initialize, you need to consider C/F layout
// if F layout, the 1D array has peroidicity of nx
// all the variables should be functions of x and y
// size (nx+2)*(ny+2), x:nx, y:ny
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
float xc = x[i];
float yc = y[j];
int cent = fnx/2;
ps_old[C] = 5.625f - sqrtf( (xc-x[cent])*(xc-x[cent]) + yc*yc )/cP.W0 ;
//if (C<1000){printf("ps %f\n",ps_old[C]);}
ps_new[C] = ps_old[C];
U_old[C] = cP.U0;
U_new[C] = cP.U0;
ph_old[C] = tanhf(ps_old[C]/cP.sqrt2);
ph_new[C] = tanhf(ps_new[C]/cP.sqrt2);
// if (C<1000){printf("phi %f\n",ph_old[C]);}
}
}
// anisotropy functions
__inline__ __device__ float
atheta(float ux, float uz){
float ux2 = cP.cosa*ux + cP.sina*uz;
ux2 = ux2*ux2;
float uz2 = -cP.sina*ux + cP.cosa*uz;
uz2 = uz2*uz2;
float MAG_sq = (ux2 + uz2);
float MAG_sq2= MAG_sq*MAG_sq;
if (MAG_sq > cP.eps){
return cP.a_s*( 1.0f + cP.epsilon*(ux2*ux2 + uz2*uz2) / MAG_sq2);}
else {return 1.0f;}
}
__inline__ __device__ float
aptheta(float ux, float uz){
float uxr = cP.cosa*ux + cP.sina*uz;
float ux2 = uxr*uxr;
float uzr = -cP.sina*ux + cP.cosa*uz;
float uz2 = uzr*uzr;
float MAG_sq = (ux2 + uz2);
float MAG_sq2= MAG_sq*MAG_sq;
if (MAG_sq > cP.eps){
return -cP.a_12*uxr*uzr*(ux2 - uz2) / MAG_sq2;}
else {return 0.0f;}
}
// psi & phi equation: two dimensions
__global__ void
rhs_psi_shared_mem(float* ps, float* ph, float* U, float* ps_new, float* ph_new, \
float* y, float* dpsi, int fnx, int fny, int nt, int num_block_x, int num_block_y){
// each CUDA theard block compute one grid(32*32)
// memory access is from global and also not continous which cannot reach the max bandwidth(memory colaseing)
// add a shared memory version to store the neighbours data: ps and ph
// clare shared memory for time tiling
// we have extra (nx+2)(ny+2) size of space to load
int halo_left = 1;
int halo_right = 1;
int halo_top = 1;
int halo_bottom = 1;
int real_block_x = BLOCK_DIM_X - halo_left - halo_right;
int real_block_y = BLOCK_DIM_Y - halo_top - halo_bottom;
// load (32+2)*(32+2) daat from mem
__shared__ float ps_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float ph_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float U_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
// each thread --> one data in one enlarged block
int local_id = threadIdx.x; //local id -> sava to shared mem
int local_id_x = local_id % BLOCK_DIM_X;
int local_id_y = local_id / BLOCK_DIM_X;
// obtain the block id in shrinked region
int block_id = blockIdx.x; // 0~num_block_x*num_block_y
int block_id_x = block_id % num_block_x;
int block_id_y = block_id / num_block_x;
// this is the addr in inner region without considering the BC
int block_addr_x = block_id_x * real_block_x;
int block_addr_y = block_id_y * real_block_y;
// find the addr of data in global memory
// add 1 as we counter the block in inner region; - halo_left as we have halo region in this block
int data_addr_x = block_addr_x + 1 - halo_left + local_id_x;
int data_addr_y = block_addr_y + 1 - halo_bottom + local_id_y;
int C= data_addr_y * fnx + data_addr_x; // place in global memory
int j=C/fnx;
int i=C-j*fnx;
// update data
ps_shared[local_id] = ps[C];
ph_shared[local_id] = ph[C];
U_shared[local_id] = U[C];
__syncthreads();
// if (C==1001){
// printf("check data 1: %f\n", ps[C]);
// }
// if (local_id == 0) printf("check data %f", ps_shared[local_id]);
// compute based on the shared memory, skip if we are at the boundary
int place = local_id_y * BLOCK_DIM_X + local_id_x;
// if the points are at boundary, return
// two levels of retunr: global and local region
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
if ((local_id_x>0)&& (local_id_x<BLOCK_DIM_X-1) && (local_id_y>0) && (local_id_y<BLOCK_DIM_Y-1)) {
// find the indices of the 8 neighbors for center
// if (C==1000){printf("find");}
int R=place+1;
int L=place-1;
int T=place+BLOCK_DIM_X;
int B=place-BLOCK_DIM_X;
// // preload data
// float ps_shared_c = ps_shared[place];
// float ps_shared_r = ps_shared[R];
// float ps_shared_l = ps_shared[L];
// float ps_shared_top = ps_shared[T];
// float ps_shared_top_l = ps_shared[T-1];
// float ps_shared_top_r = ps_shared[T+1];
// float ps_shared_b_r = ps_shared[B-1];
// float ps_shared_b_l = ps_shared[B+1];
// float ps_shared_b = ps_shared[B];
// float ph_shared_c = ph_shared[place];
// float ph_shared_r = ph_shared[R];
// float ph_shared_l = ph_shared[L];
// float ph_shared_top = ph_shared[T];
// float ph_shared_top_l = ph_shared[T-1];
// float ph_shared_top_r = ph_shared[T+1];
// float ph_shared_b_r = ph_shared[B-1];
// float ph_shared_b_l = ph_shared[B+1];
// float ph_shared_b = ph_shared[B];
// if (C==1001){
// printf("detailed check of neighbours\n");
// printf("R: %d ; L:%d ; T: %d ; B: %d \n", R, L, T, B);
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", ps_shared[R], ps_shared[L], ps_shared[T], ps_shared[B]);
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", ps_shared[R], ps_shared[L], ps_shared[T+1], ps_shared[B]);
// }
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ps's are defined on cell centers
float psipjp=( ps_shared[place] + ps_shared[R] + ps_shared[T] + ps_shared[T+1] ) * 0.25f;
float psipjm=( ps_shared[place] + ps_shared[R] + ps_shared[B] + ps_shared[B+1] ) * 0.25f;
float psimjp=( ps_shared[place] + ps_shared[L] + ps_shared[T-1] + ps_shared[T] ) * 0.25f;
float psimjm=( ps_shared[place] + ps_shared[L] + ps_shared[B-1] + ps_shared[B] ) * 0.25f;
float phipjp=( ph_shared[place] + ph_shared[R] + ph_shared[T] + ph_shared[T+1] ) * 0.25f;
float phipjm=( ph_shared[place] + ph_shared[R] + ph_shared[B] + ph_shared[B+1] ) * 0.25f;
float phimjp=( ph_shared[place] + ph_shared[L] + ph_shared[T-1] + ph_shared[T] ) * 0.25f;
float phimjm=( ph_shared[place] + ph_shared[L] + ph_shared[B-1] + ph_shared[B] ) * 0.25f;
// if (C==1001){
// printf("detailed check of neighbours 2\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", psipjp, psipjm, psimjp, psimjm);
// }
// ============================
// right edge flux
// ============================
float psx = ps_shared[R]-ps_shared[place];
float psz = psipjp - psipjm;
float phx = ph_shared[R]-ph_shared[place];
float phz = phipjp - phipjm;
float A = atheta( phx,phz);
float Ap = aptheta(phx,phz);
float JR = A * ( A*psx - Ap*psz );
// ============================
// left edge flux
// ============================
psx = ps_shared[place]-ps_shared[L];
psz = psimjp - psimjm;
phx = ph_shared[place]-ph_shared[L];
phz = phimjp - phimjm;
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JL = A * ( A*psx - Ap*psz );
// ============================
// top edge flux
// ============================
psx = psipjp - psimjp;
psz = ps_shared[T]-ps_shared[place];
phx = phipjp - phimjp;
phz = ph_shared[T]-ph_shared[place];
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JT = A * ( A*psz + Ap*psx );
// ============================
// bottom edge flux
// ============================
psx = psipjm - psimjm;
psz = ps_shared[place]-ps_shared[B];
phx = phipjm - phimjm;
phz = ph_shared[place]-ph_shared[B];
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JB = A * ( A*psz + Ap*psx );
// if (C==1001){
// printf("detailed check of neighbours 3\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", JR, JL, JT, JB);
// }
/*# =============================================================
#
# 2. EXTRA TERM: sqrt2 * atheta**2 * phi * |grad psi|^2
#
# =============================================================
# d(phi)/dx d(psi)/dx d(phi)/dz d(psi)/dz at nodes (i,j)*/
float phxn = ( ph_shared[R] - ph_shared[L] ) * 0.5f;
float phzn = ( ph_shared[T] - ph_shared[B] ) * 0.5f;
float psxn = ( ps_shared[R] - ps_shared[L] ) * 0.5f;
float pszn = ( ps_shared[T] - ps_shared[B] ) * 0.5f;
float A2 = atheta(phxn,phzn);
A2 = A2*A2;
float gradps2 = (psxn)*(psxn) + (pszn)*(pszn);
float extra = -cP.sqrt2 * A2 * ph_shared[place] * gradps2;
/*# =============================================================
#
# 3. double well (transformed): sqrt2 * phi + nonlinear terms
#
# =============================================================*/
float Up = (y[j]/cP.W0 - cP.R_tilde * (nt*cP.dt) )/cP.lT_tilde;
float rhs_psi = ((JR-JL) + (JT-JB) + extra) * cP.hi*cP.hi + \
cP.sqrt2*ph_shared[place] - cP.lamd*(1.0f-ph_shared[place]*ph_shared[place])*cP.sqrt2*(U_shared[place] + Up);
/*# =============================================================
#
# 4. dpsi/dt term
#
# =============================================================*/
float tp = (1.0f-(1.0f-cP.k)*Up);
float tau_psi;
if (tp >= cP.k){tau_psi = tp*A2;}
else {tau_psi = cP.k*A2;}
dpsi[C] = rhs_psi / tau_psi;
ps_new[C] = ps_shared[place] + cP.dt * dpsi[C];
ph_new[C] = tanhf(ps_new[C]/cP.sqrt2);
// if (C==1000){printf("%f ",ph_new[C]);}
// if (C == 1000) printf("check data %f\n", ps_shared[local_id]);
// if (C == 1001) {
// printf("check data ps: %f and ph: %f and dpsi: %f and U: %f\n", ps_new[C], ph_new[C], dpsi[C], U[C]);
// // printf("block id %d ; local_id_x %d; local_id_y %d\n", block_id, local_id_x, local_id_y);
// // printf("block id %d ; data_addr_x %d; data_addr_y %d\n", block_id, data_addr_x, data_addr_y);
// }
}
}
__syncthreads();
}
// U equation
__global__ void
rhs_U_shared_mem(float* U, float* U_new, float* ph, float* dpsi, int fnx, int fny, int num_block_x, int num_block_y){
// __shared__ float ps_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float ph_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float U_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float dpsi_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
int halo_left = 1;
int halo_right = 1;
int halo_top = 1;
int halo_bottom = 1;
int real_block_x = BLOCK_DIM_X - halo_left - halo_right;
int real_block_y = BLOCK_DIM_Y - halo_top - halo_bottom;
// each thread --> one data in one enlarged block
int local_id = threadIdx.x; //local id -> sava to shared mem
int local_id_x = local_id % BLOCK_DIM_X;
int local_id_y = local_id / BLOCK_DIM_X;
// obtain the block id in shrinked region
int block_id = blockIdx.x; // 0~num_block_x*num_block_y
int block_id_x = block_id % num_block_x;
int block_id_y = block_id / num_block_x;
// this is the addr in inner region without considering the BC
int block_addr_x = block_id_x * real_block_x;
int block_addr_y = block_id_y * real_block_y;
// find the addr of data in global memory
// add 1 as we counter the block in inner region; - halo_left as we have halo region in this block
int data_addr_x = block_addr_x + 1 - halo_left + local_id_x;
int data_addr_y = block_addr_y + 1 - halo_bottom + local_id_y;
int C= data_addr_y * fnx + data_addr_x; // place in global memory
int j=C/fnx;
int i=C-j*fnx;
// update data
// ps_shared[local_id] = ps[C];
ph_shared[local_id] = ph[C];
U_shared[local_id] = U[C];
dpsi_shared[local_id] = dpsi[C];
__syncthreads();
// if (C==1001){
// printf("check pre-loaded data\n");
// printf("ph: %f ; u:%f ; dpsi: %f\n", ph[C], U[C], dpsi[C]);
// }
int place = local_id_y * BLOCK_DIM_X + local_id_x;
// if the points are at boundary, return
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
if ((local_id_x>0)&& (local_id_x<BLOCK_DIM_X-1) && (local_id_y>0) && (local_id_y<BLOCK_DIM_Y-1)) {
// find the indices of the 8 neighbors for center
int R=place+1;
int L=place-1;
int T=place+BLOCK_DIM_X;
int B=place-BLOCK_DIM_X;
float hi = cP.hi;
float Dl_tilde = cP.Dl_tilde;
float k = cP.k;
float nx,nz;
float eps = cP.eps;
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ph's are defined on cell centers
float phipjp=( ph_shared[place] + ph_shared[R] + ph_shared[T] + ph_shared[T+1] ) * 0.25f;
float phipjm=( ph_shared[place] + ph_shared[R] + ph_shared[B] + ph_shared[B+1] ) * 0.25f;
float phimjp=( ph_shared[place] + ph_shared[L] + ph_shared[T-1] + ph_shared[T] ) * 0.25f;
float phimjm=( ph_shared[place] + ph_shared[L] + ph_shared[B-1] + ph_shared[B] ) * 0.25f;
// if (C==1001){
// printf("detailed check of neighbours 3\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", phipjp, phipjm, phimjp, phimjm);
// }
float jat = 0.5f*(1.0f+(1.0f-k)*U_shared[place])*(1.0f-ph_shared[place]*ph_shared[place])*dpsi_shared[place];
/*# ============================
# right edge flux (i+1/2, j)
# ============================*/
float phx = ph_shared[R]-ph_shared[place];
float phz = phipjp - phipjm;
float phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_ip = 0.5f*(1.0f+(1.0f-k)*U_shared[R])*(1.0f-ph_shared[R]*ph_shared[R])*dpsi_shared[R];
float UR = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[R])*(U_shared[R]-U_shared[place]) + 0.5f*(jat + jat_ip)*nx;
/* ============================
# left edge flux (i-1/2, j)
# ============================*/
phx = ph_shared[place]-ph_shared[L];
phz = phimjp - phimjm;
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_im = 0.5f*(1.0f+(1.0f-k)*U_shared[L])*(1.0f-ph_shared[L]*ph_shared[L])*dpsi_shared[L];
float UL = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[L])*(U_shared[place]-U_shared[L]) + 0.5f*(jat + jat_im)*nx;
/*# ============================
# top edge flux (i, j+1/2)
# ============================*/
phx = phipjp - phimjp;
phz = ph_shared[T]-ph_shared[place];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jp = 0.5f*(1.0f+(1.0f-k)*U_shared[T])*(1.0f-ph_shared[T]*ph_shared[T])*dpsi_shared[T];
float UT = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[T])*(U_shared[T]-U_shared[place]) + 0.5f*(jat + jat_jp)*nz;
/*# ============================
# bottom edge flux (i, j-1/2)
# ============================*/
phx = phipjm - phimjm;
phz = ph_shared[place]-ph_shared[B];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jm = 0.5f*(1.0f+(1.0f-k)*U_shared[B])*(1.0f-ph_shared[B]*ph_shared[B])*dpsi_shared[B];
float UB = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[B])*(U_shared[place]-U_shared[B]) + 0.5f*(jat + jat_jm)*nz;
float rhs_U = ( (UR-UL) + (UT-UB) ) * hi + cP.sqrt2 * jat;
float tau_U = (1.0f+cP.k) - (1.0f-cP.k)*ph_shared[place];
U_new[C] = U_shared[place] + cP.dt * ( rhs_U / tau_U );
}
}
}
// U equation
__global__ void
rhs_U_ori(float* U, float* U_new, float* ph, float* dpsi, int fnx, int fny ){
int C = blockIdx.x * blockDim.x + threadIdx.x;
int j=C/fnx;
int i=C-j*fnx;
// if the points are at boundary, return
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
// find the indices of the 8 neighbors for center
int R=C+1;
int L=C-1;
int T=C+fnx;
int B=C-fnx;
float hi = cP.hi;
float Dl_tilde = cP.Dl_tilde;
float k = cP.k;
float nx,nz;
float eps = cP.eps;
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ps's are defined on cell centers
float phipjp=( ph[C] + ph[R] + ph[T] + ph[T+1] ) * 0.25f;
float phipjm=( ph[C] + ph[R] + ph[B] + ph[B+1] ) * 0.25f;
float phimjp=( ph[C] + ph[L] + ph[T-1] + ph[T] ) * 0.25f;
float phimjm=( ph[C] + ph[L] + ph[B-1] + ph[B] ) * 0.25f;
float jat = 0.5f*(1.0f+(1.0f-k)*U[C])*(1.0f-ph[C]*ph[C])*dpsi[C];
/*# ============================
# right edge flux (i+1/2, j)
# ============================*/
float phx = ph[R]-ph[C];
float phz = phipjp - phipjm;
float phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_ip = 0.5f*(1.0f+(1.0f-k)*U[R])*(1.0f-ph[R]*ph[R])*dpsi[R];
float UR = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[R])*(U[R]-U[C]) + 0.5f*(jat + jat_ip)*nx;
/* ============================
# left edge flux (i-1/2, j)
# ============================*/
phx = ph[C]-ph[L];
phz = phimjp - phimjm;
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_im = 0.5f*(1.0f+(1.0f-k)*U[L])*(1.0f-ph[L]*ph[L])*dpsi[L];
float UL = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[L])*(U[C]-U[L]) + 0.5f*(jat + jat_im)*nx;
/*# ============================
# top edge flux (i, j+1/2)
# ============================*/
phx = phipjp - phimjp;
phz = ph[T]-ph[C];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jp = 0.5f*(1.0f+(1.0f-k)*U[T])*(1.0f-ph[T]*ph[T])*dpsi[T];
float UT = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[T])*(U[T]-U[C]) + 0.5f*(jat + jat_jp)*nz;
/*# ============================
# bottom edge flux (i, j-1/2)
# ============================*/
phx = phipjm - phimjm;
phz = ph[C]-ph[B];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jm = 0.5f*(1.0f+(1.0f-k)*U[B])*(1.0f-ph[B]*ph[B])*dpsi[B];
float UB = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[B])*(U[C]-U[B]) + 0.5f*(jat + jat_jm)*nz;
float rhs_U = ( (UR-UL) + (UT-UB) ) * hi + cP.sqrt2 * jat;
float tau_U = (1.0f+cP.k) - (1.0f-cP.k)*ph[C];
U_new[C] = U[C] + cP.dt * ( rhs_U / tau_U );
}
}
// Host codes for PF computing
void setup(GlobalConstants params, int fnx, int fny, float* x, float* y, float* phi, float* psi,float* U){
// we should have already pass all the data structure in by this time
// move those data onto device
printCudaInfo();
float* x_device;// = NULL;
float* y_device;// = NULL;
// store two for swap behavior
float* psi_old;// = NULL;
float* psi_new;// = NULL;
float* U_old;// = NULL;
float* U_new;// = NULL;
float* phi_old;// = NULL;
float* phi_new;// = NULL;
float* dpsi;// = NULL;
// allocate x, y, phi, psi, U related params
int length = fnx*fny;
hipMalloc((void **)&x_device, sizeof(float) * fnx);
hipMalloc((void **)&y_device, sizeof(float) * fny);
hipMalloc((void **)&phi_old, sizeof(float) * length);
hipMalloc((void **)&psi_old, sizeof(float) * length);
hipMalloc((void **)&U_old, sizeof(float) * length);
hipMalloc((void **)&phi_new, sizeof(float) * length);
hipMalloc((void **)&psi_new, sizeof(float) * length);
hipMalloc((void **)&U_new, sizeof(float) * length);
hipMalloc((void **)&dpsi, sizeof(float) * length);
// set initial params
hipMemcpy(x_device, x, sizeof(float) * fnx, hipMemcpyHostToDevice);
hipMemcpy(y_device, y, sizeof(float) * fny, hipMemcpyHostToDevice);
hipMemcpy(psi_old, psi, sizeof(float) * length, hipMemcpyHostToDevice);
hipMemcpy(phi_old, phi, sizeof(float) * length, hipMemcpyHostToDevice);
hipMemcpy(U_old, U, sizeof(float) * length, hipMemcpyHostToDevice);
// pass all the read-only params into global constant
hipMemcpyToSymbol(cP, ¶ms, sizeof(GlobalConstants));
int blocksize_1d = 128;
int blocksize_2d = 128; // seems reduce the block size makes it a little faster, but around 128 is okay.
int num_block_2d = (fnx*fny+blocksize_2d-1)/blocksize_2d;
int num_block_1d = (fnx+fny+blocksize_1d-1)/blocksize_1d;
printf("nx: %d and ny: %d\n", fnx, fny);
printf("block size %d, # blocks %d\n", blocksize_2d, num_block_2d);
hipLaunchKernelGGL(( initialize), dim3(num_block_2d), dim3(blocksize_2d) , 0, 0, psi_old, phi_old, U_old, psi_new, phi_new, U_new, x_device, y_device, fnx, fny);
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_new, phi_new, U_new, dpsi, fnx, fny);
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_old, phi_old, U_old, dpsi, fnx, fny);
hipDeviceSynchronize();
double startTime = CycleTimer::currentSeconds();
// change the 2d block due to we donn't want to include halo region
int real_per_block_x = BLOCK_DIM_X - HALO_LEFT - HALO_RIGHT;
int real_per_block_y = BLOCK_DIM_Y - HALO_TOP - HALO_BOTTOM;
int num_block_x = (fnx - 2 + real_per_block_x - 1) / real_per_block_x;
int num_block_y = (fny - 2 + real_per_block_y - 1) / real_per_block_y;
printf("block_x: %d and block_y: %d\n", real_per_block_x, real_per_block_y);
printf("block_x: %d and block_y: %d\n", num_block_x, num_block_y);
int num_block_2d_s = num_block_x * num_block_y; //each one take one block with (32-2)+ (32-2) ture block within (fnx-2), (fny-2)
int blocksize_2d_s = BLOCK_DIM_X * BLOCK_DIM_Y; // 32*32: as we have to write 32*32 data region into shared memory
for (int kt=0; kt<params.Mt/2; kt++){
// printf("time step %d\n",kt);
hipLaunchKernelGGL(( rhs_psi_shared_mem), dim3(num_block_2d_s), dim3(blocksize_2d_s) , 0, 0, psi_old, phi_old, U_old, psi_new, phi_new, y_device, dpsi, fnx, fny, 2*kt, num_block_x, num_block_y);
//hipDeviceSynchronize();
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_new, phi_new, U_old, dpsi, fnx, fny);
//hipDeviceSynchronize();
hipLaunchKernelGGL(( rhs_U_shared_mem), dim3(num_block_2d_s), dim3(blocksize_2d_s) , 0, 0, U_old, U_new, phi_new, dpsi, fnx, fny, num_block_x, num_block_y);
//hipDeviceSynchronize();
hipLaunchKernelGGL(( rhs_psi_shared_mem), dim3(num_block_2d_s), dim3(blocksize_2d_s) , 0, 0, psi_new, phi_new, U_new, psi_old, phi_old, y_device, dpsi, fnx, fny, 2*kt+1, num_block_x, num_block_y);
//hipDeviceSynchronize();
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_old, phi_old, U_new, dpsi, fnx, fny);
//hipDeviceSynchronize();
hipLaunchKernelGGL(( rhs_U_shared_mem), dim3(num_block_2d_s), dim3(blocksize_2d_s) , 0, 0, U_new, U_old, phi_old, dpsi, fnx, fny, num_block_x, num_block_y);
//hipDeviceSynchronize();
}
hipDeviceSynchronize();
double endTime = CycleTimer::currentSeconds();
printf("time for %d iterations: %f s\n", params.Mt, endTime-startTime);
hipMemcpy(psi, psi_old, length * sizeof(float),hipMemcpyDeviceToHost);
hipMemcpy(phi, phi_old, length * sizeof(float),hipMemcpyDeviceToHost);
hipMemcpy(U, U_old, length * sizeof(float),hipMemcpyDeviceToHost);
hipFree(x_device); hipFree(y_device);
hipFree(psi_old); hipFree(psi_new);
hipFree(phi_old); hipFree(phi_new);
hipFree(U_old); hipFree(U_new);
hipFree(dpsi);
}
/*
void time_marching(GlobalConstants params, int fnx, int fny){
// initialize or load
int blocksize_1d = 256;
int blocksize_2d = 512;
int num_block_2d = (fnx*fny+blocksize_2d-1)/blocksize_2d;
int num_block_1d = (fnx+fny+blocksize_1d-1)/blocksize_1d;
hipLaunchKernelGGL(( initialize), dim3(num_block_2d), dim3(blocksize_2d) , 0, 0, ps_old, ph_old, U_old, ps_new, ph_new, U_new, x_device, y_device, fnx, fny);
for (int kt=0; kt<params.Mt/2; kt++){
rhs_psi<<< num_block_2d, blocksize_2d >>>(psi_old, phi_old, U_old, psi_new, phi_new, y_device, dpsi, fnx, fny, 2*kt );
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_new, phi_new, U_old, dpsi, fnx, fny);
rhs_U<<< num_block_2d, blocksize_2d >>>(U_old, U_new, phi_new, dpsi);
rhs_psi<<< num_block_2d, blocksize_2d >>>(psi_new, phi_new, U_new, psi_old, phi_old, y_device, dpsi, fnx, fny, 2*kt+1 );
hipLaunchKernelGGL(( set_BC), dim3(num_block_1d), dim3(blocksize_1d) , 0, 0, psi_old, phi_old, U_new, dpsi, fnx, fny);
rhs_U<<< num_block_2d, blocksize_2d >>>(U_new, U_old, phi_old, dpsi);
}
}*/
void printCudaInfo()
{
// for fun, just print out some stats on the machine
int deviceCount = 0;
hipError_t err = hipGetDeviceCount(&deviceCount);
printf("---------------------------------------------------------\n");
printf("Found %d CUDA devices\n", deviceCount);
for (int i=0; i<deviceCount; i++)
{
hipDeviceProp_t deviceProps;
hipGetDeviceProperties(&deviceProps, i);
printf("Device %d: %s\n", i, deviceProps.name);
printf(" SMs: %d\n", deviceProps.multiProcessorCount);
printf(" Global mem: %.0f MB\n",
static_cast<float>(deviceProps.totalGlobalMem) / (1024 * 1024));
printf(" CUDA Cap: %d.%d\n", deviceProps.major, deviceProps.minor);
}
printf("---------------------------------------------------------\n");
}
| d9d053ef47b5fe28ca6bad26a3d1e20ec6855412.cu | #include <stdio.h>
#include <cmath>
#include <cuda.h>
#include <cuda_runtime.h>
#include <driver_functions.h>
#include <curand.h>
#include <curand_kernel.h>
#include "CycleTimer.h"
using namespace std;
// this is dependent on the time tiling and grid size of one thread block
// we first finish a non-time tiling version
#define BLOCK_DIM_X 16
#define BLOCK_DIM_Y 16
#define HALO_LEFT 1
#define HALO_RIGHT 1
#define HALO_TOP 1
#define HALO_BOTTOM 1
void printCudaInfo();
extern float toBW(int bytes, float sec);
struct GlobalConstants {
int nx;
int ny;
int Mt;
int nts;
int ictype;
float G;
float R;
float delta;
float k;
float c_infm;
float Dl;
float d0;
float W0;
float lT;
float lamd;
float tau0;
float c_infty;
float R_tilde;
float Dl_tilde;
float lT_tilde;
float eps;
float alpha0;
float dx;
float dt;
float asp_ratio;
float lxd;
float lx;
float lyd;
float eta;
float U0;
// parameters that are not in the input file
float hi;
float cosa;
float sina;
float sqrt2;
float a_s;
float epsilon;
float a_12;
};
__constant__ GlobalConstants cP;
// Device codes
// boundary condition
// only use this function to access the boundary points,
// other functions return at the boundary
// TODO: this function is doing what, we can definetly merge this into kenrel right?
__global__ void
set_BC(float* ps, float* ph, float* U, float* dpsi, int fnx, int fny){
// find the location of boundary:
int index = blockIdx.x * blockDim.x + threadIdx.x;
// z=0, lx
if (index<fnx) {
int b_in = index+2*fnx;
int t_out = index+(fny-1)*fnx;
int t_in = index+(fny-3)*fnx;
ps[index] = ps[b_in];
ph[index] = ph[b_in];
U[index] = U[b_in];
dpsi[index] = dpsi[b_in];
ps[t_out] = ps[t_in];
ph[t_out] = ph[t_in];
U[t_out] = U[t_in];
dpsi[t_out] = dpsi[t_in];
}
if (index<fny){
int l_out = index*fnx;
int l_in = index*fnx + 2;
int r_out = index*fnx + fnx -1;
int r_in = index*fnx + fnx -3;
ps[l_out] = ps[l_in];
ph[l_out] = ph[l_in];
U[l_out] = U[l_in];
dpsi[l_out] = dpsi[l_in];
ps[r_out] = ps[r_in];
ph[r_out] = ph[r_in];
U[r_out] = U[r_in];
dpsi[r_out] = dpsi[r_in];
}
}
// initialization
__global__ void
initialize(float* ps_old, float* ph_old, float* U_old, float* ps_new, float* ph_new, float* U_new
, float* x, float* y, int fnx, int fny){
int C = blockIdx.x * blockDim.x + threadIdx.x;
// obtain i and j(2D position)
int j=C/fnx;
int i=C-j*fnx;
// when initialize, you need to consider C/F layout
// if F layout, the 1D array has peroidicity of nx
// all the variables should be functions of x and y
// size (nx+2)*(ny+2), x:nx, y:ny
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
float xc = x[i];
float yc = y[j];
int cent = fnx/2;
ps_old[C] = 5.625f - sqrtf( (xc-x[cent])*(xc-x[cent]) + yc*yc )/cP.W0 ;
//if (C<1000){printf("ps %f\n",ps_old[C]);}
ps_new[C] = ps_old[C];
U_old[C] = cP.U0;
U_new[C] = cP.U0;
ph_old[C] = tanhf(ps_old[C]/cP.sqrt2);
ph_new[C] = tanhf(ps_new[C]/cP.sqrt2);
// if (C<1000){printf("phi %f\n",ph_old[C]);}
}
}
// anisotropy functions
__inline__ __device__ float
atheta(float ux, float uz){
float ux2 = cP.cosa*ux + cP.sina*uz;
ux2 = ux2*ux2;
float uz2 = -cP.sina*ux + cP.cosa*uz;
uz2 = uz2*uz2;
float MAG_sq = (ux2 + uz2);
float MAG_sq2= MAG_sq*MAG_sq;
if (MAG_sq > cP.eps){
return cP.a_s*( 1.0f + cP.epsilon*(ux2*ux2 + uz2*uz2) / MAG_sq2);}
else {return 1.0f;}
}
__inline__ __device__ float
aptheta(float ux, float uz){
float uxr = cP.cosa*ux + cP.sina*uz;
float ux2 = uxr*uxr;
float uzr = -cP.sina*ux + cP.cosa*uz;
float uz2 = uzr*uzr;
float MAG_sq = (ux2 + uz2);
float MAG_sq2= MAG_sq*MAG_sq;
if (MAG_sq > cP.eps){
return -cP.a_12*uxr*uzr*(ux2 - uz2) / MAG_sq2;}
else {return 0.0f;}
}
// psi & phi equation: two dimensions
__global__ void
rhs_psi_shared_mem(float* ps, float* ph, float* U, float* ps_new, float* ph_new, \
float* y, float* dpsi, int fnx, int fny, int nt, int num_block_x, int num_block_y){
// each CUDA theard block compute one grid(32*32)
// memory access is from global and also not continous which cannot reach the max bandwidth(memory colaseing)
// add a shared memory version to store the neighbours data: ps and ph
// clare shared memory for time tiling
// we have extra (nx+2)(ny+2) size of space to load
int halo_left = 1;
int halo_right = 1;
int halo_top = 1;
int halo_bottom = 1;
int real_block_x = BLOCK_DIM_X - halo_left - halo_right;
int real_block_y = BLOCK_DIM_Y - halo_top - halo_bottom;
// load (32+2)*(32+2) daat from mem
__shared__ float ps_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float ph_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float U_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
// each thread --> one data in one enlarged block
int local_id = threadIdx.x; //local id -> sava to shared mem
int local_id_x = local_id % BLOCK_DIM_X;
int local_id_y = local_id / BLOCK_DIM_X;
// obtain the block id in shrinked region
int block_id = blockIdx.x; // 0~num_block_x*num_block_y
int block_id_x = block_id % num_block_x;
int block_id_y = block_id / num_block_x;
// this is the addr in inner region without considering the BC
int block_addr_x = block_id_x * real_block_x;
int block_addr_y = block_id_y * real_block_y;
// find the addr of data in global memory
// add 1 as we counter the block in inner region; - halo_left as we have halo region in this block
int data_addr_x = block_addr_x + 1 - halo_left + local_id_x;
int data_addr_y = block_addr_y + 1 - halo_bottom + local_id_y;
int C= data_addr_y * fnx + data_addr_x; // place in global memory
int j=C/fnx;
int i=C-j*fnx;
// update data
ps_shared[local_id] = ps[C];
ph_shared[local_id] = ph[C];
U_shared[local_id] = U[C];
__syncthreads();
// if (C==1001){
// printf("check data 1: %f\n", ps[C]);
// }
// if (local_id == 0) printf("check data %f", ps_shared[local_id]);
// compute based on the shared memory, skip if we are at the boundary
int place = local_id_y * BLOCK_DIM_X + local_id_x;
// if the points are at boundary, return
// two levels of retunr: global and local region
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
if ((local_id_x>0)&& (local_id_x<BLOCK_DIM_X-1) && (local_id_y>0) && (local_id_y<BLOCK_DIM_Y-1)) {
// find the indices of the 8 neighbors for center
// if (C==1000){printf("find");}
int R=place+1;
int L=place-1;
int T=place+BLOCK_DIM_X;
int B=place-BLOCK_DIM_X;
// // preload data
// float ps_shared_c = ps_shared[place];
// float ps_shared_r = ps_shared[R];
// float ps_shared_l = ps_shared[L];
// float ps_shared_top = ps_shared[T];
// float ps_shared_top_l = ps_shared[T-1];
// float ps_shared_top_r = ps_shared[T+1];
// float ps_shared_b_r = ps_shared[B-1];
// float ps_shared_b_l = ps_shared[B+1];
// float ps_shared_b = ps_shared[B];
// float ph_shared_c = ph_shared[place];
// float ph_shared_r = ph_shared[R];
// float ph_shared_l = ph_shared[L];
// float ph_shared_top = ph_shared[T];
// float ph_shared_top_l = ph_shared[T-1];
// float ph_shared_top_r = ph_shared[T+1];
// float ph_shared_b_r = ph_shared[B-1];
// float ph_shared_b_l = ph_shared[B+1];
// float ph_shared_b = ph_shared[B];
// if (C==1001){
// printf("detailed check of neighbours\n");
// printf("R: %d ; L:%d ; T: %d ; B: %d \n", R, L, T, B);
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", ps_shared[R], ps_shared[L], ps_shared[T], ps_shared[B]);
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", ps_shared[R], ps_shared[L], ps_shared[T+1], ps_shared[B]);
// }
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ps's are defined on cell centers
float psipjp=( ps_shared[place] + ps_shared[R] + ps_shared[T] + ps_shared[T+1] ) * 0.25f;
float psipjm=( ps_shared[place] + ps_shared[R] + ps_shared[B] + ps_shared[B+1] ) * 0.25f;
float psimjp=( ps_shared[place] + ps_shared[L] + ps_shared[T-1] + ps_shared[T] ) * 0.25f;
float psimjm=( ps_shared[place] + ps_shared[L] + ps_shared[B-1] + ps_shared[B] ) * 0.25f;
float phipjp=( ph_shared[place] + ph_shared[R] + ph_shared[T] + ph_shared[T+1] ) * 0.25f;
float phipjm=( ph_shared[place] + ph_shared[R] + ph_shared[B] + ph_shared[B+1] ) * 0.25f;
float phimjp=( ph_shared[place] + ph_shared[L] + ph_shared[T-1] + ph_shared[T] ) * 0.25f;
float phimjm=( ph_shared[place] + ph_shared[L] + ph_shared[B-1] + ph_shared[B] ) * 0.25f;
// if (C==1001){
// printf("detailed check of neighbours 2\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", psipjp, psipjm, psimjp, psimjm);
// }
// ============================
// right edge flux
// ============================
float psx = ps_shared[R]-ps_shared[place];
float psz = psipjp - psipjm;
float phx = ph_shared[R]-ph_shared[place];
float phz = phipjp - phipjm;
float A = atheta( phx,phz);
float Ap = aptheta(phx,phz);
float JR = A * ( A*psx - Ap*psz );
// ============================
// left edge flux
// ============================
psx = ps_shared[place]-ps_shared[L];
psz = psimjp - psimjm;
phx = ph_shared[place]-ph_shared[L];
phz = phimjp - phimjm;
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JL = A * ( A*psx - Ap*psz );
// ============================
// top edge flux
// ============================
psx = psipjp - psimjp;
psz = ps_shared[T]-ps_shared[place];
phx = phipjp - phimjp;
phz = ph_shared[T]-ph_shared[place];
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JT = A * ( A*psz + Ap*psx );
// ============================
// bottom edge flux
// ============================
psx = psipjm - psimjm;
psz = ps_shared[place]-ps_shared[B];
phx = phipjm - phimjm;
phz = ph_shared[place]-ph_shared[B];
A = atheta( phx,phz);
Ap = aptheta(phx,phz);
float JB = A * ( A*psz + Ap*psx );
// if (C==1001){
// printf("detailed check of neighbours 3\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", JR, JL, JT, JB);
// }
/*# =============================================================
#
# 2. EXTRA TERM: sqrt2 * atheta**2 * phi * |grad psi|^2
#
# =============================================================
# d(phi)/dx d(psi)/dx d(phi)/dz d(psi)/dz at nodes (i,j)*/
float phxn = ( ph_shared[R] - ph_shared[L] ) * 0.5f;
float phzn = ( ph_shared[T] - ph_shared[B] ) * 0.5f;
float psxn = ( ps_shared[R] - ps_shared[L] ) * 0.5f;
float pszn = ( ps_shared[T] - ps_shared[B] ) * 0.5f;
float A2 = atheta(phxn,phzn);
A2 = A2*A2;
float gradps2 = (psxn)*(psxn) + (pszn)*(pszn);
float extra = -cP.sqrt2 * A2 * ph_shared[place] * gradps2;
/*# =============================================================
#
# 3. double well (transformed): sqrt2 * phi + nonlinear terms
#
# =============================================================*/
float Up = (y[j]/cP.W0 - cP.R_tilde * (nt*cP.dt) )/cP.lT_tilde;
float rhs_psi = ((JR-JL) + (JT-JB) + extra) * cP.hi*cP.hi + \
cP.sqrt2*ph_shared[place] - cP.lamd*(1.0f-ph_shared[place]*ph_shared[place])*cP.sqrt2*(U_shared[place] + Up);
/*# =============================================================
#
# 4. dpsi/dt term
#
# =============================================================*/
float tp = (1.0f-(1.0f-cP.k)*Up);
float tau_psi;
if (tp >= cP.k){tau_psi = tp*A2;}
else {tau_psi = cP.k*A2;}
dpsi[C] = rhs_psi / tau_psi;
ps_new[C] = ps_shared[place] + cP.dt * dpsi[C];
ph_new[C] = tanhf(ps_new[C]/cP.sqrt2);
// if (C==1000){printf("%f ",ph_new[C]);}
// if (C == 1000) printf("check data %f\n", ps_shared[local_id]);
// if (C == 1001) {
// printf("check data ps: %f and ph: %f and dpsi: %f and U: %f\n", ps_new[C], ph_new[C], dpsi[C], U[C]);
// // printf("block id %d ; local_id_x %d; local_id_y %d\n", block_id, local_id_x, local_id_y);
// // printf("block id %d ; data_addr_x %d; data_addr_y %d\n", block_id, data_addr_x, data_addr_y);
// }
}
}
__syncthreads();
}
// U equation
__global__ void
rhs_U_shared_mem(float* U, float* U_new, float* ph, float* dpsi, int fnx, int fny, int num_block_x, int num_block_y){
// __shared__ float ps_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float ph_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float U_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
__shared__ float dpsi_shared[(BLOCK_DIM_Y)*(BLOCK_DIM_X)];
int halo_left = 1;
int halo_right = 1;
int halo_top = 1;
int halo_bottom = 1;
int real_block_x = BLOCK_DIM_X - halo_left - halo_right;
int real_block_y = BLOCK_DIM_Y - halo_top - halo_bottom;
// each thread --> one data in one enlarged block
int local_id = threadIdx.x; //local id -> sava to shared mem
int local_id_x = local_id % BLOCK_DIM_X;
int local_id_y = local_id / BLOCK_DIM_X;
// obtain the block id in shrinked region
int block_id = blockIdx.x; // 0~num_block_x*num_block_y
int block_id_x = block_id % num_block_x;
int block_id_y = block_id / num_block_x;
// this is the addr in inner region without considering the BC
int block_addr_x = block_id_x * real_block_x;
int block_addr_y = block_id_y * real_block_y;
// find the addr of data in global memory
// add 1 as we counter the block in inner region; - halo_left as we have halo region in this block
int data_addr_x = block_addr_x + 1 - halo_left + local_id_x;
int data_addr_y = block_addr_y + 1 - halo_bottom + local_id_y;
int C= data_addr_y * fnx + data_addr_x; // place in global memory
int j=C/fnx;
int i=C-j*fnx;
// update data
// ps_shared[local_id] = ps[C];
ph_shared[local_id] = ph[C];
U_shared[local_id] = U[C];
dpsi_shared[local_id] = dpsi[C];
__syncthreads();
// if (C==1001){
// printf("check pre-loaded data\n");
// printf("ph: %f ; u:%f ; dpsi: %f\n", ph[C], U[C], dpsi[C]);
// }
int place = local_id_y * BLOCK_DIM_X + local_id_x;
// if the points are at boundary, return
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
if ((local_id_x>0)&& (local_id_x<BLOCK_DIM_X-1) && (local_id_y>0) && (local_id_y<BLOCK_DIM_Y-1)) {
// find the indices of the 8 neighbors for center
int R=place+1;
int L=place-1;
int T=place+BLOCK_DIM_X;
int B=place-BLOCK_DIM_X;
float hi = cP.hi;
float Dl_tilde = cP.Dl_tilde;
float k = cP.k;
float nx,nz;
float eps = cP.eps;
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ph's are defined on cell centers
float phipjp=( ph_shared[place] + ph_shared[R] + ph_shared[T] + ph_shared[T+1] ) * 0.25f;
float phipjm=( ph_shared[place] + ph_shared[R] + ph_shared[B] + ph_shared[B+1] ) * 0.25f;
float phimjp=( ph_shared[place] + ph_shared[L] + ph_shared[T-1] + ph_shared[T] ) * 0.25f;
float phimjm=( ph_shared[place] + ph_shared[L] + ph_shared[B-1] + ph_shared[B] ) * 0.25f;
// if (C==1001){
// printf("detailed check of neighbours 3\n");
// printf("R: %f ; L:%f ; T: %f ; B: %f \n", phipjp, phipjm, phimjp, phimjm);
// }
float jat = 0.5f*(1.0f+(1.0f-k)*U_shared[place])*(1.0f-ph_shared[place]*ph_shared[place])*dpsi_shared[place];
/*# ============================
# right edge flux (i+1/2, j)
# ============================*/
float phx = ph_shared[R]-ph_shared[place];
float phz = phipjp - phipjm;
float phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_ip = 0.5f*(1.0f+(1.0f-k)*U_shared[R])*(1.0f-ph_shared[R]*ph_shared[R])*dpsi_shared[R];
float UR = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[R])*(U_shared[R]-U_shared[place]) + 0.5f*(jat + jat_ip)*nx;
/* ============================
# left edge flux (i-1/2, j)
# ============================*/
phx = ph_shared[place]-ph_shared[L];
phz = phimjp - phimjm;
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_im = 0.5f*(1.0f+(1.0f-k)*U_shared[L])*(1.0f-ph_shared[L]*ph_shared[L])*dpsi_shared[L];
float UL = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[L])*(U_shared[place]-U_shared[L]) + 0.5f*(jat + jat_im)*nx;
/*# ============================
# top edge flux (i, j+1/2)
# ============================*/
phx = phipjp - phimjp;
phz = ph_shared[T]-ph_shared[place];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jp = 0.5f*(1.0f+(1.0f-k)*U_shared[T])*(1.0f-ph_shared[T]*ph_shared[T])*dpsi_shared[T];
float UT = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[T])*(U_shared[T]-U_shared[place]) + 0.5f*(jat + jat_jp)*nz;
/*# ============================
# bottom edge flux (i, j-1/2)
# ============================*/
phx = phipjm - phimjm;
phz = ph_shared[place]-ph_shared[B];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jm = 0.5f*(1.0f+(1.0f-k)*U_shared[B])*(1.0f-ph_shared[B]*ph_shared[B])*dpsi_shared[B];
float UB = hi*Dl_tilde*0.5f*(2.0f - ph_shared[place] - ph_shared[B])*(U_shared[place]-U_shared[B]) + 0.5f*(jat + jat_jm)*nz;
float rhs_U = ( (UR-UL) + (UT-UB) ) * hi + cP.sqrt2 * jat;
float tau_U = (1.0f+cP.k) - (1.0f-cP.k)*ph_shared[place];
U_new[C] = U_shared[place] + cP.dt * ( rhs_U / tau_U );
}
}
}
// U equation
__global__ void
rhs_U_ori(float* U, float* U_new, float* ph, float* dpsi, int fnx, int fny ){
int C = blockIdx.x * blockDim.x + threadIdx.x;
int j=C/fnx;
int i=C-j*fnx;
// if the points are at boundary, return
if ( (i>0) && (i<fnx-1) && (j>0) && (j<fny-1) ) {
// find the indices of the 8 neighbors for center
int R=C+1;
int L=C-1;
int T=C+fnx;
int B=C-fnx;
float hi = cP.hi;
float Dl_tilde = cP.Dl_tilde;
float k = cP.k;
float nx,nz;
float eps = cP.eps;
// =============================================================
// 1. ANISOTROPIC DIFFUSION
// =============================================================
// these ps's are defined on cell centers
float phipjp=( ph[C] + ph[R] + ph[T] + ph[T+1] ) * 0.25f;
float phipjm=( ph[C] + ph[R] + ph[B] + ph[B+1] ) * 0.25f;
float phimjp=( ph[C] + ph[L] + ph[T-1] + ph[T] ) * 0.25f;
float phimjm=( ph[C] + ph[L] + ph[B-1] + ph[B] ) * 0.25f;
float jat = 0.5f*(1.0f+(1.0f-k)*U[C])*(1.0f-ph[C]*ph[C])*dpsi[C];
/*# ============================
# right edge flux (i+1/2, j)
# ============================*/
float phx = ph[R]-ph[C];
float phz = phipjp - phipjm;
float phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_ip = 0.5f*(1.0f+(1.0f-k)*U[R])*(1.0f-ph[R]*ph[R])*dpsi[R];
float UR = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[R])*(U[R]-U[C]) + 0.5f*(jat + jat_ip)*nx;
/* ============================
# left edge flux (i-1/2, j)
# ============================*/
phx = ph[C]-ph[L];
phz = phimjp - phimjm;
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nx = phx / sqrtf(phn2);}
else {nx = 0.0f;}
float jat_im = 0.5f*(1.0f+(1.0f-k)*U[L])*(1.0f-ph[L]*ph[L])*dpsi[L];
float UL = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[L])*(U[C]-U[L]) + 0.5f*(jat + jat_im)*nx;
/*# ============================
# top edge flux (i, j+1/2)
# ============================*/
phx = phipjp - phimjp;
phz = ph[T]-ph[C];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jp = 0.5f*(1.0f+(1.0f-k)*U[T])*(1.0f-ph[T]*ph[T])*dpsi[T];
float UT = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[T])*(U[T]-U[C]) + 0.5f*(jat + jat_jp)*nz;
/*# ============================
# bottom edge flux (i, j-1/2)
# ============================*/
phx = phipjm - phimjm;
phz = ph[C]-ph[B];
phn2 = phx*phx + phz*phz;
if (phn2 > eps) {nz = phz / sqrtf(phn2);}
else {nz = 0.0f;}
float jat_jm = 0.5f*(1.0f+(1.0f-k)*U[B])*(1.0f-ph[B]*ph[B])*dpsi[B];
float UB = hi*Dl_tilde*0.5f*(2.0f - ph[C] - ph[B])*(U[C]-U[B]) + 0.5f*(jat + jat_jm)*nz;
float rhs_U = ( (UR-UL) + (UT-UB) ) * hi + cP.sqrt2 * jat;
float tau_U = (1.0f+cP.k) - (1.0f-cP.k)*ph[C];
U_new[C] = U[C] + cP.dt * ( rhs_U / tau_U );
}
}
// Host codes for PF computing
void setup(GlobalConstants params, int fnx, int fny, float* x, float* y, float* phi, float* psi,float* U){
// we should have already pass all the data structure in by this time
// move those data onto device
printCudaInfo();
float* x_device;// = NULL;
float* y_device;// = NULL;
// store two for swap behavior
float* psi_old;// = NULL;
float* psi_new;// = NULL;
float* U_old;// = NULL;
float* U_new;// = NULL;
float* phi_old;// = NULL;
float* phi_new;// = NULL;
float* dpsi;// = NULL;
// allocate x, y, phi, psi, U related params
int length = fnx*fny;
cudaMalloc((void **)&x_device, sizeof(float) * fnx);
cudaMalloc((void **)&y_device, sizeof(float) * fny);
cudaMalloc((void **)&phi_old, sizeof(float) * length);
cudaMalloc((void **)&psi_old, sizeof(float) * length);
cudaMalloc((void **)&U_old, sizeof(float) * length);
cudaMalloc((void **)&phi_new, sizeof(float) * length);
cudaMalloc((void **)&psi_new, sizeof(float) * length);
cudaMalloc((void **)&U_new, sizeof(float) * length);
cudaMalloc((void **)&dpsi, sizeof(float) * length);
// set initial params
cudaMemcpy(x_device, x, sizeof(float) * fnx, cudaMemcpyHostToDevice);
cudaMemcpy(y_device, y, sizeof(float) * fny, cudaMemcpyHostToDevice);
cudaMemcpy(psi_old, psi, sizeof(float) * length, cudaMemcpyHostToDevice);
cudaMemcpy(phi_old, phi, sizeof(float) * length, cudaMemcpyHostToDevice);
cudaMemcpy(U_old, U, sizeof(float) * length, cudaMemcpyHostToDevice);
// pass all the read-only params into global constant
cudaMemcpyToSymbol(cP, ¶ms, sizeof(GlobalConstants));
int blocksize_1d = 128;
int blocksize_2d = 128; // seems reduce the block size makes it a little faster, but around 128 is okay.
int num_block_2d = (fnx*fny+blocksize_2d-1)/blocksize_2d;
int num_block_1d = (fnx+fny+blocksize_1d-1)/blocksize_1d;
printf("nx: %d and ny: %d\n", fnx, fny);
printf("block size %d, # blocks %d\n", blocksize_2d, num_block_2d);
initialize<<< num_block_2d, blocksize_2d >>>(psi_old, phi_old, U_old, psi_new, phi_new, U_new, x_device, y_device, fnx, fny);
set_BC<<< num_block_1d, blocksize_1d >>>(psi_new, phi_new, U_new, dpsi, fnx, fny);
set_BC<<< num_block_1d, blocksize_1d >>>(psi_old, phi_old, U_old, dpsi, fnx, fny);
cudaDeviceSynchronize();
double startTime = CycleTimer::currentSeconds();
// change the 2d block due to we donn't want to include halo region
int real_per_block_x = BLOCK_DIM_X - HALO_LEFT - HALO_RIGHT;
int real_per_block_y = BLOCK_DIM_Y - HALO_TOP - HALO_BOTTOM;
int num_block_x = (fnx - 2 + real_per_block_x - 1) / real_per_block_x;
int num_block_y = (fny - 2 + real_per_block_y - 1) / real_per_block_y;
printf("block_x: %d and block_y: %d\n", real_per_block_x, real_per_block_y);
printf("block_x: %d and block_y: %d\n", num_block_x, num_block_y);
int num_block_2d_s = num_block_x * num_block_y; //each one take one block with (32-2)+ (32-2) ture block within (fnx-2), (fny-2)
int blocksize_2d_s = BLOCK_DIM_X * BLOCK_DIM_Y; // 32*32: as we have to write 32*32 data region into shared memory
for (int kt=0; kt<params.Mt/2; kt++){
// printf("time step %d\n",kt);
rhs_psi_shared_mem<<< num_block_2d_s, blocksize_2d_s >>>(psi_old, phi_old, U_old, psi_new, phi_new, y_device, dpsi, fnx, fny, 2*kt, num_block_x, num_block_y);
//cudaDeviceSynchronize();
set_BC<<< num_block_1d, blocksize_1d >>>(psi_new, phi_new, U_old, dpsi, fnx, fny);
//cudaDeviceSynchronize();
rhs_U_shared_mem<<< num_block_2d_s, blocksize_2d_s >>>(U_old, U_new, phi_new, dpsi, fnx, fny, num_block_x, num_block_y);
//cudaDeviceSynchronize();
rhs_psi_shared_mem<<< num_block_2d_s, blocksize_2d_s >>>(psi_new, phi_new, U_new, psi_old, phi_old, y_device, dpsi, fnx, fny, 2*kt+1, num_block_x, num_block_y);
//cudaDeviceSynchronize();
set_BC<<< num_block_1d, blocksize_1d >>>(psi_old, phi_old, U_new, dpsi, fnx, fny);
//cudaDeviceSynchronize();
rhs_U_shared_mem<<< num_block_2d_s, blocksize_2d_s >>>(U_new, U_old, phi_old, dpsi, fnx, fny, num_block_x, num_block_y);
//cudaDeviceSynchronize();
}
cudaDeviceSynchronize();
double endTime = CycleTimer::currentSeconds();
printf("time for %d iterations: %f s\n", params.Mt, endTime-startTime);
cudaMemcpy(psi, psi_old, length * sizeof(float),cudaMemcpyDeviceToHost);
cudaMemcpy(phi, phi_old, length * sizeof(float),cudaMemcpyDeviceToHost);
cudaMemcpy(U, U_old, length * sizeof(float),cudaMemcpyDeviceToHost);
cudaFree(x_device); cudaFree(y_device);
cudaFree(psi_old); cudaFree(psi_new);
cudaFree(phi_old); cudaFree(phi_new);
cudaFree(U_old); cudaFree(U_new);
cudaFree(dpsi);
}
/*
void time_marching(GlobalConstants params, int fnx, int fny){
// initialize or load
int blocksize_1d = 256;
int blocksize_2d = 512;
int num_block_2d = (fnx*fny+blocksize_2d-1)/blocksize_2d;
int num_block_1d = (fnx+fny+blocksize_1d-1)/blocksize_1d;
initialize<<< num_block_2d, blocksize_2d >>>(ps_old, ph_old, U_old, ps_new, ph_new, U_new, x_device, y_device, fnx, fny);
for (int kt=0; kt<params.Mt/2; kt++){
rhs_psi<<< num_block_2d, blocksize_2d >>>(psi_old, phi_old, U_old, psi_new, phi_new, y_device, dpsi, fnx, fny, 2*kt );
set_BC<<< num_block_1d, blocksize_1d >>>(psi_new, phi_new, U_old, dpsi, fnx, fny);
rhs_U<<< num_block_2d, blocksize_2d >>>(U_old, U_new, phi_new, dpsi);
rhs_psi<<< num_block_2d, blocksize_2d >>>(psi_new, phi_new, U_new, psi_old, phi_old, y_device, dpsi, fnx, fny, 2*kt+1 );
set_BC<<< num_block_1d, blocksize_1d >>>(psi_old, phi_old, U_new, dpsi, fnx, fny);
rhs_U<<< num_block_2d, blocksize_2d >>>(U_new, U_old, phi_old, dpsi);
}
}*/
void printCudaInfo()
{
// for fun, just print out some stats on the machine
int deviceCount = 0;
cudaError_t err = cudaGetDeviceCount(&deviceCount);
printf("---------------------------------------------------------\n");
printf("Found %d CUDA devices\n", deviceCount);
for (int i=0; i<deviceCount; i++)
{
cudaDeviceProp deviceProps;
cudaGetDeviceProperties(&deviceProps, i);
printf("Device %d: %s\n", i, deviceProps.name);
printf(" SMs: %d\n", deviceProps.multiProcessorCount);
printf(" Global mem: %.0f MB\n",
static_cast<float>(deviceProps.totalGlobalMem) / (1024 * 1024));
printf(" CUDA Cap: %d.%d\n", deviceProps.major, deviceProps.minor);
}
printf("---------------------------------------------------------\n");
}
|
db6e68ab722e9ece9a478515cc813e814369b387.hip | // !!! This is a file automatically generated by hipify!!!
#include "PSOCuda.cuh"
#include <stdexcept>
#include <algorithm>
#include "hip/hip_runtime.h"
#include "device_launch_parameters.h"
#include <thrust/functional.h>
#include <thrust/extrema.h>
#define NUM_DIMENSIONS 2
extern "C" __device__ __device_builtin__ void __syncthreads();
extern "C" __device__ __device_builtin__ float fminf(float x, float y);
extern "C" __device__ __device_builtin__ float fmaxf(float x, float y);
extern "C" __device__ __device_builtin__ unsigned int __uAtomicInc(unsigned int *address, unsigned int val);
extern "C" __device__ __device_builtin__ void __threadfence_system(void);
#define Rand(s, min, max) (hiprand_uniform(&s)*(max - min) + min)
__constant__ float _c_minPosition[NUM_DIMENSIONS];
__constant__ float _c_maxPosition[NUM_DIMENSIONS];
__forceinline__ __device__ float EvalBanana(float *position)
{
float x = position[0];
float y = position[1];
float a = y - x * x;
float b = 1 - x;
return 100 * a*a + b*b;
}
__forceinline__ __device__ void WaitForIncBlocks(unsigned int *itCount, int it, unsigned int max)
{
if (threadIdx.x == 0)
{
__threadfence_system();
__uAtomicInc(&itCount[blockIdx.x], max + 1);
__threadfence_system();
int cont = 1;
while (cont)
{
cont = 0;
for (int i = 0; i < gridDim.x; ++i)
{
if (itCount[blockIdx.x] != it + 1)
{
cont = 1;
break;
}
}
}
}
__syncthreads();
}
__global__ void k_InitPSO(
int numParticles,
float *_positions,
float *_velocities,
float *_bestPositions,
float *_bestFitness,
float *_bestGlobalPosition,
float *_bestGlobalFitness,
hiprandState_t *_s)
{
__shared__ float positions[1024 * NUM_DIMENSIONS];
__shared__ float velocities[1024 * NUM_DIMENSIONS];
__shared__ float bestFitness[1024];
__shared__ int ptrs[1024];
int idx = threadIdx.x;
int gidx = blockDim.x * blockIdx.x + idx;
if (gidx >= numParticles)
bestFitness[idx] = FLT_MAX;
__syncthreads();
ptrs[idx] = idx;
if (idx < numParticles)
{
hiprand_init(threadIdx.x, 0, 0, &_s[idx]);
int ptr_s = idx * NUM_DIMENSIONS; // posio na memoria shared
// Calculate randon pos & vel
float minX = _c_minPosition[0];
float minY = _c_minPosition[0];
float maxX = _c_maxPosition[1];
float maxY = _c_maxPosition[1];
positions[ptr_s] = hiprand_uniform(&_s[idx])*(maxX - minX) + minX;
positions[ptr_s + 1] = hiprand_uniform(&_s[idx])*(maxY - minY) + minY;
velocities[ptr_s] = hiprand_uniform(&_s[idx])*(maxX - minX) + minX;
velocities[ptr_s + 1] = hiprand_uniform(&_s[idx])*(maxY - minY) + minY;
// Initizalizes local bests
bestFitness[idx] = EvalBanana(positions + ptr_s);
}
__syncthreads();
// Descobre a melhor
for (int s = 1024 / 2; s > 0; s /= 2)
{
if (idx < s)
{
if (bestFitness[ptrs[idx]] > bestFitness[ptrs[idx + s]])
{
int tmp = ptrs[idx + s];
ptrs[idx + s] = ptrs[idx];
ptrs[idx] = tmp;
}
}
__syncthreads();
}
if (gidx < numParticles)
{
// Transfer to global memory
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_positions[gidx * NUM_DIMENSIONS + d] = positions[idx * NUM_DIMENSIONS + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_bestPositions[gidx * NUM_DIMENSIONS + d] = positions[idx * NUM_DIMENSIONS + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_velocities[gidx * NUM_DIMENSIONS + d] = velocities[idx * NUM_DIMENSIONS + d];
_bestFitness[gidx] = bestFitness[idx];
if (idx < NUM_DIMENSIONS)
_bestGlobalPosition[blockIdx.x * NUM_DIMENSIONS + idx] = positions[ptrs[0] * NUM_DIMENSIONS + idx];
if (idx == 0)
_bestGlobalFitness[blockIdx.x] = bestFitness[ptrs[0]];
}
}
extern "C" __global__ void k_IterateMultiBlock(
int numParticles,
float *_positions,
float *_velocities,
float *_bestPositions,
float *_bestFitness,
float *_bestGlobalPosition,
float *_bestGlobalFitness,
hiprandState_t *_s)
{
__shared__ int ptrs[1024];
__shared__ float positions[1024 * NUM_DIMENSIONS];
__shared__ float bestFitness[1024];
__shared__ float bestPositions[1024 * NUM_DIMENSIONS];
float bestGlobalFitness;
int p = threadIdx.x;
int block = blockIdx.x;
int gp = blockDim.x * block + p;
if (gp < numParticles)
{
for (int i = 0; i < NUM_DIMENSIONS; ++i)
positions[p * NUM_DIMENSIONS + i] = _positions[gp * NUM_DIMENSIONS + i];
for (int i = 0; i < NUM_DIMENSIONS; ++i)
bestPositions[p * NUM_DIMENSIONS + i] = _bestPositions[gp * NUM_DIMENSIONS + i];
bestFitness[p] = _bestFitness[gp];
}
if (p == 0)
bestGlobalFitness = _bestGlobalFitness[0];
else if (gp >= numParticles)
bestFitness[p] = FLT_MAX;
__syncthreads();
if (gp < numParticles)
{
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
float r1 = hiprand_uniform(&_s[p]);
float r2 = hiprand_uniform(&_s[p]);
float newVelocity = (W * _velocities[gp * NUM_DIMENSIONS + j]) +
(C1 * r1 * (bestPositions[p * NUM_DIMENSIONS + j] - positions[p * NUM_DIMENSIONS + j])) +
(C2 * r2 * (_bestGlobalPosition[block * NUM_DIMENSIONS + j] - positions[p * NUM_DIMENSIONS + j]));
newVelocity = fmaxf(_c_minPosition[j], fminf(_c_maxPosition[j], newVelocity));
_velocities[gp * NUM_DIMENSIONS + j] = newVelocity;
float newPosition = positions[p * NUM_DIMENSIONS + j] + newVelocity;
newPosition = fmaxf(_c_minPosition[j], fminf(_c_maxPosition[j], newPosition));
positions[p * NUM_DIMENSIONS + j] = newPosition;
}
float newFitness = EvalBanana(&positions[p * NUM_DIMENSIONS]);
if (newFitness < bestFitness[p])
{
bestFitness[p] = newFitness;
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
bestPositions[p * NUM_DIMENSIONS + j] = positions[p * NUM_DIMENSIONS + j];
}
}
}
__syncthreads();
// Descobre a melhor
ptrs[p] = p;
__syncthreads();
for (int s = blockDim.x / 2; s > 0; s /= 2)
{
if (p < s)
{
if (bestFitness[ptrs[p]] > bestFitness[ptrs[p + s]])
{
int tmp = ptrs[p + s];
ptrs[p + s] = ptrs[p];
ptrs[p] = tmp;
}
}
__syncthreads();
}
if (p == 0)
{
if (bestFitness[ptrs[0]] < bestGlobalFitness)
{
bestGlobalFitness = bestFitness[ptrs[0]];
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
_bestGlobalPosition[block * NUM_DIMENSIONS + j] = positions[ptrs[0] * NUM_DIMENSIONS + j];
}
}
}
__syncthreads();
if (gp < numParticles)
{
int ptr_g = gp * NUM_DIMENSIONS;
int ptr_s = p * NUM_DIMENSIONS;
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_positions[ptr_g + d] = positions[ptr_s + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_bestPositions[ptr_g + d] = bestPositions[ptr_s + d];
_bestFitness[gp] = bestFitness[p];
}
if (p == 0)
_bestGlobalFitness[block] = bestGlobalFitness;
}
__global__ void k_minimum(int _numBlocks, float *_position, float *_fitness)
{
__shared__ float fitness[1024];
__shared__ int ptrs[1024];
int idx = threadIdx.x;
ptrs[idx] = idx;
if (idx >= _numBlocks)
fitness[idx] = FLT_MAX;
__syncthreads();
if (idx < _numBlocks)
fitness[idx] = _fitness[idx];
__syncthreads();
for (int s = blockDim.x / 2; s > 0; s /= 2)
{
if (idx < s)
{
if (fitness[ptrs[idx]] > fitness[ptrs[idx + s]])
{
int tmp = ptrs[idx + s];
ptrs[idx + s] = ptrs[idx];
ptrs[idx] = tmp;
}
}
__syncthreads();
}
if (idx < NUM_DIMENSIONS)
_position[idx] = _position[ptrs[0] * NUM_DIMENSIONS + idx];
if (idx == 0)
_fitness[0] = _fitness[ptrs[0]];
}
PSOCuda::PSOCuda(int numParticles, float *minPositions, float *maxPositions)
:
PSOBase(numParticles, NUM_DIMENSIONS, minPositions, maxPositions),
_d_positions(_positions.size()),
_d_velocities(_velocities.size()),
_d_minPositions(_minPositions),
_d_maxPositions(_maxPositions),
_d_bestPositions(_bestPositions.size()),
_d_bestFitness(_bestFitness.size()),
_d_state(numParticles)
{
CalculateGeometry();
hipDeviceSetCacheConfig(hipFuncCachePreferShared);
_d_bestGlobalPosition.resize(NUM_DIMENSIONS * _numBlocks);
_d_bestGlobalFitness.resize(_numBlocks);
_bestGlobalPosition.resize(NUM_DIMENSIONS * _numBlocks);
_bestGlobalFitness.resize(_numBlocks);
hipMemcpyToSymbol(_c_minPosition, _minPositions.data(), _minPositions.size() * sizeof(float));
hipMemcpyToSymbol(_c_maxPosition, _maxPositions.data(), _maxPositions.size() * sizeof(float));
}
void PSOCuda::Init()
{
int threadNumber = pow(2, ceil(log(_numThreads)/log(2)));
int blockNumber = pow(2, ceil(log(_numBlocks)/log(2)));
hipLaunchKernelGGL(( k_InitPSO), dim3(_numBlocks), dim3(threadNumber), 0, 0, _numParticles,
raw_pointer_cast(_d_positions.data()),
raw_pointer_cast(_d_velocities.data()),
raw_pointer_cast(_d_bestPositions.data()),
raw_pointer_cast(_d_bestFitness.data()),
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()),
raw_pointer_cast(_d_state.data()));
hipDeviceSynchronize();
hipLaunchKernelGGL(( k_minimum), dim3(1), dim3(blockNumber), 0, 0, _numBlocks,
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()));
UpdateHost();
}
void PSOCuda::Iterate(int n)
{
int threadNumber = pow(2, ceil(log(_numThreads)/log(2)));
int blockNumber = pow(2, ceil(log(_numBlocks)/log(2)));
for (int i = 0; i < n; ++i)
{
hipLaunchKernelGGL(( k_IterateMultiBlock), dim3(_numBlocks), dim3(threadNumber), 0, 0, _numParticles,
raw_pointer_cast(_d_positions.data()),
raw_pointer_cast(_d_velocities.data()),
raw_pointer_cast(_d_bestPositions.data()),
raw_pointer_cast(_d_bestFitness.data()),
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()),
raw_pointer_cast(_d_state.data()));
hipDeviceSynchronize();
hipLaunchKernelGGL(( k_minimum), dim3(1), dim3(blockNumber), 0, 0, _numBlocks,
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()));
}
UpdateHost();
}
void PSOCuda::UpdateHost()
{
_positions = _d_positions;
_velocities = _d_velocities;
_minPositions = _d_minPositions;
_maxPositions = _d_maxPositions;
_bestPositions = _d_bestPositions;
_bestFitness = _d_bestFitness;
_bestGlobalPosition = _d_bestGlobalPosition;
_bestGlobalFitness = _d_bestGlobalFitness;
}
void PSOCuda::CalculateGeometry()
{
int numDevices;
hipGetDeviceCount(&numDevices);
if (numDevices < 1)
throw std::exception("Nenhum dispositivo cuda");
hipDeviceProp_t devProp;
hipGetDeviceProperties(&devProp, 0);
int maxThreads = devProp.maxThreadsPerBlock;
int maxBlocks = devProp.multiProcessorCount;
//if (maxThreads * maxBlocks < _numParticles)
// throw std::exception("_maxThreads * _maxBlocks < _numParticles");
_numThreads = _numParticles / maxBlocks;
_numThreads = ::min(((_numThreads + 191)/192)*192, maxThreads);
_numBlocks = (_numParticles + _numThreads - 1) / _numThreads;
} | db6e68ab722e9ece9a478515cc813e814369b387.cu | #include "PSOCuda.cuh"
#include <stdexcept>
#include <algorithm>
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <thrust/functional.h>
#include <thrust/extrema.h>
#define NUM_DIMENSIONS 2
extern "C" __device__ __device_builtin__ void __syncthreads();
extern "C" __device__ __device_builtin__ float fminf(float x, float y);
extern "C" __device__ __device_builtin__ float fmaxf(float x, float y);
extern "C" __device__ __device_builtin__ unsigned int __uAtomicInc(unsigned int *address, unsigned int val);
extern "C" __device__ __device_builtin__ void __threadfence_system(void);
#define Rand(s, min, max) (curand_uniform(&s)*(max - min) + min)
__constant__ float _c_minPosition[NUM_DIMENSIONS];
__constant__ float _c_maxPosition[NUM_DIMENSIONS];
__forceinline__ __device__ float EvalBanana(float *position)
{
float x = position[0];
float y = position[1];
float a = y - x * x;
float b = 1 - x;
return 100 * a*a + b*b;
}
__forceinline__ __device__ void WaitForIncBlocks(unsigned int *itCount, int it, unsigned int max)
{
if (threadIdx.x == 0)
{
__threadfence_system();
__uAtomicInc(&itCount[blockIdx.x], max + 1);
__threadfence_system();
int cont = 1;
while (cont)
{
cont = 0;
for (int i = 0; i < gridDim.x; ++i)
{
if (itCount[blockIdx.x] != it + 1)
{
cont = 1;
break;
}
}
}
}
__syncthreads();
}
__global__ void k_InitPSO(
int numParticles,
float *_positions,
float *_velocities,
float *_bestPositions,
float *_bestFitness,
float *_bestGlobalPosition,
float *_bestGlobalFitness,
curandState *_s)
{
__shared__ float positions[1024 * NUM_DIMENSIONS];
__shared__ float velocities[1024 * NUM_DIMENSIONS];
__shared__ float bestFitness[1024];
__shared__ int ptrs[1024];
int idx = threadIdx.x;
int gidx = blockDim.x * blockIdx.x + idx;
if (gidx >= numParticles)
bestFitness[idx] = FLT_MAX;
__syncthreads();
ptrs[idx] = idx;
if (idx < numParticles)
{
curand_init(threadIdx.x, 0, 0, &_s[idx]);
int ptr_s = idx * NUM_DIMENSIONS; // posição na memoria shared
// Calculate randon pos & vel
float minX = _c_minPosition[0];
float minY = _c_minPosition[0];
float maxX = _c_maxPosition[1];
float maxY = _c_maxPosition[1];
positions[ptr_s] = curand_uniform(&_s[idx])*(maxX - minX) + minX;
positions[ptr_s + 1] = curand_uniform(&_s[idx])*(maxY - minY) + minY;
velocities[ptr_s] = curand_uniform(&_s[idx])*(maxX - minX) + minX;
velocities[ptr_s + 1] = curand_uniform(&_s[idx])*(maxY - minY) + minY;
// Initizalizes local bests
bestFitness[idx] = EvalBanana(positions + ptr_s);
}
__syncthreads();
// Descobre a melhor
for (int s = 1024 / 2; s > 0; s /= 2)
{
if (idx < s)
{
if (bestFitness[ptrs[idx]] > bestFitness[ptrs[idx + s]])
{
int tmp = ptrs[idx + s];
ptrs[idx + s] = ptrs[idx];
ptrs[idx] = tmp;
}
}
__syncthreads();
}
if (gidx < numParticles)
{
// Transfer to global memory
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_positions[gidx * NUM_DIMENSIONS + d] = positions[idx * NUM_DIMENSIONS + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_bestPositions[gidx * NUM_DIMENSIONS + d] = positions[idx * NUM_DIMENSIONS + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_velocities[gidx * NUM_DIMENSIONS + d] = velocities[idx * NUM_DIMENSIONS + d];
_bestFitness[gidx] = bestFitness[idx];
if (idx < NUM_DIMENSIONS)
_bestGlobalPosition[blockIdx.x * NUM_DIMENSIONS + idx] = positions[ptrs[0] * NUM_DIMENSIONS + idx];
if (idx == 0)
_bestGlobalFitness[blockIdx.x] = bestFitness[ptrs[0]];
}
}
extern "C" __global__ void k_IterateMultiBlock(
int numParticles,
float *_positions,
float *_velocities,
float *_bestPositions,
float *_bestFitness,
float *_bestGlobalPosition,
float *_bestGlobalFitness,
curandState *_s)
{
__shared__ int ptrs[1024];
__shared__ float positions[1024 * NUM_DIMENSIONS];
__shared__ float bestFitness[1024];
__shared__ float bestPositions[1024 * NUM_DIMENSIONS];
float bestGlobalFitness;
int p = threadIdx.x;
int block = blockIdx.x;
int gp = blockDim.x * block + p;
if (gp < numParticles)
{
for (int i = 0; i < NUM_DIMENSIONS; ++i)
positions[p * NUM_DIMENSIONS + i] = _positions[gp * NUM_DIMENSIONS + i];
for (int i = 0; i < NUM_DIMENSIONS; ++i)
bestPositions[p * NUM_DIMENSIONS + i] = _bestPositions[gp * NUM_DIMENSIONS + i];
bestFitness[p] = _bestFitness[gp];
}
if (p == 0)
bestGlobalFitness = _bestGlobalFitness[0];
else if (gp >= numParticles)
bestFitness[p] = FLT_MAX;
__syncthreads();
if (gp < numParticles)
{
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
float r1 = curand_uniform(&_s[p]);
float r2 = curand_uniform(&_s[p]);
float newVelocity = (W * _velocities[gp * NUM_DIMENSIONS + j]) +
(C1 * r1 * (bestPositions[p * NUM_DIMENSIONS + j] - positions[p * NUM_DIMENSIONS + j])) +
(C2 * r2 * (_bestGlobalPosition[block * NUM_DIMENSIONS + j] - positions[p * NUM_DIMENSIONS + j]));
newVelocity = fmaxf(_c_minPosition[j], fminf(_c_maxPosition[j], newVelocity));
_velocities[gp * NUM_DIMENSIONS + j] = newVelocity;
float newPosition = positions[p * NUM_DIMENSIONS + j] + newVelocity;
newPosition = fmaxf(_c_minPosition[j], fminf(_c_maxPosition[j], newPosition));
positions[p * NUM_DIMENSIONS + j] = newPosition;
}
float newFitness = EvalBanana(&positions[p * NUM_DIMENSIONS]);
if (newFitness < bestFitness[p])
{
bestFitness[p] = newFitness;
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
bestPositions[p * NUM_DIMENSIONS + j] = positions[p * NUM_DIMENSIONS + j];
}
}
}
__syncthreads();
// Descobre a melhor
ptrs[p] = p;
__syncthreads();
for (int s = blockDim.x / 2; s > 0; s /= 2)
{
if (p < s)
{
if (bestFitness[ptrs[p]] > bestFitness[ptrs[p + s]])
{
int tmp = ptrs[p + s];
ptrs[p + s] = ptrs[p];
ptrs[p] = tmp;
}
}
__syncthreads();
}
if (p == 0)
{
if (bestFitness[ptrs[0]] < bestGlobalFitness)
{
bestGlobalFitness = bestFitness[ptrs[0]];
for (int j = 0; j < NUM_DIMENSIONS; ++j)
{
_bestGlobalPosition[block * NUM_DIMENSIONS + j] = positions[ptrs[0] * NUM_DIMENSIONS + j];
}
}
}
__syncthreads();
if (gp < numParticles)
{
int ptr_g = gp * NUM_DIMENSIONS;
int ptr_s = p * NUM_DIMENSIONS;
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_positions[ptr_g + d] = positions[ptr_s + d];
for (int d = 0; d < NUM_DIMENSIONS; ++d)
_bestPositions[ptr_g + d] = bestPositions[ptr_s + d];
_bestFitness[gp] = bestFitness[p];
}
if (p == 0)
_bestGlobalFitness[block] = bestGlobalFitness;
}
__global__ void k_minimum(int _numBlocks, float *_position, float *_fitness)
{
__shared__ float fitness[1024];
__shared__ int ptrs[1024];
int idx = threadIdx.x;
ptrs[idx] = idx;
if (idx >= _numBlocks)
fitness[idx] = FLT_MAX;
__syncthreads();
if (idx < _numBlocks)
fitness[idx] = _fitness[idx];
__syncthreads();
for (int s = blockDim.x / 2; s > 0; s /= 2)
{
if (idx < s)
{
if (fitness[ptrs[idx]] > fitness[ptrs[idx + s]])
{
int tmp = ptrs[idx + s];
ptrs[idx + s] = ptrs[idx];
ptrs[idx] = tmp;
}
}
__syncthreads();
}
if (idx < NUM_DIMENSIONS)
_position[idx] = _position[ptrs[0] * NUM_DIMENSIONS + idx];
if (idx == 0)
_fitness[0] = _fitness[ptrs[0]];
}
PSOCuda::PSOCuda(int numParticles, float *minPositions, float *maxPositions)
:
PSOBase(numParticles, NUM_DIMENSIONS, minPositions, maxPositions),
_d_positions(_positions.size()),
_d_velocities(_velocities.size()),
_d_minPositions(_minPositions),
_d_maxPositions(_maxPositions),
_d_bestPositions(_bestPositions.size()),
_d_bestFitness(_bestFitness.size()),
_d_state(numParticles)
{
CalculateGeometry();
cudaDeviceSetCacheConfig(cudaFuncCachePreferShared);
_d_bestGlobalPosition.resize(NUM_DIMENSIONS * _numBlocks);
_d_bestGlobalFitness.resize(_numBlocks);
_bestGlobalPosition.resize(NUM_DIMENSIONS * _numBlocks);
_bestGlobalFitness.resize(_numBlocks);
cudaMemcpyToSymbol(_c_minPosition, _minPositions.data(), _minPositions.size() * sizeof(float));
cudaMemcpyToSymbol(_c_maxPosition, _maxPositions.data(), _maxPositions.size() * sizeof(float));
}
void PSOCuda::Init()
{
int threadNumber = pow(2, ceil(log(_numThreads)/log(2)));
int blockNumber = pow(2, ceil(log(_numBlocks)/log(2)));
k_InitPSO<<<_numBlocks, threadNumber>>>(_numParticles,
raw_pointer_cast(_d_positions.data()),
raw_pointer_cast(_d_velocities.data()),
raw_pointer_cast(_d_bestPositions.data()),
raw_pointer_cast(_d_bestFitness.data()),
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()),
raw_pointer_cast(_d_state.data()));
cudaDeviceSynchronize();
k_minimum<<<1, blockNumber>>>(_numBlocks,
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()));
UpdateHost();
}
void PSOCuda::Iterate(int n)
{
int threadNumber = pow(2, ceil(log(_numThreads)/log(2)));
int blockNumber = pow(2, ceil(log(_numBlocks)/log(2)));
for (int i = 0; i < n; ++i)
{
k_IterateMultiBlock<<<_numBlocks, threadNumber>>>(_numParticles,
raw_pointer_cast(_d_positions.data()),
raw_pointer_cast(_d_velocities.data()),
raw_pointer_cast(_d_bestPositions.data()),
raw_pointer_cast(_d_bestFitness.data()),
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()),
raw_pointer_cast(_d_state.data()));
cudaDeviceSynchronize();
k_minimum<<<1, blockNumber>>>(_numBlocks,
raw_pointer_cast(_d_bestGlobalPosition.data()),
raw_pointer_cast(_d_bestGlobalFitness.data()));
}
UpdateHost();
}
void PSOCuda::UpdateHost()
{
_positions = _d_positions;
_velocities = _d_velocities;
_minPositions = _d_minPositions;
_maxPositions = _d_maxPositions;
_bestPositions = _d_bestPositions;
_bestFitness = _d_bestFitness;
_bestGlobalPosition = _d_bestGlobalPosition;
_bestGlobalFitness = _d_bestGlobalFitness;
}
void PSOCuda::CalculateGeometry()
{
int numDevices;
cudaGetDeviceCount(&numDevices);
if (numDevices < 1)
throw std::exception("Nenhum dispositivo cuda");
cudaDeviceProp devProp;
cudaGetDeviceProperties(&devProp, 0);
int maxThreads = devProp.maxThreadsPerBlock;
int maxBlocks = devProp.multiProcessorCount;
//if (maxThreads * maxBlocks < _numParticles)
// throw std::exception("_maxThreads * _maxBlocks < _numParticles");
_numThreads = _numParticles / maxBlocks;
_numThreads = std::min(((_numThreads + 191)/192)*192, maxThreads);
_numBlocks = (_numParticles + _numThreads - 1) / _numThreads;
} |
f7bcca15fcbeca056f462b5153bed7e1533dee46.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <cmath>
#include "caffe2/core/context_gpu.h"
#include "caffe2/operators/elementwise_op.h"
namespace caffe2 {
template <typename T>
__global__ void AsinKernel(const int N, const T* X, T* Y) {
CUDA_1D_KERNEL_LOOP(i, N) {
Y[i] = asin(X[i]);
}
}
template <typename T>
__global__ void AsinGradientKernel(const int N, const T* X, const T* dY, T* dX) {
CUDA_1D_KERNEL_LOOP(i, N) {
dX[i] = dY[i] / sqrt(1 - X[i] * X[i]);
}
}
struct AsinCUDAFunctor {
template <typename T>
inline void
operator()(const int n, const T* x, T* y, CUDAContext* device_context) {
hipLaunchKernelGGL(( AsinKernel<T>)
, dim3(CAFFE_GET_BLOCKS(n)),
dim3(CAFFE_CUDA_NUM_THREADS),
0,
device_context->cuda_stream(), n, x, y);
return;
}
};
struct AsinGradientCUDAFunctor {
template <typename T>
inline void Run(
const int n,
const T* x,
const T* dy,
T* dx,
CUDAContext* device_context) {
hipLaunchKernelGGL(( AsinGradientKernel<T>)
, dim3(CAFFE_GET_BLOCKS(n)),
dim3(CAFFE_CUDA_NUM_THREADS),
0,
device_context->cuda_stream(), n, x, dy, dx);
return;
}
};
REGISTER_CUDA_OPERATOR(
Asin,
UnaryElementwiseOp<TensorTypes<float>, CUDAContext, AsinCUDAFunctor>);
REGISTER_CUDA_OPERATOR(
AsinGradient,
BinaryElementwiseOp<
TensorTypes<float>,
CUDAContext,
WithoutBroadcast<AsinGradientCUDAFunctor>>);
} // namespace caffe2
| f7bcca15fcbeca056f462b5153bed7e1533dee46.cu | #include <cmath>
#include "caffe2/core/context_gpu.h"
#include "caffe2/operators/elementwise_op.h"
namespace caffe2 {
template <typename T>
__global__ void AsinKernel(const int N, const T* X, T* Y) {
CUDA_1D_KERNEL_LOOP(i, N) {
Y[i] = asin(X[i]);
}
}
template <typename T>
__global__ void AsinGradientKernel(const int N, const T* X, const T* dY, T* dX) {
CUDA_1D_KERNEL_LOOP(i, N) {
dX[i] = dY[i] / sqrt(1 - X[i] * X[i]);
}
}
struct AsinCUDAFunctor {
template <typename T>
inline void
operator()(const int n, const T* x, T* y, CUDAContext* device_context) {
AsinKernel<T>
<<<CAFFE_GET_BLOCKS(n),
CAFFE_CUDA_NUM_THREADS,
0,
device_context->cuda_stream()>>>(n, x, y);
return;
}
};
struct AsinGradientCUDAFunctor {
template <typename T>
inline void Run(
const int n,
const T* x,
const T* dy,
T* dx,
CUDAContext* device_context) {
AsinGradientKernel<T>
<<<CAFFE_GET_BLOCKS(n),
CAFFE_CUDA_NUM_THREADS,
0,
device_context->cuda_stream()>>>(n, x, dy, dx);
return;
}
};
REGISTER_CUDA_OPERATOR(
Asin,
UnaryElementwiseOp<TensorTypes<float>, CUDAContext, AsinCUDAFunctor>);
REGISTER_CUDA_OPERATOR(
AsinGradient,
BinaryElementwiseOp<
TensorTypes<float>,
CUDAContext,
WithoutBroadcast<AsinGradientCUDAFunctor>>);
} // namespace caffe2
|
494a18de977d7ffe6413595c3e53372b1286f5d6.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
//
// cuda_initial2.cu
// Cuda GMRES
//
// Created by Tim Ioannidis on 2/18/12.
// Copyright 2012 Chemeng NTUA. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "cuda_config.h"
#include "cuda_methods.h"
//kernel initialization-> cuda_initial
//sets y=value
__global__ void cuda_initial2_kernel(int n1,double *y,int n2, double *x )
{
int global_tid=0;
//orismos indexing
global_tid = threadIdx.x + blockIdx.x * blockDim.x;
while (global_tid < n1) {
y[global_tid] = 0 ;
if (global_tid==0) {
y[global_tid]=1;
}
global_tid += blockDim.x * gridDim.x;
}
global_tid = threadIdx.x + blockIdx.x * blockDim.x;
while (global_tid < n2) {
x[global_tid] = 0 ;
global_tid += blockDim.x * gridDim.x;
}
}
| 494a18de977d7ffe6413595c3e53372b1286f5d6.cu | //
// cuda_initial2.cu
// Cuda GMRES
//
// Created by Tim Ioannidis on 2/18/12.
// Copyright 2012 Chemeng NTUA. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "cuda_config.h"
#include "cuda_methods.h"
//kernel initialization-> cuda_initial
//sets y=value
__global__ void cuda_initial2_kernel(int n1,double *y,int n2, double *x )
{
int global_tid=0;
//orismos indexing
global_tid = threadIdx.x + blockIdx.x * blockDim.x;
while (global_tid < n1) {
y[global_tid] = 0 ;
if (global_tid==0) {
y[global_tid]=1;
}
global_tid += blockDim.x * gridDim.x;
}
global_tid = threadIdx.x + blockIdx.x * blockDim.x;
while (global_tid < n2) {
x[global_tid] = 0 ;
global_tid += blockDim.x * gridDim.x;
}
}
|
371c606e01ef95ac8eb4580f1e30c635d131be1c.hip | // !!! This is a file automatically generated by hipify!!!
#define EIGEN_USE_GPU
#include <hip/hip_runtime.h>
#include <stdio.h>
__global__ void AddOneKernel(const int* in, const int N, int* out) {
for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < N;
i += blockDim.x * gridDim.x) {
out[i] = in[i] + 1;
}
}
void AddOneKernelLauncher(const int* in, const int N, int* out) {
hipLaunchKernelGGL(( AddOneKernel), dim3(32), dim3(256), 0, 0, in, N, out);
hipError_t cudaerr = hipDeviceSynchronize();
if (cudaerr != hipSuccess)
printf("kernel launch failed with error \"%s\".\n", hipGetErrorString(cudaerr));
} | 371c606e01ef95ac8eb4580f1e30c635d131be1c.cu | #define EIGEN_USE_GPU
#include <cuda.h>
#include <stdio.h>
__global__ void AddOneKernel(const int* in, const int N, int* out) {
for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < N;
i += blockDim.x * gridDim.x) {
out[i] = in[i] + 1;
}
}
void AddOneKernelLauncher(const int* in, const int N, int* out) {
AddOneKernel<<<32, 256>>>(in, N, out);
cudaError_t cudaerr = cudaDeviceSynchronize();
if (cudaerr != cudaSuccess)
printf("kernel launch failed with error \"%s\".\n", cudaGetErrorString(cudaerr));
} |
495fff6ce9c0d4a48752f9fdda921f271d84d3da.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <iostream>
#define kDataLen 1000
__global__ void child(float *z, int i) {
z[i] += 1.f;
}
__global__ void parent(float* x, float* y, float *z) {
int i = blockDim.x * blockIdx.x + threadIdx.x;
z[i] += y[i] + x[i];
hipLaunchKernelGGL(( child), dim3(1), dim3(1), 0, 0, z, i);
}
int main(int argc, char* argv[]) {
float host_a[kDataLen];
float host_b[kDataLen];
float host_c[kDataLen];
for (int i=0; i < kDataLen; i++) {
host_a[i] = i;
host_b[i] = 2*i;
}
// Copy input data to device.
float* device_a;
float* device_b;
float* device_c;
hipMalloc(&device_a, kDataLen * sizeof(float));
hipMalloc(&device_b, kDataLen * sizeof(float));
hipMalloc(&device_c, kDataLen * sizeof(float));
hipMemcpy(device_a, host_a, kDataLen * sizeof(float),
hipMemcpyHostToDevice);
hipMemcpy(device_b, host_b, kDataLen * sizeof(float),
hipMemcpyHostToDevice);
// Launch the kernel.
hipLaunchKernelGGL(( parent), dim3(5), dim3(kDataLen/5), 0, 0, device_a, device_b, device_c);
// Copy output data to host.
hipDeviceSynchronize();
hipMemcpy(host_c, device_c, kDataLen * sizeof(float),
hipMemcpyDeviceToHost);
bool passed = true;
// Print the results.
for (int i = 0; i < kDataLen; ++i) {
std::cout << "y[" << i << "] = " << host_c[i];
float expected = host_a[i] + host_b[i] + 1;
std::cout << " Expected: " << expected << std::endl;
if(host_c[i] != expected) {
passed = false;
}
}
if(passed)
std::cout << "PASSED" << std::endl;
else
std::cout << "FAILED" << std::endl;
hipDeviceReset();
return 0;
}
| 495fff6ce9c0d4a48752f9fdda921f271d84d3da.cu | #include <iostream>
#define kDataLen 1000
__global__ void child(float *z, int i) {
z[i] += 1.f;
}
__global__ void parent(float* x, float* y, float *z) {
int i = blockDim.x * blockIdx.x + threadIdx.x;
z[i] += y[i] + x[i];
child<<<1, 1>>>(z, i);
}
int main(int argc, char* argv[]) {
float host_a[kDataLen];
float host_b[kDataLen];
float host_c[kDataLen];
for (int i=0; i < kDataLen; i++) {
host_a[i] = i;
host_b[i] = 2*i;
}
// Copy input data to device.
float* device_a;
float* device_b;
float* device_c;
cudaMalloc(&device_a, kDataLen * sizeof(float));
cudaMalloc(&device_b, kDataLen * sizeof(float));
cudaMalloc(&device_c, kDataLen * sizeof(float));
cudaMemcpy(device_a, host_a, kDataLen * sizeof(float),
cudaMemcpyHostToDevice);
cudaMemcpy(device_b, host_b, kDataLen * sizeof(float),
cudaMemcpyHostToDevice);
// Launch the kernel.
parent<<<5, kDataLen/5>>>(device_a, device_b, device_c);
// Copy output data to host.
cudaDeviceSynchronize();
cudaMemcpy(host_c, device_c, kDataLen * sizeof(float),
cudaMemcpyDeviceToHost);
bool passed = true;
// Print the results.
for (int i = 0; i < kDataLen; ++i) {
std::cout << "y[" << i << "] = " << host_c[i];
float expected = host_a[i] + host_b[i] + 1;
std::cout << " Expected: " << expected << std::endl;
if(host_c[i] != expected) {
passed = false;
}
}
if(passed)
std::cout << "PASSED" << std::endl;
else
std::cout << "FAILED" << std::endl;
cudaDeviceReset();
return 0;
}
|
3f99d1a5ca3dc345e0e826dca76e5f7b537f81f9.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* Copyright (c) 2017 by Contributors
* \file bilinear_sampler.cu
* \brief
* \author Xu Dong
*/
#include "./bilinear_sampler-inl.h"
#include <algorithm>
#include "../common/cuda_utils.h"
#if MXNET_USE_CUDNN == 1 && CUDNN_MAJOR >= 5
#include "./cudnn_bilinear_sampler-inl.h"
#endif // MXNET_USE_CUDNN && CUDNN_MAJOR
namespace mshadow {
namespace cuda {
template<typename DType>
__device__ bool between(DType value, int lowerBound, int upperBound) {
return (value >= lowerBound && value <= upperBound);
}
template<typename DType>
__global__ void BilinearSamplerForwardKernel(const int i_c, const int i_h,
const int i_w, const DType* data,
const DType* grid, const int o_n,
const int o_c, const int o_h,
const int o_w, DType* out) {
for (int index = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
index < o_n * o_c * o_h * o_w;
index += blockDim.x * gridDim.x * gridDim.y) {
// (n, c, h, w) is the element in out
int w = index % o_w;
int h = (index / o_w) % o_h;
int c = (index / o_w / o_h) % o_c;
int n = index / o_w / o_h / o_c;
index_t out_index = n * o_c * o_h * o_w + c * o_h * o_w + h * o_w + w;
index_t grid_index = n * o_h * o_w * 2 + h * o_w + w;
DType y_real = (*(grid + grid_index + o_h * o_w) + 1) * (i_h - 1) / 2;
DType x_real = (*(grid + grid_index) + 1) * (i_w - 1) / 2;
int top_left_y = static_cast<int>(floor(y_real));
int top_left_x = static_cast<int>(floor(x_real));
DType top_left_y_w = 1.0 - (y_real - top_left_y);
DType top_left_x_w = 1.0 - (x_real - top_left_x);
int data_index = n * i_c * i_h * i_w + c * i_h * i_w + top_left_y * i_w + top_left_x;
DType top_left_v = 0;
DType top_right_v = 0;
DType bottom_left_v = 0;
DType bottom_right_v = 0;
if (between(top_left_x, 0, i_w-1) && between(top_left_y, 0, i_h-1))
top_left_v = *(data + data_index);
if (between(top_left_x + 1, 0, i_w-1) && between(top_left_y, 0, i_h-1))
top_right_v = *(data + data_index + 1);
if (between(top_left_x, 0, i_w-1) && between(top_left_y + 1, 0, i_h-1))
bottom_left_v = *(data + data_index + i_w);
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y + 1, 0, i_h-1))
bottom_right_v = *(data + data_index + i_w + 1);
*(out+out_index) = top_left_v * top_left_y_w * top_left_x_w +
top_right_v * top_left_y_w * (1.0 - top_left_x_w) +
bottom_left_v * (1.0 - top_left_y_w) * top_left_x_w +
bottom_right_v * (1.0 - top_left_y_w) * (1.0 - top_left_x_w);
}
}
template<typename DType>
__global__ void BilinearSamplerBackwardKernel(const int i_c, const int i_h,
const int i_w, const DType* grad,
const DType* data, const int o_n,
const int o_c, const int o_h,
const int o_w, DType* g_input,
const DType* grid_src,
DType* grad_grid) {
for (int index = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
index < o_n * o_h * o_w;
index += blockDim.x * gridDim.x * gridDim.y) {
// (n, c, h, w) is the element in grad
int w = index % o_w;
int h = (index / o_w) % o_h;
int n = index / o_w / o_h;
DType top_left_y_gw = 0.0;
DType top_left_x_gw = 0.0;
index_t grid_src_index = n * o_h * o_w * 2 + h * o_w + w;
DType y_real = (*(grid_src + grid_src_index + o_h * o_w) + 1) * (i_h - 1) / 2;
DType x_real = (*(grid_src + grid_src_index) + 1) * (i_w - 1) / 2;
int top_left_y = static_cast<int>(floor(y_real));
int top_left_x = static_cast<int>(floor(x_real));
DType top_left_y_w = 1.0 - (y_real - top_left_y);
DType top_left_x_w = 1.0 - (x_real - top_left_x);
for (index_t c = 0; c < o_c; ++c) {
index_t grad_index = n * o_c * o_h * o_w + c * o_h * o_w + h * o_w + w;
int data_index = n * i_c * i_h * i_w + c * i_h * i_w + top_left_y * i_w + top_left_x;
// calc 4 vertex value in input data
DType top_left_v = 0;
DType top_right_v = 0;
DType bottom_left_v = 0;
DType bottom_right_v = 0;
// calc input grad
if (between(top_left_x, 0, i_w-1) && between(top_left_y, 0, i_h-1)) {
atomicAdd(&g_input[data_index], *(grad + grad_index) * top_left_y_w * top_left_x_w);
top_left_v = *(data + data_index);
}
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y, 0, i_h-1)) {
atomicAdd(&g_input[data_index + 1], *(grad + grad_index) * top_left_y_w
* (1.0 - top_left_x_w));
top_right_v = *(data + data_index + 1);
}
if (between(top_left_x, 0, i_w-1) && between(top_left_y+1, 0, i_h-1)) {
atomicAdd(&g_input[data_index+ i_w], *(grad + grad_index) * (1.0 - top_left_y_w)
* top_left_x_w);
bottom_left_v = *(data + data_index + i_w);
}
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y+1, 0, i_h-1)) {
atomicAdd(&g_input[data_index+ i_w + 1], *(grad + grad_index) * (1.0 - top_left_y_w)
* (1.0 - top_left_x_w));
bottom_right_v = *(data + data_index + i_w + 1);
}
// calc weight grad of top_left_w, then multiple -1 is the grad of grid_src
top_left_y_gw -= *(grad + grad_index) * (top_right_v - bottom_right_v +
(top_left_v - top_right_v - bottom_left_v + bottom_right_v)
* top_left_x_w);
top_left_x_gw -= *(grad + grad_index) * (bottom_left_v - bottom_right_v +
(top_left_v - top_right_v - bottom_left_v + bottom_right_v)
* top_left_y_w);
}
// calc grad of grid
*(grad_grid + grid_src_index + o_h * o_w) += top_left_y_gw * (i_h - 1) / 2;
*(grad_grid + grid_src_index) += top_left_x_gw * (i_w - 1) / 2;
}
}
} // namespace cuda
template<typename DType>
inline void BilinearSamplerForward(const Tensor<gpu, 4, DType> &output,
const Tensor<gpu, 4, DType> &input,
const Tensor<gpu, 4, DType> &grid_src) {
DType *out = output.dptr_;
const DType *data = input.dptr_;
const DType *grid = grid_src.dptr_;
int o_n = output.size(0), o_c = output.size(1), o_h = output.size(2), o_w = output.size(3);
int i_c = input.size(1), i_h = input.size(2), i_w = input.size(3);
using namespace cuda;
const int max_block = (output.shape_.Size() + kMaxThreadsPerBlock - 1) / kMaxThreadsPerBlock;
const int grid_dim_x = (max_block > kMaxGridDim) ? kMaxGridDim : max_block;
const int grid_dim_y =
(max_block > kMaxGridDim) ? (max_block + kMaxGridDim - 1) / kMaxGridDim : 1;
dim3 num_blocks(grid_dim_x, grid_dim_y);
dim3 threads_per_block(kMaxThreadsPerBlock);
CheckLaunchParam(num_blocks, threads_per_block, "bilinear sampler forward");
hipStream_t stream = Stream<gpu>::GetStream(output.stream_);
cuda::BilinearSamplerForwardKernel<DType> << <num_blocks, threads_per_block, 0, stream >> >(
i_c, i_h, i_w, data, grid, o_n, o_c, o_h, o_w, out);
// post kernel check
hipError_t err = hipPeekAtLastError();
CHECK_EQ(err, hipSuccess) << hipGetErrorString(err);
}
template<typename DType>
inline void BilinearSamplerBackward(const Tensor<gpu, 4, DType> &input_grad,
const Tensor<gpu, 4, DType> &ggrid,
const Tensor<gpu, 4, DType> &output_grad,
const Tensor<gpu, 4, DType> &input_data,
const Tensor<gpu, 4, DType> &grid) {
DType *g_input = input_grad.dptr_;
DType *grad_grid = ggrid.dptr_;
const DType *grid_src = grid.dptr_;
const DType *grad = output_grad.dptr_;
const DType *data = input_data.dptr_;
int o_n = output_grad.size(0), o_c = output_grad.size(1),
o_h = output_grad.size(2), o_w = output_grad.size(3);
int i_c = input_data.size(1), i_h = input_data.size(2), i_w = input_data.size(3);
using namespace cuda;
const int max_block = (output_grad.shape_.Size() / o_c + kMaxThreadsPerBlock - 1)
/ kMaxThreadsPerBlock;
const int grid_dim_x = (max_block > kMaxGridDim) ? kMaxGridDim : max_block;
const int grid_dim_y =
(max_block > kMaxGridDim) ? (max_block + kMaxGridDim - 1) / kMaxGridDim : 1;
dim3 num_blocks(grid_dim_x, grid_dim_y);
dim3 threads_per_block(kMaxThreadsPerBlock);
CheckLaunchParam(num_blocks, threads_per_block, "bilinear sampler backward");
hipStream_t stream = Stream<gpu>::GetStream(input_grad.stream_);
cuda::BilinearSamplerBackwardKernel<DType> << <num_blocks, threads_per_block, 0, stream >> >(
i_c, i_h, i_w, grad, data, o_n, o_c, o_h, o_w, g_input, grid_src, grad_grid);
// post kernel check
hipError_t err = hipPeekAtLastError();
CHECK_EQ(err, hipSuccess) << hipGetErrorString(err);
}
} // namespace mshadow
namespace mxnet {
namespace op {
template<>
Operator* CreateOp<gpu>(BilinearSamplerParam param, int dtype) {
Operator *op = NULL;
#if MXNET_USE_CUDNN == 1 && CUDNN_MAJOR >= 5
MSHADOW_REAL_TYPE_SWITCH(dtype, DType, {
if (param.cudnn_off.has_value() && param.cudnn_off.value()) {
op = new BilinearSamplerOp<gpu, DType>(param);
} else {
op = new CuDNNBilinearSamplerOp<DType>(param);
}
})
#else
MSHADOW_REAL_TYPE_SWITCH(dtype, DType, {
op = new BilinearSamplerOp<gpu, DType>(param);
})
#endif // MXNET_USE_CUDNN && CUDNN_MAJOR
return op;
}
} // namespace op
} // namespace mxnet
| 3f99d1a5ca3dc345e0e826dca76e5f7b537f81f9.cu | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* Copyright (c) 2017 by Contributors
* \file bilinear_sampler.cu
* \brief
* \author Xu Dong
*/
#include "./bilinear_sampler-inl.h"
#include <algorithm>
#include "../common/cuda_utils.h"
#if MXNET_USE_CUDNN == 1 && CUDNN_MAJOR >= 5
#include "./cudnn_bilinear_sampler-inl.h"
#endif // MXNET_USE_CUDNN && CUDNN_MAJOR
namespace mshadow {
namespace cuda {
template<typename DType>
__device__ bool between(DType value, int lowerBound, int upperBound) {
return (value >= lowerBound && value <= upperBound);
}
template<typename DType>
__global__ void BilinearSamplerForwardKernel(const int i_c, const int i_h,
const int i_w, const DType* data,
const DType* grid, const int o_n,
const int o_c, const int o_h,
const int o_w, DType* out) {
for (int index = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
index < o_n * o_c * o_h * o_w;
index += blockDim.x * gridDim.x * gridDim.y) {
// (n, c, h, w) is the element in out
int w = index % o_w;
int h = (index / o_w) % o_h;
int c = (index / o_w / o_h) % o_c;
int n = index / o_w / o_h / o_c;
index_t out_index = n * o_c * o_h * o_w + c * o_h * o_w + h * o_w + w;
index_t grid_index = n * o_h * o_w * 2 + h * o_w + w;
DType y_real = (*(grid + grid_index + o_h * o_w) + 1) * (i_h - 1) / 2;
DType x_real = (*(grid + grid_index) + 1) * (i_w - 1) / 2;
int top_left_y = static_cast<int>(floor(y_real));
int top_left_x = static_cast<int>(floor(x_real));
DType top_left_y_w = 1.0 - (y_real - top_left_y);
DType top_left_x_w = 1.0 - (x_real - top_left_x);
int data_index = n * i_c * i_h * i_w + c * i_h * i_w + top_left_y * i_w + top_left_x;
DType top_left_v = 0;
DType top_right_v = 0;
DType bottom_left_v = 0;
DType bottom_right_v = 0;
if (between(top_left_x, 0, i_w-1) && between(top_left_y, 0, i_h-1))
top_left_v = *(data + data_index);
if (between(top_left_x + 1, 0, i_w-1) && between(top_left_y, 0, i_h-1))
top_right_v = *(data + data_index + 1);
if (between(top_left_x, 0, i_w-1) && between(top_left_y + 1, 0, i_h-1))
bottom_left_v = *(data + data_index + i_w);
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y + 1, 0, i_h-1))
bottom_right_v = *(data + data_index + i_w + 1);
*(out+out_index) = top_left_v * top_left_y_w * top_left_x_w +
top_right_v * top_left_y_w * (1.0 - top_left_x_w) +
bottom_left_v * (1.0 - top_left_y_w) * top_left_x_w +
bottom_right_v * (1.0 - top_left_y_w) * (1.0 - top_left_x_w);
}
}
template<typename DType>
__global__ void BilinearSamplerBackwardKernel(const int i_c, const int i_h,
const int i_w, const DType* grad,
const DType* data, const int o_n,
const int o_c, const int o_h,
const int o_w, DType* g_input,
const DType* grid_src,
DType* grad_grid) {
for (int index = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
index < o_n * o_h * o_w;
index += blockDim.x * gridDim.x * gridDim.y) {
// (n, c, h, w) is the element in grad
int w = index % o_w;
int h = (index / o_w) % o_h;
int n = index / o_w / o_h;
DType top_left_y_gw = 0.0;
DType top_left_x_gw = 0.0;
index_t grid_src_index = n * o_h * o_w * 2 + h * o_w + w;
DType y_real = (*(grid_src + grid_src_index + o_h * o_w) + 1) * (i_h - 1) / 2;
DType x_real = (*(grid_src + grid_src_index) + 1) * (i_w - 1) / 2;
int top_left_y = static_cast<int>(floor(y_real));
int top_left_x = static_cast<int>(floor(x_real));
DType top_left_y_w = 1.0 - (y_real - top_left_y);
DType top_left_x_w = 1.0 - (x_real - top_left_x);
for (index_t c = 0; c < o_c; ++c) {
index_t grad_index = n * o_c * o_h * o_w + c * o_h * o_w + h * o_w + w;
int data_index = n * i_c * i_h * i_w + c * i_h * i_w + top_left_y * i_w + top_left_x;
// calc 4 vertex value in input data
DType top_left_v = 0;
DType top_right_v = 0;
DType bottom_left_v = 0;
DType bottom_right_v = 0;
// calc input grad
if (between(top_left_x, 0, i_w-1) && between(top_left_y, 0, i_h-1)) {
atomicAdd(&g_input[data_index], *(grad + grad_index) * top_left_y_w * top_left_x_w);
top_left_v = *(data + data_index);
}
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y, 0, i_h-1)) {
atomicAdd(&g_input[data_index + 1], *(grad + grad_index) * top_left_y_w
* (1.0 - top_left_x_w));
top_right_v = *(data + data_index + 1);
}
if (between(top_left_x, 0, i_w-1) && between(top_left_y+1, 0, i_h-1)) {
atomicAdd(&g_input[data_index+ i_w], *(grad + grad_index) * (1.0 - top_left_y_w)
* top_left_x_w);
bottom_left_v = *(data + data_index + i_w);
}
if (between(top_left_x+1, 0, i_w-1) && between(top_left_y+1, 0, i_h-1)) {
atomicAdd(&g_input[data_index+ i_w + 1], *(grad + grad_index) * (1.0 - top_left_y_w)
* (1.0 - top_left_x_w));
bottom_right_v = *(data + data_index + i_w + 1);
}
// calc weight grad of top_left_w, then multiple -1 is the grad of grid_src
top_left_y_gw -= *(grad + grad_index) * (top_right_v - bottom_right_v +
(top_left_v - top_right_v - bottom_left_v + bottom_right_v)
* top_left_x_w);
top_left_x_gw -= *(grad + grad_index) * (bottom_left_v - bottom_right_v +
(top_left_v - top_right_v - bottom_left_v + bottom_right_v)
* top_left_y_w);
}
// calc grad of grid
*(grad_grid + grid_src_index + o_h * o_w) += top_left_y_gw * (i_h - 1) / 2;
*(grad_grid + grid_src_index) += top_left_x_gw * (i_w - 1) / 2;
}
}
} // namespace cuda
template<typename DType>
inline void BilinearSamplerForward(const Tensor<gpu, 4, DType> &output,
const Tensor<gpu, 4, DType> &input,
const Tensor<gpu, 4, DType> &grid_src) {
DType *out = output.dptr_;
const DType *data = input.dptr_;
const DType *grid = grid_src.dptr_;
int o_n = output.size(0), o_c = output.size(1), o_h = output.size(2), o_w = output.size(3);
int i_c = input.size(1), i_h = input.size(2), i_w = input.size(3);
using namespace cuda;
const int max_block = (output.shape_.Size() + kMaxThreadsPerBlock - 1) / kMaxThreadsPerBlock;
const int grid_dim_x = (max_block > kMaxGridDim) ? kMaxGridDim : max_block;
const int grid_dim_y =
(max_block > kMaxGridDim) ? (max_block + kMaxGridDim - 1) / kMaxGridDim : 1;
dim3 num_blocks(grid_dim_x, grid_dim_y);
dim3 threads_per_block(kMaxThreadsPerBlock);
CheckLaunchParam(num_blocks, threads_per_block, "bilinear sampler forward");
cudaStream_t stream = Stream<gpu>::GetStream(output.stream_);
cuda::BilinearSamplerForwardKernel<DType> << <num_blocks, threads_per_block, 0, stream >> >(
i_c, i_h, i_w, data, grid, o_n, o_c, o_h, o_w, out);
// post kernel check
cudaError err = cudaPeekAtLastError();
CHECK_EQ(err, cudaSuccess) << cudaGetErrorString(err);
}
template<typename DType>
inline void BilinearSamplerBackward(const Tensor<gpu, 4, DType> &input_grad,
const Tensor<gpu, 4, DType> &ggrid,
const Tensor<gpu, 4, DType> &output_grad,
const Tensor<gpu, 4, DType> &input_data,
const Tensor<gpu, 4, DType> &grid) {
DType *g_input = input_grad.dptr_;
DType *grad_grid = ggrid.dptr_;
const DType *grid_src = grid.dptr_;
const DType *grad = output_grad.dptr_;
const DType *data = input_data.dptr_;
int o_n = output_grad.size(0), o_c = output_grad.size(1),
o_h = output_grad.size(2), o_w = output_grad.size(3);
int i_c = input_data.size(1), i_h = input_data.size(2), i_w = input_data.size(3);
using namespace cuda;
const int max_block = (output_grad.shape_.Size() / o_c + kMaxThreadsPerBlock - 1)
/ kMaxThreadsPerBlock;
const int grid_dim_x = (max_block > kMaxGridDim) ? kMaxGridDim : max_block;
const int grid_dim_y =
(max_block > kMaxGridDim) ? (max_block + kMaxGridDim - 1) / kMaxGridDim : 1;
dim3 num_blocks(grid_dim_x, grid_dim_y);
dim3 threads_per_block(kMaxThreadsPerBlock);
CheckLaunchParam(num_blocks, threads_per_block, "bilinear sampler backward");
cudaStream_t stream = Stream<gpu>::GetStream(input_grad.stream_);
cuda::BilinearSamplerBackwardKernel<DType> << <num_blocks, threads_per_block, 0, stream >> >(
i_c, i_h, i_w, grad, data, o_n, o_c, o_h, o_w, g_input, grid_src, grad_grid);
// post kernel check
cudaError err = cudaPeekAtLastError();
CHECK_EQ(err, cudaSuccess) << cudaGetErrorString(err);
}
} // namespace mshadow
namespace mxnet {
namespace op {
template<>
Operator* CreateOp<gpu>(BilinearSamplerParam param, int dtype) {
Operator *op = NULL;
#if MXNET_USE_CUDNN == 1 && CUDNN_MAJOR >= 5
MSHADOW_REAL_TYPE_SWITCH(dtype, DType, {
if (param.cudnn_off.has_value() && param.cudnn_off.value()) {
op = new BilinearSamplerOp<gpu, DType>(param);
} else {
op = new CuDNNBilinearSamplerOp<DType>(param);
}
})
#else
MSHADOW_REAL_TYPE_SWITCH(dtype, DType, {
op = new BilinearSamplerOp<gpu, DType>(param);
})
#endif // MXNET_USE_CUDNN && CUDNN_MAJOR
return op;
}
} // namespace op
} // namespace mxnet
|
ccfa8b5097656751830de63cf08bc0d10a135f13.hip | // !!! This is a file automatically generated by hipify!!!
#include <iostream>
#include <hip/hip_runtime.h>
#include "util.hpp"
// host implementation of dot product
double dot_host(const double *x, const double* y, int n) {
double sum = 0;
for(auto i=0; i<n; ++i) {
sum += x[i]*y[i];
}
return sum;
}
// TODO implement dot product kernel
template <int THREADS>
__global__
void dot_gpu_kernel(const double *x, const double* y, double *result, int n) {
// computes the multiplication of two vectors x and y
int ind = threadIdx.x + blockIdx.x*blockDim.x;
if (ind<n) {
result[ind] = x[ind]+y[ind];
}
}
__global__
void reduce_array(double *x, double *result, int n){
// assuming I run on n/2 threads
// sums up 2 elements of array x
int ind_res = (threadIdx.x + blockIdx.x*blockDim.x);
int ind_x = 2*ind_res;
if (ind_x < n-1){
result[ind_res] = x[ind_x]+x[ind_x+1];
}
else {
result[ind_res] = x[ind_x];
}
}
double dot_gpu(const double *x, const double* y, int n) {
static double* result = malloc_managed<double>(1);
double* result_arr = malloc_managed<double>(n);
double* result_arr2 = malloc_managed<double>(n/2+1);
// TODO call dot product kernel
hipLaunchKernelGGL(( dot_gpu_kernel<1024>), dim3(1),dim3(n), 0, 0, x, y, result_arr, n);
while (n>1){
n = n/2+1;
hipLaunchKernelGGL(( reduce_array), dim3(1),dim3(n), 0, 0, result_arr, result_arr2, n);
}
result = &result_arr[0];
hipDeviceSynchronize();
return *result;
}
//==================================================
template <int THREADS>
__global__
void dot_gpu_kernel_solution(const double *x, const double* y, double *result, int n) {
__shared__ double buf[1024];
int i = threadIdx.x;
buf[i] = 0;
if (i<n){
buf[i] = x[i]*y[i];
}
int m = THREADS/2;
while(m) {
__syncthreads();
if (i<m) {
buf[i] += buf[i+m];
}
m = m/2;
}
if (i==0) {
*result = buf[0];
}
}
double dot_gpu_solution(const double *x, const double *y, int n){
static double* result = malloc_managed<double>(1);
hipLaunchKernelGGL(( dot_gpu_kernel_solution<1024>), dim3(1),dim3(1024), 0, 0, x,y,result,n);
hipDeviceSynchronize();
return *result;
}
// solution for arbitrary number of threads and blocks
double dot_gpu_solution_arbitrary(const double *x, const double *y, int n){
static double* result = malloc_managed<double>(1);
*results = 0;
constexpr int block_dim = 1024;
hipLaunchKernelGGL(( dot_gpu_kernel_solution<blockdim>), dim3((n+block_dim-1)/block_dim), dim3(block_dim), 0, 0, x,y,result,n);
hipDeviceSynchronize();
return *result;
}
// solution for arbitrary number of threads and blocks
template <int THREADS>
__global__
void dot_gpu_kernel_solution_arbitrary(const double *x, const double* y, double *result, int n) {
// todo: not complete solution
__shared__ double buf[1024];
int gid = threadIdx.x + blockIdx.x*blockDim.x;
int i = threadIdx.x;
buf[i] = 0;
if (gid<n){
buf[i] = x[gidi]*y[gid];
}
int m = THREADS/2;
while(m) {
__syncthreads();
if (i<m) {
buf[i] += buf[i+m];
}
m = m/2;
}
if (i==0) {
atomicAdd(result, buf[0]);
}
}
//==================================================
int main(int argc, char** argv) {
size_t pow = read_arg(argc, argv, 1, 4);
size_t n = (1 << pow);
auto size_in_bytes = n * sizeof(double);
std::cout << "dot product CUDA of length n = " << n
<< " : " << size_in_bytes*1e-9 << "MB\n";
auto x_h = malloc_host<double>(n, 2.);
auto y_h = malloc_host<double>(n);
for(auto i=0; i<n; ++i) {
y_h[i] = rand()%10;
}
auto x_d = malloc_device<double>(n);
auto y_d = malloc_device<double>(n);
// copy initial conditions to device
copy_to_device<double>(x_h, x_d, n);
copy_to_device<double>(y_h, y_d, n);
auto result = dot_gpu_solution(x_d, y_d, n);
auto expected = dot_host(x_h, y_h, n);
printf("expected %f got %f\n", (float)expected, (float)result);
return 0;
}
| ccfa8b5097656751830de63cf08bc0d10a135f13.cu | #include <iostream>
#include <cuda.h>
#include "util.hpp"
// host implementation of dot product
double dot_host(const double *x, const double* y, int n) {
double sum = 0;
for(auto i=0; i<n; ++i) {
sum += x[i]*y[i];
}
return sum;
}
// TODO implement dot product kernel
template <int THREADS>
__global__
void dot_gpu_kernel(const double *x, const double* y, double *result, int n) {
// computes the multiplication of two vectors x and y
int ind = threadIdx.x + blockIdx.x*blockDim.x;
if (ind<n) {
result[ind] = x[ind]+y[ind];
}
}
__global__
void reduce_array(double *x, double *result, int n){
// assuming I run on n/2 threads
// sums up 2 elements of array x
int ind_res = (threadIdx.x + blockIdx.x*blockDim.x);
int ind_x = 2*ind_res;
if (ind_x < n-1){
result[ind_res] = x[ind_x]+x[ind_x+1];
}
else {
result[ind_res] = x[ind_x];
}
}
double dot_gpu(const double *x, const double* y, int n) {
static double* result = malloc_managed<double>(1);
double* result_arr = malloc_managed<double>(n);
double* result_arr2 = malloc_managed<double>(n/2+1);
// TODO call dot product kernel
dot_gpu_kernel<1024><<<1,n>>>(x, y, result_arr, n);
while (n>1){
n = n/2+1;
reduce_array<<<1,n>>>(result_arr, result_arr2, n);
}
result = &result_arr[0];
cudaDeviceSynchronize();
return *result;
}
//==================================================
template <int THREADS>
__global__
void dot_gpu_kernel_solution(const double *x, const double* y, double *result, int n) {
__shared__ double buf[1024];
int i = threadIdx.x;
buf[i] = 0;
if (i<n){
buf[i] = x[i]*y[i];
}
int m = THREADS/2;
while(m) {
__syncthreads();
if (i<m) {
buf[i] += buf[i+m];
}
m = m/2;
}
if (i==0) {
*result = buf[0];
}
}
double dot_gpu_solution(const double *x, const double *y, int n){
static double* result = malloc_managed<double>(1);
dot_gpu_kernel_solution<1024><<<1,1024>>>(x,y,result,n);
cudaDeviceSynchronize();
return *result;
}
// solution for arbitrary number of threads and blocks
double dot_gpu_solution_arbitrary(const double *x, const double *y, int n){
static double* result = malloc_managed<double>(1);
*results = 0;
constexpr int block_dim = 1024;
dot_gpu_kernel_solution<blockdim><<<(n+block_dim-1)/block_dim, block_dim>>>(x,y,result,n);
cudaDeviceSynchronize();
return *result;
}
// solution for arbitrary number of threads and blocks
template <int THREADS>
__global__
void dot_gpu_kernel_solution_arbitrary(const double *x, const double* y, double *result, int n) {
// todo: not complete solution
__shared__ double buf[1024];
int gid = threadIdx.x + blockIdx.x*blockDim.x;
int i = threadIdx.x;
buf[i] = 0;
if (gid<n){
buf[i] = x[gidi]*y[gid];
}
int m = THREADS/2;
while(m) {
__syncthreads();
if (i<m) {
buf[i] += buf[i+m];
}
m = m/2;
}
if (i==0) {
atomicAdd(result, buf[0]);
}
}
//==================================================
int main(int argc, char** argv) {
size_t pow = read_arg(argc, argv, 1, 4);
size_t n = (1 << pow);
auto size_in_bytes = n * sizeof(double);
std::cout << "dot product CUDA of length n = " << n
<< " : " << size_in_bytes*1e-9 << "MB\n";
auto x_h = malloc_host<double>(n, 2.);
auto y_h = malloc_host<double>(n);
for(auto i=0; i<n; ++i) {
y_h[i] = rand()%10;
}
auto x_d = malloc_device<double>(n);
auto y_d = malloc_device<double>(n);
// copy initial conditions to device
copy_to_device<double>(x_h, x_d, n);
copy_to_device<double>(y_h, y_d, n);
auto result = dot_gpu_solution(x_d, y_d, n);
auto expected = dot_host(x_h, y_h, n);
printf("expected %f got %f\n", (float)expected, (float)result);
return 0;
}
|
7d3b2d87089ea3eba0393e3f52365e30f86d4dd5.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <getopt.h>
#include <hiprand/hiprand_kernel.h>
#include <stdlib.h>
#include <hip/hip_runtime.h>
#include <sys/time.h>
#include "orthogonalize.cu"
#include<chrono>
#include<iostream>
using namespace std;
using namespace std::chrono;
int blocks_[20][2] = {{8,8},{16,16},{24,24},{32,32},{1,64},{1,128},{1,192},{1,256},{1,320},{1,384},{1,448},{1,512},{1,576},{1,640},{1,704},{1,768},{1,832},{1,896},{1,960},{1,1024}};
int matrices_[7][2] = {{240,240},{496,496},{784,784},{1016,1016},{1232,1232},{1680,1680},{2024,2024}};
int main(int argc, char **argv) {
hipSetDevice(0);
char* p;int matrix_len=strtol(argv[1], &p, 10);
for(int matrix_looper=0;matrix_looper<matrix_len;matrix_looper++){
for(int block_looper=0;block_looper<20;block_looper++){
int XSIZE=matrices_[matrix_looper][0],YSIZE=matrices_[matrix_looper][1],BLOCKX=blocks_[block_looper][0],BLOCKY=blocks_[block_looper][1];
float *eigvec = NULL;
hipMalloc(&eigvec, XSIZE*YSIZE);
float *Qi_gdof = NULL;
hipMalloc(&Qi_gdof, XSIZE*YSIZE);
int cdof = 1;
int *blocksizes = NULL;
hipMalloc(&blocksizes, XSIZE*YSIZE);
int *blocknums = NULL;
hipMalloc(&blocknums, XSIZE*YSIZE);
int largestblock = 1;
int iXSIZE= XSIZE;
int iYSIZE= YSIZE;
while(iXSIZE%BLOCKX!=0)
{
iXSIZE++;
}
while(iYSIZE%BLOCKY!=0)
{
iYSIZE++;
}
dim3 gridBlock(iXSIZE/BLOCKX, iYSIZE/BLOCKY);
dim3 threadBlock(BLOCKX, BLOCKY);
hipFree(0);hipLaunchKernelGGL((
orthogonalize), dim3(gridBlock),dim3(threadBlock), 0, 0, eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
hipDeviceSynchronize();
for (int loop_counter = 0; loop_counter < 10; ++loop_counter) {hipLaunchKernelGGL((
orthogonalize), dim3(gridBlock),dim3(threadBlock), 0, 0, eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
}
auto start = steady_clock::now();
for (int loop_counter = 0; loop_counter < 1000; loop_counter++) {hipLaunchKernelGGL((
orthogonalize), dim3(gridBlock),dim3(threadBlock), 0, 0, eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
}
auto end = steady_clock::now();
auto usecs = duration_cast<duration<float, microseconds::period> >(end - start);
cout <<'['<<usecs.count()<<','<<'('<<BLOCKX<<','<<BLOCKY<<')' << ','<<'('<<XSIZE<<','<<YSIZE<<')'<<']' << endl;
}
}} | 7d3b2d87089ea3eba0393e3f52365e30f86d4dd5.cu | #include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <getopt.h>
#include <curand_kernel.h>
#include <stdlib.h>
#include <cuda.h>
#include <sys/time.h>
#include "orthogonalize.cu"
#include<chrono>
#include<iostream>
using namespace std;
using namespace std::chrono;
int blocks_[20][2] = {{8,8},{16,16},{24,24},{32,32},{1,64},{1,128},{1,192},{1,256},{1,320},{1,384},{1,448},{1,512},{1,576},{1,640},{1,704},{1,768},{1,832},{1,896},{1,960},{1,1024}};
int matrices_[7][2] = {{240,240},{496,496},{784,784},{1016,1016},{1232,1232},{1680,1680},{2024,2024}};
int main(int argc, char **argv) {
cudaSetDevice(0);
char* p;int matrix_len=strtol(argv[1], &p, 10);
for(int matrix_looper=0;matrix_looper<matrix_len;matrix_looper++){
for(int block_looper=0;block_looper<20;block_looper++){
int XSIZE=matrices_[matrix_looper][0],YSIZE=matrices_[matrix_looper][1],BLOCKX=blocks_[block_looper][0],BLOCKY=blocks_[block_looper][1];
float *eigvec = NULL;
cudaMalloc(&eigvec, XSIZE*YSIZE);
float *Qi_gdof = NULL;
cudaMalloc(&Qi_gdof, XSIZE*YSIZE);
int cdof = 1;
int *blocksizes = NULL;
cudaMalloc(&blocksizes, XSIZE*YSIZE);
int *blocknums = NULL;
cudaMalloc(&blocknums, XSIZE*YSIZE);
int largestblock = 1;
int iXSIZE= XSIZE;
int iYSIZE= YSIZE;
while(iXSIZE%BLOCKX!=0)
{
iXSIZE++;
}
while(iYSIZE%BLOCKY!=0)
{
iYSIZE++;
}
dim3 gridBlock(iXSIZE/BLOCKX, iYSIZE/BLOCKY);
dim3 threadBlock(BLOCKX, BLOCKY);
cudaFree(0);
orthogonalize<<<gridBlock,threadBlock>>>(eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
cudaDeviceSynchronize();
for (int loop_counter = 0; loop_counter < 10; ++loop_counter) {
orthogonalize<<<gridBlock,threadBlock>>>(eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
}
auto start = steady_clock::now();
for (int loop_counter = 0; loop_counter < 1000; loop_counter++) {
orthogonalize<<<gridBlock,threadBlock>>>(eigvec,Qi_gdof,cdof,blocksizes,blocknums,largestblock);
}
auto end = steady_clock::now();
auto usecs = duration_cast<duration<float, microseconds::period> >(end - start);
cout <<'['<<usecs.count()<<','<<'('<<BLOCKX<<','<<BLOCKY<<')' << ','<<'('<<XSIZE<<','<<YSIZE<<')'<<']' << endl;
}
}} |
a550cf5d07a25404787b10c5d9b16b5756843cda.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include<cuda_runtime.h>
#include<device_launch_parameters.h>
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
__global__ void q1(int* d_a,int* d_r,int *d_m)
{
int n = threadIdx.x;
for(int i = 0;i<(*d_m);i++)
{
d_r[n*(*d_m)+i] = d_a[n*(*d_m)+i];
for(int j = 0;j<n;j++)
d_r[n*(*d_m)+i] *= d_a[n*(*d_m)+i];
}
}
int main(void)
{
int *a,*r,m,n,i;
int *d_a,*d_r,*d_m;
printf("Enter m,n : ");
scanf("%d %d",&m,&n);
a = (int*)malloc(m*n*sizeof(int));
r = (int*)malloc(m*n*sizeof(int));
printf("Enter matrix:\n");
for(i=0;i<m*n;i++)
{
scanf("%d",&a[i]);
}
hipMalloc((void **)&d_a,(m*n)*sizeof(int));
hipMalloc((void **)&d_r,(m*n)*sizeof(int));
hipMalloc((void **)&d_m,sizeof(int));
hipMemcpy(d_a,a,(m*n)*sizeof(int),hipMemcpyHostToDevice);
hipMemcpy(d_r,r,(m*n)*sizeof(int),hipMemcpyHostToDevice);
hipMemcpy(d_m,&m,sizeof(int),hipMemcpyHostToDevice);
hipLaunchKernelGGL((
q1), dim3(1),dim3(n), 0, 0, d_a,d_r,d_m);
hipError_t error = hipGetLastError();
if(error!= hipSuccess)
{
printf("%s\n",hipGetErrorString(error));
}
hipMemcpy(r,d_r,(m*n)*sizeof(int),hipMemcpyDeviceToHost);
printf("Result matrix :\n");
for(i=0;i<m*n;i++)
{
printf("%d\t",r[i]);
if((i+1)%m==0)
printf("\n");
}
} | a550cf5d07a25404787b10c5d9b16b5756843cda.cu | #include<cuda_runtime.h>
#include<device_launch_parameters.h>
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
__global__ void q1(int* d_a,int* d_r,int *d_m)
{
int n = threadIdx.x;
for(int i = 0;i<(*d_m);i++)
{
d_r[n*(*d_m)+i] = d_a[n*(*d_m)+i];
for(int j = 0;j<n;j++)
d_r[n*(*d_m)+i] *= d_a[n*(*d_m)+i];
}
}
int main(void)
{
int *a,*r,m,n,i;
int *d_a,*d_r,*d_m;
printf("Enter m,n : ");
scanf("%d %d",&m,&n);
a = (int*)malloc(m*n*sizeof(int));
r = (int*)malloc(m*n*sizeof(int));
printf("Enter matrix:\n");
for(i=0;i<m*n;i++)
{
scanf("%d",&a[i]);
}
cudaMalloc((void **)&d_a,(m*n)*sizeof(int));
cudaMalloc((void **)&d_r,(m*n)*sizeof(int));
cudaMalloc((void **)&d_m,sizeof(int));
cudaMemcpy(d_a,a,(m*n)*sizeof(int),cudaMemcpyHostToDevice);
cudaMemcpy(d_r,r,(m*n)*sizeof(int),cudaMemcpyHostToDevice);
cudaMemcpy(d_m,&m,sizeof(int),cudaMemcpyHostToDevice);
q1<<<1,n>>>(d_a,d_r,d_m);
cudaError_t error = cudaGetLastError();
if(error!= cudaSuccess)
{
printf("%s\n",cudaGetErrorString(error));
}
cudaMemcpy(r,d_r,(m*n)*sizeof(int),cudaMemcpyDeviceToHost);
printf("Result matrix :\n");
for(i=0;i<m*n;i++)
{
printf("%d\t",r[i]);
if((i+1)%m==0)
printf("\n");
}
} |
97482bc638d28dd94adc2fc8f2db5ecf7bb2fbc9.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include <time.h>
hipError_t addWithCuda(int *c, const int *a, const int *b, unsigned int size);
hipError_t addWithCudaDouble(double* c, double* a, double* b, unsigned int size);
hipError_t addWithCudaFloat(float* c, float* a, float* b, unsigned int size);
hipError_t addWithCudaFloatTimes(float* c, float* a, float* b, unsigned int size, int laps);
hipError_t addWithCudaFloatTimesBlocks(float* c, float* a, float* b, unsigned int size, int laps);
__global__ void addKernel(int *c, const int *a, const int *b)
{
int i = threadIdx.x;
c[i] = a[i] + b[i];
}
__global__ void addKernelDouble(double* c, double* a, double* b)
{
int i = threadIdx.x;
c[i] = a[i] * b[i];
}
__global__ void addKernelDoubleShowIndex(double* c, double* a, double* b)
{
int i = threadIdx.x;
c[i] = a[i] * b[i];
printf("H-> [X:%d Y:%d Z:%d] B->[X:%d Y:%d Z:%d] {%1f + %1f}={%1f}\n", threadIdx.x, threadIdx.y, threadIdx.z
, blockIdx.x, blockIdx.y, blockIdx.z
, a[i], b[i], c[i]);
}
__global__ void addKernelDoubleShowIndexBlocks(double* c, double* a, double* b)
{
int i = blockIdx.x * blockDim.x + threadIdx.x;
c[i] = a[i] * b[i];
printf("H-> [X:%d Y:%d Z:%d] B->[X:%d Y:%d Z:%d] {%1f + %1f}={%1f}\n", threadIdx.x, threadIdx.y, threadIdx.z
, blockIdx.x, blockIdx.y, blockIdx.z
, a[i], b[i], c[i]);
}
__global__ void multMatrixKernelFloat(float* c, float* a, float* b, int N) {
int i = threadIdx.x;
int j = threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
__global__ void multMatrixKernelFloatBlocks(float* c, float* a, float* b, int N) {
int i = blockDim.x * blockIdx.x + threadIdx.x;
int j = blockDim.y * blockIdx.y + threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
__global__ void multMatrixKernelFloatBlocks2(float* c, float* a, float* b, int N, int blockAdjust) {
int i = blockDim.x * (blockIdx.x + blockAdjust) + threadIdx.x;
int j = blockDim.y * (blockIdx.y + blockAdjust) + threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
void MatrixMultiplication(float* A, float* B, float* C, int N)
{
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
float sum = 0;
for (int k = 0; k < N; k++)
sum += A[i * N + k] * B[k * N + j];
C[i * N + j] = sum;
}
}
void generateMatrix(float* a, float*b, int N) {
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
a[i * N + j] = i + j;
b[i * N + j] = i - j;
}
}
void showMatrix(float* a, int N){
for (int i = 0; i < N*N; i++)
if(i%N == 0){
printf("\n | %f", a[i]);
}
else {
printf(" | %f", a[i]);
}
}
void showDiagonalMatrix(float* a, int N) {
for (int i = 0; i < N; i++) {
printf(" | %f", a[i+i*N]);
}
}
int main()
{
printf("Practica 8");
printf("\n-----------------------------------------------------------------------------------------------");
int laps = 1;
const int N = 1024;
float* a = new float[N * N];
float* b = new float[N * N];
float* c = new float[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
hipError_t cudaStatus1 = addWithCudaFloatTimesBlocks(c, a, b, N, laps);
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// hipDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = hipDeviceReset();
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "hipDeviceReset failed!");
return 1;
}
showDiagonalMatrix(c, N);
delete[] a;
delete[] b;
delete[] c;
return 0;
}
// Helper function for using CUDA to add vectors in parallel.
hipError_t addWithCuda(int *c, const int *a, const int *b, unsigned int size)
{
int *dev_a = 0;
int *dev_b = 0;
int *dev_c = 0;
hipError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = hipSetDevice(0);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = hipMalloc((void**)&dev_c, size * sizeof(int));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_a, size * sizeof(int));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_b, size * sizeof(int));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = hipMemcpy(dev_a, a, size * sizeof(int), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
cudaStatus = hipMemcpy(dev_b, b, size * sizeof(int), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
hipLaunchKernelGGL(( addKernel), dim3(1), dim3(size), 0, 0, dev_c, dev_a, dev_b);
// Check for any errors launching the kernel
cudaStatus = hipGetLastError();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", hipGetErrorString(cudaStatus));
goto Error;
}
// hipDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = hipDeviceSynchronize();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = hipMemcpy(c, dev_c, size * sizeof(int), hipMemcpyDeviceToHost);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
Error:
hipFree(dev_c);
hipFree(dev_a);
hipFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
hipError_t addWithCudaDouble(double* c, double* a, double* b, unsigned int size)
{
double* dev_a = 0;
double* dev_b = 0;
double* dev_c = 0;
hipError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = hipSetDevice(0);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = hipMalloc((void**)&dev_c, size * sizeof(double));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_a, size * sizeof(double));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_b, size * sizeof(double));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = hipMemcpy(dev_a, a, size * sizeof(double), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
cudaStatus = hipMemcpy(dev_b, b, size * sizeof(double), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
hipLaunchKernelGGL(( addKernelDoubleShowIndexBlocks) , dim3(10), 10 >> > (dev_c, dev_a, dev_b);
// Check for any errors launching the kernel
cudaStatus = hipGetLastError();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", hipGetErrorString(cudaStatus));
goto Error;
}
// hipDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = hipDeviceSynchronize();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = hipMemcpy(c, dev_c, size * sizeof(double), hipMemcpyDeviceToHost);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
Error:
hipFree(dev_c);
hipFree(dev_a);
hipFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
hipError_t addWithCudaFloat(float* c, float* a, float* b, unsigned int size)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
hipError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = hipSetDevice(0);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = hipMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = hipMemcpy(dev_a, a, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
cudaStatus = hipMemcpy(dev_b, b, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(size, size);
dim3 numBlocks(size / threadsPerBlock.x, size / threadsPerBlock.y);
multMatrixKernelFloat , numBlocks, threadsPerBlock , 0, 0, 0, dev_c, dev_a, dev_b, size);
// Check for any errors launching the kernel
cudaStatus = hipGetLastError();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", hipGetErrorString(cudaStatus));
goto Error;
}
// hipDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = hipDeviceSynchronize();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = hipMemcpy(c, dev_c, size * size * sizeof(float), hipMemcpyDeviceToHost);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
Error:
hipFree(dev_c);
hipFree(dev_a);
hipFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
hipError_t addWithCudaFloatTimes(float* c, float* a, float* b, unsigned int size, int laps)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
hipError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = hipSetDevice(0);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = hipMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = hipMemcpy(dev_a, a, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
cudaStatus = hipMemcpy(dev_b, b, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
//Time Recorder
float milliseconds = 0;
hipEvent_t start, stop;
hipEventCreate(&start);
hipEventCreate(&stop);
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(size, size);
dim3 numBlocks(size / threadsPerBlock.x, size / threadsPerBlock.y);
hipEventRecord(start);
for (int i = 0; i < laps; i++) {
hipLaunchKernelGGL(( multMatrixKernelFloat) , dim3(numBlocks), dim3(threadsPerBlock) , 0, 0, dev_c, dev_a, dev_b, size);
}
hipEventRecord(stop);
hipEventSynchronize(stop);
hipEventElapsedTime(&milliseconds, start, stop);
printf("\nTiempo: %f\n\n", (milliseconds/(float)laps)/(float)1000);
// Check for any errors launching the kernel
cudaStatus = hipGetLastError();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", hipGetErrorString(cudaStatus));
goto Error;
}
// hipDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = hipDeviceSynchronize();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = hipMemcpy(c, dev_c, size * size * sizeof(float), hipMemcpyDeviceToHost);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
Error:
hipFree(dev_c);
hipFree(dev_a);
hipFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
hipError_t addWithCudaFloatTimesBlocks(float* c, float* a, float* b, unsigned int size, int laps)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
int threads = 32;
hipError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = hipSetDevice(0);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = hipMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
cudaStatus = hipMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = hipMemcpy(dev_a, a, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
cudaStatus = hipMemcpy(dev_b, b, size * size * sizeof(float), hipMemcpyHostToDevice);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
//Time Recorder
float milliseconds = 0;
hipEvent_t start, stop;
hipEventCreate(&start);
hipEventCreate(&stop);
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(threads, threads, 1);
dim3 numBlocks(size / threadsPerBlock.x /2, size/ threadsPerBlock.y/2 , 1);
hipEventRecord(start);
for (int i = 0; i < laps; i++) {
//multMatrixKernelFloatBlocks <<< numBlocks, threadsPerBlock >> > (dev_c, dev_a, dev_b, size);
for (int e = 0; e < 32 ; e+=16 ) {
hipLaunchKernelGGL(( multMatrixKernelFloatBlocks2) , dim3(numBlocks), dim3(threadsPerBlock) , 0, 0, dev_c, dev_a, dev_b, size, e);
}
}
hipEventRecord(stop);
hipEventSynchronize(stop);
hipEventElapsedTime(&milliseconds, start, stop);
printf("\nTiempo: %f\n\n", (milliseconds / (float)laps)/(float)1000);
// Check for any errors launching the kernel
cudaStatus = hipGetLastError();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", hipGetErrorString(cudaStatus));
goto Error;
}
// hipDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = hipDeviceSynchronize();
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = hipMemcpy(c, dev_c, size * size * sizeof(float), hipMemcpyDeviceToHost);
if (cudaStatus != hipSuccess) {
fprintf(stderr, "hipMemcpy failed!");
goto Error;
}
Error:
hipFree(dev_c);
hipFree(dev_a);
hipFree(dev_b);
return cudaStatus;
}
//------------PRACTICA-2------------------------------------------------------------------
/*
int main()
{
const int length = 100;
double a1[length];
double b1[length];
double c1[length];
for (int i = 0; i < length; i++) {
a1[i] = double(i);
b1[i] = double(i) * (double)2;
}
printf("Practica 2");
printf("\n-----------------------------------------------------------------------------------------------\n");
// Add vectors in parallel.
hipError_t cudaStatus1 = addWithCudaDouble(c1, a1, b1, length);
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// hipDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = hipDeviceReset();
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "hipDeviceReset failed!");
return 1;
}
double escalar = 0;
for (int i = 0; i < length; i++) {
escalar += c1[i];
}
//printf("Practica 2");
//printf("\n-----------------------------------------------------------------------------------------------");
printf("\nEscalar(A1,B1): %1f", escalar);
printf("\n\n");
return 0;
}*/
//------------PRACTICA-3------------------------------------------------------------------
/*
int main()
{
const int N = 3;
float* a;
float* b;
float* c;
a = new float[N * N];
b = new float[N * N];
c = new float[N * N];
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
a[i * N + j] = i + j;
b[i * N + j] = i - j;
}
MatrixMultiplication(a, b, c, N);
//showDiagonalMatrix(c, N);
showMatrix(a, N);
printf("\n");
showMatrix(b, N);
printf("\n");
showMatrix(c, N);
printf("\n");
//printf("Escalar(A1,B1): %1f", escalar);
delete[] a;
delete[] b;
delete[] c;
return 0;
}
*/
//------------PRACTICA-4------------------------------------------------------------------
/*
int main()
{
clock_t start, stop;
const int N = 32;
float* a;
float* b;
float* c;
a = new float[N * N];
b = new float[N * N];
c = new float[N * N];
generateMatrix(a, b, N);
start = clock();
for (int i = 0; i < 1; i++) MatrixMultiplication(a, b, c, N);
stop = clock();
printf("Practica 4");
printf("\n-----------------------------------------------------------------------------------------------\n");
printf("Tiempo secuencial: %f segundos\n",
(float)(stop - start) / CLOCKS_PER_SEC / 1);
getchar();
delete[] a;
delete[] b;
delete[] c;
return 0;
}
*/
//------------PRACTICA-5------------------------------------------------------------------
/*
int main()
{
const int N = 3;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
hipError_t cudaStatus1 = addWithCudaFloat(c, a, b, N);
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// hipDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = hipDeviceReset();
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "hipDeviceReset failed!");
return 1;
}
showMatrix(c, N);
return 0;
}
*/
//------------PRACTICA-6------------------------------------------------------------------
/*
int main()
{
int laps = 5;
const int N = 32;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
printf("Practica 6");
printf("\n-----------------------------------------------------------------------------------------------\n");
//Add vectors in parallel.
hipError_t cudaStatus1 = addWithCudaFloatTimes(c, a, b, N, laps);
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// hipDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = hipDeviceReset();
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "hipDeviceReset failed!");
return 1;
}
showMatrix(c, N);
//getchar();
return 0;
}
*/
//------------PRACTICA-7------------------------------------------------------------------
/*
int main()
{
int laps = 10000;
const int N = 3;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
hipError_t cudaStatus1 = addWithCudaFloatTimes(c, a, b, N, laps);
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// hipDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = hipDeviceReset();
if (cudaStatus1 != hipSuccess) {
fprintf(stderr, "hipDeviceReset failed!");
return 1;
}
showDiagonalMatrix(c, N);
//getchar();
return 0;
}
*/ | 97482bc638d28dd94adc2fc8f2db5ecf7bb2fbc9.cu |
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include <time.h>
cudaError_t addWithCuda(int *c, const int *a, const int *b, unsigned int size);
cudaError_t addWithCudaDouble(double* c, double* a, double* b, unsigned int size);
cudaError_t addWithCudaFloat(float* c, float* a, float* b, unsigned int size);
cudaError_t addWithCudaFloatTimes(float* c, float* a, float* b, unsigned int size, int laps);
cudaError_t addWithCudaFloatTimesBlocks(float* c, float* a, float* b, unsigned int size, int laps);
__global__ void addKernel(int *c, const int *a, const int *b)
{
int i = threadIdx.x;
c[i] = a[i] + b[i];
}
__global__ void addKernelDouble(double* c, double* a, double* b)
{
int i = threadIdx.x;
c[i] = a[i] * b[i];
}
__global__ void addKernelDoubleShowIndex(double* c, double* a, double* b)
{
int i = threadIdx.x;
c[i] = a[i] * b[i];
printf("H-> [X:%d Y:%d Z:%d] B->[X:%d Y:%d Z:%d] {%1f + %1f}={%1f}\n", threadIdx.x, threadIdx.y, threadIdx.z
, blockIdx.x, blockIdx.y, blockIdx.z
, a[i], b[i], c[i]);
}
__global__ void addKernelDoubleShowIndexBlocks(double* c, double* a, double* b)
{
int i = blockIdx.x * blockDim.x + threadIdx.x;
c[i] = a[i] * b[i];
printf("H-> [X:%d Y:%d Z:%d] B->[X:%d Y:%d Z:%d] {%1f + %1f}={%1f}\n", threadIdx.x, threadIdx.y, threadIdx.z
, blockIdx.x, blockIdx.y, blockIdx.z
, a[i], b[i], c[i]);
}
__global__ void multMatrixKernelFloat(float* c, float* a, float* b, int N) {
int i = threadIdx.x;
int j = threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
__global__ void multMatrixKernelFloatBlocks(float* c, float* a, float* b, int N) {
int i = blockDim.x * blockIdx.x + threadIdx.x;
int j = blockDim.y * blockIdx.y + threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
__global__ void multMatrixKernelFloatBlocks2(float* c, float* a, float* b, int N, int blockAdjust) {
int i = blockDim.x * (blockIdx.x + blockAdjust) + threadIdx.x;
int j = blockDim.y * (blockIdx.y + blockAdjust) + threadIdx.y;
float sum = 0;
for (int k = 0; k < N; k++)
sum += a[i * N + k] * b[k * N + j];
c[i * N + j] = sum;
//printf("A-> [X:%d Y:%d] [P:%d] = %f\n", i, j, (i * N + j), b[(i*N+j)]);
}
void MatrixMultiplication(float* A, float* B, float* C, int N)
{
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
float sum = 0;
for (int k = 0; k < N; k++)
sum += A[i * N + k] * B[k * N + j];
C[i * N + j] = sum;
}
}
void generateMatrix(float* a, float*b, int N) {
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
a[i * N + j] = i + j;
b[i * N + j] = i - j;
}
}
void showMatrix(float* a, int N){
for (int i = 0; i < N*N; i++)
if(i%N == 0){
printf("\n | %f", a[i]);
}
else {
printf(" | %f", a[i]);
}
}
void showDiagonalMatrix(float* a, int N) {
for (int i = 0; i < N; i++) {
printf(" | %f", a[i+i*N]);
}
}
int main()
{
printf("Practica 8");
printf("\n-----------------------------------------------------------------------------------------------");
int laps = 1;
const int N = 1024;
float* a = new float[N * N];
float* b = new float[N * N];
float* c = new float[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
cudaError_t cudaStatus1 = addWithCudaFloatTimesBlocks(c, a, b, N, laps);
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = cudaDeviceReset();
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
showDiagonalMatrix(c, N);
delete[] a;
delete[] b;
delete[] c;
return 0;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCuda(int *c, const int *a, const int *b, unsigned int size)
{
int *dev_a = 0;
int *dev_b = 0;
int *dev_c = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_c, size * sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_a, size * sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_b, size * sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_a, a, size * sizeof(int), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_b, b, size * sizeof(int), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
addKernel<<<1, size>>>(dev_c, dev_a, dev_b);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(c, dev_c, size * sizeof(int), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_a);
cudaFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCudaDouble(double* c, double* a, double* b, unsigned int size)
{
double* dev_a = 0;
double* dev_b = 0;
double* dev_c = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_c, size * sizeof(double));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_a, size * sizeof(double));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_b, size * sizeof(double));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_a, a, size * sizeof(double), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_b, b, size * sizeof(double), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
addKernelDoubleShowIndexBlocks <<< 10, 10 >> > (dev_c, dev_a, dev_b);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(c, dev_c, size * sizeof(double), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_a);
cudaFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCudaFloat(float* c, float* a, float* b, unsigned int size)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_a, a, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_b, b, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(size, size);
dim3 numBlocks(size / threadsPerBlock.x, size / threadsPerBlock.y);
multMatrixKernelFloat <<< numBlocks, threadsPerBlock >>> (dev_c, dev_a, dev_b, size);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(c, dev_c, size * size * sizeof(float), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_a);
cudaFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCudaFloatTimes(float* c, float* a, float* b, unsigned int size, int laps)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_a, a, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_b, b, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
//Time Recorder
float milliseconds = 0;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(size, size);
dim3 numBlocks(size / threadsPerBlock.x, size / threadsPerBlock.y);
cudaEventRecord(start);
for (int i = 0; i < laps; i++) {
multMatrixKernelFloat <<< numBlocks, threadsPerBlock >>> (dev_c, dev_a, dev_b, size);
}
cudaEventRecord(stop);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&milliseconds, start, stop);
printf("\nTiempo: %f\n\n", (milliseconds/(float)laps)/(float)1000);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(c, dev_c, size * size * sizeof(float), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_a);
cudaFree(dev_b);
return cudaStatus;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCudaFloatTimesBlocks(float* c, float* a, float* b, unsigned int size, int laps)
{
float* dev_a = 0;
float* dev_b = 0;
float* dev_c = 0;
int threads = 32;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_c, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_a, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_b, size * size * sizeof(float));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_a, a, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_b, b, size * size * sizeof(float), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
//Time Recorder
float milliseconds = 0;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
// Launch a kernel on the GPU with one thread for each element.
dim3 threadsPerBlock(threads, threads, 1);
dim3 numBlocks(size / threadsPerBlock.x /2, size/ threadsPerBlock.y/2 , 1);
cudaEventRecord(start);
for (int i = 0; i < laps; i++) {
//multMatrixKernelFloatBlocks <<< numBlocks, threadsPerBlock >> > (dev_c, dev_a, dev_b, size);
for (int e = 0; e < 32 ; e+=16 ) {
multMatrixKernelFloatBlocks2 <<< numBlocks, threadsPerBlock >>> (dev_c, dev_a, dev_b, size, e);
}
}
cudaEventRecord(stop);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&milliseconds, start, stop);
printf("\nTiempo: %f\n\n", (milliseconds / (float)laps)/(float)1000);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(c, dev_c, size * size * sizeof(float), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_a);
cudaFree(dev_b);
return cudaStatus;
}
//------------PRACTICA-2------------------------------------------------------------------
/*
int main()
{
const int length = 100;
double a1[length];
double b1[length];
double c1[length];
for (int i = 0; i < length; i++) {
a1[i] = double(i);
b1[i] = double(i) * (double)2;
}
printf("Practica 2");
printf("\n-----------------------------------------------------------------------------------------------\n");
// Add vectors in parallel.
cudaError_t cudaStatus1 = addWithCudaDouble(c1, a1, b1, length);
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = cudaDeviceReset();
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
double escalar = 0;
for (int i = 0; i < length; i++) {
escalar += c1[i];
}
//printf("Practica 2");
//printf("\n-----------------------------------------------------------------------------------------------");
printf("\nEscalar(A1,B1): %1f", escalar);
printf("\n\n");
return 0;
}*/
//------------PRACTICA-3------------------------------------------------------------------
/*
int main()
{
const int N = 3;
float* a;
float* b;
float* c;
a = new float[N * N];
b = new float[N * N];
c = new float[N * N];
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++) {
a[i * N + j] = i + j;
b[i * N + j] = i - j;
}
MatrixMultiplication(a, b, c, N);
//showDiagonalMatrix(c, N);
showMatrix(a, N);
printf("\n");
showMatrix(b, N);
printf("\n");
showMatrix(c, N);
printf("\n");
//printf("Escalar(A1,B1): %1f", escalar);
delete[] a;
delete[] b;
delete[] c;
return 0;
}
*/
//------------PRACTICA-4------------------------------------------------------------------
/*
int main()
{
clock_t start, stop;
const int N = 32;
float* a;
float* b;
float* c;
a = new float[N * N];
b = new float[N * N];
c = new float[N * N];
generateMatrix(a, b, N);
start = clock();
for (int i = 0; i < 1; i++) MatrixMultiplication(a, b, c, N);
stop = clock();
printf("Practica 4");
printf("\n-----------------------------------------------------------------------------------------------\n");
printf("Tiempo secuencial: %f segundos\n",
(float)(stop - start) / CLOCKS_PER_SEC / 1);
getchar();
delete[] a;
delete[] b;
delete[] c;
return 0;
}
*/
//------------PRACTICA-5------------------------------------------------------------------
/*
int main()
{
const int N = 3;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
cudaError_t cudaStatus1 = addWithCudaFloat(c, a, b, N);
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = cudaDeviceReset();
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
showMatrix(c, N);
return 0;
}
*/
//------------PRACTICA-6------------------------------------------------------------------
/*
int main()
{
int laps = 5;
const int N = 32;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
printf("Practica 6");
printf("\n-----------------------------------------------------------------------------------------------\n");
//Add vectors in parallel.
cudaError_t cudaStatus1 = addWithCudaFloatTimes(c, a, b, N, laps);
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = cudaDeviceReset();
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
showMatrix(c, N);
//getchar();
return 0;
}
*/
//------------PRACTICA-7------------------------------------------------------------------
/*
int main()
{
int laps = 10000;
const int N = 3;
float a[N * N];
float b[N * N];
float c[N * N];
generateMatrix(a, b, N);
//Add vectors in parallel.
cudaError_t cudaStatus1 = addWithCudaFloatTimes(c, a, b, N, laps);
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus1 = cudaDeviceReset();
if (cudaStatus1 != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
showDiagonalMatrix(c, N);
//getchar();
return 0;
}
*/ |
656b713e5b24482fe8149ad853f155520b4c65c2.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdio.h>
#include <string.h>
#include <stdarg.h>
#ifdef UNIX
#include <stdint.h>
#include <unistd.h>
#endif
#include "mex.h"
#include "mpi.h"
// CUDA
#include "hip/hip_runtime.h"
#include "hip/hip_runtime.h"
#include "rocblas.h"
#include "cudaCommon.h"
#include "cudaFluidStep.h"
#include "fluidMethod.h"
/* THIS FUNCTION:
directionalMaxFinder has three different behaviors depending on how it is called.
m = directionalMaxFinder(array) will calculate the global maximum of array
c = directionalMaxFinder(a1, a2, direct) will find the max of |a1(r)+a2(r)| in the
'direct' direction (1=X, 2=Y, 3=Z)
c = directionalMaxFinder(rho, c_s, px, py, pz) will specifically calculate the x direction
CFL limiting speed, max(|px/rho| + c_s)
*/
template <int simulationDimension, geometryType_t shape, FluidMethods algo>
__global__ void cukern_CFLtimestep(double *fluid, double *cs, double *out, int nx, int ntotal, int64_t slabpitch);
#define BLOCKDIM 8
#define GLOBAL_BLOCKDIM 128
__constant__ __device__ double geoParams[5];
#define GEO_DX geoParams[0]
#define GEO_DY geoParams[1]
#define GEO_DZ geoParams[2]
#define GEO_RIN geoParams[3]
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) {
// Form of call: tau = cflTimestep(FluidManager, soundspeed gpu array, GeometryManager)
MGArray fluid[5];
GeometryParams geom;
MGArray sndspeed;
// At least 2 arguments expected
// Input and result
if((nlhs != 1) || (nrhs != 4))
mexErrMsgTxt("Call must be tau = cflTimestep(FluidManager, soundspeed gpu array, GeometryManager, cfd_method);");
CHECK_CUDA_ERROR("entering directionalMaxFinder");
int i;
int sub[6];
int worked;
worked = MGA_accessFluidCanister(prhs[0], 0, &fluid[0]);
if(CHECK_IMOGEN_ERROR(worked) != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
worked = MGA_accessMatlabArrays(prhs, 1, 1, &sndspeed);
if(CHECK_IMOGEN_ERROR(worked) != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
geom = accessMatlabGeometryClass(prhs[2]);
int meth = (int)*mxGetPr(prhs[3]);
double geoarray[5];
geoarray[0] = geom.h[0];
geoarray[1] = geom.h[1];
geoarray[2] = geom.h[2];
geoarray[3] = geom.Rinner;
const mxArray *gdr = mxGetProperty(prhs[2], 0, "globalDomainRez");
double *globrez = mxGetPr(gdr);
dim3 blocksize, gridsize;
blocksize.x = GLOBAL_BLOCKDIM; blocksize.y = blocksize.z = 1;
// Launches enough blocks to fully occupy the GPU
gridsize.x = 128;
gridsize.y = gridsize.z =1;
// Allocate enough pinned memory to hold results
double *blkA[fluid->nGPUs];
int hblockElements = gridsize.x;
int spacedim = 0;
if(globrez[1] > 1) spacedim = 1;
if(globrez[2] > 1) spacedim = 2;
int gt = 0;
if(geom.shape == CYLINDRICAL) gt = 1;
int ctype = spacedim + 3*(gt + 2*(meth-1)); // value in 0..17
int numBlocks[fluid->nGPUs];
for(i = 0; i < fluid->nGPUs; i++) {
hipSetDevice(fluid->deviceID[i]);
CHECK_CUDA_ERROR("hipSetDevice()");
hipHostMalloc((void **)&blkA[i], hblockElements * sizeof(double));
CHECK_CUDA_ERROR("CFL malloc doubles");
hipMemcpyToSymbol(geoParams, &geoarray[0], 5*sizeof(double), 0, hipMemcpyHostToDevice);
if(CHECK_CUDA_ERROR("cfl const memcpy") != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
calcPartitionExtent(&fluid[0], i, &sub[0]);
gridsize.x = ROUNDUPTO(fluid[0].partNumel[i], blocksize.x) / blocksize.x;
if(gridsize.x > 128) gridsize.x = 128;
numBlocks[i] = gridsize.x;
switch(ctype) {
case 0: hipLaunchKernelGGL(( cukern_CFLtimestep<1, SQUARE, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 1: hipLaunchKernelGGL(( cukern_CFLtimestep<2, SQUARE, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 2: hipLaunchKernelGGL(( cukern_CFLtimestep<3, SQUARE, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 3: hipLaunchKernelGGL(( cukern_CFLtimestep<1, CYLINDRICAL, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 4: hipLaunchKernelGGL(( cukern_CFLtimestep<2, CYLINDRICAL, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 5: hipLaunchKernelGGL(( cukern_CFLtimestep<3, CYLINDRICAL, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 6: hipLaunchKernelGGL(( cukern_CFLtimestep<1, SQUARE, METHOD_HLL >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 7: hipLaunchKernelGGL(( cukern_CFLtimestep<2, SQUARE, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 8: hipLaunchKernelGGL(( cukern_CFLtimestep<3, SQUARE, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 9: hipLaunchKernelGGL(( cukern_CFLtimestep<1, CYLINDRICAL, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 10:hipLaunchKernelGGL(( cukern_CFLtimestep<2, CYLINDRICAL, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 11:hipLaunchKernelGGL(( cukern_CFLtimestep<3, CYLINDRICAL, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 12:hipLaunchKernelGGL(( cukern_CFLtimestep<1, SQUARE, METHOD_HLLC >), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 13:hipLaunchKernelGGL(( cukern_CFLtimestep<2, SQUARE, METHOD_XINJIN>), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 14:hipLaunchKernelGGL(( cukern_CFLtimestep<3, SQUARE, METHOD_XINJIN>), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 15:hipLaunchKernelGGL(( cukern_CFLtimestep<1, CYLINDRICAL, METHOD_XINJIN>), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 16:hipLaunchKernelGGL(( cukern_CFLtimestep<2, CYLINDRICAL, METHOD_XINJIN>), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 17:hipLaunchKernelGGL(( cukern_CFLtimestep<3, CYLINDRICAL, METHOD_XINJIN>), dim3(gridsize), dim3(blocksize), 0, 0, fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
default:
DROP_MEX_ERROR("cflTimestep was passed a 4th argument (method) which was not 1 (hll), 2 (hllc) or 3 (xin/jin).");
break;
}
CHECK_CUDA_LAUNCH_ERROR(blocksize, gridsize, &fluid[0], i, "CFL max finder for Riemann solvers");
}
double tmin = 1e38;
int j;
for(i = 0; i < fluid->nGPUs; i++) {
hipSetDevice(fluid->deviceID[i]);
hipDeviceSynchronize();
for(j = 0; j < numBlocks[i]; j++) {
tmin = (tmin < blkA[i][j]) ? tmin : blkA[i][j];
}
hipHostFree(blkA[i]);
if(CHECK_CUDA_ERROR("freeing blkA") != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
}
double trueMin;
MPI_Allreduce((void *)&tmin, (void *)&trueMin, 1, MPI_DOUBLE, MPI_MIN, MPI_COMM_WORLD);
mwSize outputDims[2];
outputDims[0] = 1;
outputDims[1] = 1;
plhs[0] = mxCreateNumericArray (2, outputDims, mxDOUBLE_CLASS, mxREAL);
double *timeStep = mxGetPr(plhs[0]);
if((meth == 1) || (meth == 2)) { // HLL or HLLC
trueMin = trueMin / 2;
// Waves cannot be let to go more than HALF a cell: otherwise in principle two waves could collide,
// and emit a fast-moving signal that might propagate back to the fluxing interface.
// The method is, however, likely to be stable up to a twice this timestep. But likely is NOT good enough.
}
#ifndef USE_SSPRK
trueMin = trueMin / 2;
// If we are using explicit midpoint, the timestep must be halved again to remain TVD
#endif
timeStep[0] = trueMin;
}
template <int dimension>
__device__ __inline__ double getMagnitudeMomentum(double *base, int64_t pitch)
{
double p, q;
p = base[2*pitch]; // p_x or p_r
if(dimension == 1) {
return fabs(p);
} else {
p=p*p;
q = base[3*pitch];
p=p+q*q;
if(dimension > 2) {
q = base[4*pitch];
p=p+q*q;
}
return sqrt(p);
}
}
// 3 dims x 2 shapes x 3 algorithms = 18 kernels total
template <int simulationDimension, geometryType_t shape, FluidMethods algo>
__global__ void cukern_CFLtimestep(double *fluid, double *cs, double *out, int nx, int ntotal, int64_t slabpitch)
{
unsigned int tix = threadIdx.x;
int x = blockIdx.x * blockDim.x + tix; // address
int blockhop = blockDim.x * gridDim.x; // stepsize
__shared__ double dtLimit[GLOBAL_BLOCKDIM];
double u, v, w;
double localTmin = 1e37;
dtLimit[tix] = 1e37;
if(x >= ntotal) return; // This is unlikely but we may get a stupid-small resolution
fluid += x; // compute base offset
cs += x;
if((algo == METHOD_HLL) || (algo == METHOD_HLLC)) {
if(shape == SQUARE) { // compute h once
v = GEO_DX;
if(simulationDimension > 1) { if(GEO_DY < v) v = GEO_DY; }
if(simulationDimension > 2) { if(GEO_DZ < v) v = GEO_DZ; }
}
if(shape == CYLINDRICAL) { // Compute what we can compute just once
v = GEO_DX;
if(simulationDimension == 3) v = (v < GEO_DZ) ? v : GEO_DZ;
}
}
while(x < ntotal) {
if((algo == METHOD_HLL) || (algo == METHOD_HLLC)) {
// get max signal speed
u = getMagnitudeMomentum<simulationDimension>(fluid, slabpitch) / fluid[0] + cs[0]; // |v| + c
// Identify local constraint on dt < dx / c_signal
if(shape == SQUARE) {
u = v / u;
}
if(shape == CYLINDRICAL) {
w = (GEO_RIN + (x % nx) *GEO_DX)*GEO_DY; // r dtheta changes with r...
w = (w < v) ? w : v;
u = w / u;
}
}
if(algo == METHOD_XINJIN) {
double rho = fluid[0];
// get max signal speed
u = GEO_DX / ( fabs(fluid[2*slabpitch])/rho + cs[0] );
if(simulationDimension > 1) {
if(shape == SQUARE) {
v = GEO_DY / ( fabs(fluid[3*slabpitch])/rho + cs[0] );
}
if(shape == CYLINDRICAL){
v = (GEO_RIN + (x % nx)*GEO_DX)*GEO_DY / ( fabs(fluid[3*slabpitch])/rho + cs[0] );
}
u = (u < v) ? u : v;
}
if(simulationDimension > 2) {
v = GEO_DZ / ( fabs(fluid[4*slabpitch])/rho + cs[0] );
u = (u < v) ? u : v;
}
}
// Each thread keeps running track of minimum dt
localTmin = (u < localTmin) ? u : localTmin;
fluid += blockhop;
cs += blockhop;
x += blockhop; // skip the first block since we've already done it.
}
dtLimit[tix] = localTmin;
__syncthreads();
x = GLOBAL_BLOCKDIM / 2;
while(x > 16) {
if(tix >= x) return;
__syncthreads();
if(dtLimit[tix+x] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+x]; }
x=x/2;
}
if(tix >= 16) return;
// We have one halfwarp (16 threads) remaining, proceed synchronously
// cuda-memcheck --racecheck whines bitterly about this but because of warp synchronicity
// there is no RAW problem.
if(dtLimit[tix+16] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+16]; } if(tix >= 8) return;
//__syncthreads();
if(dtLimit[tix+8] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+8 ]; } if(tix >= 4) return;
//__syncthreads();
if(dtLimit[tix+4] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+4 ]; } if(tix >= 2) return;
//__syncthreads();
if(dtLimit[tix+2] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+2 ]; } if(tix) return;
//__syncthreads();
out[blockIdx.x] = (dtLimit[1] < dtLimit[0]) ? dtLimit[1] : dtLimit[0];
}
| 656b713e5b24482fe8149ad853f155520b4c65c2.cu | #include <stdio.h>
#include <string.h>
#include <stdarg.h>
#ifdef UNIX
#include <stdint.h>
#include <unistd.h>
#endif
#include "mex.h"
#include "mpi.h"
// CUDA
#include "cuda.h"
#include "cuda_runtime.h"
#include "cublas.h"
#include "cudaCommon.h"
#include "cudaFluidStep.h"
#include "fluidMethod.h"
/* THIS FUNCTION:
directionalMaxFinder has three different behaviors depending on how it is called.
m = directionalMaxFinder(array) will calculate the global maximum of array
c = directionalMaxFinder(a1, a2, direct) will find the max of |a1(r)+a2(r)| in the
'direct' direction (1=X, 2=Y, 3=Z)
c = directionalMaxFinder(rho, c_s, px, py, pz) will specifically calculate the x direction
CFL limiting speed, max(|px/rho| + c_s)
*/
template <int simulationDimension, geometryType_t shape, FluidMethods algo>
__global__ void cukern_CFLtimestep(double *fluid, double *cs, double *out, int nx, int ntotal, int64_t slabpitch);
#define BLOCKDIM 8
#define GLOBAL_BLOCKDIM 128
__constant__ __device__ double geoParams[5];
#define GEO_DX geoParams[0]
#define GEO_DY geoParams[1]
#define GEO_DZ geoParams[2]
#define GEO_RIN geoParams[3]
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) {
// Form of call: tau = cflTimestep(FluidManager, soundspeed gpu array, GeometryManager)
MGArray fluid[5];
GeometryParams geom;
MGArray sndspeed;
// At least 2 arguments expected
// Input and result
if((nlhs != 1) || (nrhs != 4))
mexErrMsgTxt("Call must be tau = cflTimestep(FluidManager, soundspeed gpu array, GeometryManager, cfd_method);");
CHECK_CUDA_ERROR("entering directionalMaxFinder");
int i;
int sub[6];
int worked;
worked = MGA_accessFluidCanister(prhs[0], 0, &fluid[0]);
if(CHECK_IMOGEN_ERROR(worked) != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
worked = MGA_accessMatlabArrays(prhs, 1, 1, &sndspeed);
if(CHECK_IMOGEN_ERROR(worked) != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
geom = accessMatlabGeometryClass(prhs[2]);
int meth = (int)*mxGetPr(prhs[3]);
double geoarray[5];
geoarray[0] = geom.h[0];
geoarray[1] = geom.h[1];
geoarray[2] = geom.h[2];
geoarray[3] = geom.Rinner;
const mxArray *gdr = mxGetProperty(prhs[2], 0, "globalDomainRez");
double *globrez = mxGetPr(gdr);
dim3 blocksize, gridsize;
blocksize.x = GLOBAL_BLOCKDIM; blocksize.y = blocksize.z = 1;
// Launches enough blocks to fully occupy the GPU
gridsize.x = 128;
gridsize.y = gridsize.z =1;
// Allocate enough pinned memory to hold results
double *blkA[fluid->nGPUs];
int hblockElements = gridsize.x;
int spacedim = 0;
if(globrez[1] > 1) spacedim = 1;
if(globrez[2] > 1) spacedim = 2;
int gt = 0;
if(geom.shape == CYLINDRICAL) gt = 1;
int ctype = spacedim + 3*(gt + 2*(meth-1)); // value in 0..17
int numBlocks[fluid->nGPUs];
for(i = 0; i < fluid->nGPUs; i++) {
cudaSetDevice(fluid->deviceID[i]);
CHECK_CUDA_ERROR("cudaSetDevice()");
cudaMallocHost((void **)&blkA[i], hblockElements * sizeof(double));
CHECK_CUDA_ERROR("CFL malloc doubles");
cudaMemcpyToSymbol(geoParams, &geoarray[0], 5*sizeof(double), 0, cudaMemcpyHostToDevice);
if(CHECK_CUDA_ERROR("cfl const memcpy") != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
calcPartitionExtent(&fluid[0], i, &sub[0]);
gridsize.x = ROUNDUPTO(fluid[0].partNumel[i], blocksize.x) / blocksize.x;
if(gridsize.x > 128) gridsize.x = 128;
numBlocks[i] = gridsize.x;
switch(ctype) {
case 0: cukern_CFLtimestep<1, SQUARE, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 1: cukern_CFLtimestep<2, SQUARE, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 2: cukern_CFLtimestep<3, SQUARE, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 3: cukern_CFLtimestep<1, CYLINDRICAL, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 4: cukern_CFLtimestep<2, CYLINDRICAL, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 5: cukern_CFLtimestep<3, CYLINDRICAL, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 6: cukern_CFLtimestep<1, SQUARE, METHOD_HLL ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 7: cukern_CFLtimestep<2, SQUARE, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 8: cukern_CFLtimestep<3, SQUARE, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 9: cukern_CFLtimestep<1, CYLINDRICAL, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 10: cukern_CFLtimestep<2, CYLINDRICAL, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 11: cukern_CFLtimestep<3, CYLINDRICAL, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 12: cukern_CFLtimestep<1, SQUARE, METHOD_HLLC ><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 13: cukern_CFLtimestep<2, SQUARE, METHOD_XINJIN><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 14: cukern_CFLtimestep<3, SQUARE, METHOD_XINJIN><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 15: cukern_CFLtimestep<1, CYLINDRICAL, METHOD_XINJIN><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 16: cukern_CFLtimestep<2, CYLINDRICAL, METHOD_XINJIN><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
case 17: cukern_CFLtimestep<3, CYLINDRICAL, METHOD_XINJIN><<<gridsize, blocksize>>>(fluid[0].devicePtr[i], sndspeed.devicePtr[i], blkA[i], sub[3], fluid[0].partNumel[i], fluid[0].slabPitch[i] / 8); break;
default:
DROP_MEX_ERROR("cflTimestep was passed a 4th argument (method) which was not 1 (hll), 2 (hllc) or 3 (xin/jin).");
break;
}
CHECK_CUDA_LAUNCH_ERROR(blocksize, gridsize, &fluid[0], i, "CFL max finder for Riemann solvers");
}
double tmin = 1e38;
int j;
for(i = 0; i < fluid->nGPUs; i++) {
cudaSetDevice(fluid->deviceID[i]);
cudaDeviceSynchronize();
for(j = 0; j < numBlocks[i]; j++) {
tmin = (tmin < blkA[i][j]) ? tmin : blkA[i][j];
}
cudaFreeHost(blkA[i]);
if(CHECK_CUDA_ERROR("freeing blkA") != SUCCESSFUL) { mexErrMsgTxt("Dumping"); }
}
double trueMin;
MPI_Allreduce((void *)&tmin, (void *)&trueMin, 1, MPI_DOUBLE, MPI_MIN, MPI_COMM_WORLD);
mwSize outputDims[2];
outputDims[0] = 1;
outputDims[1] = 1;
plhs[0] = mxCreateNumericArray (2, outputDims, mxDOUBLE_CLASS, mxREAL);
double *timeStep = mxGetPr(plhs[0]);
if((meth == 1) || (meth == 2)) { // HLL or HLLC
trueMin = trueMin / 2;
// Waves cannot be let to go more than HALF a cell: otherwise in principle two waves could collide,
// and emit a fast-moving signal that might propagate back to the fluxing interface.
// The method is, however, likely to be stable up to a twice this timestep. But likely is NOT good enough.
}
#ifndef USE_SSPRK
trueMin = trueMin / 2;
// If we are using explicit midpoint, the timestep must be halved again to remain TVD
#endif
timeStep[0] = trueMin;
}
template <int dimension>
__device__ __inline__ double getMagnitudeMomentum(double *base, int64_t pitch)
{
double p, q;
p = base[2*pitch]; // p_x or p_r
if(dimension == 1) {
return fabs(p);
} else {
p=p*p;
q = base[3*pitch];
p=p+q*q;
if(dimension > 2) {
q = base[4*pitch];
p=p+q*q;
}
return sqrt(p);
}
}
// 3 dims x 2 shapes x 3 algorithms = 18 kernels total
template <int simulationDimension, geometryType_t shape, FluidMethods algo>
__global__ void cukern_CFLtimestep(double *fluid, double *cs, double *out, int nx, int ntotal, int64_t slabpitch)
{
unsigned int tix = threadIdx.x;
int x = blockIdx.x * blockDim.x + tix; // address
int blockhop = blockDim.x * gridDim.x; // stepsize
__shared__ double dtLimit[GLOBAL_BLOCKDIM];
double u, v, w;
double localTmin = 1e37;
dtLimit[tix] = 1e37;
if(x >= ntotal) return; // This is unlikely but we may get a stupid-small resolution
fluid += x; // compute base offset
cs += x;
if((algo == METHOD_HLL) || (algo == METHOD_HLLC)) {
if(shape == SQUARE) { // compute h once
v = GEO_DX;
if(simulationDimension > 1) { if(GEO_DY < v) v = GEO_DY; }
if(simulationDimension > 2) { if(GEO_DZ < v) v = GEO_DZ; }
}
if(shape == CYLINDRICAL) { // Compute what we can compute just once
v = GEO_DX;
if(simulationDimension == 3) v = (v < GEO_DZ) ? v : GEO_DZ;
}
}
while(x < ntotal) {
if((algo == METHOD_HLL) || (algo == METHOD_HLLC)) {
// get max signal speed
u = getMagnitudeMomentum<simulationDimension>(fluid, slabpitch) / fluid[0] + cs[0]; // |v| + c
// Identify local constraint on dt < dx / c_signal
if(shape == SQUARE) {
u = v / u;
}
if(shape == CYLINDRICAL) {
w = (GEO_RIN + (x % nx) *GEO_DX)*GEO_DY; // r dtheta changes with r...
w = (w < v) ? w : v;
u = w / u;
}
}
if(algo == METHOD_XINJIN) {
double rho = fluid[0];
// get max signal speed
u = GEO_DX / ( fabs(fluid[2*slabpitch])/rho + cs[0] );
if(simulationDimension > 1) {
if(shape == SQUARE) {
v = GEO_DY / ( fabs(fluid[3*slabpitch])/rho + cs[0] );
}
if(shape == CYLINDRICAL){
v = (GEO_RIN + (x % nx)*GEO_DX)*GEO_DY / ( fabs(fluid[3*slabpitch])/rho + cs[0] );
}
u = (u < v) ? u : v;
}
if(simulationDimension > 2) {
v = GEO_DZ / ( fabs(fluid[4*slabpitch])/rho + cs[0] );
u = (u < v) ? u : v;
}
}
// Each thread keeps running track of minimum dt
localTmin = (u < localTmin) ? u : localTmin;
fluid += blockhop;
cs += blockhop;
x += blockhop; // skip the first block since we've already done it.
}
dtLimit[tix] = localTmin;
__syncthreads();
x = GLOBAL_BLOCKDIM / 2;
while(x > 16) {
if(tix >= x) return;
__syncthreads();
if(dtLimit[tix+x] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+x]; }
x=x/2;
}
if(tix >= 16) return;
// We have one halfwarp (16 threads) remaining, proceed synchronously
// cuda-memcheck --racecheck whines bitterly about this but because of warp synchronicity
// there is no RAW problem.
if(dtLimit[tix+16] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+16]; } if(tix >= 8) return;
//__syncthreads();
if(dtLimit[tix+8] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+8 ]; } if(tix >= 4) return;
//__syncthreads();
if(dtLimit[tix+4] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+4 ]; } if(tix >= 2) return;
//__syncthreads();
if(dtLimit[tix+2] < dtLimit[tix]) { dtLimit[tix] = dtLimit[tix+2 ]; } if(tix) return;
//__syncthreads();
out[blockIdx.x] = (dtLimit[1] < dtLimit[0]) ? dtLimit[1] : dtLimit[0];
}
|
059759c26fa730f6dd706c69f873ba195d129675.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <iostream>
#include <math.h>
#include <stdio.h>
// Kernel function to add the elements of two arrays
__global__
void add(int n, float *x, float *y)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
int stride = blockDim.x * gridDim.x;
for (int i = index; i < n; i+=stride)
{
y[i] = x[i] + y[i];
printf("i=%d,blockIdx.x=%d\n",i,blockIdx.x);
}
}
int main(void)
{
int N = 1<<20;
float *x, *y;
// Allocate Unified Memory accessible from CPU or GPU
hipMallocManaged(&x, N*sizeof(float));
hipMallocManaged(&y, N*sizeof(float));
// initialize x and y arrays on the host
for (int i = 0; i < N; i++) {
x[i] = 1.0f;
y[i] = 2.0f;
}
// Run kernel on 1M elements on the GPU
int blockSize = 256;
int numBlocks = (N + blockSize - 1) / blockSize;
hipLaunchKernelGGL(( add), dim3(numBlocks), dim3(blockSize), 0, 0, N, x, y);
// Wait for GPU to finish before accessing on host
hipDeviceSynchronize();
// Check for errors (all values should be 3.0f)
float maxError = 0.0f;
for (int i = 0; i < N; i++)
maxError = fmax(maxError, fabs(y[i]-3.0f));
std::cout << "Max error: " << maxError << std::endl;
// Free memory
hipFree(x);
hipFree(y);
return 0;
} | 059759c26fa730f6dd706c69f873ba195d129675.cu | #include <iostream>
#include <math.h>
#include <stdio.h>
// Kernel function to add the elements of two arrays
__global__
void add(int n, float *x, float *y)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
int stride = blockDim.x * gridDim.x;
for (int i = index; i < n; i+=stride)
{
y[i] = x[i] + y[i];
printf("i=%d,blockIdx.x=%d\n",i,blockIdx.x);
}
}
int main(void)
{
int N = 1<<20;
float *x, *y;
// Allocate Unified Memory – accessible from CPU or GPU
cudaMallocManaged(&x, N*sizeof(float));
cudaMallocManaged(&y, N*sizeof(float));
// initialize x and y arrays on the host
for (int i = 0; i < N; i++) {
x[i] = 1.0f;
y[i] = 2.0f;
}
// Run kernel on 1M elements on the GPU
int blockSize = 256;
int numBlocks = (N + blockSize - 1) / blockSize;
add<<<numBlocks, blockSize>>>(N, x, y);
// Wait for GPU to finish before accessing on host
cudaDeviceSynchronize();
// Check for errors (all values should be 3.0f)
float maxError = 0.0f;
for (int i = 0; i < N; i++)
maxError = fmax(maxError, fabs(y[i]-3.0f));
std::cout << "Max error: " << maxError << std::endl;
// Free memory
cudaFree(x);
cudaFree(y);
return 0;
} |
7b3e7120e91fbfdc3a7fcd4ee70fcd0fdc40c915.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <hip/hip_fp16.h>
#include <device_launch_parameters.h>
#include "caffe/common.hpp"
#include "caffe/util/gpu_math_functions.cuh"
#include "caffe/util/math_functions.hpp"
namespace caffe {
///////////////////////////////////// ASUM REDUCTION ///////////////////////////////////
template<unsigned int BlockSize, typename TR>
__device__ void asum_reduce_block(volatile TR *sdata, TR my_sum, unsigned int tid) {
volatile TR* st = sdata + tid;
tassign(st, my_sum);
__syncthreads();
// do reduction in shared mem
if (BlockSize >= 512) {
if (tid < 256) {
tsum_replace(st, sdata[tid + 256]);
}
__syncthreads();
}
if (BlockSize >= 256) {
if (tid < 128) {
tsum_replace(st, sdata[tid + 128]);
}
__syncthreads();
}
if (BlockSize >= 128) {
if (tid < 64) {
tsum_replace(st, sdata[tid + 64]);
}
__syncthreads();
}
if (tid < 32) {
for (int i = 32; i > 0; i >>= 1) {
tsum_replace(st, sdata[tid + i]);
}
}
}
// Global variable used by amax_reduce_kernel to count how many blocks have finished
__device__ unsigned int asum_blocks_count_f = 0;
__device__ unsigned int asum_blocks_count_d = 0;
__device__ unsigned int asum_blocks_count_h = 0;
template<typename T>
__device__ __inline__
unsigned int* asum_blocks_count_ptr();
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<float>() {
return &asum_blocks_count_f;
}
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<double>() {
return &asum_blocks_count_d;
}
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<__half2>() {
return &asum_blocks_count_h;
}
template<typename T>
hipError_t set_asum_blocks_count(unsigned int cnt);
template<>
hipError_t set_asum_blocks_count<float>(unsigned int cnt) {
return hipMemcpyToSymbolAsync(asum_blocks_count_f, &cnt, sizeof(unsigned int), 0,
hipMemcpyHostToDevice, Caffe::thread_stream());
}
template<>
hipError_t set_asum_blocks_count<double>(unsigned int cnt) {
return hipMemcpyToSymbolAsync(asum_blocks_count_d, &cnt, sizeof(unsigned int), 0,
hipMemcpyHostToDevice, Caffe::thread_stream());
}
template<>
hipError_t set_asum_blocks_count<__half2>(unsigned int cnt) {
return hipMemcpyToSymbolAsync(asum_blocks_count_h, &cnt, sizeof(unsigned int), 0,
hipMemcpyHostToDevice, Caffe::thread_stream());
}
template<typename T>
__device__ __inline__
void reset_asum_blocks_count();
template<>
void reset_asum_blocks_count<float>() {
asum_blocks_count_f = 0;
}
template<>
__device__ __inline__
void reset_asum_blocks_count<double>() {
asum_blocks_count_d = 0;
}
template<>
__device__ __inline__
void reset_asum_blocks_count<__half2>() {
asum_blocks_count_h = 0;
}
template<unsigned int BlockSize, bool IsPow2, typename T, typename TR>
__device__ void asum_reduce_blocks(const T *in, TR *out, unsigned int n) {
struct __dyn_shmem__<n_bytes<sizeof(TR)>> asum_blocks_shmem;
TR* partial_asum = reinterpret_cast<TR*>(asum_blocks_shmem.getPtr());
// first level of reduction:
// reading from global memory, writing to shared memory
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x * BlockSize * 2 + threadIdx.x;
unsigned int gridSize = BlockSize * 2 * gridDim.x;
T t1, t2;
TR my_sum = tzero<TR>();
// We reduce multiple elements per thread. The number is determined by the
// number of active thread blocks (via gridDim). More blocks will result
// in a larger gridSize and therefore fewer elements per thread.
while (i < n) {
t1 = tabs(in[i]);
if (IsPow2 || i + BlockSize < n) {
t2 = tabs(in[i + BlockSize]);
tsum_replace(&my_sum, tsum<T, TR>(t1, t2));
} else {
tsum_replace(&my_sum, t1);
}
i += gridSize;
}
// do reduction in shared mem
asum_reduce_block<BlockSize>(partial_asum, my_sum, tid);
// write result for this block to global mem
if (tid == 0) {
out[blockIdx.x] = partial_asum[0];
}
}
template<unsigned int BlockSize, bool IsPow2, typename T, typename TR>
__global__ void asum_reduce_kernel(unsigned int n, const T *in, TR *out) {
asum_reduce_blocks<BlockSize, IsPow2>(in, out, n);
if (gridDim.x > 1) {
const unsigned int tid = threadIdx.x;
struct __dyn_shmem__<n_bytes<sizeof(TR)>> asum_reduce_shmem;
TR* partial_asum = reinterpret_cast<TR*>(asum_reduce_shmem.getPtr());
__shared__ bool last_asum_reduce_block;
// wait until all outstanding memory instructions in this thread are finished
__threadfence();
// Thread 0 takes a ticket
if (tid == 0) {
unsigned int ticket = atomicInc(asum_blocks_count_ptr<T>(), gridDim.x);
last_asum_reduce_block = (ticket == gridDim.x - 1);
}
__syncthreads();
// The last block sums the results of all other blocks
if (last_asum_reduce_block) {
int i = tid;
TR my_sum = tzero<TR>();
while (i < gridDim.x) {
tsum_replace(&my_sum, out[i]);
i += BlockSize;
}
asum_reduce_block<BlockSize>(partial_asum, my_sum, tid);
if (tid == 0) {
out[0] = partial_asum[0];
// reset blocks count so that next run succeeds
reset_asum_blocks_count<T>();
}
}
}
}
template<typename T, typename TR>
void gpu_asum_t(const int n, const T* x, TR* sum) {
hipStream_t stream = Caffe::thread_stream();
const bool po2 = is_pow2(n);
// See kernel for details
CHECK_LE(CAFFE_CUDA_NUM_THREADS_HALF, 512);
CHECK_GE(CAFFE_CUDA_NUM_THREADS_HALF, 128);
const int threadsPerCta = CAFFE_CUDA_NUM_THREADS_HALF;
const int nbrCtas = CAFFE_GET_BLOCKS_HALF(n);
const int reduction_size_sum = (nbrCtas + 1) * sizeof(TR);
TR* dev_ptr_sum = reinterpret_cast<TR*>(GPUMemory::pinned_buffer(reduction_size_sum));
if (po2 && n > CAFFE_CUDA_NUM_THREADS_HALF) {
// NOLINT_NEXT_LINE(whitespace/operators)
hipLaunchKernelGGL(( asum_reduce_kernel<CAFFE_CUDA_NUM_THREADS_HALF, true>), dim3(nbrCtas), dim3(threadsPerCta),
threadsPerCta * sizeof(TR) + sizeof(bool), stream,
(unsigned int)n, x, dev_ptr_sum);
} else {
// NOLINT_NEXT_LINE(whitespace/operators)
hipLaunchKernelGGL(( asum_reduce_kernel<CAFFE_CUDA_NUM_THREADS_HALF, false>), dim3(nbrCtas), dim3(threadsPerCta),
threadsPerCta * sizeof(TR) + sizeof(bool), stream,
(unsigned int)n, x, dev_ptr_sum);
}
CUDA_POST_KERNEL_CHECK;
CUDA_CHECK(hipStreamSynchronize(stream));
*sum = dev_ptr_sum[0];
}
template<typename Dtype, typename Mtype>
void caffe_gpu_asum(const int n, const Dtype* x, Mtype* sum) {
static hipError_t status = set_asum_blocks_count<Dtype>(0U); // needed just 1 time
CUDA_CHECK(status);
gpu_asum_t(n, x, sum);
}
template<>
void caffe_gpu_asum<float16, float>(const int n, const float16* x, float* sum) {
// For odd counts we allocate extra element to speed up kernels.
// We have to keep it clean.
hipStream_t stream = Caffe::thread_stream();
if (n & 1) {
clean_last_element(const_cast<float16*>(x) + n, stream);
}
const int n2 = even(n) / 2;
static hipError_t status = set_asum_blocks_count<__half2>(0U); // needed just 1 time
CUDA_CHECK(status);
gpu_asum_t(n2, reinterpret_cast<const __half2*>(x), sum);
}
template<>
void caffe_gpu_asum<float16, double>(const int n, const float16* x, double* sum) {
float sf;
caffe_gpu_asum(n, x, &sf);
*sum = sf;
}
template<>
void caffe_gpu_asum<float16, float16>(const int n, const float16* x, float16* sum) {
float sf;
caffe_gpu_asum(n, x, &sf);
*sum = sf;
}
} // namespace caffe
| 7b3e7120e91fbfdc3a7fcd4ee70fcd0fdc40c915.cu | #include <cuda_fp16.h>
#include <device_launch_parameters.h>
#include "caffe/common.hpp"
#include "caffe/util/gpu_math_functions.cuh"
#include "caffe/util/math_functions.hpp"
namespace caffe {
///////////////////////////////////// ASUM REDUCTION ///////////////////////////////////
template<unsigned int BlockSize, typename TR>
__device__ void asum_reduce_block(volatile TR *sdata, TR my_sum, unsigned int tid) {
volatile TR* st = sdata + tid;
tassign(st, my_sum);
__syncthreads();
// do reduction in shared mem
if (BlockSize >= 512) {
if (tid < 256) {
tsum_replace(st, sdata[tid + 256]);
}
__syncthreads();
}
if (BlockSize >= 256) {
if (tid < 128) {
tsum_replace(st, sdata[tid + 128]);
}
__syncthreads();
}
if (BlockSize >= 128) {
if (tid < 64) {
tsum_replace(st, sdata[tid + 64]);
}
__syncthreads();
}
if (tid < 32) {
for (int i = 32; i > 0; i >>= 1) {
tsum_replace(st, sdata[tid + i]);
}
}
}
// Global variable used by amax_reduce_kernel to count how many blocks have finished
__device__ unsigned int asum_blocks_count_f = 0;
__device__ unsigned int asum_blocks_count_d = 0;
__device__ unsigned int asum_blocks_count_h = 0;
template<typename T>
__device__ __inline__
unsigned int* asum_blocks_count_ptr();
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<float>() {
return &asum_blocks_count_f;
}
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<double>() {
return &asum_blocks_count_d;
}
template<>
__device__ __inline__
unsigned int* asum_blocks_count_ptr<__half2>() {
return &asum_blocks_count_h;
}
template<typename T>
cudaError_t set_asum_blocks_count(unsigned int cnt);
template<>
cudaError_t set_asum_blocks_count<float>(unsigned int cnt) {
return cudaMemcpyToSymbolAsync(asum_blocks_count_f, &cnt, sizeof(unsigned int), 0,
cudaMemcpyHostToDevice, Caffe::thread_stream());
}
template<>
cudaError_t set_asum_blocks_count<double>(unsigned int cnt) {
return cudaMemcpyToSymbolAsync(asum_blocks_count_d, &cnt, sizeof(unsigned int), 0,
cudaMemcpyHostToDevice, Caffe::thread_stream());
}
template<>
cudaError_t set_asum_blocks_count<__half2>(unsigned int cnt) {
return cudaMemcpyToSymbolAsync(asum_blocks_count_h, &cnt, sizeof(unsigned int), 0,
cudaMemcpyHostToDevice, Caffe::thread_stream());
}
template<typename T>
__device__ __inline__
void reset_asum_blocks_count();
template<>
void reset_asum_blocks_count<float>() {
asum_blocks_count_f = 0;
}
template<>
__device__ __inline__
void reset_asum_blocks_count<double>() {
asum_blocks_count_d = 0;
}
template<>
__device__ __inline__
void reset_asum_blocks_count<__half2>() {
asum_blocks_count_h = 0;
}
template<unsigned int BlockSize, bool IsPow2, typename T, typename TR>
__device__ void asum_reduce_blocks(const T *in, TR *out, unsigned int n) {
struct __dyn_shmem__<n_bytes<sizeof(TR)>> asum_blocks_shmem;
TR* partial_asum = reinterpret_cast<TR*>(asum_blocks_shmem.getPtr());
// first level of reduction:
// reading from global memory, writing to shared memory
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x * BlockSize * 2 + threadIdx.x;
unsigned int gridSize = BlockSize * 2 * gridDim.x;
T t1, t2;
TR my_sum = tzero<TR>();
// We reduce multiple elements per thread. The number is determined by the
// number of active thread blocks (via gridDim). More blocks will result
// in a larger gridSize and therefore fewer elements per thread.
while (i < n) {
t1 = tabs(in[i]);
if (IsPow2 || i + BlockSize < n) {
t2 = tabs(in[i + BlockSize]);
tsum_replace(&my_sum, tsum<T, TR>(t1, t2));
} else {
tsum_replace(&my_sum, t1);
}
i += gridSize;
}
// do reduction in shared mem
asum_reduce_block<BlockSize>(partial_asum, my_sum, tid);
// write result for this block to global mem
if (tid == 0) {
out[blockIdx.x] = partial_asum[0];
}
}
template<unsigned int BlockSize, bool IsPow2, typename T, typename TR>
__global__ void asum_reduce_kernel(unsigned int n, const T *in, TR *out) {
asum_reduce_blocks<BlockSize, IsPow2>(in, out, n);
if (gridDim.x > 1) {
const unsigned int tid = threadIdx.x;
struct __dyn_shmem__<n_bytes<sizeof(TR)>> asum_reduce_shmem;
TR* partial_asum = reinterpret_cast<TR*>(asum_reduce_shmem.getPtr());
__shared__ bool last_asum_reduce_block;
// wait until all outstanding memory instructions in this thread are finished
__threadfence();
// Thread 0 takes a ticket
if (tid == 0) {
unsigned int ticket = atomicInc(asum_blocks_count_ptr<T>(), gridDim.x);
last_asum_reduce_block = (ticket == gridDim.x - 1);
}
__syncthreads();
// The last block sums the results of all other blocks
if (last_asum_reduce_block) {
int i = tid;
TR my_sum = tzero<TR>();
while (i < gridDim.x) {
tsum_replace(&my_sum, out[i]);
i += BlockSize;
}
asum_reduce_block<BlockSize>(partial_asum, my_sum, tid);
if (tid == 0) {
out[0] = partial_asum[0];
// reset blocks count so that next run succeeds
reset_asum_blocks_count<T>();
}
}
}
}
template<typename T, typename TR>
void gpu_asum_t(const int n, const T* x, TR* sum) {
cudaStream_t stream = Caffe::thread_stream();
const bool po2 = is_pow2(n);
// See kernel for details
CHECK_LE(CAFFE_CUDA_NUM_THREADS_HALF, 512);
CHECK_GE(CAFFE_CUDA_NUM_THREADS_HALF, 128);
const int threadsPerCta = CAFFE_CUDA_NUM_THREADS_HALF;
const int nbrCtas = CAFFE_GET_BLOCKS_HALF(n);
const int reduction_size_sum = (nbrCtas + 1) * sizeof(TR);
TR* dev_ptr_sum = reinterpret_cast<TR*>(GPUMemory::pinned_buffer(reduction_size_sum));
if (po2 && n > CAFFE_CUDA_NUM_THREADS_HALF) {
// NOLINT_NEXT_LINE(whitespace/operators)
asum_reduce_kernel<CAFFE_CUDA_NUM_THREADS_HALF, true><<<nbrCtas, threadsPerCta,
threadsPerCta * sizeof(TR) + sizeof(bool), stream>>>
((unsigned int)n, x, dev_ptr_sum);
} else {
// NOLINT_NEXT_LINE(whitespace/operators)
asum_reduce_kernel<CAFFE_CUDA_NUM_THREADS_HALF, false><<<nbrCtas, threadsPerCta,
threadsPerCta * sizeof(TR) + sizeof(bool), stream>>>
((unsigned int)n, x, dev_ptr_sum);
}
CUDA_POST_KERNEL_CHECK;
CUDA_CHECK(cudaStreamSynchronize(stream));
*sum = dev_ptr_sum[0];
}
template<typename Dtype, typename Mtype>
void caffe_gpu_asum(const int n, const Dtype* x, Mtype* sum) {
static cudaError_t status = set_asum_blocks_count<Dtype>(0U); // needed just 1 time
CUDA_CHECK(status);
gpu_asum_t(n, x, sum);
}
template<>
void caffe_gpu_asum<float16, float>(const int n, const float16* x, float* sum) {
// For odd counts we allocate extra element to speed up kernels.
// We have to keep it clean.
cudaStream_t stream = Caffe::thread_stream();
if (n & 1) {
clean_last_element(const_cast<float16*>(x) + n, stream);
}
const int n2 = even(n) / 2;
static cudaError_t status = set_asum_blocks_count<__half2>(0U); // needed just 1 time
CUDA_CHECK(status);
gpu_asum_t(n2, reinterpret_cast<const __half2*>(x), sum);
}
template<>
void caffe_gpu_asum<float16, double>(const int n, const float16* x, double* sum) {
float sf;
caffe_gpu_asum(n, x, &sf);
*sum = sf;
}
template<>
void caffe_gpu_asum<float16, float16>(const int n, const float16* x, float16* sum) {
float sf;
caffe_gpu_asum(n, x, &sf);
*sum = sf;
}
} // namespace caffe
|
91ec4170d864420a701a2e9bbaf67357a1ed923a.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include "repeat.h"
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test1 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
sharedvar++;
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test2 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
label0: if (sharedvar != tid) goto label0;
else sharedvar++;
out[0] = (t1 + t2);
}
__global__ void kdiverge2_test3 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
label0: if (sharedvar != tid)
{
if (t1 > 0) goto label0;
}
else
sharedvar++;
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test4 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
atomicAdd((unsigned int *)&sharedvar, 1);
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test5 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
atomicAdd((unsigned int *)&sharedvar, 1);
out[0] = (t1 + t2);
}
void measure_diverge2()
{
unsigned int ts[1024]; // ts, output from kernel. Two elements used per thread.
unsigned int *d_ts;
unsigned int *d_out; // Unused memory for storing output
dim3 Db = dim3(32 * 2);
dim3 Dg = dim3(1,1,1);
// Allocate device array.
hipError_t errorcode;
if (hipSuccess != (errorcode = hipMalloc((void**)&d_ts, sizeof(ts))))
{
printf ("hipMalloc failed %s:%d\n", __FILE__, __LINE__);
printf (" %s\n", hipGetErrorString(errorcode));
return;
}
if (hipSuccess != (errorcode = hipMalloc((void**)&d_out, 4)))
{
printf ("hipMalloc failed %s:%d\n", __FILE__, __LINE__);
return;
}
errorcode = hipGetLastError();
printf ("Running divergence tests ...\n");
Db.x = 32;
// Not runnning kdiverge2_test1, 2, 4, and 5 because they hang the system.
printf("kdiverge2_test3: ");
hipLaunchKernelGGL(( kdiverge2_test3) , dim3(Dg), dim3(Db), 0, 0, d_ts, d_out, 4, 6, 100);
hipDeviceSynchronize();
if (hipSuccess != (errorcode = hipGetLastError()))
printf("failed. %s\n", hipGetErrorString(errorcode));
hipMemcpy(ts, d_ts, sizeof(ts), hipMemcpyDeviceToHost);
if (hipSuccess != (errorcode = hipGetLastError()))
printf("failed. %s\n", hipGetErrorString(errorcode));
printf(" PASS.\n");
printf("\n");
hipFree(d_ts);
hipFree(d_out);
}
| 91ec4170d864420a701a2e9bbaf67357a1ed923a.cu | #include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include "repeat.h"
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test1 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
sharedvar++;
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test2 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
label0: if (sharedvar != tid) goto label0;
else sharedvar++;
out[0] = (t1 + t2);
}
__global__ void kdiverge2_test3 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
label0: if (sharedvar != tid)
{
if (t1 > 0) goto label0;
}
else
sharedvar++;
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test4 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ volatile unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
atomicAdd((unsigned int *)&sharedvar, 1);
out[0] = (t1 + t2);
}
/* THIS KERNEL HANGS */
__global__ void kdiverge2_test5 (unsigned int *ts, unsigned int* out, int p1, int p2, int its)
{
int t1 = p1;
int t2 = p2;
unsigned int tid = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ unsigned int sharedvar;
sharedvar = 0;
__syncthreads();
while (sharedvar != tid);
atomicAdd((unsigned int *)&sharedvar, 1);
out[0] = (t1 + t2);
}
void measure_diverge2()
{
unsigned int ts[1024]; // ts, output from kernel. Two elements used per thread.
unsigned int *d_ts;
unsigned int *d_out; // Unused memory for storing output
dim3 Db = dim3(32 * 2);
dim3 Dg = dim3(1,1,1);
// Allocate device array.
cudaError_t errorcode;
if (cudaSuccess != (errorcode = cudaMalloc((void**)&d_ts, sizeof(ts))))
{
printf ("cudaMalloc failed %s:%d\n", __FILE__, __LINE__);
printf (" %s\n", cudaGetErrorString(errorcode));
return;
}
if (cudaSuccess != (errorcode = cudaMalloc((void**)&d_out, 4)))
{
printf ("cudaMalloc failed %s:%d\n", __FILE__, __LINE__);
return;
}
errorcode = cudaGetLastError();
printf ("Running divergence tests ...\n");
Db.x = 32;
// Not runnning kdiverge2_test1, 2, 4, and 5 because they hang the system.
printf("kdiverge2_test3: ");
kdiverge2_test3 <<<Dg, Db>>>(d_ts, d_out, 4, 6, 100);
cudaThreadSynchronize();
if (cudaSuccess != (errorcode = cudaGetLastError()))
printf("failed. %s\n", cudaGetErrorString(errorcode));
cudaMemcpy(ts, d_ts, sizeof(ts), cudaMemcpyDeviceToHost);
if (cudaSuccess != (errorcode = cudaGetLastError()))
printf("failed. %s\n", cudaGetErrorString(errorcode));
printf(" PASS.\n");
printf("\n");
cudaFree(d_ts);
cudaFree(d_out);
}
|
9bc868f8efc55746f0ef98d3b342056e06114f67.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <getopt.h>
#include <hiprand/hiprand_kernel.h>
#include <stdlib.h>
#include <hip/hip_runtime.h>
#include <sys/time.h>
#include "subMat.cu"
#include<chrono>
#include<iostream>
using namespace std;
using namespace std::chrono;
int blocks_[20][2] = {{8,8},{16,16},{24,24},{32,32},{1,64},{1,128},{1,192},{1,256},{1,320},{1,384},{1,448},{1,512},{1,576},{1,640},{1,704},{1,768},{1,832},{1,896},{1,960},{1,1024}};
int matrices_[7][2] = {{240,240},{496,496},{784,784},{1016,1016},{1232,1232},{1680,1680},{2024,2024}};
int main(int argc, char **argv) {
hipSetDevice(0);
char* p;int matrix_len=strtol(argv[1], &p, 10);
for(int matrix_looper=0;matrix_looper<matrix_len;matrix_looper++){
for(int block_looper=0;block_looper<20;block_looper++){
int XSIZE=matrices_[matrix_looper][0],YSIZE=matrices_[matrix_looper][1],BLOCKX=blocks_[block_looper][0],BLOCKY=blocks_[block_looper][1];
float *a = NULL;
hipMalloc(&a, XSIZE*YSIZE);
float *b = NULL;
hipMalloc(&b, XSIZE*YSIZE);
float *sub = NULL;
hipMalloc(&sub, XSIZE*YSIZE);
int N = XSIZE*YSIZE;
int iXSIZE= XSIZE;
int iYSIZE= YSIZE;
while(iXSIZE%BLOCKX!=0)
{
iXSIZE++;
}
while(iYSIZE%BLOCKY!=0)
{
iYSIZE++;
}
dim3 gridBlock(iXSIZE/BLOCKX, iYSIZE/BLOCKY);
dim3 threadBlock(BLOCKX, BLOCKY);
hipFree(0);hipLaunchKernelGGL((
subMat), dim3(gridBlock),dim3(threadBlock), 0, 0, a,b,sub,N);
hipDeviceSynchronize();
for (int loop_counter = 0; loop_counter < 10; ++loop_counter) {hipLaunchKernelGGL((
subMat), dim3(gridBlock),dim3(threadBlock), 0, 0, a,b,sub,N);
}
auto start = steady_clock::now();
for (int loop_counter = 0; loop_counter < 1000; loop_counter++) {hipLaunchKernelGGL((
subMat), dim3(gridBlock),dim3(threadBlock), 0, 0, a,b,sub,N);
}
auto end = steady_clock::now();
auto usecs = duration_cast<duration<float, microseconds::period> >(end - start);
cout <<'['<<usecs.count()<<','<<'('<<BLOCKX<<','<<BLOCKY<<')' << ','<<'('<<XSIZE<<','<<YSIZE<<')'<<']' << endl;
}
}} | 9bc868f8efc55746f0ef98d3b342056e06114f67.cu | #include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <getopt.h>
#include <curand_kernel.h>
#include <stdlib.h>
#include <cuda.h>
#include <sys/time.h>
#include "subMat.cu"
#include<chrono>
#include<iostream>
using namespace std;
using namespace std::chrono;
int blocks_[20][2] = {{8,8},{16,16},{24,24},{32,32},{1,64},{1,128},{1,192},{1,256},{1,320},{1,384},{1,448},{1,512},{1,576},{1,640},{1,704},{1,768},{1,832},{1,896},{1,960},{1,1024}};
int matrices_[7][2] = {{240,240},{496,496},{784,784},{1016,1016},{1232,1232},{1680,1680},{2024,2024}};
int main(int argc, char **argv) {
cudaSetDevice(0);
char* p;int matrix_len=strtol(argv[1], &p, 10);
for(int matrix_looper=0;matrix_looper<matrix_len;matrix_looper++){
for(int block_looper=0;block_looper<20;block_looper++){
int XSIZE=matrices_[matrix_looper][0],YSIZE=matrices_[matrix_looper][1],BLOCKX=blocks_[block_looper][0],BLOCKY=blocks_[block_looper][1];
float *a = NULL;
cudaMalloc(&a, XSIZE*YSIZE);
float *b = NULL;
cudaMalloc(&b, XSIZE*YSIZE);
float *sub = NULL;
cudaMalloc(&sub, XSIZE*YSIZE);
int N = XSIZE*YSIZE;
int iXSIZE= XSIZE;
int iYSIZE= YSIZE;
while(iXSIZE%BLOCKX!=0)
{
iXSIZE++;
}
while(iYSIZE%BLOCKY!=0)
{
iYSIZE++;
}
dim3 gridBlock(iXSIZE/BLOCKX, iYSIZE/BLOCKY);
dim3 threadBlock(BLOCKX, BLOCKY);
cudaFree(0);
subMat<<<gridBlock,threadBlock>>>(a,b,sub,N);
cudaDeviceSynchronize();
for (int loop_counter = 0; loop_counter < 10; ++loop_counter) {
subMat<<<gridBlock,threadBlock>>>(a,b,sub,N);
}
auto start = steady_clock::now();
for (int loop_counter = 0; loop_counter < 1000; loop_counter++) {
subMat<<<gridBlock,threadBlock>>>(a,b,sub,N);
}
auto end = steady_clock::now();
auto usecs = duration_cast<duration<float, microseconds::period> >(end - start);
cout <<'['<<usecs.count()<<','<<'('<<BLOCKX<<','<<BLOCKY<<')' << ','<<'('<<XSIZE<<','<<YSIZE<<')'<<']' << endl;
}
}} |
df5b605a2b865433447c867a2bd2535fae141dbb.hip | // !!! This is a file automatically generated by hipify!!!
/**
https://blog.csdn.net/Bruce_0712/article/details/64928442
hipDeviceSynchronize()CPUGPUCUDAkernel
hipDeviceSynchronize()cudaDeviceSynchronize()CUDA
hipStreamSynchronize()stream IDCPUGPUstream IDCUDAstreamCUDA
warp __ syncthreads
(1)warpshared/global ,
globalsharedwarp
(2)blockwarpshared/global ,
__syncthreads(), __threadfence()
(3)gridblockshared/gloabl
__threadfence*()
//
[vec_add.cu:59] GPU
allocated 76.29 MB on GPU
GPU cost time=0.109505s last_num=4e+07
CPU cost time=2.46585s last_num=4e+07
*/
#include <iostream>
#include <stdio.h>
#include <hip/hip_runtime.h>
#include <cmath>
#include <ctime>
#define mycout cout<<"["<<__FILE__<<":"<<__LINE__<<"] "
#define use_warmup 1
using namespace std;
typedef float FLOAT;
// GPU(GPU)
void warmup();
// CPU
// __host__ void vec_add_host(FLOAT *x,FLOAT* y,FLOAT *z,int N);
// or host__host__
void vec_add_host(FLOAT *x,FLOAT *y,FLOAT *z,int N);
// GPU
__global__ void vec_add(FLOAT *x,FLOAT *y,FLOAT *z,int N)
{
// idid
/**
* <<<(256,256),256>>> grid 2 block 1 tid=threadIdx.x+blockDim.x*blockIdx.x+blockDim.x*gridDim.x*blockIdx.y
* <<<256,256>>> grid 1 block 1 tid=threadIdx.x+blockDim.x*blockIdx.x
* <<<1,256>>> grid 1 block 1 tid=threadIdx.x
* <<<256,1>>> grid 1 block 1 tid=blockDim.x*blockIdx.x
*/
int tid=threadIdx.x+blockDim.x*blockIdx.x+blockDim.x*gridDim.x*blockIdx.y;
if (tid<N) z[tid]=x[tid]+y[tid]; //
// __syncthreads(); //
}
int main(int argc, char *argv[])
{
mycout<<"GPU "<<endl;
int N = 20000000;
int nbytes = N * sizeof(FLOAT);
/* 1D block */
int bs = 256;
/* 2D grid */
// int s = ceil(sqrt((N + bs - 1.) / bs));
int s = ceil(sqrt(1.0*N / bs));
dim3 grid = dim3(s, s);
// dx gpuhxcpu
FLOAT *dx = NULL, *hx = NULL;
FLOAT *dy = NULL, *hy = NULL;
FLOAT *dz = NULL, *hz = NULL;
int itr = 30;
int i;
// double th, td;
/**======1CPU ==========*/
/* alllocate CPU mem */
hx = (FLOAT *) malloc(nbytes);
hy = (FLOAT *) malloc(nbytes);
hz = (FLOAT *) malloc(nbytes);
if (hx == NULL || hy == NULL || hz == NULL) {
// printf("couldn't allocate CPU memory\n");
mycout<<"couldn't allocate CPU memory"<<endl;
return -2;
}
/*CPU*/
// fill the arrays 'hx' and 'hy' on the CPU
for (int i=0; i<N; i++) {
hx[i] = i;
hy[i] = i ;
}
/* warm up GPU */
#if use_warmup
warmup(); //
#endif // use_warmup
/**======2GPU ======*/
/* allocate GPU mem */
hipMalloc((void **)&dx, nbytes);
hipMalloc((void **)&dy, nbytes);
hipMalloc((void **)&dz, nbytes);
if (dx == NULL || dy == NULL || dz == NULL) {
// printf("couldn't allocate GPU memory\n");
mycout<<"couldn't allocate GPU memory"<<endl;
return -1;
}
printf("allocated %.2f MB on GPU\n", nbytes / (1024.f * 1024.f));
/**======3CPUGPU======*/
/** copy data to GPU */
hipMemcpy(dx, hx, nbytes, hipMemcpyHostToDevice);
hipMemcpy(dy, hy, nbytes, hipMemcpyHostToDevice);
/* warm up GPU */
#if use_warmup
warmup(); //
#endif // use_warmup
/**======4GPU======*/
/* call GPU */
// hipDeviceSynchronize(); // GPU
hipDeviceSynchronize(); // hipDeviceSynchronize()
clock_t start = clock();
for (i = 0; i < itr; i++)hipLaunchKernelGGL(( vec_add), dim3(grid), dim3(bs), 0, 0, dx, dy, dz, N);
// hipDeviceSynchronize(); //
hipDeviceSynchronize(); // hipDeviceSynchronize()
/**======5GPUCPU======*/
hipMemcpy(hz,dz, nbytes, hipMemcpyDeviceToHost);
cout<<"GPU cost time="<<(double)(clock()-start)/CLOCKS_PER_SEC <<"s\t"<<
"last_num="<<hz[N-1]<<endl;
// CPU
start = clock();
for (i = 0; i < itr; i++) vec_add_host(hx, hy, hz, N);
cout<<"CPU cost time="<<(double)(clock()-start)/CLOCKS_PER_SEC <<"s\t"<<
"last_num="<<hz[N-1]<<endl;
//
hipFree(dx);
hipFree(dy);
hipFree(dz);
free(hx);
free(hy);
free(hz);
return 0;
}
/* warm up GPU */
__global__ void warmup_knl()
{
int i, j;
i = 1;
j = 2;
i = i + j;
}
void warmup()
{
int i;
for (i = 0; i < 8; i++) {
hipLaunchKernelGGL(( warmup_knl), dim3(1), dim3(256), 0, 0, );
}
}
void vec_add_host(FLOAT *x,FLOAT *y,FLOAT *z,int N)
{
for(int i=0;i<N;i++)
{
z[i]=x[i]+y[i];
}
}
| df5b605a2b865433447c867a2bd2535fae141dbb.cu | /**
https://blog.csdn.net/Bruce_0712/article/details/64928442
cudaDeviceSynchronize():该方法将停止CPU端线程的执行,直到GPU端完成之前CUDA的任务,包括kernel函数、数据拷贝等。
cudaThreadSynchronize():该方法的作用和cudaDeviceSynchronize()基本相同,但它不是一个被推荐的方法,也许在后期版本的CUDA中会被删除。
cudaStreamSynchronize():这个方法接受一个stream ID,它将阻止CPU执行直到GPU端完成相应stream ID的所有CUDA任务,但其它stream中的CUDA任务可能执行完也可能没有执行完。
跨warp进行同步,您需要使用 __ syncthreads()
(1)在同一个warp内的线程读写shared/global 不需同步,
读写global和shared是立刻对本warp内的其他线程立刻可见的。
(2)在同一个block内的不同warp内线程读写shared/global 需同步,
这种读写必须使用__syncthreads(), 或者__threadfence()来实现不同的读写可见效果。
(3)在同一个grid内的不同block内的线程读写shared/gloabl 需要同步
这种读写必须使用__threadfence*()来实现一定的读写可见效果。
// 执行结果
[vec_add.cu:59] GPU 实现向量的加法
allocated 76.29 MB on GPU
GPU cost time=0.109505s last_num=4e+07
CPU cost time=2.46585s last_num=4e+07
*/
#include <iostream>
#include <stdio.h>
#include <cuda.h>
#include <cmath>
#include <ctime>
#define mycout cout<<"["<<__FILE__<<":"<<__LINE__<<"] "
#define use_warmup 1
using namespace std;
typedef float FLOAT;
// GPU预热(可以提升GPU计算速度)
void warmup();
// CPU 向量加法
// __host__ void vec_add_host(FLOAT *x,FLOAT* y,FLOAT *z,int N);
// or host函数__host__可以省略
void vec_add_host(FLOAT *x,FLOAT *y,FLOAT *z,int N);
// GPU 函数
__global__ void vec_add(FLOAT *x,FLOAT *y,FLOAT *z,int N)
{
// 获取线程id(同时有很多个线程在执行这个函数,通过线程id区分)
/**
* <<<(256,256),256>>> grid 2维 block 1维 tid=threadIdx.x+blockDim.x*blockIdx.x+blockDim.x*gridDim.x*blockIdx.y
* <<<256,256>>> grid 1维 block 1维 tid=threadIdx.x+blockDim.x*blockIdx.x
* <<<1,256>>> grid 1维 block 1维 tid=threadIdx.x
* <<<256,1>>> grid 1维 block 1维 tid=blockDim.x*blockIdx.x
*/
int tid=threadIdx.x+blockDim.x*blockIdx.x+blockDim.x*gridDim.x*blockIdx.y;
if (tid<N) z[tid]=x[tid]+y[tid]; // 开的线程数必须大于数据总数,保证每个数据都能参与计算
// __syncthreads(); // 线程同步
}
int main(int argc, char *argv[])
{
mycout<<"GPU 实现向量的加法"<<endl;
int N = 20000000;
int nbytes = N * sizeof(FLOAT);
/* 1D block */
int bs = 256;
/* 2D grid */
// int s = ceil(sqrt((N + bs - 1.) / bs));
int s = ceil(sqrt(1.0*N / bs));
dim3 grid = dim3(s, s);
// dx 表示gpu变量,hx表示cpu变量
FLOAT *dx = NULL, *hx = NULL;
FLOAT *dy = NULL, *hy = NULL;
FLOAT *dz = NULL, *hz = NULL;
int itr = 30;
int i;
// double th, td;
/**======1、CPU 创建变量赋值==========*/
/* alllocate CPU mem */
hx = (FLOAT *) malloc(nbytes);
hy = (FLOAT *) malloc(nbytes);
hz = (FLOAT *) malloc(nbytes);
if (hx == NULL || hy == NULL || hz == NULL) {
// printf("couldn't allocate CPU memory\n");
mycout<<"couldn't allocate CPU memory"<<endl;
return -2;
}
/*给CPU变量赋值*/
// fill the arrays 'hx' and 'hy' on the CPU
for (int i=0; i<N; i++) {
hx[i] = i;
hy[i] = i ;
}
/* warm up GPU */
#if use_warmup
warmup(); // 预热
#endif // use_warmup
/**======2、GPU 分配内存======*/
/* allocate GPU mem */
cudaMalloc((void **)&dx, nbytes);
cudaMalloc((void **)&dy, nbytes);
cudaMalloc((void **)&dz, nbytes);
if (dx == NULL || dy == NULL || dz == NULL) {
// printf("couldn't allocate GPU memory\n");
mycout<<"couldn't allocate GPU memory"<<endl;
return -1;
}
printf("allocated %.2f MB on GPU\n", nbytes / (1024.f * 1024.f));
/**======3、将CPU数据拷贝给GPU======*/
/** copy data to GPU */
cudaMemcpy(dx, hx, nbytes, cudaMemcpyHostToDevice);
cudaMemcpy(dy, hy, nbytes, cudaMemcpyHostToDevice);
/* warm up GPU */
#if use_warmup
warmup(); // 预热
#endif // use_warmup
/**======4、调用GPU计算======*/
/* call GPU */
// cudaThreadSynchronize(); // 线程同步,等到前面的GPU数据拷贝完成
cudaDeviceSynchronize(); // cudaThreadSynchronize() 弃用了
clock_t start = clock();
for (i = 0; i < itr; i++) vec_add<<<grid, bs>>>(dx, dy, dz, N);
// cudaThreadSynchronize(); // 线程同步 等待所有线程处理完成
cudaDeviceSynchronize(); // cudaThreadSynchronize() 弃用了
/**======5、GPU计算结果拷贝给CPU======*/
cudaMemcpy(hz,dz, nbytes, cudaMemcpyDeviceToHost);
cout<<"GPU cost time="<<(double)(clock()-start)/CLOCKS_PER_SEC <<"s\t"<<
"last_num="<<hz[N-1]<<endl;
// 计算CPU的时间
start = clock();
for (i = 0; i < itr; i++) vec_add_host(hx, hy, hz, N);
cout<<"CPU cost time="<<(double)(clock()-start)/CLOCKS_PER_SEC <<"s\t"<<
"last_num="<<hz[N-1]<<endl;
// 释放内存
cudaFree(dx);
cudaFree(dy);
cudaFree(dz);
free(hx);
free(hy);
free(hz);
return 0;
}
/* warm up GPU */
__global__ void warmup_knl()
{
int i, j;
i = 1;
j = 2;
i = i + j;
}
void warmup()
{
int i;
for (i = 0; i < 8; i++) {
warmup_knl<<<1, 256>>>();
}
}
void vec_add_host(FLOAT *x,FLOAT *y,FLOAT *z,int N)
{
for(int i=0;i<N;i++)
{
z[i]=x[i]+y[i];
}
}
|
9879b59aa4ea79e28a059d948754d7719a06461d.hip | // !!! This is a file automatically generated by hipify!!!
/* ******************************************************************************
*
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// @author raver119@gmail.com
// @author Yurii Shyrma (iuriish@yahoo.com)
//
#include <exceptions/cuda_exception.h>
#include <rocblas.h>
#include <ops/specials_cuda.h>
#include <system/op_boilerplate.h>
#include <types/float16.h>
#include <ops/declarable/helpers/batched_gemm.h>
#include <helpers/PointersManager.h>
namespace sd {
namespace ops {
namespace helpers {
//////////////////////////////////////////////////////////////////////////////
// bsxMXK x bSxKxN = bSxMxN
ND4J_LOCAL void bgemm(const std::vector<NDArray*>& vA, const std::vector<NDArray*>& vB, std::vector<NDArray*>& vC, const NDArray* alphas, const NDArray* betas, int transA, int transB, int M, int N, int K, const int lda, const int ldb, const int ldc) {
const auto bS = vA.size(); // batch size
std::vector<NDArray*> pA(bS), pB(bS), pC(bS);
std::vector<NDArray*> toDelete;
for(int i = 0; i < bS; ++i) {
if(vA[i]->ews() != 1) {
pA[i] = new NDArray(vA[i]->dup('f'));
toDelete.emplace_back(pA[i]);
}
else
pA[i] = vA[i];
if(vB[i]->ews() != 1) {
pB[i] = new NDArray(vB[i]->dup('f'));
toDelete.emplace_back(pB[i]);
}
else
pB[i] = vB[i];
if(vC[i]->ews() != 1) {
pC[i] = new NDArray(vC[i]->dup('f'));
toDelete.emplace_back(pC[i]);
}
else
pC[i] = vC[i];
if(pC[i]->ordering() != 'f') {
auto temp = pA[i];
pA[i] = new NDArray(pB[i]->permute({1,0}));
pB[i] = new NDArray(temp ->permute({1,0}));
pC[i] = new NDArray(pC[i]->permute({1,0}));
toDelete.push_back(pA[i]);
toDelete.push_back(pB[i]);
toDelete.push_back(pC[i]);
M = pA[i]->sizeAt(0);
K = pA[i]->sizeAt(1);
N = pB[i]->sizeAt(1);
}
NDArray::prepareSpecialUse ({pC[i]}, {pA[i], pB[i]});
NDArray::registerSpecialUse({pC[i]}, {pA[i], pB[i]});
}
NDArray::prepareSpecialUse ({}, {alphas, betas});
NDArray::registerSpecialUse({}, {alphas, betas});
std::vector<void*> pAbuffs(bS), pBbuffs(bS), pCbuffs(bS);
for(int i = 0; i < bS; ++i) {
pAbuffs[i] = pA[i]->specialBuffer();
pBbuffs[i] = pB[i]->specialBuffer();
pCbuffs[i] = pC[i]->specialBuffer();
}
sd::LaunchContext* context = vA[0]->getContext();
PointersManager manager(context, "helpers::bgemm cuda");
const void** aBuffers = reinterpret_cast<const void**>(manager.replicatePointer(pAbuffs.data(), bS * sizeof(void*)));
const void** bBuffers = reinterpret_cast<const void**>(manager.replicatePointer(pBbuffs.data(), bS * sizeof(void*)));
void** cBuffers = reinterpret_cast<void**>(manager.replicatePointer(pCbuffs.data(), bS * sizeof(void*)));
// const auto aOrder = pA->ordering();
// const auto bOrder = pB->ordering();
// const bool transA = aOrder != 'f';
// const bool transB = bOrder != 'f';
const hipblasOperation_t transAblas = transA == 112 ? HIPBLAS_OP_T : HIPBLAS_OP_N;
const hipblasOperation_t transBblas = transB == 112 ? HIPBLAS_OP_T : HIPBLAS_OP_N;
// const int lda = aOrder == 'f' ? M : K;
// const int ldb = bOrder == 'f' ? K : N;
// const int ldc = M; // cOrder == 'f' ? M : N;
const auto aType = pA[0]->dataType();
const auto bType = pB[0]->dataType();
const auto cType = pC[0]->dataType();
std::lock_guard<std::mutex> lock(*LaunchContext::deviceMutex());
auto handle = reinterpret_cast<hipblasHandle_t*>(context->getCublasHandle());
auto stream = context->getCudaStream();
auto status = hipblasSetStream(*handle, *stream);
if (status != HIPBLAS_STATUS_SUCCESS)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", status);
const bool AB(aType == bType), AC(aType == cType), ABC(AB && AC);
// choose appropriate cuda gemm api depending on data types
if(ABC && aType == DataType::DOUBLE) {
double alpha = alphas->e<double>(0);
double beta = betas->e<double>(0);
status = hipblasDgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const double**)aBuffers, lda, (const double**)bBuffers, ldb, &beta, (double**)cBuffers, ldc, bS);
}
else if(ABC && aType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = hipblasSgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const float**)aBuffers, lda, (const float**)bBuffers, ldb, &beta, (float**)cBuffers, ldc, bS);
}
else if(ABC && aType == DataType::HALF) {
__half alpha = alphas->e<float>(0);
__half beta = betas->e<float>(0);
status = hipblasHgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const __half**)aBuffers, lda, (const __half**)bBuffers, ldb, &beta, (__half**)cBuffers, ldc, bS);
}
else if(AB && aType == DataType::INT8 && cType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = hipblasGemmBatchedEx(*handle, transAblas, transBblas, M, N, K, &alpha, aBuffers, HIP_R_8I, lda, bBuffers, HIP_R_8I, ldb, &beta, cBuffers, HIP_R_32F, ldc, bS, HIP_R_32F, HIPBLAS_GEMM_DEFAULT);
}
else if(AB && aType == DataType::HALF && cType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = hipblasGemmBatchedEx(*handle, transAblas, transBblas, M, N, K, &alpha, aBuffers, HIP_R_16F, lda, bBuffers, HIP_R_16F, ldb, &beta, cBuffers, HIP_R_32F, ldc, bS, HIP_R_32F, HIPBLAS_GEMM_DEFAULT);
}
else
throw std::runtime_error("batched gemm cuda: this mode is not implemented yet !");
if (status != HIPBLAS_STATUS_SUCCESS)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", status);
auto cudaResult = hipStreamSynchronize(*stream);
if (cudaResult != 0)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", cudaResult);
for(int i = 0; i < bS; ++i)
if(vC[i]->ews() != 1)
vC[i]->assign(pC[i]);
for(int i = toDelete.size() - 1; i >= 0; --i)
delete toDelete[i];
}
}
}
}
| 9879b59aa4ea79e28a059d948754d7719a06461d.cu | /* ******************************************************************************
*
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// @author raver119@gmail.com
// @author Yurii Shyrma (iuriish@yahoo.com)
//
#include <exceptions/cuda_exception.h>
#include <cublas_v2.h>
#include <ops/specials_cuda.h>
#include <system/op_boilerplate.h>
#include <types/float16.h>
#include <ops/declarable/helpers/batched_gemm.h>
#include <helpers/PointersManager.h>
namespace sd {
namespace ops {
namespace helpers {
//////////////////////////////////////////////////////////////////////////////
// bsxMXK x bSxKxN = bSxMxN
ND4J_LOCAL void bgemm(const std::vector<NDArray*>& vA, const std::vector<NDArray*>& vB, std::vector<NDArray*>& vC, const NDArray* alphas, const NDArray* betas, int transA, int transB, int M, int N, int K, const int lda, const int ldb, const int ldc) {
const auto bS = vA.size(); // batch size
std::vector<NDArray*> pA(bS), pB(bS), pC(bS);
std::vector<NDArray*> toDelete;
for(int i = 0; i < bS; ++i) {
if(vA[i]->ews() != 1) {
pA[i] = new NDArray(vA[i]->dup('f'));
toDelete.emplace_back(pA[i]);
}
else
pA[i] = vA[i];
if(vB[i]->ews() != 1) {
pB[i] = new NDArray(vB[i]->dup('f'));
toDelete.emplace_back(pB[i]);
}
else
pB[i] = vB[i];
if(vC[i]->ews() != 1) {
pC[i] = new NDArray(vC[i]->dup('f'));
toDelete.emplace_back(pC[i]);
}
else
pC[i] = vC[i];
if(pC[i]->ordering() != 'f') {
auto temp = pA[i];
pA[i] = new NDArray(pB[i]->permute({1,0}));
pB[i] = new NDArray(temp ->permute({1,0}));
pC[i] = new NDArray(pC[i]->permute({1,0}));
toDelete.push_back(pA[i]);
toDelete.push_back(pB[i]);
toDelete.push_back(pC[i]);
M = pA[i]->sizeAt(0);
K = pA[i]->sizeAt(1);
N = pB[i]->sizeAt(1);
}
NDArray::prepareSpecialUse ({pC[i]}, {pA[i], pB[i]});
NDArray::registerSpecialUse({pC[i]}, {pA[i], pB[i]});
}
NDArray::prepareSpecialUse ({}, {alphas, betas});
NDArray::registerSpecialUse({}, {alphas, betas});
std::vector<void*> pAbuffs(bS), pBbuffs(bS), pCbuffs(bS);
for(int i = 0; i < bS; ++i) {
pAbuffs[i] = pA[i]->specialBuffer();
pBbuffs[i] = pB[i]->specialBuffer();
pCbuffs[i] = pC[i]->specialBuffer();
}
sd::LaunchContext* context = vA[0]->getContext();
PointersManager manager(context, "helpers::bgemm cuda");
const void** aBuffers = reinterpret_cast<const void**>(manager.replicatePointer(pAbuffs.data(), bS * sizeof(void*)));
const void** bBuffers = reinterpret_cast<const void**>(manager.replicatePointer(pBbuffs.data(), bS * sizeof(void*)));
void** cBuffers = reinterpret_cast<void**>(manager.replicatePointer(pCbuffs.data(), bS * sizeof(void*)));
// const auto aOrder = pA->ordering();
// const auto bOrder = pB->ordering();
// const bool transA = aOrder != 'f';
// const bool transB = bOrder != 'f';
const cublasOperation_t transAblas = transA == 112 ? CUBLAS_OP_T : CUBLAS_OP_N;
const cublasOperation_t transBblas = transB == 112 ? CUBLAS_OP_T : CUBLAS_OP_N;
// const int lda = aOrder == 'f' ? M : K;
// const int ldb = bOrder == 'f' ? K : N;
// const int ldc = M; // cOrder == 'f' ? M : N;
const auto aType = pA[0]->dataType();
const auto bType = pB[0]->dataType();
const auto cType = pC[0]->dataType();
std::lock_guard<std::mutex> lock(*LaunchContext::deviceMutex());
auto handle = reinterpret_cast<cublasHandle_t*>(context->getCublasHandle());
auto stream = context->getCudaStream();
auto status = cublasSetStream_v2(*handle, *stream);
if (status != CUBLAS_STATUS_SUCCESS)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", status);
const bool AB(aType == bType), AC(aType == cType), ABC(AB && AC);
// choose appropriate cuda gemm api depending on data types
if(ABC && aType == DataType::DOUBLE) {
double alpha = alphas->e<double>(0);
double beta = betas->e<double>(0);
status = cublasDgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const double**)aBuffers, lda, (const double**)bBuffers, ldb, &beta, (double**)cBuffers, ldc, bS);
}
else if(ABC && aType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = cublasSgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const float**)aBuffers, lda, (const float**)bBuffers, ldb, &beta, (float**)cBuffers, ldc, bS);
}
else if(ABC && aType == DataType::HALF) {
__half alpha = alphas->e<float>(0);
__half beta = betas->e<float>(0);
status = cublasHgemmBatched(*handle, transAblas, transBblas, M, N, K, &alpha, (const __half**)aBuffers, lda, (const __half**)bBuffers, ldb, &beta, (__half**)cBuffers, ldc, bS);
}
else if(AB && aType == DataType::INT8 && cType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = cublasGemmBatchedEx(*handle, transAblas, transBblas, M, N, K, &alpha, aBuffers, CUDA_R_8I, lda, bBuffers, CUDA_R_8I, ldb, &beta, cBuffers, CUDA_R_32F, ldc, bS, CUDA_R_32F, CUBLAS_GEMM_DEFAULT);
}
else if(AB && aType == DataType::HALF && cType == DataType::FLOAT32) {
float alpha = alphas->e<float>(0);
float beta = betas->e<float>(0);
status = cublasGemmBatchedEx(*handle, transAblas, transBblas, M, N, K, &alpha, aBuffers, CUDA_R_16F, lda, bBuffers, CUDA_R_16F, ldb, &beta, cBuffers, CUDA_R_32F, ldc, bS, CUDA_R_32F, CUBLAS_GEMM_DEFAULT);
}
else
throw std::runtime_error("batched gemm cuda: this mode is not implemented yet !");
if (status != CUBLAS_STATUS_SUCCESS)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", status);
auto cudaResult = cudaStreamSynchronize(*stream);
if (cudaResult != 0)
throw cuda_exception::build("MmulHelper::mmulMxM cuda failed !", cudaResult);
for(int i = 0; i < bS; ++i)
if(vC[i]->ews() != 1)
vC[i]->assign(pC[i]);
for(int i = toDelete.size() - 1; i >= 0; --i)
delete toDelete[i];
}
}
}
}
|
a85af5da407424cffb733b9b3e8caf4c62f971ad.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
Author : H.M.Gamaarachchi
C file for adding of 2 matrices
*/
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <stdlib.h>
#include "myhelpers.h"
#define BLOCK 16
//adding kernel
__global__ void cuda_add(float *dev_c,float *dev_a,float *dev_b,int width,int height){
int x = blockDim.x * blockIdx.x + threadIdx.x;
int y = blockDim.y * blockIdx.y + threadIdx.y;
if (x<width && y<height){
int n = y * width + x;
dev_c[n]=dev_a[n]+dev_b[n];
}
}
//addition abstraction
hipError_t add(float *c,float *a,float *b,int width,int height){
float *dev_a=0;
float *dev_b=0;
float *dev_c=0;
hipError_t cudastatus;
//memory allocation
cudastatus=hipMalloc((void**)&dev_a,width*height*sizeof(float));
if (cudastatus!=hipSuccess)
return cudastatus;
cudastatus=hipMalloc((void**)&dev_b,width*height*sizeof(float));
if (cudastatus!=hipSuccess)
return cudastatus;
cudastatus=hipMalloc((void**)&dev_c,width*height*sizeof(float));
if (cudastatus!=hipSuccess)
return cudastatus;
//copying
cudastatus=hipMemcpy(dev_a,a,sizeof(float)*width*height,hipMemcpyHostToDevice);
if(cudastatus!=hipSuccess)
return cudastatus;
cudastatus=hipMemcpy(dev_b,b,sizeof(float)*width*height,hipMemcpyHostToDevice);
if(cudastatus!=hipSuccess)
return cudastatus;
cudastatus=hipMemcpy(dev_c,c,sizeof(float)*width*height,hipMemcpyHostToDevice);
if(cudastatus!=hipSuccess)
return cudastatus;
dim3 grid(ceil(width/(float)BLOCK),ceil(height/(float)BLOCK));
dim3 block(BLOCK,BLOCK);
//Time
hipEvent_t start,stop;
float elapsedtime;
hipEventCreate(&start);
hipEventRecord(start,0);
//function
hipLaunchKernelGGL(( cuda_add), dim3(grid),dim3(block), 0, 0, dev_c,dev_a,dev_b,width,height);
checkCudaError(hipGetLastError());
hipDeviceSynchronize();
checkCudaError(hipGetLastError());
//Time
hipEventCreate(&stop);
hipEventRecord(stop,0);
hipEventSynchronize(stop);
hipEventElapsedTime(&elapsedtime,start,stop);
printf("Time spent for calculation in CUDA : %.10f s\n",elapsedtime/(float)1000);
cudastatus=hipGetLastError();
if (cudastatus!=hipSuccess)
return cudastatus;
//copyback
cudastatus=hipMemcpy(c,dev_c,width*height*sizeof(float),hipMemcpyDeviceToHost);
hipFree(dev_a);
hipFree(dev_b);
hipFree(dev_c);
return cudastatus;
}
int main(int argc, char *argv[]){
int width,height;
//checking args and getting args
if(argc<5){
fprintf(stderr,"Please enter all args eg: ./add file1.txt file2.txt rows cols ans.txt");
exit(1);
}
//char matf1[]=argv[1];
width=atoi(argv[4]);
height=atoi(argv[3]);
//allocating
float *mat1=(float *)malloc(width*height*sizeof(float));
isMemoryFull(mat1);
float *mat2=(float *)malloc(width*height*sizeof(float));
isMemoryFull(mat2);
float *ans=(float *)malloc(width*height*sizeof(float));
isMemoryFull(ans);
//reading files
int i,j;
FILE *fp;
fp=fopen(argv[1],"r");
isFileOK(fp);
for (i=0;i<width*height;i++){
fscanf(fp,"%f",&mat1[i]);
}
fclose(fp);
//printf("reading mat 1 finished\n");
fp=fopen(argv[2],"r");
isFileOK(fp);
for (i=0;i<width*height;i++){
fscanf(fp,"%f",&mat2[i]);
}
fclose(fp);
//printf("reading mat 2 finished\n");
//add
clock_t start=clock();
hipError_t status=add(ans,mat1,mat2,width,height);
checkCudaError(status);
clock_t stop=clock();
double cputime=(double)((stop-start)/(float)CLOCKS_PER_SEC);
printf("Time for calculation with memory transfer overhead : %1.10f s\n",cputime);
//writing to file
fp=fopen(argv[5],"w");
isFileOK(fp);
for (i=0;i<height;i++){
for (j=0;j<width;j++){
fprintf(fp,"%f ",ans[width*i+j]);
}
fprintf(fp,"\n");
}
fclose(fp);
return 0;
}
| a85af5da407424cffb733b9b3e8caf4c62f971ad.cu | /*
Author : H.M.Gamaarachchi
C file for adding of 2 matrices
*/
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <stdlib.h>
#include "myhelpers.h"
#define BLOCK 16
//adding kernel
__global__ void cuda_add(float *dev_c,float *dev_a,float *dev_b,int width,int height){
int x = blockDim.x * blockIdx.x + threadIdx.x;
int y = blockDim.y * blockIdx.y + threadIdx.y;
if (x<width && y<height){
int n = y * width + x;
dev_c[n]=dev_a[n]+dev_b[n];
}
}
//addition abstraction
cudaError_t add(float *c,float *a,float *b,int width,int height){
float *dev_a=0;
float *dev_b=0;
float *dev_c=0;
cudaError_t cudastatus;
//memory allocation
cudastatus=cudaMalloc((void**)&dev_a,width*height*sizeof(float));
if (cudastatus!=cudaSuccess)
return cudastatus;
cudastatus=cudaMalloc((void**)&dev_b,width*height*sizeof(float));
if (cudastatus!=cudaSuccess)
return cudastatus;
cudastatus=cudaMalloc((void**)&dev_c,width*height*sizeof(float));
if (cudastatus!=cudaSuccess)
return cudastatus;
//copying
cudastatus=cudaMemcpy(dev_a,a,sizeof(float)*width*height,cudaMemcpyHostToDevice);
if(cudastatus!=cudaSuccess)
return cudastatus;
cudastatus=cudaMemcpy(dev_b,b,sizeof(float)*width*height,cudaMemcpyHostToDevice);
if(cudastatus!=cudaSuccess)
return cudastatus;
cudastatus=cudaMemcpy(dev_c,c,sizeof(float)*width*height,cudaMemcpyHostToDevice);
if(cudastatus!=cudaSuccess)
return cudastatus;
dim3 grid(ceil(width/(float)BLOCK),ceil(height/(float)BLOCK));
dim3 block(BLOCK,BLOCK);
//Time
cudaEvent_t start,stop;
float elapsedtime;
cudaEventCreate(&start);
cudaEventRecord(start,0);
//function
cuda_add<<<grid,block>>>(dev_c,dev_a,dev_b,width,height);
checkCudaError(cudaGetLastError());
cudaDeviceSynchronize();
checkCudaError(cudaGetLastError());
//Time
cudaEventCreate(&stop);
cudaEventRecord(stop,0);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&elapsedtime,start,stop);
printf("Time spent for calculation in CUDA : %.10f s\n",elapsedtime/(float)1000);
cudastatus=cudaGetLastError();
if (cudastatus!=cudaSuccess)
return cudastatus;
//copyback
cudastatus=cudaMemcpy(c,dev_c,width*height*sizeof(float),cudaMemcpyDeviceToHost);
cudaFree(dev_a);
cudaFree(dev_b);
cudaFree(dev_c);
return cudastatus;
}
int main(int argc, char *argv[]){
int width,height;
//checking args and getting args
if(argc<5){
fprintf(stderr,"Please enter all args eg: ./add file1.txt file2.txt rows cols ans.txt");
exit(1);
}
//char matf1[]=argv[1];
width=atoi(argv[4]);
height=atoi(argv[3]);
//allocating
float *mat1=(float *)malloc(width*height*sizeof(float));
isMemoryFull(mat1);
float *mat2=(float *)malloc(width*height*sizeof(float));
isMemoryFull(mat2);
float *ans=(float *)malloc(width*height*sizeof(float));
isMemoryFull(ans);
//reading files
int i,j;
FILE *fp;
fp=fopen(argv[1],"r");
isFileOK(fp);
for (i=0;i<width*height;i++){
fscanf(fp,"%f",&mat1[i]);
}
fclose(fp);
//printf("reading mat 1 finished\n");
fp=fopen(argv[2],"r");
isFileOK(fp);
for (i=0;i<width*height;i++){
fscanf(fp,"%f",&mat2[i]);
}
fclose(fp);
//printf("reading mat 2 finished\n");
//add
clock_t start=clock();
cudaError_t status=add(ans,mat1,mat2,width,height);
checkCudaError(status);
clock_t stop=clock();
double cputime=(double)((stop-start)/(float)CLOCKS_PER_SEC);
printf("Time for calculation with memory transfer overhead : %1.10f s\n",cputime);
//writing to file
fp=fopen(argv[5],"w");
isFileOK(fp);
for (i=0;i<height;i++){
for (j=0;j<width;j++){
fprintf(fp,"%f ",ans[width*i+j]);
}
fprintf(fp,"\n");
}
fclose(fp);
return 0;
}
|
57af7f11066580f5526ce9c7090a0fc120820663.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/box_coder_op.h"
#include "paddle/fluid/platform/cuda_helper.h"
namespace paddle {
namespace operators {
template <typename T>
__global__ void EncodeCenterSizeKernel(const T* prior_box_data,
const T* prior_box_var_data,
const T* target_box_data, const int row,
const int col, const int len,
T* output) {
const int idx = threadIdx.x + blockIdx.x * blockDim.x;
if (idx < row * col) {
const int row_idx = idx / col;
const int col_idx = idx % col;
T prior_box_width =
prior_box_data[col_idx * len + 2] - prior_box_data[col_idx * len];
T prior_box_height =
prior_box_data[col_idx * len + 3] - prior_box_data[col_idx * len + 1];
T prior_box_center_x =
(prior_box_data[col_idx * len + 2] + prior_box_data[col_idx * len]) / 2;
T prior_box_center_y = (prior_box_data[col_idx * len + 3] +
prior_box_data[col_idx * len + 1]) /
2;
T target_box_center_x =
(target_box_data[row_idx * len + 2] + target_box_data[row_idx * len]) /
2;
T target_box_center_y = (target_box_data[row_idx * len + 3] +
target_box_data[row_idx * len + 1]) /
2;
T target_box_width =
target_box_data[row_idx * len + 2] - target_box_data[row_idx * len];
T target_box_height =
target_box_data[row_idx * len + 3] - target_box_data[row_idx * len + 1];
output[idx * len] = (target_box_center_x - prior_box_center_x) /
prior_box_width / prior_box_var_data[col_idx * len];
output[idx * len + 1] = (target_box_center_y - prior_box_center_y) /
prior_box_height /
prior_box_var_data[col_idx * len + 1];
output[idx * len + 2] = log(fabs(target_box_width / prior_box_width)) /
prior_box_var_data[col_idx * len + 2];
output[idx * len + 3] = log(fabs(target_box_height / prior_box_height)) /
prior_box_var_data[col_idx * len + 3];
}
}
template <typename T>
__global__ void DecodeCenterSizeKernel(const T* prior_box_data,
const T* prior_box_var_data,
const T* target_box_data, const int row,
const int col, const int len,
T* output) {
const int idx = threadIdx.x + blockIdx.x * blockDim.x;
if (idx < row * col) {
const int row_idx = idx / col;
const int col_idx = idx % col;
T prior_box_width =
prior_box_data[col_idx * len + 2] - prior_box_data[col_idx * len];
T prior_box_height =
prior_box_data[col_idx * len + 3] - prior_box_data[col_idx * len + 1];
T prior_box_center_x =
(prior_box_data[col_idx * len + 2] + prior_box_data[col_idx * len]) / 2;
T prior_box_center_y = (prior_box_data[col_idx * len + 3] +
prior_box_data[col_idx * len + 1]) /
2;
T target_box_width = exp(prior_box_var_data[col_idx * len + 2] *
target_box_data[row_idx * len + 2]) *
prior_box_width;
T target_box_height = exp(prior_box_var_data[col_idx * len + 3] *
target_box_data[row_idx * len + 3]) *
prior_box_height;
T target_box_center_x = prior_box_var_data[col_idx * len] *
target_box_data[row_idx * len] *
prior_box_width +
prior_box_center_x;
T target_box_center_y = prior_box_var_data[col_idx * len + 1] *
target_box_data[row_idx * len + 1] *
prior_box_height +
prior_box_center_y;
output[idx * len] = target_box_center_x - target_box_width / 2;
output[idx * len + 1] = target_box_center_y - target_box_height / 2;
output[idx * len + 2] = target_box_center_x + target_box_width / 2;
output[idx * len + 3] = target_box_center_y + target_box_height / 2;
}
}
template <typename T>
class BoxCoderCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext& context) const override {
PADDLE_ENFORCE(platform::is_gpu_place(context.GetPlace()),
"This kernel only runs on GPU device.");
auto* prior_box = context.Input<framework::Tensor>("PriorBox");
auto* prior_box_var = context.Input<framework::Tensor>("PriorBoxVar");
auto* target_box = context.Input<framework::LoDTensor>("TargetBox");
auto* output_box = context.Output<framework::Tensor>("OutputBox");
if (target_box->lod().size()) {
PADDLE_ENFORCE_EQ(target_box->lod().size(), 1,
"Only support 1 level of LoD.");
}
auto row = target_box->dims()[0];
auto col = prior_box->dims()[0];
auto len = prior_box->dims()[1];
int block = 512;
int grid = (row * col + block - 1) / block;
auto& device_ctx = context.cuda_device_context();
const T* prior_box_data = prior_box->data<T>();
const T* prior_box_var_data = prior_box_var->data<T>();
const T* target_box_data = target_box->data<T>();
output_box->mutable_data<T>({row, col, len}, context.GetPlace());
T* output = output_box->data<T>();
auto code_type = GetBoxCodeType(context.Attr<std::string>("code_type"));
if (code_type == BoxCodeType::kEncodeCenterSize) {
hipLaunchKernelGGL(( EncodeCenterSizeKernel<T>), dim3(grid), dim3(block), 0, device_ctx.stream(),
prior_box_data, prior_box_var_data, target_box_data, row, col, len,
output);
} else if (code_type == BoxCodeType::kDecodeCenterSize) {
hipLaunchKernelGGL(( DecodeCenterSizeKernel<T>), dim3(grid), dim3(block), 0, device_ctx.stream(),
prior_box_data, prior_box_var_data, target_box_data, row, col, len,
output);
}
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OP_CUDA_KERNEL(box_coder, ops::BoxCoderCUDAKernel<float>,
ops::BoxCoderCUDAKernel<double>);
| 57af7f11066580f5526ce9c7090a0fc120820663.cu | /* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/box_coder_op.h"
#include "paddle/fluid/platform/cuda_helper.h"
namespace paddle {
namespace operators {
template <typename T>
__global__ void EncodeCenterSizeKernel(const T* prior_box_data,
const T* prior_box_var_data,
const T* target_box_data, const int row,
const int col, const int len,
T* output) {
const int idx = threadIdx.x + blockIdx.x * blockDim.x;
if (idx < row * col) {
const int row_idx = idx / col;
const int col_idx = idx % col;
T prior_box_width =
prior_box_data[col_idx * len + 2] - prior_box_data[col_idx * len];
T prior_box_height =
prior_box_data[col_idx * len + 3] - prior_box_data[col_idx * len + 1];
T prior_box_center_x =
(prior_box_data[col_idx * len + 2] + prior_box_data[col_idx * len]) / 2;
T prior_box_center_y = (prior_box_data[col_idx * len + 3] +
prior_box_data[col_idx * len + 1]) /
2;
T target_box_center_x =
(target_box_data[row_idx * len + 2] + target_box_data[row_idx * len]) /
2;
T target_box_center_y = (target_box_data[row_idx * len + 3] +
target_box_data[row_idx * len + 1]) /
2;
T target_box_width =
target_box_data[row_idx * len + 2] - target_box_data[row_idx * len];
T target_box_height =
target_box_data[row_idx * len + 3] - target_box_data[row_idx * len + 1];
output[idx * len] = (target_box_center_x - prior_box_center_x) /
prior_box_width / prior_box_var_data[col_idx * len];
output[idx * len + 1] = (target_box_center_y - prior_box_center_y) /
prior_box_height /
prior_box_var_data[col_idx * len + 1];
output[idx * len + 2] = log(fabs(target_box_width / prior_box_width)) /
prior_box_var_data[col_idx * len + 2];
output[idx * len + 3] = log(fabs(target_box_height / prior_box_height)) /
prior_box_var_data[col_idx * len + 3];
}
}
template <typename T>
__global__ void DecodeCenterSizeKernel(const T* prior_box_data,
const T* prior_box_var_data,
const T* target_box_data, const int row,
const int col, const int len,
T* output) {
const int idx = threadIdx.x + blockIdx.x * blockDim.x;
if (idx < row * col) {
const int row_idx = idx / col;
const int col_idx = idx % col;
T prior_box_width =
prior_box_data[col_idx * len + 2] - prior_box_data[col_idx * len];
T prior_box_height =
prior_box_data[col_idx * len + 3] - prior_box_data[col_idx * len + 1];
T prior_box_center_x =
(prior_box_data[col_idx * len + 2] + prior_box_data[col_idx * len]) / 2;
T prior_box_center_y = (prior_box_data[col_idx * len + 3] +
prior_box_data[col_idx * len + 1]) /
2;
T target_box_width = exp(prior_box_var_data[col_idx * len + 2] *
target_box_data[row_idx * len + 2]) *
prior_box_width;
T target_box_height = exp(prior_box_var_data[col_idx * len + 3] *
target_box_data[row_idx * len + 3]) *
prior_box_height;
T target_box_center_x = prior_box_var_data[col_idx * len] *
target_box_data[row_idx * len] *
prior_box_width +
prior_box_center_x;
T target_box_center_y = prior_box_var_data[col_idx * len + 1] *
target_box_data[row_idx * len + 1] *
prior_box_height +
prior_box_center_y;
output[idx * len] = target_box_center_x - target_box_width / 2;
output[idx * len + 1] = target_box_center_y - target_box_height / 2;
output[idx * len + 2] = target_box_center_x + target_box_width / 2;
output[idx * len + 3] = target_box_center_y + target_box_height / 2;
}
}
template <typename T>
class BoxCoderCUDAKernel : public framework::OpKernel<T> {
public:
void Compute(const framework::ExecutionContext& context) const override {
PADDLE_ENFORCE(platform::is_gpu_place(context.GetPlace()),
"This kernel only runs on GPU device.");
auto* prior_box = context.Input<framework::Tensor>("PriorBox");
auto* prior_box_var = context.Input<framework::Tensor>("PriorBoxVar");
auto* target_box = context.Input<framework::LoDTensor>("TargetBox");
auto* output_box = context.Output<framework::Tensor>("OutputBox");
if (target_box->lod().size()) {
PADDLE_ENFORCE_EQ(target_box->lod().size(), 1,
"Only support 1 level of LoD.");
}
auto row = target_box->dims()[0];
auto col = prior_box->dims()[0];
auto len = prior_box->dims()[1];
int block = 512;
int grid = (row * col + block - 1) / block;
auto& device_ctx = context.cuda_device_context();
const T* prior_box_data = prior_box->data<T>();
const T* prior_box_var_data = prior_box_var->data<T>();
const T* target_box_data = target_box->data<T>();
output_box->mutable_data<T>({row, col, len}, context.GetPlace());
T* output = output_box->data<T>();
auto code_type = GetBoxCodeType(context.Attr<std::string>("code_type"));
if (code_type == BoxCodeType::kEncodeCenterSize) {
EncodeCenterSizeKernel<T><<<grid, block, 0, device_ctx.stream()>>>(
prior_box_data, prior_box_var_data, target_box_data, row, col, len,
output);
} else if (code_type == BoxCodeType::kDecodeCenterSize) {
DecodeCenterSizeKernel<T><<<grid, block, 0, device_ctx.stream()>>>(
prior_box_data, prior_box_var_data, target_box_data, row, col, len,
output);
}
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OP_CUDA_KERNEL(box_coder, ops::BoxCoderCUDAKernel<float>,
ops::BoxCoderCUDAKernel<double>);
|
01917370e46f93704ad344560dfd9bbc8cb483de.hip | // !!! This is a file automatically generated by hipify!!!
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "common.h"
#include "cuda_common.cuh"
#include "hip/hip_runtime.h"
#include "hip/hip_runtime.h"
#include "device_launch_parameters.h"
#define BLOCK_SIZE 1024
template<unsigned int iblock_size>
__global__ void reduction_gmem_benchmark(int * input,int * temp, int size)
{
int tid = threadIdx.x;
int * i_data = input + blockDim.x * blockIdx.x;
//manual unrolling depending on block size
if (iblock_size >= 1024 && tid < 512)
i_data[tid] += i_data[tid + 512];
__syncthreads();
if (iblock_size >= 512 && tid < 256)
i_data[tid] += i_data[tid + 256];
__syncthreads();
if (iblock_size >= 256 && tid < 128)
i_data[tid] += i_data[tid + 128];
__syncthreads();
if (iblock_size >= 128 && tid < 64)
i_data[tid] += i_data[tid + 64];
__syncthreads();
//unrolling warp
if (tid < 32)
{
volatile int * vsmem = i_data;
vsmem[tid] += vsmem[tid + 32];
vsmem[tid] += vsmem[tid + 16];
vsmem[tid] += vsmem[tid + 8];
vsmem[tid] += vsmem[tid + 4];
vsmem[tid] += vsmem[tid + 2];
vsmem[tid] += vsmem[tid + 1];
}
if (tid == 0)
{
temp[blockIdx.x] = i_data[0];
}
}
template<unsigned int iblock_size>
__global__ void reduction_smem(int * input, int * temp, int size)
{
__shared__ int smem[BLOCK_SIZE];
int tid = threadIdx.x;
int * i_data = input + blockDim.x * blockIdx.x;
smem[tid] = i_data[tid];
__syncthreads();
//manual unrolling depending on block size
if (iblock_size >= 1024 && tid < 512)
i_data[tid] += i_data[tid + 512];
__syncthreads();
if (iblock_size >= 512 && tid < 256)
i_data[tid] += i_data[tid + 256];
__syncthreads();
if (iblock_size >= 256 && tid < 128)
i_data[tid] += i_data[tid + 128];
__syncthreads();
if (iblock_size >= 128 && tid < 64)
i_data[tid] += i_data[tid + 64];
__syncthreads();
//unrolling warp
if (tid < 32)
{
volatile int * vsmem = smem;
vsmem[tid] += vsmem[tid + 32];
vsmem[tid] += vsmem[tid + 16];
vsmem[tid] += vsmem[tid + 8];
vsmem[tid] += vsmem[tid + 4];
vsmem[tid] += vsmem[tid + 2];
vsmem[tid] += vsmem[tid + 1];
}
if (tid == 0)
{
temp[blockIdx.x] = i_data[0];
}
}
//int main(int argc, char ** argv)
//{
// printf("Running parallel reduction with complete unrolling kernel \n");
//
// int kernel_index = 0;
//
// if (argc >1)
// {
// kernel_index = 1;
// }
//
// int size = 1 << 22;
// int byte_size = size * sizeof(int);
// int block_size = BLOCK_SIZE;
//
// int * h_input, *h_ref;
// h_input = (int*)malloc(byte_size);
//
// initialize(h_input, size);
//
// int cpu_result = reduction_cpu(h_input, size);
//
// dim3 block(block_size);
// dim3 grid((size / block_size));
//
// int temp_array_byte_size = sizeof(int)* grid.x;
//
// h_ref = (int*)malloc(temp_array_byte_size);
//
// int * d_input, *d_temp;
// gpuErrchk(hipMalloc((void**)&d_input, byte_size));
// gpuErrchk(hipMalloc((void**)&d_temp, temp_array_byte_size));
//
// gpuErrchk(hipMemset(d_temp, 0, temp_array_byte_size));
// gpuErrchk(hipMemcpy(d_input, h_input, byte_size,
// hipMemcpyHostToDevice));
// if (kernel_index == 0)
// {
// printf("Kernel launch parameters || grid : %d, block : %d \n", grid.x, block.x);
//
// switch (block_size)
// {
// case 1024:
// reduction_gmem_benchmark <1024> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 512:
// reduction_gmem_benchmark <512> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 256:
// reduction_gmem_benchmark <256> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 128:
// reduction_gmem_benchmark <128> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 64:
// reduction_gmem_benchmark <64> << < grid, block >> > (d_input, d_temp, size);
// break;
// }
// }
// else
// {
// printf("Kernel launch parameters || grid : %d, block : %d \n", grid.x, block.x);
//
// switch (block_size)
// {
// case 1024:
// reduction_smem <1024> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 512:
// reduction_smem <512> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 256:
// reduction_smem <256> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 128:
// reduction_smem <128> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 64:
// reduction_smem <64> << < grid, block >> > (d_input, d_temp, size);
// break;
// }
// }
//
// gpuErrchk(hipDeviceSynchronize());
// gpuErrchk(hipMemcpy(h_ref, d_temp, temp_array_byte_size, hipMemcpyDeviceToHost));
//
// int gpu_result = 0;
// for (int i = 0; i < grid.x; i++)
// {
// gpu_result += h_ref[i];
// }
//
// compare_results(gpu_result, cpu_result);
//
// gpuErrchk(hipFree(d_input));
// gpuErrchk(hipFree(d_temp));
// free(h_input);
// free(h_ref);
//
// gpuErrchk(hipDeviceReset());
// return 0;
//} | 01917370e46f93704ad344560dfd9bbc8cb483de.cu | #include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "common.h"
#include "cuda_common.cuh"
#include "cuda.h"
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#define BLOCK_SIZE 1024
template<unsigned int iblock_size>
__global__ void reduction_gmem_benchmark(int * input,int * temp, int size)
{
int tid = threadIdx.x;
int * i_data = input + blockDim.x * blockIdx.x;
//manual unrolling depending on block size
if (iblock_size >= 1024 && tid < 512)
i_data[tid] += i_data[tid + 512];
__syncthreads();
if (iblock_size >= 512 && tid < 256)
i_data[tid] += i_data[tid + 256];
__syncthreads();
if (iblock_size >= 256 && tid < 128)
i_data[tid] += i_data[tid + 128];
__syncthreads();
if (iblock_size >= 128 && tid < 64)
i_data[tid] += i_data[tid + 64];
__syncthreads();
//unrolling warp
if (tid < 32)
{
volatile int * vsmem = i_data;
vsmem[tid] += vsmem[tid + 32];
vsmem[tid] += vsmem[tid + 16];
vsmem[tid] += vsmem[tid + 8];
vsmem[tid] += vsmem[tid + 4];
vsmem[tid] += vsmem[tid + 2];
vsmem[tid] += vsmem[tid + 1];
}
if (tid == 0)
{
temp[blockIdx.x] = i_data[0];
}
}
template<unsigned int iblock_size>
__global__ void reduction_smem(int * input, int * temp, int size)
{
__shared__ int smem[BLOCK_SIZE];
int tid = threadIdx.x;
int * i_data = input + blockDim.x * blockIdx.x;
smem[tid] = i_data[tid];
__syncthreads();
//manual unrolling depending on block size
if (iblock_size >= 1024 && tid < 512)
i_data[tid] += i_data[tid + 512];
__syncthreads();
if (iblock_size >= 512 && tid < 256)
i_data[tid] += i_data[tid + 256];
__syncthreads();
if (iblock_size >= 256 && tid < 128)
i_data[tid] += i_data[tid + 128];
__syncthreads();
if (iblock_size >= 128 && tid < 64)
i_data[tid] += i_data[tid + 64];
__syncthreads();
//unrolling warp
if (tid < 32)
{
volatile int * vsmem = smem;
vsmem[tid] += vsmem[tid + 32];
vsmem[tid] += vsmem[tid + 16];
vsmem[tid] += vsmem[tid + 8];
vsmem[tid] += vsmem[tid + 4];
vsmem[tid] += vsmem[tid + 2];
vsmem[tid] += vsmem[tid + 1];
}
if (tid == 0)
{
temp[blockIdx.x] = i_data[0];
}
}
//int main(int argc, char ** argv)
//{
// printf("Running parallel reduction with complete unrolling kernel \n");
//
// int kernel_index = 0;
//
// if (argc >1)
// {
// kernel_index = 1;
// }
//
// int size = 1 << 22;
// int byte_size = size * sizeof(int);
// int block_size = BLOCK_SIZE;
//
// int * h_input, *h_ref;
// h_input = (int*)malloc(byte_size);
//
// initialize(h_input, size);
//
// int cpu_result = reduction_cpu(h_input, size);
//
// dim3 block(block_size);
// dim3 grid((size / block_size));
//
// int temp_array_byte_size = sizeof(int)* grid.x;
//
// h_ref = (int*)malloc(temp_array_byte_size);
//
// int * d_input, *d_temp;
// gpuErrchk(cudaMalloc((void**)&d_input, byte_size));
// gpuErrchk(cudaMalloc((void**)&d_temp, temp_array_byte_size));
//
// gpuErrchk(cudaMemset(d_temp, 0, temp_array_byte_size));
// gpuErrchk(cudaMemcpy(d_input, h_input, byte_size,
// cudaMemcpyHostToDevice));
// if (kernel_index == 0)
// {
// printf("Kernel launch parameters || grid : %d, block : %d \n", grid.x, block.x);
//
// switch (block_size)
// {
// case 1024:
// reduction_gmem_benchmark <1024> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 512:
// reduction_gmem_benchmark <512> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 256:
// reduction_gmem_benchmark <256> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 128:
// reduction_gmem_benchmark <128> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 64:
// reduction_gmem_benchmark <64> << < grid, block >> > (d_input, d_temp, size);
// break;
// }
// }
// else
// {
// printf("Kernel launch parameters || grid : %d, block : %d \n", grid.x, block.x);
//
// switch (block_size)
// {
// case 1024:
// reduction_smem <1024> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 512:
// reduction_smem <512> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 256:
// reduction_smem <256> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 128:
// reduction_smem <128> << < grid, block >> > (d_input, d_temp, size);
// break;
// case 64:
// reduction_smem <64> << < grid, block >> > (d_input, d_temp, size);
// break;
// }
// }
//
// gpuErrchk(cudaDeviceSynchronize());
// gpuErrchk(cudaMemcpy(h_ref, d_temp, temp_array_byte_size, cudaMemcpyDeviceToHost));
//
// int gpu_result = 0;
// for (int i = 0; i < grid.x; i++)
// {
// gpu_result += h_ref[i];
// }
//
// compare_results(gpu_result, cpu_result);
//
// gpuErrchk(cudaFree(d_input));
// gpuErrchk(cudaFree(d_temp));
// free(h_input);
// free(h_ref);
//
// gpuErrchk(cudaDeviceReset());
// return 0;
//} |
f2037ac6967abec7904a01560706a3ce94f47128.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#ifdef WITH_CUDA
#include "core/context_cuda.h"
#include "utils/op_kernel.h"
namespace dragon {
namespace kernel {
/*! Argmax <T = float32, Device = CUDA> */
template <typename T>
__global__ void _Argmax(
const int count,
const int axis_dim,
const int inner_dim,
const T neg_bound,
const T* x,
int64_t* indices) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int max_idx = -1; T max_val = neg_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val > max_val) {
max_val = val;
max_idx = j;
}
}
indices[idx] = max_idx;
}
}
template <typename T>
__global__ void _Argmax_v2(
const int count,
const int axis_dim,
const int inner_dim,
const T neg_bound,
const T* x,
int64_t* indices,
T* values) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int max_idx = -1; T max_val = neg_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val > max_val) {
max_val = val;
max_idx = j;
}
}
indices[idx] = max_idx;
values[idx] = max_val;
}
}
template<> void Argmax<float, CUDAContext>(
const int count,
const int axis_dim,
const int inner_dim,
const int top_k,
const float* x,
int64_t* indices,
float* values,
CUDAContext* ctx) {
CHECK_EQ(top_k, 1) << "top_k > 1 is not supported with CUDA";
if (values == nullptr) {
_Argmax<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
-FLT_MAX, x, indices);
} else {
_Argmax_v2<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
-FLT_MAX, x, indices, values);
}
}
/*! Argmin <T = float32, Device = CUDA> */
template <typename T>
__global__ void _Argmin(
const int count,
const int axis_dim,
const int inner_dim,
const T pos_bound,
const T* x,
int64_t* indices) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int min_idx = -1; T min_val = pos_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val < min_val) {
min_val = val;
min_idx = j;
}
}
indices[idx] = min_idx;
}
}
template <typename T>
__global__ void _Argmin_v2(
const int count,
const int axis_dim,
const int inner_dim,
const T pos_bound,
const T* x,
int64_t* indices,
T* values) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int min_idx = -1; T min_val = pos_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val < min_val) {
min_val = val;
min_idx = j;
}
}
indices[idx] = min_idx;
values[idx] = min_val;
}
}
template<> void Argmin<float, CUDAContext>(
const int count,
const int axis_dim,
const int inner_dim,
const int top_k,
const float* x,
int64_t* indices,
float* values,
CUDAContext* ctx) {
CHECK_EQ(top_k, 1) << "top_k > 1 is not supported with CUDA";
if (values == nullptr) {
_Argmin<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
FLT_MAX, x, indices);
} else {
_Argmin_v2<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
FLT_MAX, x, indices, values);
}
}
} // namespace kernel
} // namepsace dragon
#endif // WITH_CUDA | f2037ac6967abec7904a01560706a3ce94f47128.cu | #ifdef WITH_CUDA
#include "core/context_cuda.h"
#include "utils/op_kernel.h"
namespace dragon {
namespace kernel {
/*! Argmax <T = float32, Device = CUDA> */
template <typename T>
__global__ void _Argmax(
const int count,
const int axis_dim,
const int inner_dim,
const T neg_bound,
const T* x,
int64_t* indices) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int max_idx = -1; T max_val = neg_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val > max_val) {
max_val = val;
max_idx = j;
}
}
indices[idx] = max_idx;
}
}
template <typename T>
__global__ void _Argmax_v2(
const int count,
const int axis_dim,
const int inner_dim,
const T neg_bound,
const T* x,
int64_t* indices,
T* values) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int max_idx = -1; T max_val = neg_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val > max_val) {
max_val = val;
max_idx = j;
}
}
indices[idx] = max_idx;
values[idx] = max_val;
}
}
template<> void Argmax<float, CUDAContext>(
const int count,
const int axis_dim,
const int inner_dim,
const int top_k,
const float* x,
int64_t* indices,
float* values,
CUDAContext* ctx) {
CHECK_EQ(top_k, 1) << "top_k > 1 is not supported with CUDA";
if (values == nullptr) {
_Argmax<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
-FLT_MAX, x, indices);
} else {
_Argmax_v2<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
-FLT_MAX, x, indices, values);
}
}
/*! Argmin <T = float32, Device = CUDA> */
template <typename T>
__global__ void _Argmin(
const int count,
const int axis_dim,
const int inner_dim,
const T pos_bound,
const T* x,
int64_t* indices) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int min_idx = -1; T min_val = pos_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val < min_val) {
min_val = val;
min_idx = j;
}
}
indices[idx] = min_idx;
}
}
template <typename T>
__global__ void _Argmin_v2(
const int count,
const int axis_dim,
const int inner_dim,
const T pos_bound,
const T* x,
int64_t* indices,
T* values) {
CUDA_1D_KERNEL_LOOP(idx, count) {
const int oix = idx / inner_dim;
const int iix = idx % inner_dim;
int min_idx = -1; T min_val = pos_bound;
for (int j = 0; j < axis_dim; ++j) {
const T val = x[(oix * axis_dim + j)
* inner_dim + iix];
if (val < min_val) {
min_val = val;
min_idx = j;
}
}
indices[idx] = min_idx;
values[idx] = min_val;
}
}
template<> void Argmin<float, CUDAContext>(
const int count,
const int axis_dim,
const int inner_dim,
const int top_k,
const float* x,
int64_t* indices,
float* values,
CUDAContext* ctx) {
CHECK_EQ(top_k, 1) << "top_k > 1 is not supported with CUDA";
if (values == nullptr) {
_Argmin<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
FLT_MAX, x, indices);
} else {
_Argmin_v2<float>
<< < CUDA_BLOCKS(count), CUDA_THREADS,
0, ctx->cuda_stream() >> >
(count, axis_dim, inner_dim,
FLT_MAX, x, indices, values);
}
}
} // namespace kernel
} // namepsace dragon
#endif // WITH_CUDA |
6efc6cced7ef5e2103bce3e250a0db04ff6ad608.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "includes.h"
#define N 10
__global__ void add(int *a, int *b, int *c)
{
int tid = blockIdx.x;
if (tid < N)
c[tid] = a[tid] + b[tid];
} | 6efc6cced7ef5e2103bce3e250a0db04ff6ad608.cu | #include "includes.h"
#define N 10
__global__ void add(int *a, int *b, int *c)
{
int tid = blockIdx.x;
if (tid < N)
c[tid] = a[tid] + b[tid];
} |
3e9f738cbb98500e9a3783a19bfaf39cc069c376.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include "timer.h"
#include "check.h"
#define SOFTENING 1e-9f
/*
* Each body contains x, y, and z coordinate positions,
* as well as velocities in the x, y, and z directions.
*/
typedef struct { float x, y, z, vx, vy, vz; } Body;
/*
* Do not modify this function. A constraint of this exercise is
* that it remain a host function.
*/
void randomizeBodies(float *data, int n) {
for (int i = 0; i < n; i++) {
data[i] = 2.0f * (rand() / (float)RAND_MAX) - 1.0f;
}
}
/*
* This function calculates the gravitational impact of all bodies in the system
* on all others, but does not update their positions.
*/
void bodyForce(Body *p, float dt, int n) {
for (int i = 0; i < n; ++i) {
float Fx = 0.0f; float Fy = 0.0f; float Fz = 0.0f;
for (int j = 0; j < n; j++) {
float dx = p[j].x - p[i].x;
float dy = p[j].y - p[i].y;
float dz = p[j].z - p[i].z;
float distSqr = dx*dx + dy*dy + dz*dz + SOFTENING;
float invDist = rsqrtf(distSqr);
float invDist3 = invDist * invDist * invDist;
Fx += dx * invDist3; Fy += dy * invDist3; Fz += dz * invDist3;
}
p[i].vx += dt*Fx; p[i].vy += dt*Fy; p[i].vz += dt*Fz;
}
}
__global__ void calcBodyForceKernel_adv(Body *p, float dt, int n, int tPerSeg)
{
int bi = blockIdx.x * (blockDim.x / tPerSeg);
int i = threadIdx.x / tPerSeg;
int j = threadIdx.x % tPerSeg;
__shared__ float bodyVel[1024 * 3];
bodyVel[3 * i + 0] = 0;
bodyVel[3 * i + 1] = 0;
bodyVel[3 * i + 2] = 0;
__syncthreads();
float Fx = 0.0f;
float Fy = 0.0f;
float Fz = 0.0f;
for (; j < n; j += tPerSeg)
{
float dx = p[j].x - p[bi + i].x;
float dy = p[j].y - p[bi + i].y;
float dz = p[j].z - p[bi + i].z;
float distSqr = dx * dx + dy * dy + dz * dz + SOFTENING;
float invDist = rsqrtf(distSqr);
float invDist3 = invDist * invDist * invDist;
Fx += dx * invDist3; Fy += dy * invDist3; Fz += dz * invDist3;
}
atomicAdd(&(bodyVel[3 * i + 0]), dt * Fx);
atomicAdd(&(bodyVel[3 * i + 1]), dt * Fy);
atomicAdd(&(bodyVel[3 * i + 2]), dt * Fz);
__syncthreads();
p[bi + i].vx += bodyVel[3 * i + 0];
p[bi + i].vy += bodyVel[3 * i + 1];
p[bi + i].vz += bodyVel[3 * i + 2];
}
__global__ void ApplyVel(Body *p, float dt, int n)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
int gStride = gridDim.x * blockDim.x;
for (int i = index; i < n; i += gStride)
{
p[i].x += p[i].vx * dt;
p[i].y += p[i].vy * dt;
p[i].z += p[i].vz * dt;
}
}
int main(const int argc, const char** argv) {
/*
* Do not change the value for `nBodies` here. If you would like to modify it,
* pass values into the command line.
*/
int nBodies = 2<<11;
int salt = 0;
if (argc > 1) nBodies = 2<<atoi(argv[1]);
/*
* This salt is for assessment reasons. Tampering with it will result in automatic failure.
*/
if (argc > 2) salt = atoi(argv[2]);
const float dt = 0.01f; // time step
const int nIters = 10; // simulation iterations
int deviceId;
hipGetDevice(&deviceId);
int bytes = nBodies * sizeof(Body);
float *buf;
float *buf_device;
hipMalloc(&buf_device, bytes);
hipHostMalloc(&buf, bytes);
Body *p = (Body*) buf_device;
int tPerSeg = 16;
int threads = 512;
int blocks = (nBodies / threads) * tPerSeg;
/*
* As a constraint of this exercise, `randomizeBodies` must remain a host function.
*/
randomizeBodies(buf, 6 * nBodies); // Init pos / vel data
double totalTime = 0.0;
// Copy the buffers
hipMemcpy(
buf_device,
buf,
bytes,
hipMemcpyHostToDevice);
/*
* This simulation will run for 10 cycles of time, calculating gravitational
* interaction amongst bodies, and adjusting their positions to reflect.
*/
StartTimer();
/*******************************************************************/
// Do not modify these 2 lines of code.
for (int iter = 0; iter < nIters; iter++) {
//StartTimer();
/*******************************************************************/
/*
* You will likely wish to refactor the work being done in `bodyForce`,
* as well as the work to integrate the positions.
*/
// Calculate force
hipLaunchKernelGGL(( calcBodyForceKernel_adv), dim3(blocks), dim3(threads), 0, 0, p, dt, nBodies, tPerSeg);
/*
* This position integration cannot occur until this round of `bodyForce` has completed.
* Also, the next round of `bodyForce` cannot begin until the integration is complete.
*/
// Apply vel for segments
hipLaunchKernelGGL(( ApplyVel), dim3(blocks), dim3(threads), 0, 0, p, dt, nBodies);
// hipDeviceSynchronize();
/*******************************************************************/
// Do not modify the code in this section.
// const double tElapsed = GetTimer() / 1000.0;
// totalTime += tElapsed;
}
// Collect results
hipDeviceSynchronize();
const double tElapsed = GetTimer() / 1000.0;
totalTime += tElapsed;
// Copy the buffers
hipMemcpy(
buf,
buf_device,
bytes,
hipMemcpyDeviceToHost);
hipDeviceSynchronize();
double avgTime = totalTime / (double)(nIters);
float billionsOfOpsPerSecond = 1e-9 * nBodies * nBodies / avgTime;
#ifdef ASSESS
checkPerformance(buf, billionsOfOpsPerSecond, salt);
#else
checkAccuracy(buf, nBodies);
printf("%d Bodies: average %0.3f Billion Interactions / second\n", nBodies, billionsOfOpsPerSecond);
salt += 1;
#endif
/*******************************************************************/
/*
* Feel free to modify code below.
*/
hipHostFree(buf);
hipFree(buf_device);
}
| 3e9f738cbb98500e9a3783a19bfaf39cc069c376.cu | #include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include "timer.h"
#include "check.h"
#define SOFTENING 1e-9f
/*
* Each body contains x, y, and z coordinate positions,
* as well as velocities in the x, y, and z directions.
*/
typedef struct { float x, y, z, vx, vy, vz; } Body;
/*
* Do not modify this function. A constraint of this exercise is
* that it remain a host function.
*/
void randomizeBodies(float *data, int n) {
for (int i = 0; i < n; i++) {
data[i] = 2.0f * (rand() / (float)RAND_MAX) - 1.0f;
}
}
/*
* This function calculates the gravitational impact of all bodies in the system
* on all others, but does not update their positions.
*/
void bodyForce(Body *p, float dt, int n) {
for (int i = 0; i < n; ++i) {
float Fx = 0.0f; float Fy = 0.0f; float Fz = 0.0f;
for (int j = 0; j < n; j++) {
float dx = p[j].x - p[i].x;
float dy = p[j].y - p[i].y;
float dz = p[j].z - p[i].z;
float distSqr = dx*dx + dy*dy + dz*dz + SOFTENING;
float invDist = rsqrtf(distSqr);
float invDist3 = invDist * invDist * invDist;
Fx += dx * invDist3; Fy += dy * invDist3; Fz += dz * invDist3;
}
p[i].vx += dt*Fx; p[i].vy += dt*Fy; p[i].vz += dt*Fz;
}
}
__global__ void calcBodyForceKernel_adv(Body *p, float dt, int n, int tPerSeg)
{
int bi = blockIdx.x * (blockDim.x / tPerSeg);
int i = threadIdx.x / tPerSeg;
int j = threadIdx.x % tPerSeg;
__shared__ float bodyVel[1024 * 3];
bodyVel[3 * i + 0] = 0;
bodyVel[3 * i + 1] = 0;
bodyVel[3 * i + 2] = 0;
__syncthreads();
float Fx = 0.0f;
float Fy = 0.0f;
float Fz = 0.0f;
for (; j < n; j += tPerSeg)
{
float dx = p[j].x - p[bi + i].x;
float dy = p[j].y - p[bi + i].y;
float dz = p[j].z - p[bi + i].z;
float distSqr = dx * dx + dy * dy + dz * dz + SOFTENING;
float invDist = rsqrtf(distSqr);
float invDist3 = invDist * invDist * invDist;
Fx += dx * invDist3; Fy += dy * invDist3; Fz += dz * invDist3;
}
atomicAdd(&(bodyVel[3 * i + 0]), dt * Fx);
atomicAdd(&(bodyVel[3 * i + 1]), dt * Fy);
atomicAdd(&(bodyVel[3 * i + 2]), dt * Fz);
__syncthreads();
p[bi + i].vx += bodyVel[3 * i + 0];
p[bi + i].vy += bodyVel[3 * i + 1];
p[bi + i].vz += bodyVel[3 * i + 2];
}
__global__ void ApplyVel(Body *p, float dt, int n)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
int gStride = gridDim.x * blockDim.x;
for (int i = index; i < n; i += gStride)
{
p[i].x += p[i].vx * dt;
p[i].y += p[i].vy * dt;
p[i].z += p[i].vz * dt;
}
}
int main(const int argc, const char** argv) {
/*
* Do not change the value for `nBodies` here. If you would like to modify it,
* pass values into the command line.
*/
int nBodies = 2<<11;
int salt = 0;
if (argc > 1) nBodies = 2<<atoi(argv[1]);
/*
* This salt is for assessment reasons. Tampering with it will result in automatic failure.
*/
if (argc > 2) salt = atoi(argv[2]);
const float dt = 0.01f; // time step
const int nIters = 10; // simulation iterations
int deviceId;
cudaGetDevice(&deviceId);
int bytes = nBodies * sizeof(Body);
float *buf;
float *buf_device;
cudaMalloc(&buf_device, bytes);
cudaMallocHost(&buf, bytes);
Body *p = (Body*) buf_device;
int tPerSeg = 16;
int threads = 512;
int blocks = (nBodies / threads) * tPerSeg;
/*
* As a constraint of this exercise, `randomizeBodies` must remain a host function.
*/
randomizeBodies(buf, 6 * nBodies); // Init pos / vel data
double totalTime = 0.0;
// Copy the buffers
cudaMemcpy(
buf_device,
buf,
bytes,
cudaMemcpyHostToDevice);
/*
* This simulation will run for 10 cycles of time, calculating gravitational
* interaction amongst bodies, and adjusting their positions to reflect.
*/
StartTimer();
/*******************************************************************/
// Do not modify these 2 lines of code.
for (int iter = 0; iter < nIters; iter++) {
//StartTimer();
/*******************************************************************/
/*
* You will likely wish to refactor the work being done in `bodyForce`,
* as well as the work to integrate the positions.
*/
// Calculate force
calcBodyForceKernel_adv<<<blocks, threads, 0>>>(p, dt, nBodies, tPerSeg);
/*
* This position integration cannot occur until this round of `bodyForce` has completed.
* Also, the next round of `bodyForce` cannot begin until the integration is complete.
*/
// Apply vel for segments
ApplyVel<<<blocks, threads>>>(p, dt, nBodies);
// cudaDeviceSynchronize();
/*******************************************************************/
// Do not modify the code in this section.
// const double tElapsed = GetTimer() / 1000.0;
// totalTime += tElapsed;
}
// Collect results
cudaDeviceSynchronize();
const double tElapsed = GetTimer() / 1000.0;
totalTime += tElapsed;
// Copy the buffers
cudaMemcpy(
buf,
buf_device,
bytes,
cudaMemcpyDeviceToHost);
cudaDeviceSynchronize();
double avgTime = totalTime / (double)(nIters);
float billionsOfOpsPerSecond = 1e-9 * nBodies * nBodies / avgTime;
#ifdef ASSESS
checkPerformance(buf, billionsOfOpsPerSecond, salt);
#else
checkAccuracy(buf, nBodies);
printf("%d Bodies: average %0.3f Billion Interactions / second\n", nBodies, billionsOfOpsPerSecond);
salt += 1;
#endif
/*******************************************************************/
/*
* Feel free to modify code below.
*/
cudaFreeHost(buf);
cudaFree(buf_device);
}
|
b999e6561f0ca888b3295f2f73baa4a3ed8e173e.hip | // !!! This is a file automatically generated by hipify!!!
#include "flamegpu/gpu/CUDAEnsemble.h"
#include <algorithm>
#include <cstdlib>
#include <memory>
#include <thread>
#include <set>
#include <queue>
#include <mutex>
#include <condition_variable>
#include "flamegpu/version.h"
#include "flamegpu/model/ModelDescription.h"
#include "flamegpu/sim/RunPlanVector.h"
#include "flamegpu/util/detail/compute_capability.cuh"
#include "flamegpu/util/detail/SteadyClockTimer.h"
#include "flamegpu/gpu/CUDASimulation.h"
#include "flamegpu/io/StateWriterFactory.h"
#include "flamegpu/util/detail/filesystem.h"
#include "flamegpu/sim/LoggingConfig.h"
#include "flamegpu/sim/SimRunner.h"
#include "flamegpu/sim/LogFrame.h"
#include "flamegpu/sim/SimLogger.h"
namespace flamegpu {
CUDAEnsemble::CUDAEnsemble(const ModelDescription& _model, int argc, const char** argv)
: model(_model.model->clone()) {
initialise(argc, argv);
}
CUDAEnsemble::~CUDAEnsemble() {
// Nothing to do
}
void CUDAEnsemble::simulate(const RunPlanVector &plans) {
// Validate that RunPlan model matches CUDAEnsemble model
if (*plans.environment != this->model->environment->properties) {
THROW exception::InvalidArgument("RunPlan is for a different ModelDescription, in CUDAEnsemble::simulate()");
}
// Validate/init output directories
if (!config.out_directory.empty()) {
// Validate out format is right
config.out_format = io::StateWriterFactory::detectSupportedFileExt(config.out_format);
if (config.out_format.empty()) {
THROW exception::InvalidArgument("The out_directory config option also requires the out_format options to be set to a suitable type (e.g. 'json', 'xml'), in CUDAEnsemble::simulate()");
}
// Create any missing directories
try {
util::detail::filesystem::recursive_create_dir(config.out_directory);
} catch (const std::exception &e) {
THROW exception::InvalidArgument("Unable to use output directory '%s', in CUDAEnsemble::simulate(): %s", config.out_directory.c_str(), e.what());
}
for (const auto &p : plans) {
const auto subdir = p.getOutputSubdirectory();
if (!subdir.empty()) {
path sub_path = config.out_directory;
try {
sub_path.append(subdir);
util::detail::filesystem::recursive_create_dir(sub_path);
} catch (const std::exception &e) {
THROW exception::InvalidArgument("Unable to use output subdirectory '%s', in CUDAEnsemble::simulate(): %s", sub_path.generic_string().c_str(), e.what());
}
}
}
}
// Purge run logs, and resize ready for new runs
// Resize means we can setup logs during execution out of order, without risk of list being reallocated
run_logs.clear();
run_logs.resize(plans.size());
// Workout how many devices and runner we will be executing
int ct = -1;
gpuErrchk(hipGetDeviceCount(&ct));
std::set<int> devices;
if (config.devices.size()) {
devices = config.devices;
} else {
for (int i = 0; i < ct; ++i) {
devices.emplace(i);
}
}
// Check that each device is capable, and init cuda context
for (auto d = devices.begin(); d != devices.end(); ++d) {
if (!util::detail::compute_capability::checkComputeCapability(*d)) {
fprintf(stderr, "FLAMEGPU2 has not been built with an appropriate compute capability for device %d, this device will not be used.\n", *d);
d = devices.erase(d);
--d;
} else {
gpuErrchk(hipSetDevice(*d));
gpuErrchk(hipFree(nullptr));
}
}
// Return to device 0 (or check original device first?)
gpuErrchk(hipSetDevice(0));
// Init runners, devices * concurrent runs
std::atomic<unsigned int> err_ct = {0};
std::atomic<unsigned int> next_run = {0};
const size_t TOTAL_RUNNERS = devices.size() * config.concurrent_runs;
SimRunner *runners = static_cast<SimRunner *>(malloc(sizeof(SimRunner) * TOTAL_RUNNERS));
// Log Time (We can't use CUDA events here, due to device resets)
auto ensemble_timer = util::detail::SteadyClockTimer();
ensemble_timer.start();
// Reset the elapsed time.
ensemble_elapsed_time = 0.;
// Logging thread-safety items
std::queue<unsigned int> log_export_queue;
std::mutex log_export_queue_mutex;
std::condition_variable log_export_queue_cdn;
// Init with placement new
{
if (!config.quiet)
printf("\rCUDAEnsemble progress: %u/%u", 0, static_cast<unsigned int>(plans.size()));
unsigned int i = 0;
for (auto &d : devices) {
for (unsigned int j = 0; j < config.concurrent_runs; ++j) {
new (&runners[i++]) SimRunner(model, err_ct, next_run, plans, step_log_config, exit_log_config, d, j, !config.quiet, run_logs, log_export_queue, log_export_queue_mutex, log_export_queue_cdn);
}
}
}
// Init log worker
SimLogger *log_worker = nullptr;
if (!config.out_directory.empty() && !config.out_format.empty()) {
log_worker = new SimLogger(run_logs, plans, config.out_directory, config.out_format, log_export_queue, log_export_queue_mutex, log_export_queue_cdn);
} else if (!config.out_directory.empty() ^ !config.out_format.empty()) {
fprintf(stderr, "Warning: Only 1 of out_directory and out_format is set, both must be set for logging to commence to file.\n");
}
// Wait for all runners to exit
for (unsigned int i = 0; i < TOTAL_RUNNERS; ++i) {
runners[i].thread.join();
runners[i].~SimRunner();
}
// Notify logger to exit
if (log_worker) {
{
std::lock_guard<std::mutex> lck(log_export_queue_mutex);
log_export_queue.push(UINT_MAX);
}
log_export_queue_cdn.notify_one();
log_worker->thread.join();
delete log_worker;
log_worker = nullptr;
}
// Record and store the elapsed time
ensemble_timer.stop();
ensemble_elapsed_time = ensemble_timer.getElapsedSeconds();
// Ensemble has finished, print summary
if (!config.quiet) {
printf("\rCUDAEnsemble completed %u runs successfully!\n", static_cast<unsigned int>(plans.size() - err_ct));
if (err_ct)
printf("There were a total of %u errors.\n", err_ct.load());
}
if (config.timing) {
printf("Ensemble time elapsed: %fs\n", ensemble_elapsed_time);
}
// Free memory
free(runners);
}
void CUDAEnsemble::initialise(int argc, const char** argv) {
if (!checkArgs(argc, argv)) {
exit(EXIT_FAILURE);
}
/* Disabled as this is printed prior to quiet being accessible
// If verbsoe, output the flamegpu version.
if (!config.quiet) {
fprintf(stdout, "FLAME GPU %s\n", flamegpu::VERSION_FULL);
}
*/
}
int CUDAEnsemble::checkArgs(int argc, const char** argv) {
// Parse optional args
int i = 1;
for (; i < argc; i++) {
// Get arg as lowercase
std::string arg(argv[i]);
std::transform(arg.begin(), arg.end(), arg.begin(), [](unsigned char c) { return std::use_facet< std::ctype<char>>(std::locale()).tolower(c); });
// -h/--help. Print the help output and exit.
if (arg.compare("--help") == 0 || arg.compare("-h") == 0) {
printHelp(argv[0]);
return false;
}
// --concurrent <runs>, Number of concurrent simulations to run per device
if (arg.compare("--concurrent") == 0 || arg.compare("-c") == 0) {
if (i + 1 >= argc) {
fprintf(stderr, "%s requires a trailing argument\n", arg.c_str());
return false;
}
config.concurrent_runs = static_cast<unsigned int>(strtoul(argv[++i], nullptr, 0));
continue;
}
// --devices <string>, comma separated list of uints
if (arg.compare("--devices") == 0 || arg.compare("-d") == 0) {
if (i + 1 >= argc) {
fprintf(stderr, "%s requires a trailing argument\n", arg.c_str());
return false;
}
// Split and parse string
std::string device_string = argv[++i];
device_string += ","; // Append comma, to catch final item
int max_id = 0; // Catch max device so we can validate it exists
size_t pos;
while ((pos = device_string.find(",")) != std::string::npos) {
const unsigned int id = static_cast<unsigned int>(strtoul(device_string.substr(0, pos).c_str(), nullptr, 0));
if (id == 0 && (device_string.length() < 2 || (device_string[0] != '0' || device_string[1] != ','))) {
fprintf(stderr, "'%s' is not a valid device index.\n", device_string.substr(0, pos).c_str());
printHelp(argv[0]);
return false;
}
max_id = static_cast<int>(id) > max_id ? id : max_id;
config.devices.emplace(id);
device_string.erase(0, pos + 1);
}
int ct = -1;
gpuErrchk(hipGetDeviceCount(&ct));
if (max_id >= ct) {
fprintf(stderr, "Device id %u exceeds available CUDA devices %d\n", max_id, ct);
printHelp(argv[0]);
return false;
}
continue;
}
// -o/--out <directory> <filetype>, Quiet FLAME GPU output.
if (arg.compare("--out") == 0 || arg.compare("-o") == 0) {
if (i + 2 >= argc) {
fprintf(stderr, "%s requires two trailing arguments\n", arg.c_str());
return false;
}
// Validate output directory is valid (and recursively create it if necessary)
try {
path out_directory = argv[++i];
util::detail::filesystem::recursive_create_dir(out_directory);
config.out_directory = out_directory.generic_string();
} catch (const std::exception &e) {
// Catch any exceptions, probably std::filesystem::filesystem_error, but other implementation defined errors also possible
fprintf(stderr, "Unable to use '%s' as output directory:\n%s\n", argv[i], e.what());
return false;
}
// Validate output format is available in io module
config.out_format = io::StateWriterFactory::detectSupportedFileExt(argv[++i]);
if (config.out_format.empty()) {
fprintf(stderr, "'%s' is not a supported output file type.\n", argv[i]);
return false;
}
continue;
}
// -q/--quiet, Don't report progress to console.
if (arg.compare("--quiet") == 0 || arg.compare("-q") == 0) {
config.quiet = true;
continue;
}
// -t/--timing, Output timing information to stdout
if (arg.compare("--timing") == 0 || arg.compare("-t") == 0) {
config.timing = true;
continue;
}
fprintf(stderr, "Unexpected argument: %s\n", arg.c_str());
printHelp(argv[0]);
return false;
}
return true;
}
void CUDAEnsemble::printHelp(const char *executable) {
printf("FLAME GPU %s\n", flamegpu::VERSION_FULL);
printf("Usage: %s [optional arguments]\n", executable);
printf("Optional Arguments:\n");
const char *line_fmt = "%-18s %s\n";
printf(line_fmt, "-h, --help", "show this help message and exit");
printf(line_fmt, "-d, --devices <device ids>", "Comma separated list of device ids to be used");
printf(line_fmt, "", "By default, all available devices will be used.");
printf(line_fmt, "-c, --concurrent <runs>", "Number of concurrent simulations to run per device");
printf(line_fmt, "", "By default, 4 will be used.");
printf(line_fmt, "-o, --out <directory> <filetype>", "Directory and filetype for ensemble outputs");
printf(line_fmt, "-q, --quiet", "Don't print progress information to console");
printf(line_fmt, "-t, --timing", "Output timing information to stdout");
}
void CUDAEnsemble::setStepLog(const StepLoggingConfig &stepConfig) {
// Validate ModelDescription matches
if (*stepConfig.model != *model) {
THROW exception::InvalidArgument("Model descriptions attached to LoggingConfig and CUDAEnsemble do not match, in CUDAEnsemble::setStepLog()\n");
}
// Set internal config
step_log_config = std::make_shared<StepLoggingConfig>(stepConfig);
}
void CUDAEnsemble::setExitLog(const LoggingConfig &exitConfig) {
// Validate ModelDescription matches
if (*exitConfig.model != *model) {
THROW exception::InvalidArgument("Model descriptions attached to LoggingConfig and CUDAEnsemble do not match, in CUDAEnsemble::setExitLog()\n");
}
// Set internal config
exit_log_config = std::make_shared<LoggingConfig>(exitConfig);
}
const std::vector<RunLog> &CUDAEnsemble::getLogs() {
return run_logs;
}
} // namespace flamegpu
| b999e6561f0ca888b3295f2f73baa4a3ed8e173e.cu | #include "flamegpu/gpu/CUDAEnsemble.h"
#include <algorithm>
#include <cstdlib>
#include <memory>
#include <thread>
#include <set>
#include <queue>
#include <mutex>
#include <condition_variable>
#include "flamegpu/version.h"
#include "flamegpu/model/ModelDescription.h"
#include "flamegpu/sim/RunPlanVector.h"
#include "flamegpu/util/detail/compute_capability.cuh"
#include "flamegpu/util/detail/SteadyClockTimer.h"
#include "flamegpu/gpu/CUDASimulation.h"
#include "flamegpu/io/StateWriterFactory.h"
#include "flamegpu/util/detail/filesystem.h"
#include "flamegpu/sim/LoggingConfig.h"
#include "flamegpu/sim/SimRunner.h"
#include "flamegpu/sim/LogFrame.h"
#include "flamegpu/sim/SimLogger.h"
namespace flamegpu {
CUDAEnsemble::CUDAEnsemble(const ModelDescription& _model, int argc, const char** argv)
: model(_model.model->clone()) {
initialise(argc, argv);
}
CUDAEnsemble::~CUDAEnsemble() {
// Nothing to do
}
void CUDAEnsemble::simulate(const RunPlanVector &plans) {
// Validate that RunPlan model matches CUDAEnsemble model
if (*plans.environment != this->model->environment->properties) {
THROW exception::InvalidArgument("RunPlan is for a different ModelDescription, in CUDAEnsemble::simulate()");
}
// Validate/init output directories
if (!config.out_directory.empty()) {
// Validate out format is right
config.out_format = io::StateWriterFactory::detectSupportedFileExt(config.out_format);
if (config.out_format.empty()) {
THROW exception::InvalidArgument("The out_directory config option also requires the out_format options to be set to a suitable type (e.g. 'json', 'xml'), in CUDAEnsemble::simulate()");
}
// Create any missing directories
try {
util::detail::filesystem::recursive_create_dir(config.out_directory);
} catch (const std::exception &e) {
THROW exception::InvalidArgument("Unable to use output directory '%s', in CUDAEnsemble::simulate(): %s", config.out_directory.c_str(), e.what());
}
for (const auto &p : plans) {
const auto subdir = p.getOutputSubdirectory();
if (!subdir.empty()) {
path sub_path = config.out_directory;
try {
sub_path.append(subdir);
util::detail::filesystem::recursive_create_dir(sub_path);
} catch (const std::exception &e) {
THROW exception::InvalidArgument("Unable to use output subdirectory '%s', in CUDAEnsemble::simulate(): %s", sub_path.generic_string().c_str(), e.what());
}
}
}
}
// Purge run logs, and resize ready for new runs
// Resize means we can setup logs during execution out of order, without risk of list being reallocated
run_logs.clear();
run_logs.resize(plans.size());
// Workout how many devices and runner we will be executing
int ct = -1;
gpuErrchk(cudaGetDeviceCount(&ct));
std::set<int> devices;
if (config.devices.size()) {
devices = config.devices;
} else {
for (int i = 0; i < ct; ++i) {
devices.emplace(i);
}
}
// Check that each device is capable, and init cuda context
for (auto d = devices.begin(); d != devices.end(); ++d) {
if (!util::detail::compute_capability::checkComputeCapability(*d)) {
fprintf(stderr, "FLAMEGPU2 has not been built with an appropriate compute capability for device %d, this device will not be used.\n", *d);
d = devices.erase(d);
--d;
} else {
gpuErrchk(cudaSetDevice(*d));
gpuErrchk(cudaFree(nullptr));
}
}
// Return to device 0 (or check original device first?)
gpuErrchk(cudaSetDevice(0));
// Init runners, devices * concurrent runs
std::atomic<unsigned int> err_ct = {0};
std::atomic<unsigned int> next_run = {0};
const size_t TOTAL_RUNNERS = devices.size() * config.concurrent_runs;
SimRunner *runners = static_cast<SimRunner *>(malloc(sizeof(SimRunner) * TOTAL_RUNNERS));
// Log Time (We can't use CUDA events here, due to device resets)
auto ensemble_timer = util::detail::SteadyClockTimer();
ensemble_timer.start();
// Reset the elapsed time.
ensemble_elapsed_time = 0.;
// Logging thread-safety items
std::queue<unsigned int> log_export_queue;
std::mutex log_export_queue_mutex;
std::condition_variable log_export_queue_cdn;
// Init with placement new
{
if (!config.quiet)
printf("\rCUDAEnsemble progress: %u/%u", 0, static_cast<unsigned int>(plans.size()));
unsigned int i = 0;
for (auto &d : devices) {
for (unsigned int j = 0; j < config.concurrent_runs; ++j) {
new (&runners[i++]) SimRunner(model, err_ct, next_run, plans, step_log_config, exit_log_config, d, j, !config.quiet, run_logs, log_export_queue, log_export_queue_mutex, log_export_queue_cdn);
}
}
}
// Init log worker
SimLogger *log_worker = nullptr;
if (!config.out_directory.empty() && !config.out_format.empty()) {
log_worker = new SimLogger(run_logs, plans, config.out_directory, config.out_format, log_export_queue, log_export_queue_mutex, log_export_queue_cdn);
} else if (!config.out_directory.empty() ^ !config.out_format.empty()) {
fprintf(stderr, "Warning: Only 1 of out_directory and out_format is set, both must be set for logging to commence to file.\n");
}
// Wait for all runners to exit
for (unsigned int i = 0; i < TOTAL_RUNNERS; ++i) {
runners[i].thread.join();
runners[i].~SimRunner();
}
// Notify logger to exit
if (log_worker) {
{
std::lock_guard<std::mutex> lck(log_export_queue_mutex);
log_export_queue.push(UINT_MAX);
}
log_export_queue_cdn.notify_one();
log_worker->thread.join();
delete log_worker;
log_worker = nullptr;
}
// Record and store the elapsed time
ensemble_timer.stop();
ensemble_elapsed_time = ensemble_timer.getElapsedSeconds();
// Ensemble has finished, print summary
if (!config.quiet) {
printf("\rCUDAEnsemble completed %u runs successfully!\n", static_cast<unsigned int>(plans.size() - err_ct));
if (err_ct)
printf("There were a total of %u errors.\n", err_ct.load());
}
if (config.timing) {
printf("Ensemble time elapsed: %fs\n", ensemble_elapsed_time);
}
// Free memory
free(runners);
}
void CUDAEnsemble::initialise(int argc, const char** argv) {
if (!checkArgs(argc, argv)) {
exit(EXIT_FAILURE);
}
/* Disabled as this is printed prior to quiet being accessible
// If verbsoe, output the flamegpu version.
if (!config.quiet) {
fprintf(stdout, "FLAME GPU %s\n", flamegpu::VERSION_FULL);
}
*/
}
int CUDAEnsemble::checkArgs(int argc, const char** argv) {
// Parse optional args
int i = 1;
for (; i < argc; i++) {
// Get arg as lowercase
std::string arg(argv[i]);
std::transform(arg.begin(), arg.end(), arg.begin(), [](unsigned char c) { return std::use_facet< std::ctype<char>>(std::locale()).tolower(c); });
// -h/--help. Print the help output and exit.
if (arg.compare("--help") == 0 || arg.compare("-h") == 0) {
printHelp(argv[0]);
return false;
}
// --concurrent <runs>, Number of concurrent simulations to run per device
if (arg.compare("--concurrent") == 0 || arg.compare("-c") == 0) {
if (i + 1 >= argc) {
fprintf(stderr, "%s requires a trailing argument\n", arg.c_str());
return false;
}
config.concurrent_runs = static_cast<unsigned int>(strtoul(argv[++i], nullptr, 0));
continue;
}
// --devices <string>, comma separated list of uints
if (arg.compare("--devices") == 0 || arg.compare("-d") == 0) {
if (i + 1 >= argc) {
fprintf(stderr, "%s requires a trailing argument\n", arg.c_str());
return false;
}
// Split and parse string
std::string device_string = argv[++i];
device_string += ","; // Append comma, to catch final item
int max_id = 0; // Catch max device so we can validate it exists
size_t pos;
while ((pos = device_string.find(",")) != std::string::npos) {
const unsigned int id = static_cast<unsigned int>(strtoul(device_string.substr(0, pos).c_str(), nullptr, 0));
if (id == 0 && (device_string.length() < 2 || (device_string[0] != '0' || device_string[1] != ','))) {
fprintf(stderr, "'%s' is not a valid device index.\n", device_string.substr(0, pos).c_str());
printHelp(argv[0]);
return false;
}
max_id = static_cast<int>(id) > max_id ? id : max_id;
config.devices.emplace(id);
device_string.erase(0, pos + 1);
}
int ct = -1;
gpuErrchk(cudaGetDeviceCount(&ct));
if (max_id >= ct) {
fprintf(stderr, "Device id %u exceeds available CUDA devices %d\n", max_id, ct);
printHelp(argv[0]);
return false;
}
continue;
}
// -o/--out <directory> <filetype>, Quiet FLAME GPU output.
if (arg.compare("--out") == 0 || arg.compare("-o") == 0) {
if (i + 2 >= argc) {
fprintf(stderr, "%s requires two trailing arguments\n", arg.c_str());
return false;
}
// Validate output directory is valid (and recursively create it if necessary)
try {
path out_directory = argv[++i];
util::detail::filesystem::recursive_create_dir(out_directory);
config.out_directory = out_directory.generic_string();
} catch (const std::exception &e) {
// Catch any exceptions, probably std::filesystem::filesystem_error, but other implementation defined errors also possible
fprintf(stderr, "Unable to use '%s' as output directory:\n%s\n", argv[i], e.what());
return false;
}
// Validate output format is available in io module
config.out_format = io::StateWriterFactory::detectSupportedFileExt(argv[++i]);
if (config.out_format.empty()) {
fprintf(stderr, "'%s' is not a supported output file type.\n", argv[i]);
return false;
}
continue;
}
// -q/--quiet, Don't report progress to console.
if (arg.compare("--quiet") == 0 || arg.compare("-q") == 0) {
config.quiet = true;
continue;
}
// -t/--timing, Output timing information to stdout
if (arg.compare("--timing") == 0 || arg.compare("-t") == 0) {
config.timing = true;
continue;
}
fprintf(stderr, "Unexpected argument: %s\n", arg.c_str());
printHelp(argv[0]);
return false;
}
return true;
}
void CUDAEnsemble::printHelp(const char *executable) {
printf("FLAME GPU %s\n", flamegpu::VERSION_FULL);
printf("Usage: %s [optional arguments]\n", executable);
printf("Optional Arguments:\n");
const char *line_fmt = "%-18s %s\n";
printf(line_fmt, "-h, --help", "show this help message and exit");
printf(line_fmt, "-d, --devices <device ids>", "Comma separated list of device ids to be used");
printf(line_fmt, "", "By default, all available devices will be used.");
printf(line_fmt, "-c, --concurrent <runs>", "Number of concurrent simulations to run per device");
printf(line_fmt, "", "By default, 4 will be used.");
printf(line_fmt, "-o, --out <directory> <filetype>", "Directory and filetype for ensemble outputs");
printf(line_fmt, "-q, --quiet", "Don't print progress information to console");
printf(line_fmt, "-t, --timing", "Output timing information to stdout");
}
void CUDAEnsemble::setStepLog(const StepLoggingConfig &stepConfig) {
// Validate ModelDescription matches
if (*stepConfig.model != *model) {
THROW exception::InvalidArgument("Model descriptions attached to LoggingConfig and CUDAEnsemble do not match, in CUDAEnsemble::setStepLog()\n");
}
// Set internal config
step_log_config = std::make_shared<StepLoggingConfig>(stepConfig);
}
void CUDAEnsemble::setExitLog(const LoggingConfig &exitConfig) {
// Validate ModelDescription matches
if (*exitConfig.model != *model) {
THROW exception::InvalidArgument("Model descriptions attached to LoggingConfig and CUDAEnsemble do not match, in CUDAEnsemble::setExitLog()\n");
}
// Set internal config
exit_log_config = std::make_shared<LoggingConfig>(exitConfig);
}
const std::vector<RunLog> &CUDAEnsemble::getLogs() {
return run_logs;
}
} // namespace flamegpu
|
13aa6c394c0c38927448331c7d6a4abe68084328.hip | // !!! This is a file automatically generated by hipify!!!
// High level matrix multiplication on GPU using CUDA with Thrust, CURAND and
// CUBLAS C(m,n) = A(m,k) * B(k,n)
#include <rocblas.h>
#include <hiprand/hiprand.h>
#include <cstdlib>
#include <ctime>
#include <iostream>
#include <thrust/device_vector.h>
#include <thrust/host_vector.h>
// Fill the array A(nr_rows_A, nr_cols_A) with random numbers on GPU
void GPU_fill_rand(float* A, int nr_rows_A, int nr_cols_A) {
// Create a pseudo-random number generator
hiprandGenerator_t prng;
hiprandCreateGenerator(&prng, HIPRAND_RNG_PSEUDO_DEFAULT);
// Set the seed for the random number generator using the system clock
hiprandSetPseudoRandomGeneratorSeed(prng, (unsigned long long)clock());
// Fill the array with random numbers on the device
hiprandGenerateUniform(prng, A, nr_rows_A * nr_cols_A);
}
// Multiply the arrays A and B on GPU and save the result in C
// C(m,n) = A(m,k) * B(k,n)
void gpu_blas_mmul(const float* A,
const float* B,
float* C,
const int m,
const int k,
const int n) {
int lda = m, ldb = k, ldc = m;
const float alf = 1;
const float bet = 0;
const float* alpha = &alf;
const float* beta = &bet;
// Create a handle for CUBLAS
hipblasHandle_t handle;
hipblasCreate(&handle);
hipEvent_t start, stop;
hipEventCreate(&start);
hipEventCreate(&stop);
hipEventRecord(start);
// Do the actual multiplication
hipblasSgemm(handle, HIPBLAS_OP_N, HIPBLAS_OP_N, m, n, k, alpha, A, lda, B, ldb,
beta, C, ldc);
hipEventRecord(stop);
hipEventSynchronize(stop);
float msecs = 0;
hipEventElapsedTime(&msecs, start, stop);
std::cout << "cublasSGEMM Elapsed Time on GPU: " << msecs << " ms.\n";
float numOps = 2 * 3 * 3;
std::cout << "Efficiency of the program: " << numOps / (msecs * 1000)
<< " GFLOPS.\n\n";
// Destroy the handle
hipblasDestroy(handle);
}
// Print matrix A(nr_rows_A, nr_cols_A) storage in column-major format
void print_matrix(const thrust::device_vector<float>& A,
int nr_rows_A,
int nr_cols_A) {
for (int i = 0; i < nr_rows_A; ++i) {
for (int j = 0; j < nr_cols_A; ++j) {
std::cout << A[j * nr_rows_A + i] << " ";
}
std::cout << std::endl;
}
std::cout << std::endl;
}
int main() {
// Allocate 3 arrays on CPU
int nr_rows_A, nr_cols_A, nr_rows_B, nr_cols_B, nr_rows_C, nr_cols_C;
// for simplicity we are going to use square arrays
nr_rows_A = nr_cols_A = nr_rows_B = nr_cols_B = nr_rows_C = nr_cols_C = 3;
thrust::device_vector<float> d_A(nr_rows_A * nr_cols_A),
d_B(nr_rows_B * nr_cols_B), d_C(nr_rows_C * nr_cols_C);
// Fill the arrays A and B on GPU with random numbers
GPU_fill_rand(thrust::raw_pointer_cast(&d_A[0]), nr_rows_A, nr_cols_A);
GPU_fill_rand(thrust::raw_pointer_cast(&d_B[0]), nr_rows_B, nr_cols_B);
// Optionally we can print the data
std::cout << "A =" << std::endl;
print_matrix(d_A, nr_rows_A, nr_cols_A);
std::cout << "B =" << std::endl;
print_matrix(d_B, nr_rows_B, nr_cols_B);
// Multiply A and B on GPU
gpu_blas_mmul(
thrust::raw_pointer_cast(&d_A[0]), thrust::raw_pointer_cast(&d_B[0]),
thrust::raw_pointer_cast(&d_C[0]), nr_rows_A, nr_cols_A, nr_cols_B);
// Print the result
std::cout << "C =" << std::endl;
print_matrix(d_C, nr_rows_C, nr_cols_C);
return 0;
} | 13aa6c394c0c38927448331c7d6a4abe68084328.cu | // High level matrix multiplication on GPU using CUDA with Thrust, CURAND and
// CUBLAS C(m,n) = A(m,k) * B(k,n)
#include <cublas_v2.h>
#include <curand.h>
#include <cstdlib>
#include <ctime>
#include <iostream>
#include <thrust/device_vector.h>
#include <thrust/host_vector.h>
// Fill the array A(nr_rows_A, nr_cols_A) with random numbers on GPU
void GPU_fill_rand(float* A, int nr_rows_A, int nr_cols_A) {
// Create a pseudo-random number generator
curandGenerator_t prng;
curandCreateGenerator(&prng, CURAND_RNG_PSEUDO_DEFAULT);
// Set the seed for the random number generator using the system clock
curandSetPseudoRandomGeneratorSeed(prng, (unsigned long long)clock());
// Fill the array with random numbers on the device
curandGenerateUniform(prng, A, nr_rows_A * nr_cols_A);
}
// Multiply the arrays A and B on GPU and save the result in C
// C(m,n) = A(m,k) * B(k,n)
void gpu_blas_mmul(const float* A,
const float* B,
float* C,
const int m,
const int k,
const int n) {
int lda = m, ldb = k, ldc = m;
const float alf = 1;
const float bet = 0;
const float* alpha = &alf;
const float* beta = &bet;
// Create a handle for CUBLAS
cublasHandle_t handle;
cublasCreate(&handle);
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
// Do the actual multiplication
cublasSgemm(handle, CUBLAS_OP_N, CUBLAS_OP_N, m, n, k, alpha, A, lda, B, ldb,
beta, C, ldc);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float msecs = 0;
cudaEventElapsedTime(&msecs, start, stop);
std::cout << "cublasSGEMM Elapsed Time on GPU: " << msecs << " ms.\n";
float numOps = 2 * 3 * 3;
std::cout << "Efficiency of the program: " << numOps / (msecs * 1000)
<< " GFLOPS.\n\n";
// Destroy the handle
cublasDestroy(handle);
}
// Print matrix A(nr_rows_A, nr_cols_A) storage in column-major format
void print_matrix(const thrust::device_vector<float>& A,
int nr_rows_A,
int nr_cols_A) {
for (int i = 0; i < nr_rows_A; ++i) {
for (int j = 0; j < nr_cols_A; ++j) {
std::cout << A[j * nr_rows_A + i] << " ";
}
std::cout << std::endl;
}
std::cout << std::endl;
}
int main() {
// Allocate 3 arrays on CPU
int nr_rows_A, nr_cols_A, nr_rows_B, nr_cols_B, nr_rows_C, nr_cols_C;
// for simplicity we are going to use square arrays
nr_rows_A = nr_cols_A = nr_rows_B = nr_cols_B = nr_rows_C = nr_cols_C = 3;
thrust::device_vector<float> d_A(nr_rows_A * nr_cols_A),
d_B(nr_rows_B * nr_cols_B), d_C(nr_rows_C * nr_cols_C);
// Fill the arrays A and B on GPU with random numbers
GPU_fill_rand(thrust::raw_pointer_cast(&d_A[0]), nr_rows_A, nr_cols_A);
GPU_fill_rand(thrust::raw_pointer_cast(&d_B[0]), nr_rows_B, nr_cols_B);
// Optionally we can print the data
std::cout << "A =" << std::endl;
print_matrix(d_A, nr_rows_A, nr_cols_A);
std::cout << "B =" << std::endl;
print_matrix(d_B, nr_rows_B, nr_cols_B);
// Multiply A and B on GPU
gpu_blas_mmul(
thrust::raw_pointer_cast(&d_A[0]), thrust::raw_pointer_cast(&d_B[0]),
thrust::raw_pointer_cast(&d_C[0]), nr_rows_A, nr_cols_A, nr_cols_B);
// Print the result
std::cout << "C =" << std::endl;
print_matrix(d_C, nr_rows_C, nr_cols_C);
return 0;
} |
2bd47b1599a9379d30ab397544eabf32c4604e67.hip | // !!! This is a file automatically generated by hipify!!!
#include <algorithm>
#include <fstream>
#include <cmath>
#include <map>
#include <stdio.h>
#include <stdlib.h>
#include <cusp/io/matrix_market.h>
#include <cusp/csr_matrix.h>
#include <cusp/multiply.h>
#include <cusp/blas.h>
#include <timer.h>
#include "hipsparse.h"
// -----------------------------------------------------------------------------
// Macro to obtain a random number between two specified values
// -----------------------------------------------------------------------------
#define RAND(L,H) ((L) + ((H)-(L)) * (float)rand()/(float)RAND_MAX)
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
#ifdef WIN32
# define ISNAN(A) (_isnan(A))
#else
# define ISNAN(A) (isnan(A))
#endif
// -----------------------------------------------------------------------------
// Typedefs
// -----------------------------------------------------------------------------
typedef double REAL;
typedef double PREC_REAL;
typedef typename cusp::csr_matrix<int, REAL, cusp::device_memory> Matrix;
typedef typename cusp::array1d<REAL, cusp::device_memory> Vector;
typedef typename cusp::array1d<REAL, cusp::host_memory> VectorH;
typedef typename cusp::array1d<PREC_REAL, cusp::device_memory> PrecVector;
// -----------------------------------------------------------------------------
using std::cout;
using std::cerr;
using std::cin;
using std::endl;
using std::string;
using std::vector;
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const float *values,
hipsparseHandle_t& handle, hipsparseMatDescr_t& descrA,
const float *x, float *y);
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const double *values,
hipsparseHandle_t& handle, hipsparseMatDescr_t& descrA,
const double *x, double *y);
// -----------------------------------------------------------------------------
// MAIN
// -----------------------------------------------------------------------------
int main(int argc, char** argv)
{
// Set up the problem to be solved.
string fileMat;
if (argc < 2) {
cerr << "Usage: ./driver_cusparse_csr ${MATRIX_FILE_NAME}" << endl;
exit(-1);
}
fileMat = argv[1];
cout << fileMat << endl;
// Get matrix and rhs.
Matrix A;
Vector b;
Vector x;
cusp::io::read_matrix_market_file(A, fileMat);
b.resize(A.num_rows);
{
VectorH x_h(A.num_rows);
for (int i = 0; i < A.num_rows; i++)
x_h[i] = RAND(2,10) / 2;
x = x_h;
}
hipsparseHandle_t handle;
hipsparseMatDescr_t descrA;
hipsparseCreate(&handle);
hipsparseCreateMatDescr(&descrA);
hipsparseSetMatType(descrA, HIPSPARSE_MATRIX_TYPE_GENERAL);
hipsparseSetMatDiagType(descrA, HIPSPARSE_DIAG_TYPE_NON_UNIT);
hipsparseSetMatIndexBase(descrA, HIPSPARSE_INDEX_BASE_ZERO);
hipsparseSetMatFillMode(descrA, HIPSPARSE_FILL_MODE_LOWER);
CUDATimer timer;
int counter = 0;
double elapsed = 0.0;
for (int i = 0; i < 10; i++) {
timer.Start();
spmv(A.num_rows, A.num_entries, thrust::raw_pointer_cast(&A.row_offsets[0]), thrust::raw_pointer_cast(&A.column_indices[0]),
thrust::raw_pointer_cast(&A.values[0]), handle, descrA, thrust::raw_pointer_cast(&x[0]), thrust::raw_pointer_cast(&b[0]));
timer.Stop();
if (i > 0) {
counter ++;
elapsed += timer.getElapsed();
}
}
elapsed /= counter;
cout << "cuSparse CSR: " << elapsed << endl;
hipsparseDestroyMatDescr(descrA);
hipsparseDestroy(handle);
return 0;
}
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const float *values,
hipsparseHandle_t& handle, hipsparseMatDescr_t& descrA,
const float *x, float *y)
{
float one = 1.f, zero = 0.f;
hipsparseScsrmv(handle, HIPSPARSE_OPERATION_NON_TRANSPOSE, n, n, nnz, &one, descrA, values, row_offsets, column_indices, x, &zero, y);
}
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const double *values,
hipsparseHandle_t& handle, hipsparseMatDescr_t& descrA,
const double *x, double *y)
{
double one = 1.0, zero = 0.0;
hipsparseDcsrmv(handle, HIPSPARSE_OPERATION_NON_TRANSPOSE, n, n, nnz, &one, descrA, values, row_offsets, column_indices, x, &zero, y);
}
| 2bd47b1599a9379d30ab397544eabf32c4604e67.cu | #include <algorithm>
#include <fstream>
#include <cmath>
#include <map>
#include <stdio.h>
#include <stdlib.h>
#include <cusp/io/matrix_market.h>
#include <cusp/csr_matrix.h>
#include <cusp/multiply.h>
#include <cusp/blas.h>
#include <timer.h>
#include "cusparse.h"
// -----------------------------------------------------------------------------
// Macro to obtain a random number between two specified values
// -----------------------------------------------------------------------------
#define RAND(L,H) ((L) + ((H)-(L)) * (float)rand()/(float)RAND_MAX)
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
#ifdef WIN32
# define ISNAN(A) (_isnan(A))
#else
# define ISNAN(A) (isnan(A))
#endif
// -----------------------------------------------------------------------------
// Typedefs
// -----------------------------------------------------------------------------
typedef double REAL;
typedef double PREC_REAL;
typedef typename cusp::csr_matrix<int, REAL, cusp::device_memory> Matrix;
typedef typename cusp::array1d<REAL, cusp::device_memory> Vector;
typedef typename cusp::array1d<REAL, cusp::host_memory> VectorH;
typedef typename cusp::array1d<PREC_REAL, cusp::device_memory> PrecVector;
// -----------------------------------------------------------------------------
using std::cout;
using std::cerr;
using std::cin;
using std::endl;
using std::string;
using std::vector;
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const float *values,
cusparseHandle_t& handle, cusparseMatDescr_t& descrA,
const float *x, float *y);
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const double *values,
cusparseHandle_t& handle, cusparseMatDescr_t& descrA,
const double *x, double *y);
// -----------------------------------------------------------------------------
// MAIN
// -----------------------------------------------------------------------------
int main(int argc, char** argv)
{
// Set up the problem to be solved.
string fileMat;
if (argc < 2) {
cerr << "Usage: ./driver_cusparse_csr ${MATRIX_FILE_NAME}" << endl;
exit(-1);
}
fileMat = argv[1];
cout << fileMat << endl;
// Get matrix and rhs.
Matrix A;
Vector b;
Vector x;
cusp::io::read_matrix_market_file(A, fileMat);
b.resize(A.num_rows);
{
VectorH x_h(A.num_rows);
for (int i = 0; i < A.num_rows; i++)
x_h[i] = RAND(2,10) / 2;
x = x_h;
}
cusparseHandle_t handle;
cusparseMatDescr_t descrA;
cusparseCreate(&handle);
cusparseCreateMatDescr(&descrA);
cusparseSetMatType(descrA, CUSPARSE_MATRIX_TYPE_GENERAL);
cusparseSetMatDiagType(descrA, CUSPARSE_DIAG_TYPE_NON_UNIT);
cusparseSetMatIndexBase(descrA, CUSPARSE_INDEX_BASE_ZERO);
cusparseSetMatFillMode(descrA, CUSPARSE_FILL_MODE_LOWER);
CUDATimer timer;
int counter = 0;
double elapsed = 0.0;
for (int i = 0; i < 10; i++) {
timer.Start();
spmv(A.num_rows, A.num_entries, thrust::raw_pointer_cast(&A.row_offsets[0]), thrust::raw_pointer_cast(&A.column_indices[0]),
thrust::raw_pointer_cast(&A.values[0]), handle, descrA, thrust::raw_pointer_cast(&x[0]), thrust::raw_pointer_cast(&b[0]));
timer.Stop();
if (i > 0) {
counter ++;
elapsed += timer.getElapsed();
}
}
elapsed /= counter;
cout << "cuSparse CSR: " << elapsed << endl;
cusparseDestroyMatDescr(descrA);
cusparseDestroy(handle);
return 0;
}
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const float *values,
cusparseHandle_t& handle, cusparseMatDescr_t& descrA,
const float *x, float *y)
{
float one = 1.f, zero = 0.f;
cusparseScsrmv(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, n, n, nnz, &one, descrA, values, row_offsets, column_indices, x, &zero, y);
}
void spmv(int n, int nnz, const int *row_offsets, const int *column_indices, const double *values,
cusparseHandle_t& handle, cusparseMatDescr_t& descrA,
const double *x, double *y)
{
double one = 1.0, zero = 0.0;
cusparseDcsrmv(handle, CUSPARSE_OPERATION_NON_TRANSPOSE, n, n, nnz, &one, descrA, values, row_offsets, column_indices, x, &zero, y);
}
|
ba4be4fc9ccbbd5d35a5e55d246d94287cafd507.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include "String.h"
#define N 10
__global__ void add(int *a, int *b, int * c)
{
int tid = blockIdx.x;
if(tid < N)
c[tid] = a[tid] + b[tid];
}
int main()
{
int a[N], b[N], c[N];
int *dev_a, *dev_b, *dev_c;
hipMalloc((void**)&dev_a, N*sizeof(int));
hipMalloc((void**)&dev_b, N*sizeof(int));
hipMalloc((void**)&dev_c, N*sizeof(int));
for(int i = 0; i < N; ++i)
{
a[i] = -i;
b[i] = i*i;
}
hipMemcpy(dev_a, a, N*sizeof(int), hipMemcpyHostToDevice);
hipMemcpy(dev_b, b, N*sizeof(int), hipMemcpyHostToDevice);
hipMemcpy(dev_c, c, N*sizeof(int), hipMemcpyHostToDevice);
//N:
hipLaunchKernelGGL(( add), dim3(N), dim3(1), 0, 0, dev_a, dev_b, dev_c);
hipMemcpy(c, dev_c, N*sizeof(int), hipMemcpyDeviceToHost);
for(int i = 0; i < N; ++i)
{
printf("%d + %d = %d\n", a[i], b[i], c[i]);
}
hipFree(dev_a);
hipFree(dev_b);
hipFree(dev_c);
//Second Demo
#if 0
int count = 0;
hipDeviceProp_t prop;
memset(&prop, 0x00, sizeof(hipDeviceProp_t));
hipGetDeviceCount(&count);
printf("Device Count is %d\n", count);
for(int i = 0; i <count; ++i)
{
hipGetDeviceProperties(&prop, i);
printf("Information for Device %d\n", i);
printf("Name:%s\n", prop.name);
}
#endif
//Third Demo. GPUGPU.
hipDeviceProp_t prop;
int dev;
hipGetDevice(&dev);
printf("ID of current CUDA device: %d\n", dev);
memset(&prop, 0x00, sizeof(hipDeviceProp_t));
prop.major = 1;
prop.minor = 3;
hipChooseDevice(&dev, &prop);
printf("ID of CUDA device closest to revision 1.3: %d\n", dev);
hipSetDevice(dev);
return 0;
}
| ba4be4fc9ccbbd5d35a5e55d246d94287cafd507.cu | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include "String.h"
#define N 10
__global__ void add(int *a, int *b, int * c)
{
int tid = blockIdx.x;
if(tid < N)
c[tid] = a[tid] + b[tid];
}
int main()
{
int a[N], b[N], c[N];
int *dev_a, *dev_b, *dev_c;
cudaMalloc((void**)&dev_a, N*sizeof(int));
cudaMalloc((void**)&dev_b, N*sizeof(int));
cudaMalloc((void**)&dev_c, N*sizeof(int));
for(int i = 0; i < N; ++i)
{
a[i] = -i;
b[i] = i*i;
}
cudaMemcpy(dev_a, a, N*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_b, b, N*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_c, c, N*sizeof(int), cudaMemcpyHostToDevice);
//N:表示设备在执行核函数时使用的并行线程块的数量,
add<<<N, 1>>>(dev_a, dev_b, dev_c);
cudaMemcpy(c, dev_c, N*sizeof(int), cudaMemcpyDeviceToHost);
for(int i = 0; i < N; ++i)
{
printf("%d + %d = %d\n", a[i], b[i], c[i]);
}
cudaFree(dev_a);
cudaFree(dev_b);
cudaFree(dev_c);
//Second Demo
#if 0
int count = 0;
cudaDeviceProp prop;
memset(&prop, 0x00, sizeof(cudaDeviceProp));
cudaGetDeviceCount(&count);
printf("Device Count is %d\n", count);
for(int i = 0; i <count; ++i)
{
cudaGetDeviceProperties(&prop, i);
printf("Information for Device %d\n", i);
printf("Name:%s\n", prop.name);
}
#endif
//Third Demo. 多GPU环境下选择最优的GPU.
cudaDeviceProp prop;
int dev;
cudaGetDevice(&dev);
printf("ID of current CUDA device: %d\n", dev);
memset(&prop, 0x00, sizeof(cudaDeviceProp));
prop.major = 1;
prop.minor = 3;
cudaChooseDevice(&dev, &prop);
printf("ID of CUDA device closest to revision 1.3: %d\n", dev);
cudaSetDevice(dev);
return 0;
}
|
716cdfd4a444fb94380fcf68734f40d8b172d163.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include <cstdio>
#include <cstdlib>
#include <vector>
__global__ void bucketSort(int *key, int *bucket, int n) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= n) return;
atomicAdd(&bucket[key[i]], 1);
__syncthreads();
for (int j=0, k=0; j <= i; k++) {
key[i] = k;
j += bucket[k];
}
}
int main() {
const int n = 50;
const int m = 64;
int range = 5;
int *key;
hipMallocManaged(&key, n*sizeof(int));
for (int i=0; i<n; i++) {
key[i] = rand() % range;
printf("%d ",key[i]);
}
printf("\n");
int *bucket;
hipMallocManaged(&bucket, range*sizeof(int));
for (int i=0; i<range; i++) {
bucket[i] = 0;
}
hipLaunchKernelGGL(( bucketSort), dim3((n+m-1)/m), dim3(m), 0, 0, key, bucket, n);
hipDeviceSynchronize();
for (int i=0; i<n; i++) {
printf("%d ",key[i]);
}
printf("\n");
}
| 716cdfd4a444fb94380fcf68734f40d8b172d163.cu | #include <cstdio>
#include <cstdlib>
#include <vector>
__global__ void bucketSort(int *key, int *bucket, int n) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= n) return;
atomicAdd(&bucket[key[i]], 1);
__syncthreads();
for (int j=0, k=0; j <= i; k++) {
key[i] = k;
j += bucket[k];
}
}
int main() {
const int n = 50;
const int m = 64;
int range = 5;
int *key;
cudaMallocManaged(&key, n*sizeof(int));
for (int i=0; i<n; i++) {
key[i] = rand() % range;
printf("%d ",key[i]);
}
printf("\n");
int *bucket;
cudaMallocManaged(&bucket, range*sizeof(int));
for (int i=0; i<range; i++) {
bucket[i] = 0;
}
bucketSort<<<(n+m-1)/m, m>>>(key, bucket, n);
cudaDeviceSynchronize();
for (int i=0; i<n; i++) {
printf("%d ",key[i]);
}
printf("\n");
}
|
1032022c65546390d0eed7d27ef99437a521ab40.hip | // !!! This is a file automatically generated by hipify!!!
// Copyright (c) 2017-2018 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef QUERY_MEMORY_HPP
#define QUERY_MEMORY_HPP
#include "query/memory.hpp"
#include <hip/hip_runtime.h>
#include <cstdlib>
#include <cstring>
#include <string>
#include "memutils/memory.h"
#include "query/utils.hpp"
// MemoryError represents a memory error.
class MemoryError : public AlgorithmError{
public:
explicit MemoryError(const std::string &message) : AlgorithmError(message) {
}
};
void checkMemoryError(CGoCallResHandle res) {
if (res.pStrErr != nullptr) {
throw MemoryError(res.pStrErr);
}
}
namespace ares {
void deviceMalloc(void **devPtr, size_t size) {
checkMemoryError(::deviceMalloc(devPtr, size));
}
void deviceFree(void *devPtr) {
checkMemoryError(::deviceFree(devPtr));
}
void deviceMemset(void *devPtr, int value, size_t count) {
checkMemoryError(::deviceMemset(devPtr, value, count));
}
void asyncCopyHostToDevice(void* dst, const void* src, size_t count,
hipStream_t stream) {
checkMemoryError(::asyncCopyHostToDevice(dst, src, count, stream));
}
} // namespace ares
#endif // QUERY_MEMORY_HPP
| 1032022c65546390d0eed7d27ef99437a521ab40.cu | // Copyright (c) 2017-2018 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef QUERY_MEMORY_HPP
#define QUERY_MEMORY_HPP
#include "query/memory.hpp"
#include <cuda_runtime.h>
#include <cstdlib>
#include <cstring>
#include <string>
#include "memutils/memory.h"
#include "query/utils.hpp"
// MemoryError represents a memory error.
class MemoryError : public AlgorithmError{
public:
explicit MemoryError(const std::string &message) : AlgorithmError(message) {
}
};
void checkMemoryError(CGoCallResHandle res) {
if (res.pStrErr != nullptr) {
throw MemoryError(res.pStrErr);
}
}
namespace ares {
void deviceMalloc(void **devPtr, size_t size) {
checkMemoryError(::deviceMalloc(devPtr, size));
}
void deviceFree(void *devPtr) {
checkMemoryError(::deviceFree(devPtr));
}
void deviceMemset(void *devPtr, int value, size_t count) {
checkMemoryError(::deviceMemset(devPtr, value, count));
}
void asyncCopyHostToDevice(void* dst, const void* src, size_t count,
cudaStream_t stream) {
checkMemoryError(::asyncCopyHostToDevice(dst, src, count, stream));
}
} // namespace ares
#endif // QUERY_MEMORY_HPP
|
252fe142e93ad01d4745877d5af9e9d04c73a478.hip | // !!! This is a file automatically generated by hipify!!!
/*
* Copyright 2011-2016 Maxim Milakov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "sparse_1x1_layer_updater_cuda.h"
#include <hip/hip_runtime.h>
#include <boost/format.hpp>
#include "util_cuda.h"
#include "cudnn_util.h"
#include "neural_network_cusparse_exception.h"
#include "neural_network_cublas_exception.h"
#include "neural_network_cuda_exception.h"
#include "neural_network_cudnn_exception.h"
#include "../sparse_convolution_layer.h"
#include "int_fastdiv.h"
namespace nnforge
{
namespace cuda
{
#define OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE 4
__global__ void sparse_1x1_backprop_upd_kernel(
const float * __restrict output_errors,
float * __restrict input_errors,
const float * __restrict weights,
const int * __restrict column_indices,
const int * __restrict row_ptrs,
int output_feature_map_count,
int entry_count,
int entry32_block_size)
{
int row_id = blockIdx.y * blockDim.y + threadIdx.y;
if (row_id >= output_feature_map_count)
return;
int start_column_index = __load_nc(row_ptrs + row_id);
int end_column_index = __load_nc(row_ptrs + row_id + 1);
int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
int base_column_index_offset = (thread_id_x >> 5) * OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE;
int base_nnz_index = start_column_index + base_column_index_offset;
if (base_nnz_index >= end_column_index)
return;
int max_valid_lane = min(OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE, end_column_index - base_nnz_index);
bool valid[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
int column_ids[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
float w[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE; ++i)
{
valid[i] = (i < max_valid_lane);
int index = valid[i] ? base_nnz_index + i : (end_column_index - 1);
column_ids[i] = __load_nc(column_indices + index);
w[i] = __load_nc(weights + index);
}
int base_entry_id = ((blockIdx.z * blockDim.z + threadIdx.z) << 5) * entry32_block_size;
if (base_entry_id >= entry_count)
return;
int lane_id = thread_id_x & 31;
int current_entry_id = base_entry_id + lane_id;
const float * base_output_errors = output_errors + row_id * entry_count;
for(int j = 0; j < entry32_block_size; ++j, current_entry_id += 32)
{
if (current_entry_id < entry_count)
{
float output_error = __load_nc(base_output_errors + current_entry_id);
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE; ++i)
if (valid[i])
atomicAdd(input_errors + column_ids[i] * entry_count + current_entry_id, output_error * w[i]);
}
}
}
#define OUTPUT_ELEM_COUNT_BLOCK_SIZE 4
template<bool single_entry_pass>
__global__ void sparse_1x1_update_weights_kernel(
const float * __restrict output_errors,
const float * __restrict input_neurons,
float * __restrict gradient_weights,
const int * __restrict column_indices,
const int * __restrict row_ptrs,
int output_feature_map_count,
int entry_count,
int entry32_block_size)
{
int row_id = blockIdx.y * blockDim.y + threadIdx.y;
if (row_id >= output_feature_map_count)
return;
int start_column_index = __load_nc(row_ptrs + row_id);
int end_column_index = __load_nc(row_ptrs + row_id + 1);
int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
int base_column_index_offset = (thread_id_x >> 5) * OUTPUT_ELEM_COUNT_BLOCK_SIZE;
int base_nnz_index = start_column_index + base_column_index_offset;
if (base_nnz_index >= end_column_index)
return;
int max_valid_lane = min(OUTPUT_ELEM_COUNT_BLOCK_SIZE, end_column_index - base_nnz_index);
bool valid[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
valid[i] = (i < max_valid_lane);
int column_ids[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
column_ids[i] = __load_nc(column_indices + (valid[i] ? base_nnz_index + i : (end_column_index - 1)));
int base_entry_id = ((blockIdx.z * blockDim.z + threadIdx.z) << 5) * entry32_block_size;
if (base_entry_id >= entry_count)
return;
int lane_id = thread_id_x & 31;
int current_entry_id = base_entry_id + lane_id;
float sums[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] = 0.0F;
const float * base_output_errors = output_errors + row_id * entry_count;
for(int j = 0; j < entry32_block_size; ++j, current_entry_id += 32)
{
if (current_entry_id < entry_count)
{
float output_error = __load_nc(base_output_errors + current_entry_id);
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] += __load_nc(input_neurons + column_ids[i] * entry_count + current_entry_id) * output_error;
}
}
#pragma unroll
for(int tx = 16; tx > 0; tx >>= 1)
{
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] += __shfl_xor(sums[i], tx);
}
#pragma unroll
for(int i = 1; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
if (lane_id == i)
sums[0] = sums[i];
if (lane_id < max_valid_lane)
{
if (single_entry_pass)
{
gradient_weights[base_nnz_index + lane_id] += sums[0];
}
else
{
atomicAdd(gradient_weights + base_nnz_index + lane_id, sums[0]);
}
}
}
/*
template<int DIMENSION_COUNT, bool add_update_to_destination>
__global__ void sparse_1x1_convert_to_strided(
float * __restrict strided_nchw_values,
const float * __restrict packed_cnhw_values,
array_by_val<int_fastdiv, DIMENSION_COUNT> input_compacted_sizes,
array_by_val<int, DIMENSION_COUNT> strides,
array_by_val<int, DIMENSION_COUNT> input_sizes,
int neuron_count_per_feature_map,
int feature_map_count,
int entry_count)
{
int neuron_id = blockIdx.x * blockDim.x + threadIdx.x;
int feature_map_id = blockIdx.y * blockDim.y + threadIdx.y;
int entry_id = blockIdx.z * blockDim.z + threadIdx.z;
if ((neuron_id < neuron_count_per_feature_map) && (feature_map_id < feature_map_count) && (entry_id < entry_count))
{
int xyz[DIMENSION_COUNT];
int remainder = neuron_id;
#pragma unroll
for(int i = 0; i < DIMENSION_COUNT - 1; ++i)
{
int new_remainder = remainder / input_compacted_sizes[i];
xyz[i] = remainder - input_compacted_sizes[i] * new_remainder;
remainder = new_remainder;
}
xyz[DIMENSION_COUNT - 1] = remainder;
int compacted_offset = (feature_map_id * entry_count + entry_id) * neuron_count_per_feature_map + neuron_id;
int strided_offset = entry_id * feature_map_count + feature_map_id;
#pragma unroll
for(int i = DIMENSION_COUNT - 1; i >= 0; --i)
strided_offset = strided_offset * input_sizes[i] + xyz[i] * strides[i];
if (add_update_to_destination)
strided_nchw_values[strided_offset] += packed_cnhw_values[compacted_offset];
else
strided_nchw_values[strided_offset] = packed_cnhw_values[compacted_offset];
}
}
*/
sparse_1x1_layer_updater_cuda::sparse_1x1_layer_updater_cuda()
: output_data_desc(0)
, bias_desc(0)
{
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_strided_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_converted_NHWC_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_converted_CNHW_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&output_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&bias_desc));
}
sparse_1x1_layer_updater_cuda::~sparse_1x1_layer_updater_cuda()
{
cudnnDestroyTensorDescriptor(input_strided_data_desc);
cudnnDestroyTensorDescriptor(input_converted_NHWC_data_desc);
cudnnDestroyTensorDescriptor(input_converted_CNHW_data_desc);
cudnnDestroyTensorDescriptor(output_data_desc);
cudnnDestroyTensorDescriptor(bias_desc);
}
void sparse_1x1_layer_updater_cuda::enqueue_forward_propagation(
hipStream_t stream_id,
cuda_linear_buffer_device::ptr output_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_buffers,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::ptr temporary_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_per_entry_buffer,
unsigned int entry_count)
{
// Convert input data strided NCHW to packed CNHW format
if (unit_stride)
{
cuda_util::transpose23(
*cuda_config,
*input_buffers[0],
*temporary_working_per_entry_buffer,
input_elem_count_per_feature_map_list[0],
input_configuration_specific_list[0].feature_map_count,
entry_count,
stream_id);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_strided_data_desc,
*input_buffers[0],
&beta,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer));
}
{
cusparse_safe_call(hipsparseSetStream(cuda_config->get_cusparse_handle(), stream_id));
float alpha = 1.0F;
float beta = 0.0F;
hipsparseMatDescr_t mat_descr;
cusparse_safe_call(hipsparseCreateMatDescr(&mat_descr));
cusparse_safe_call(hipsparseScsrmm2(
cuda_config->get_cusparse_handle(),
HIPSPARSE_OPERATION_NON_TRANSPOSE,
HIPSPARSE_OPERATION_TRANSPOSE,
output_configuration_specific.feature_map_count,
entry_count * output_elem_count_per_feature_map,
input_strided_config.feature_map_count,
feature_map_connection_count,
&alpha,
mat_descr,
*data[0],
*data_custom[1],
*data_custom[0],
*temporary_working_per_entry_buffer,
entry_count * output_elem_count_per_feature_map,
&beta,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_configuration_specific.feature_map_count));
}
// Convert output from NHWC to NCHW
{
cuda_util::transpose(
*cuda_config,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*output_buffer,
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map,
entry_count,
stream_id);
}
// Add bias
if (bias)
{
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
output_data_desc,
output_configuration_specific,
entry_count);
float alpha = 1.0F;
float beta = 1.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
bias_desc,
*data[1],
&beta,
output_data_desc,
*output_buffer));
}
}
void sparse_1x1_layer_updater_cuda::enqueue_backward_data_propagation(
hipStream_t stream_id,
unsigned int input_index,
cuda_linear_buffer_device::ptr input_errors_buffer,
cuda_linear_buffer_device::const_ptr output_errors_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_neurons_buffers,
cuda_linear_buffer_device::const_ptr output_neurons_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::const_ptr temporary_fixed_buffer,
cuda_linear_buffer_device::const_ptr temporary_per_entry_buffer,
bool add_update_to_destination,
unsigned int entry_count)
{
// Convert output errors from NCHW to CNHW
{
cuda_util::transpose23(
*cuda_config,
*output_errors_buffer,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_elem_count_per_feature_map,
output_configuration_specific.feature_map_count,
entry_count,
stream_id);
}
{
cuda_util::set_with_value(
*cuda_config,
*temporary_working_per_entry_buffer,
0.0F,
input_converted_elem_count_per_entry_aligned * entry_count,
stream_id);
std::pair<int, int> entry32_block_size_and_count = get_entry32_backprop_block_size_and_count(entry_count * output_elem_count_per_feature_map);
std::pair<dim3, dim3> kernel_dims = cuda_util::get_grid_and_threadblock_sizes_sequential_access(
*cuda_config,
32 * ((max_column_index_count_per_row + OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE - 1) / OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE),
output_configuration_specific.feature_map_count,
entry32_block_size_and_count.second,
32);
hipLaunchKernelGGL(( sparse_1x1_backprop_upd_kernel), dim3(kernel_dims.first), dim3(kernel_dims.second), 0, stream_id,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*data[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
entry_count * output_elem_count_per_feature_map,
entry32_block_size_and_count.first);
}
// Convert input errors from CNHW to NCHW
{
if ((!add_update_to_destination) && (!unit_stride))
{
cuda_util::set_with_value(
*cuda_config,
*input_errors_buffer,
0.0F,
input_elem_count_per_entry_list[0] * entry_count,
stream_id);
}
if (unit_stride)
{
cuda_util::transpose23(
*cuda_config,
*temporary_working_per_entry_buffer,
*input_errors_buffer,
input_elem_count_per_feature_map_list[0],
entry_count,
input_configuration_specific_list[0].feature_map_count,
stream_id,
add_update_to_destination);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = add_update_to_destination ? 1.0F : 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer,
&beta,
input_strided_data_desc,
*input_errors_buffer));
}
}
}
void sparse_1x1_layer_updater_cuda::enqueue_backward_weights_propagation(
hipStream_t stream_id,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::ptr>& gradient,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_neurons_buffers,
cuda_linear_buffer_device::const_ptr output_errors_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::const_ptr temporary_fixed_buffer,
cuda_linear_buffer_device::const_ptr temporary_per_entry_buffer,
unsigned int entry_count)
{
// Convert input data to packed CNHW format
if (unit_stride)
{
cuda_util::transpose(
*cuda_config,
*input_neurons_buffers[0],
*temporary_working_per_entry_buffer,
input_elem_count_per_feature_map_list[0],
input_configuration_specific_list[0].feature_map_count,
entry_count,
stream_id);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_strided_data_desc,
*input_neurons_buffers[0],
&beta,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer));
}
// Convert output from NCHW to CNHW
{
cuda_util::transpose23(
*cuda_config,
*output_errors_buffer,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_elem_count_per_feature_map,
output_configuration_specific.feature_map_count,
entry_count,
stream_id);
}
{
std::pair<int, int> entry32_block_size_and_count = get_entry32_update_block_size_and_count(output_elem_count_per_feature_map * entry_count);
std::pair<dim3, dim3> kernel_dims = cuda_util::get_grid_and_threadblock_sizes_sequential_access(
*cuda_config,
32 * ((max_column_index_count_per_row + OUTPUT_ELEM_COUNT_BLOCK_SIZE - 1) / OUTPUT_ELEM_COUNT_BLOCK_SIZE),
output_configuration_specific.feature_map_count,
entry32_block_size_and_count.second,
32);
if (entry32_block_size_and_count.second > 1)
{
hipLaunchKernelGGL(( sparse_1x1_update_weights_kernel<false>), dim3(kernel_dims.first), dim3(kernel_dims.second), 0, stream_id,
((const float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*gradient[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map * entry_count,
entry32_block_size_and_count.first);
}
else
{
hipLaunchKernelGGL(( sparse_1x1_update_weights_kernel<true>), dim3(kernel_dims.first), dim3(kernel_dims.second), 0, stream_id,
((const float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*gradient[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map * entry_count,
entry32_block_size_and_count.first);
}
}
// Update biases
if (bias)
{
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
output_data_desc,
output_configuration_specific,
entry_count);
float alpha = 1.0F;
float beta = 1.0F;
cudnn_safe_call(cudnnConvolutionBackwardBias(
cuda_config->get_cudnn_handle(),
&alpha,
output_data_desc,
*output_errors_buffer,
&beta,
bias_desc,
*gradient[1]));
}
}
void sparse_1x1_layer_updater_cuda::updater_configured()
{
nnforge_shared_ptr<const sparse_convolution_layer> layer_derived = nnforge_dynamic_pointer_cast<const sparse_convolution_layer>(layer_schema);
feature_map_connection_count = layer_derived->feature_map_connection_count;
bias = layer_derived->bias;
unit_stride = (layer_derived->strides == std::vector<unsigned int>(layer_derived->strides.size(), 1));
cudnn_util::set_tensor_bias_descriptor(
bias_desc,
output_configuration_specific.feature_map_count,
static_cast<unsigned int>(output_configuration_specific.dimension_sizes.size()));
int input_data_single_update_32block_entry_size = input_configuration_specific_list[0].feature_map_count * 32 * sizeof(float);
max_entry32_update_block_size = ::max(1, cuda_config->l2_cache_size / 2 / input_data_single_update_32block_entry_size);
int input_data_single_backprop_32block_entry_size = input_configuration_specific_list[0].feature_map_count * 32 * sizeof(float);
max_entry32_backprop_block_size = ::max(1, cuda_config->l2_cache_size / 2 / input_data_single_backprop_32block_entry_size);
const std::vector<unsigned int>& strides = layer_derived->strides;
input_strided_config.feature_map_count = input_configuration_specific_list[0].feature_map_count;
input_strided_config.dimension_sizes = output_configuration_specific.dimension_sizes;
input_strides.resize(strides.size() + 1);
unsigned int dim_size = 1;
for(int i = 0; i < strides.size(); ++i)
{
*(input_strides.begin() + i) = strides[i] * dim_size;
dim_size *= input_configuration_specific_list[0].dimension_sizes[i];
}
input_strides[strides.size()] = dim_size;
input_converted_NHWC_strides.resize(strides.size() + 2);
input_converted_NHWC_strides[strides.size()] = 1;
dim_size = input_strided_config.feature_map_count;
for(int i = 0; i < strides.size(); ++i)
{
input_converted_NHWC_strides[i] = dim_size;
dim_size *= output_configuration_specific.dimension_sizes[i];
}
input_converted_NHWC_strides.back() = dim_size;
input_converted_CNHW_strides_base.resize(strides.size() + 2);
dim_size = 1;
for(int i = 0; i < strides.size(); ++i)
{
input_converted_CNHW_strides_base[i] = dim_size;
dim_size *= output_configuration_specific.dimension_sizes[i];
}
input_converted_CNHW_strides_base[strides.size() + 1] = dim_size;
input_converted_elem_count_per_entry_aligned = (input_strided_config.get_neuron_count() + 4 - 1) / 4 * 4;
output_elem_count_per_entry_aligned = (output_configuration_specific.get_neuron_count() + 4 - 1) / 4 * 4;
}
void sparse_1x1_layer_updater_cuda::notify_data_custom(layer_data_custom::const_ptr host_data_custom)
{
max_column_index_count_per_row = 0;
const std::vector<int>& row_indices = host_data_custom->at(1);
for(int i = 0; i < row_indices.size() - 1; ++i)
max_column_index_count_per_row = ::max(max_column_index_count_per_row, row_indices[i + 1] - row_indices[i]);
}
size_t sparse_1x1_layer_updater_cuda::get_temporary_working_per_entry_buffer_size(const layer_action& action) const
{
if ((action.get_action_type() == layer_action::forward) || (action.get_action_type() == layer_action::backward_data) || (action.get_action_type() == layer_action::backward_weights))
return (input_converted_elem_count_per_entry_aligned * sizeof(float)) + (output_elem_count_per_entry_aligned * sizeof(float));
else
return layer_updater_cuda::get_temporary_working_per_entry_buffer_size(action);
}
std::pair<int, int> sparse_1x1_layer_updater_cuda::get_entry32_update_block_size_and_count(unsigned int entry_count) const
{
int candidate_block_size = (entry_count + 32 - 1) / 32;
if (candidate_block_size <= max_entry32_update_block_size)
return std::make_pair(candidate_block_size, 1);
int candidate_block_count2 = (candidate_block_size + max_entry32_update_block_size - 1) / max_entry32_update_block_size;
int candidate_block_size2 = (candidate_block_size + candidate_block_count2 - 1) / candidate_block_count2;
return std::make_pair(candidate_block_size2, candidate_block_count2);
}
std::pair<int, int> sparse_1x1_layer_updater_cuda::get_entry32_backprop_block_size_and_count(unsigned int entry_count) const
{
int candidate_block_size = (entry_count + 32 - 1) / 32;
if (candidate_block_size <= max_entry32_backprop_block_size)
return std::make_pair(candidate_block_size, 1);
int candidate_block_count2 = (candidate_block_size + max_entry32_backprop_block_size - 1) / max_entry32_backprop_block_size;
int candidate_block_size2 = (candidate_block_size + candidate_block_count2 - 1) / candidate_block_count2;
return std::make_pair(candidate_block_size2, candidate_block_count2);
}
bool sparse_1x1_layer_updater_cuda::is_backward_data_dependent_on_input_buffer(unsigned int action_input_index, unsigned int data_input_index) const
{
return false;
}
bool sparse_1x1_layer_updater_cuda::is_backward_data_dependent_on_output_buffer(unsigned int action_input_index) const
{
return false;
}
bool sparse_1x1_layer_updater_cuda::is_backward_weights_dependent_on_input_buffer(unsigned int data_input_index) const
{
return true;
}
}
}
| 252fe142e93ad01d4745877d5af9e9d04c73a478.cu | /*
* Copyright 2011-2016 Maxim Milakov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "sparse_1x1_layer_updater_cuda.h"
#include <cuda_runtime.h>
#include <boost/format.hpp>
#include "util_cuda.h"
#include "cudnn_util.h"
#include "neural_network_cusparse_exception.h"
#include "neural_network_cublas_exception.h"
#include "neural_network_cuda_exception.h"
#include "neural_network_cudnn_exception.h"
#include "../sparse_convolution_layer.h"
#include "int_fastdiv.h"
namespace nnforge
{
namespace cuda
{
#define OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE 4
__global__ void sparse_1x1_backprop_upd_kernel(
const float * __restrict output_errors,
float * __restrict input_errors,
const float * __restrict weights,
const int * __restrict column_indices,
const int * __restrict row_ptrs,
int output_feature_map_count,
int entry_count,
int entry32_block_size)
{
int row_id = blockIdx.y * blockDim.y + threadIdx.y;
if (row_id >= output_feature_map_count)
return;
int start_column_index = __load_nc(row_ptrs + row_id);
int end_column_index = __load_nc(row_ptrs + row_id + 1);
int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
int base_column_index_offset = (thread_id_x >> 5) * OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE;
int base_nnz_index = start_column_index + base_column_index_offset;
if (base_nnz_index >= end_column_index)
return;
int max_valid_lane = min(OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE, end_column_index - base_nnz_index);
bool valid[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
int column_ids[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
float w[OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE; ++i)
{
valid[i] = (i < max_valid_lane);
int index = valid[i] ? base_nnz_index + i : (end_column_index - 1);
column_ids[i] = __load_nc(column_indices + index);
w[i] = __load_nc(weights + index);
}
int base_entry_id = ((blockIdx.z * blockDim.z + threadIdx.z) << 5) * entry32_block_size;
if (base_entry_id >= entry_count)
return;
int lane_id = thread_id_x & 31;
int current_entry_id = base_entry_id + lane_id;
const float * base_output_errors = output_errors + row_id * entry_count;
for(int j = 0; j < entry32_block_size; ++j, current_entry_id += 32)
{
if (current_entry_id < entry_count)
{
float output_error = __load_nc(base_output_errors + current_entry_id);
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE; ++i)
if (valid[i])
atomicAdd(input_errors + column_ids[i] * entry_count + current_entry_id, output_error * w[i]);
}
}
}
#define OUTPUT_ELEM_COUNT_BLOCK_SIZE 4
template<bool single_entry_pass>
__global__ void sparse_1x1_update_weights_kernel(
const float * __restrict output_errors,
const float * __restrict input_neurons,
float * __restrict gradient_weights,
const int * __restrict column_indices,
const int * __restrict row_ptrs,
int output_feature_map_count,
int entry_count,
int entry32_block_size)
{
int row_id = blockIdx.y * blockDim.y + threadIdx.y;
if (row_id >= output_feature_map_count)
return;
int start_column_index = __load_nc(row_ptrs + row_id);
int end_column_index = __load_nc(row_ptrs + row_id + 1);
int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
int base_column_index_offset = (thread_id_x >> 5) * OUTPUT_ELEM_COUNT_BLOCK_SIZE;
int base_nnz_index = start_column_index + base_column_index_offset;
if (base_nnz_index >= end_column_index)
return;
int max_valid_lane = min(OUTPUT_ELEM_COUNT_BLOCK_SIZE, end_column_index - base_nnz_index);
bool valid[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
valid[i] = (i < max_valid_lane);
int column_ids[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
column_ids[i] = __load_nc(column_indices + (valid[i] ? base_nnz_index + i : (end_column_index - 1)));
int base_entry_id = ((blockIdx.z * blockDim.z + threadIdx.z) << 5) * entry32_block_size;
if (base_entry_id >= entry_count)
return;
int lane_id = thread_id_x & 31;
int current_entry_id = base_entry_id + lane_id;
float sums[OUTPUT_ELEM_COUNT_BLOCK_SIZE];
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] = 0.0F;
const float * base_output_errors = output_errors + row_id * entry_count;
for(int j = 0; j < entry32_block_size; ++j, current_entry_id += 32)
{
if (current_entry_id < entry_count)
{
float output_error = __load_nc(base_output_errors + current_entry_id);
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] += __load_nc(input_neurons + column_ids[i] * entry_count + current_entry_id) * output_error;
}
}
#pragma unroll
for(int tx = 16; tx > 0; tx >>= 1)
{
#pragma unroll
for(int i = 0; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
sums[i] += __shfl_xor(sums[i], tx);
}
#pragma unroll
for(int i = 1; i < OUTPUT_ELEM_COUNT_BLOCK_SIZE; ++i)
if (lane_id == i)
sums[0] = sums[i];
if (lane_id < max_valid_lane)
{
if (single_entry_pass)
{
gradient_weights[base_nnz_index + lane_id] += sums[0];
}
else
{
atomicAdd(gradient_weights + base_nnz_index + lane_id, sums[0]);
}
}
}
/*
template<int DIMENSION_COUNT, bool add_update_to_destination>
__global__ void sparse_1x1_convert_to_strided(
float * __restrict strided_nchw_values,
const float * __restrict packed_cnhw_values,
array_by_val<int_fastdiv, DIMENSION_COUNT> input_compacted_sizes,
array_by_val<int, DIMENSION_COUNT> strides,
array_by_val<int, DIMENSION_COUNT> input_sizes,
int neuron_count_per_feature_map,
int feature_map_count,
int entry_count)
{
int neuron_id = blockIdx.x * blockDim.x + threadIdx.x;
int feature_map_id = blockIdx.y * blockDim.y + threadIdx.y;
int entry_id = blockIdx.z * blockDim.z + threadIdx.z;
if ((neuron_id < neuron_count_per_feature_map) && (feature_map_id < feature_map_count) && (entry_id < entry_count))
{
int xyz[DIMENSION_COUNT];
int remainder = neuron_id;
#pragma unroll
for(int i = 0; i < DIMENSION_COUNT - 1; ++i)
{
int new_remainder = remainder / input_compacted_sizes[i];
xyz[i] = remainder - input_compacted_sizes[i] * new_remainder;
remainder = new_remainder;
}
xyz[DIMENSION_COUNT - 1] = remainder;
int compacted_offset = (feature_map_id * entry_count + entry_id) * neuron_count_per_feature_map + neuron_id;
int strided_offset = entry_id * feature_map_count + feature_map_id;
#pragma unroll
for(int i = DIMENSION_COUNT - 1; i >= 0; --i)
strided_offset = strided_offset * input_sizes[i] + xyz[i] * strides[i];
if (add_update_to_destination)
strided_nchw_values[strided_offset] += packed_cnhw_values[compacted_offset];
else
strided_nchw_values[strided_offset] = packed_cnhw_values[compacted_offset];
}
}
*/
sparse_1x1_layer_updater_cuda::sparse_1x1_layer_updater_cuda()
: output_data_desc(0)
, bias_desc(0)
{
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_strided_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_converted_NHWC_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&input_converted_CNHW_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&output_data_desc));
cudnn_safe_call(cudnnCreateTensorDescriptor(&bias_desc));
}
sparse_1x1_layer_updater_cuda::~sparse_1x1_layer_updater_cuda()
{
cudnnDestroyTensorDescriptor(input_strided_data_desc);
cudnnDestroyTensorDescriptor(input_converted_NHWC_data_desc);
cudnnDestroyTensorDescriptor(input_converted_CNHW_data_desc);
cudnnDestroyTensorDescriptor(output_data_desc);
cudnnDestroyTensorDescriptor(bias_desc);
}
void sparse_1x1_layer_updater_cuda::enqueue_forward_propagation(
cudaStream_t stream_id,
cuda_linear_buffer_device::ptr output_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_buffers,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::ptr temporary_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_per_entry_buffer,
unsigned int entry_count)
{
// Convert input data strided NCHW to packed CNHW format
if (unit_stride)
{
cuda_util::transpose23(
*cuda_config,
*input_buffers[0],
*temporary_working_per_entry_buffer,
input_elem_count_per_feature_map_list[0],
input_configuration_specific_list[0].feature_map_count,
entry_count,
stream_id);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_strided_data_desc,
*input_buffers[0],
&beta,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer));
}
{
cusparse_safe_call(cusparseSetStream(cuda_config->get_cusparse_handle(), stream_id));
float alpha = 1.0F;
float beta = 0.0F;
cusparseMatDescr_t mat_descr;
cusparse_safe_call(cusparseCreateMatDescr(&mat_descr));
cusparse_safe_call(cusparseScsrmm2(
cuda_config->get_cusparse_handle(),
CUSPARSE_OPERATION_NON_TRANSPOSE,
CUSPARSE_OPERATION_TRANSPOSE,
output_configuration_specific.feature_map_count,
entry_count * output_elem_count_per_feature_map,
input_strided_config.feature_map_count,
feature_map_connection_count,
&alpha,
mat_descr,
*data[0],
*data_custom[1],
*data_custom[0],
*temporary_working_per_entry_buffer,
entry_count * output_elem_count_per_feature_map,
&beta,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_configuration_specific.feature_map_count));
}
// Convert output from NHWC to NCHW
{
cuda_util::transpose(
*cuda_config,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*output_buffer,
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map,
entry_count,
stream_id);
}
// Add bias
if (bias)
{
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
output_data_desc,
output_configuration_specific,
entry_count);
float alpha = 1.0F;
float beta = 1.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
bias_desc,
*data[1],
&beta,
output_data_desc,
*output_buffer));
}
}
void sparse_1x1_layer_updater_cuda::enqueue_backward_data_propagation(
cudaStream_t stream_id,
unsigned int input_index,
cuda_linear_buffer_device::ptr input_errors_buffer,
cuda_linear_buffer_device::const_ptr output_errors_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_neurons_buffers,
cuda_linear_buffer_device::const_ptr output_neurons_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::const_ptr temporary_fixed_buffer,
cuda_linear_buffer_device::const_ptr temporary_per_entry_buffer,
bool add_update_to_destination,
unsigned int entry_count)
{
// Convert output errors from NCHW to CNHW
{
cuda_util::transpose23(
*cuda_config,
*output_errors_buffer,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_elem_count_per_feature_map,
output_configuration_specific.feature_map_count,
entry_count,
stream_id);
}
{
cuda_util::set_with_value(
*cuda_config,
*temporary_working_per_entry_buffer,
0.0F,
input_converted_elem_count_per_entry_aligned * entry_count,
stream_id);
std::pair<int, int> entry32_block_size_and_count = get_entry32_backprop_block_size_and_count(entry_count * output_elem_count_per_feature_map);
std::pair<dim3, dim3> kernel_dims = cuda_util::get_grid_and_threadblock_sizes_sequential_access(
*cuda_config,
32 * ((max_column_index_count_per_row + OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE - 1) / OUTPUT_ELEM_COUNT_BACKPROP_BLOCK_SIZE),
output_configuration_specific.feature_map_count,
entry32_block_size_and_count.second,
32);
sparse_1x1_backprop_upd_kernel<<<kernel_dims.first, kernel_dims.second, 0, stream_id>>>(
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*data[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
entry_count * output_elem_count_per_feature_map,
entry32_block_size_and_count.first);
}
// Convert input errors from CNHW to NCHW
{
if ((!add_update_to_destination) && (!unit_stride))
{
cuda_util::set_with_value(
*cuda_config,
*input_errors_buffer,
0.0F,
input_elem_count_per_entry_list[0] * entry_count,
stream_id);
}
if (unit_stride)
{
cuda_util::transpose23(
*cuda_config,
*temporary_working_per_entry_buffer,
*input_errors_buffer,
input_elem_count_per_feature_map_list[0],
entry_count,
input_configuration_specific_list[0].feature_map_count,
stream_id,
add_update_to_destination);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = add_update_to_destination ? 1.0F : 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer,
&beta,
input_strided_data_desc,
*input_errors_buffer));
}
}
}
void sparse_1x1_layer_updater_cuda::enqueue_backward_weights_propagation(
cudaStream_t stream_id,
const std::vector<cuda_linear_buffer_device::const_ptr>& schema_data,
const std::vector<cuda_linear_buffer_device::ptr>& gradient,
const std::vector<cuda_linear_buffer_device::const_ptr>& data_custom,
const std::vector<cuda_linear_buffer_device::const_ptr>& input_neurons_buffers,
cuda_linear_buffer_device::const_ptr output_errors_buffer,
const std::vector<cuda_linear_buffer_device::const_ptr>& persistent_working_data,
cuda_linear_buffer_device::ptr temporary_working_fixed_buffer,
cuda_linear_buffer_device::ptr temporary_working_per_entry_buffer,
cuda_linear_buffer_device::const_ptr temporary_fixed_buffer,
cuda_linear_buffer_device::const_ptr temporary_per_entry_buffer,
unsigned int entry_count)
{
// Convert input data to packed CNHW format
if (unit_stride)
{
cuda_util::transpose(
*cuda_config,
*input_neurons_buffers[0],
*temporary_working_per_entry_buffer,
input_elem_count_per_feature_map_list[0],
input_configuration_specific_list[0].feature_map_count,
entry_count,
stream_id);
}
else
{
std::vector<unsigned int> input_converted_CNHW_strides = input_converted_CNHW_strides_base;
input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 2] = input_converted_CNHW_strides[input_converted_CNHW_strides.size() - 1] * entry_count;
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
input_strided_data_desc,
input_strided_config,
entry_count,
input_strides);
cudnn_util::set_tensor_descriptor(
input_converted_CNHW_data_desc,
input_strided_config,
entry_count,
input_converted_CNHW_strides);
float alpha = 1.0F;
float beta = 0.0F;
cudnn_safe_call(cudnnAddTensor(
cuda_config->get_cudnn_handle(),
&alpha,
input_strided_data_desc,
*input_neurons_buffers[0],
&beta,
input_converted_CNHW_data_desc,
*temporary_working_per_entry_buffer));
}
// Convert output from NCHW to CNHW
{
cuda_util::transpose23(
*cuda_config,
*output_errors_buffer,
((float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
output_elem_count_per_feature_map,
output_configuration_specific.feature_map_count,
entry_count,
stream_id);
}
{
std::pair<int, int> entry32_block_size_and_count = get_entry32_update_block_size_and_count(output_elem_count_per_feature_map * entry_count);
std::pair<dim3, dim3> kernel_dims = cuda_util::get_grid_and_threadblock_sizes_sequential_access(
*cuda_config,
32 * ((max_column_index_count_per_row + OUTPUT_ELEM_COUNT_BLOCK_SIZE - 1) / OUTPUT_ELEM_COUNT_BLOCK_SIZE),
output_configuration_specific.feature_map_count,
entry32_block_size_and_count.second,
32);
if (entry32_block_size_and_count.second > 1)
{
sparse_1x1_update_weights_kernel<false><<<kernel_dims.first, kernel_dims.second, 0, stream_id>>>(
((const float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*gradient[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map * entry_count,
entry32_block_size_and_count.first);
}
else
{
sparse_1x1_update_weights_kernel<true><<<kernel_dims.first, kernel_dims.second, 0, stream_id>>>(
((const float *)*temporary_working_per_entry_buffer) + input_converted_elem_count_per_entry_aligned * entry_count,
*temporary_working_per_entry_buffer,
*gradient[0],
*data_custom[0],
*data_custom[1],
output_configuration_specific.feature_map_count,
output_elem_count_per_feature_map * entry_count,
entry32_block_size_and_count.first);
}
}
// Update biases
if (bias)
{
cudnn_safe_call(cudnnSetStream(cuda_config->get_cudnn_handle(), stream_id));
cudnn_util::set_tensor_descriptor(
output_data_desc,
output_configuration_specific,
entry_count);
float alpha = 1.0F;
float beta = 1.0F;
cudnn_safe_call(cudnnConvolutionBackwardBias(
cuda_config->get_cudnn_handle(),
&alpha,
output_data_desc,
*output_errors_buffer,
&beta,
bias_desc,
*gradient[1]));
}
}
void sparse_1x1_layer_updater_cuda::updater_configured()
{
nnforge_shared_ptr<const sparse_convolution_layer> layer_derived = nnforge_dynamic_pointer_cast<const sparse_convolution_layer>(layer_schema);
feature_map_connection_count = layer_derived->feature_map_connection_count;
bias = layer_derived->bias;
unit_stride = (layer_derived->strides == std::vector<unsigned int>(layer_derived->strides.size(), 1));
cudnn_util::set_tensor_bias_descriptor(
bias_desc,
output_configuration_specific.feature_map_count,
static_cast<unsigned int>(output_configuration_specific.dimension_sizes.size()));
int input_data_single_update_32block_entry_size = input_configuration_specific_list[0].feature_map_count * 32 * sizeof(float);
max_entry32_update_block_size = std::max(1, cuda_config->l2_cache_size / 2 / input_data_single_update_32block_entry_size);
int input_data_single_backprop_32block_entry_size = input_configuration_specific_list[0].feature_map_count * 32 * sizeof(float);
max_entry32_backprop_block_size = std::max(1, cuda_config->l2_cache_size / 2 / input_data_single_backprop_32block_entry_size);
const std::vector<unsigned int>& strides = layer_derived->strides;
input_strided_config.feature_map_count = input_configuration_specific_list[0].feature_map_count;
input_strided_config.dimension_sizes = output_configuration_specific.dimension_sizes;
input_strides.resize(strides.size() + 1);
unsigned int dim_size = 1;
for(int i = 0; i < strides.size(); ++i)
{
*(input_strides.begin() + i) = strides[i] * dim_size;
dim_size *= input_configuration_specific_list[0].dimension_sizes[i];
}
input_strides[strides.size()] = dim_size;
input_converted_NHWC_strides.resize(strides.size() + 2);
input_converted_NHWC_strides[strides.size()] = 1;
dim_size = input_strided_config.feature_map_count;
for(int i = 0; i < strides.size(); ++i)
{
input_converted_NHWC_strides[i] = dim_size;
dim_size *= output_configuration_specific.dimension_sizes[i];
}
input_converted_NHWC_strides.back() = dim_size;
input_converted_CNHW_strides_base.resize(strides.size() + 2);
dim_size = 1;
for(int i = 0; i < strides.size(); ++i)
{
input_converted_CNHW_strides_base[i] = dim_size;
dim_size *= output_configuration_specific.dimension_sizes[i];
}
input_converted_CNHW_strides_base[strides.size() + 1] = dim_size;
input_converted_elem_count_per_entry_aligned = (input_strided_config.get_neuron_count() + 4 - 1) / 4 * 4;
output_elem_count_per_entry_aligned = (output_configuration_specific.get_neuron_count() + 4 - 1) / 4 * 4;
}
void sparse_1x1_layer_updater_cuda::notify_data_custom(layer_data_custom::const_ptr host_data_custom)
{
max_column_index_count_per_row = 0;
const std::vector<int>& row_indices = host_data_custom->at(1);
for(int i = 0; i < row_indices.size() - 1; ++i)
max_column_index_count_per_row = std::max(max_column_index_count_per_row, row_indices[i + 1] - row_indices[i]);
}
size_t sparse_1x1_layer_updater_cuda::get_temporary_working_per_entry_buffer_size(const layer_action& action) const
{
if ((action.get_action_type() == layer_action::forward) || (action.get_action_type() == layer_action::backward_data) || (action.get_action_type() == layer_action::backward_weights))
return (input_converted_elem_count_per_entry_aligned * sizeof(float)) + (output_elem_count_per_entry_aligned * sizeof(float));
else
return layer_updater_cuda::get_temporary_working_per_entry_buffer_size(action);
}
std::pair<int, int> sparse_1x1_layer_updater_cuda::get_entry32_update_block_size_and_count(unsigned int entry_count) const
{
int candidate_block_size = (entry_count + 32 - 1) / 32;
if (candidate_block_size <= max_entry32_update_block_size)
return std::make_pair(candidate_block_size, 1);
int candidate_block_count2 = (candidate_block_size + max_entry32_update_block_size - 1) / max_entry32_update_block_size;
int candidate_block_size2 = (candidate_block_size + candidate_block_count2 - 1) / candidate_block_count2;
return std::make_pair(candidate_block_size2, candidate_block_count2);
}
std::pair<int, int> sparse_1x1_layer_updater_cuda::get_entry32_backprop_block_size_and_count(unsigned int entry_count) const
{
int candidate_block_size = (entry_count + 32 - 1) / 32;
if (candidate_block_size <= max_entry32_backprop_block_size)
return std::make_pair(candidate_block_size, 1);
int candidate_block_count2 = (candidate_block_size + max_entry32_backprop_block_size - 1) / max_entry32_backprop_block_size;
int candidate_block_size2 = (candidate_block_size + candidate_block_count2 - 1) / candidate_block_count2;
return std::make_pair(candidate_block_size2, candidate_block_count2);
}
bool sparse_1x1_layer_updater_cuda::is_backward_data_dependent_on_input_buffer(unsigned int action_input_index, unsigned int data_input_index) const
{
return false;
}
bool sparse_1x1_layer_updater_cuda::is_backward_data_dependent_on_output_buffer(unsigned int action_input_index) const
{
return false;
}
bool sparse_1x1_layer_updater_cuda::is_backward_weights_dependent_on_input_buffer(unsigned int data_input_index) const
{
return true;
}
}
}
|
a35aad5ed68024bdb2a28c10387b0cded98e57dc.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "common_cuda_helper.hpp"
#include "trt_cuda_helper.cuh"
#include "trt_plugin_helper.hpp"
using mmcv::TensorDesc;
template <class scalar_t>
__global__ void copy_permute_kernel(scalar_t *dst, const scalar_t *src, int n,
TensorDesc ts_src_stride,
TensorDesc ts_dst_stride,
TensorDesc ts_permute) {
const int src_dim = ts_src_stride.dim;
int *src_stride = &(ts_src_stride.stride[0]);
int *dst_stride = &(ts_dst_stride.stride[0]);
int *permute = &(ts_permute.shape[0]);
CUDA_1D_KERNEL_LOOP(index, n) {
size_t dst_index = index;
size_t src_index = 0;
for (int i = 0; i < src_dim; ++i) {
int dim_index = dst_index / dst_stride[i];
dst_index = dst_index % dst_stride[i];
src_index += dim_index * src_stride[permute[i]];
}
dst[index] = src[src_index];
}
}
template <class scalar_t>
void memcpyPermute(scalar_t *dst, const scalar_t *src, int *src_size,
int *permute, int src_dim, hipStream_t stream) {
size_t copy_size = 1;
TensorDesc ts_permute;
memcpy(&(ts_permute.shape[0]), permute, src_dim * sizeof(int));
TensorDesc ts_src_stride;
TensorDesc ts_dst_stride;
ts_src_stride.dim = src_dim;
ts_dst_stride.dim = src_dim;
int *src_stride = &(ts_src_stride.stride[0]);
int *dst_stride = &(ts_dst_stride.stride[0]);
int *dst_size = &(ts_dst_stride.shape[0]);
src_stride[src_dim - 1] = 1;
dst_stride[src_dim - 1] = 1;
for (int i = src_dim - 1; i >= 0; --i) {
dst_size[i] = src_size[permute[i]];
if (i < src_dim - 1) {
src_stride[i] = src_stride[i + 1] * src_size[i + 1];
}
}
for (int i = src_dim - 1; i >= 0; --i) {
copy_size *= dst_size[i];
if (i < src_dim - 1) {
dst_stride[i] = dst_stride[i + 1] * dst_size[i + 1];
}
}
hipLaunchKernelGGL(( copy_permute_kernel<scalar_t>)
, dim3(GET_BLOCKS(copy_size)), dim3(THREADS_PER_BLOCK), 0, stream,
dst, src, copy_size, ts_src_stride, ts_dst_stride, ts_permute);
}
template void memcpyPermute<float>(float *dst, const float *src, int *src_size,
int *permute, int src_dim,
hipStream_t stream);
| a35aad5ed68024bdb2a28c10387b0cded98e57dc.cu | #include "common_cuda_helper.hpp"
#include "trt_cuda_helper.cuh"
#include "trt_plugin_helper.hpp"
using mmcv::TensorDesc;
template <class scalar_t>
__global__ void copy_permute_kernel(scalar_t *dst, const scalar_t *src, int n,
TensorDesc ts_src_stride,
TensorDesc ts_dst_stride,
TensorDesc ts_permute) {
const int src_dim = ts_src_stride.dim;
int *src_stride = &(ts_src_stride.stride[0]);
int *dst_stride = &(ts_dst_stride.stride[0]);
int *permute = &(ts_permute.shape[0]);
CUDA_1D_KERNEL_LOOP(index, n) {
size_t dst_index = index;
size_t src_index = 0;
for (int i = 0; i < src_dim; ++i) {
int dim_index = dst_index / dst_stride[i];
dst_index = dst_index % dst_stride[i];
src_index += dim_index * src_stride[permute[i]];
}
dst[index] = src[src_index];
}
}
template <class scalar_t>
void memcpyPermute(scalar_t *dst, const scalar_t *src, int *src_size,
int *permute, int src_dim, cudaStream_t stream) {
size_t copy_size = 1;
TensorDesc ts_permute;
memcpy(&(ts_permute.shape[0]), permute, src_dim * sizeof(int));
TensorDesc ts_src_stride;
TensorDesc ts_dst_stride;
ts_src_stride.dim = src_dim;
ts_dst_stride.dim = src_dim;
int *src_stride = &(ts_src_stride.stride[0]);
int *dst_stride = &(ts_dst_stride.stride[0]);
int *dst_size = &(ts_dst_stride.shape[0]);
src_stride[src_dim - 1] = 1;
dst_stride[src_dim - 1] = 1;
for (int i = src_dim - 1; i >= 0; --i) {
dst_size[i] = src_size[permute[i]];
if (i < src_dim - 1) {
src_stride[i] = src_stride[i + 1] * src_size[i + 1];
}
}
for (int i = src_dim - 1; i >= 0; --i) {
copy_size *= dst_size[i];
if (i < src_dim - 1) {
dst_stride[i] = dst_stride[i + 1] * dst_size[i + 1];
}
}
copy_permute_kernel<scalar_t>
<<<GET_BLOCKS(copy_size), THREADS_PER_BLOCK, 0, stream>>>(
dst, src, copy_size, ts_src_stride, ts_dst_stride, ts_permute);
}
template void memcpyPermute<float>(float *dst, const float *src, int *src_size,
int *permute, int src_dim,
cudaStream_t stream);
|
9d9bbcfabd377ade76eb0a9bd4ae4244c639f0eb.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
#include "includes.h"
__global__ void hello_cuda()
{
printf("Hello CUDA world \n");
} | 9d9bbcfabd377ade76eb0a9bd4ae4244c639f0eb.cu | #include "includes.h"
__global__ void hello_cuda()
{
printf("Hello CUDA world \n");
} |
a607092efb2a4e32ba1ca98b294876695cc0c051.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
//# FIR_Filter.cu
//# Copyright (C) 2013 ASTRON (Netherlands Institute for Radio Astronomy)
//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
//#
//# This file is part of the LOFAR software suite.
//# The LOFAR software suite is free software: you can redistribute it and/or
//# modify it under the terms of the GNU General Public License as published
//# by the Free Software Foundation, either version 3 of the License, or
//# (at your option) any later version.
//#
//# The LOFAR software suite is distributed in the hope that it will be useful,
//# but WITHOUT ANY WARRANTY; without even the implied warranty of
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//# GNU General Public License for more details.
//#
//# You should have received a copy of the GNU General Public License along
//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
//#
//# $Id: FIR_Filter.cu 27000 2013-10-17 09:11:13Z loose $
#include "IntToFloat.cuh"
#include <stdio.h>
#if !(NR_STABS >= 1)
#error Precondition violated: NR_STABS >= 1
#endif
#if !(NR_TAPS == 16)
#error Precondition violated: NR_TAPS == 16
#endif
#if !(NR_SUBBANDS > 0)
#error Precondition violated: NR_SUBBANDS > 0
#endif
#if !(NR_SAMPLES_PER_CHANNEL > 0 && NR_SAMPLES_PER_CHANNEL % NR_TAPS == 0)
#error Precondition violated: NR_SAMPLES_PER_CHANNEL > 0 && NR_SAMPLES_PER_CHANNEL % NR_TAPS == 0
#endif
#if NR_BITS_PER_SAMPLE == 16
typedef signed short SampleType;
#elif NR_BITS_PER_SAMPLE == 8
typedef signed char SampleType;
#else
#error Precondition violated: NR_BITS_PER_SAMPLE == 8 || NR_BITS_PER_SAMPLE == 16
#endif
#if NR_CHANNELS == 1
#warning TODO: NR_CHANNELS == 1 is not (yet) supported
#elif !(NR_CHANNELS > 0 && NR_CHANNELS % 16 == 0)
#error Precondition violated: NR_CHANNELS > 0 && NR_CHANNELS % 16 == 0
#endif
#if !(NR_POLARIZATIONS == 2)
#error Precondition violated: NR_POLARIZATIONS == 2
#endif
#if !(COMPLEX == 2)
#error Precondition violated: COMPLEX == 2
#endif
//# NR_STABS means #stations (correlator) or #TABs (beamformer).
typedef SampleType (*SampledDataType)[NR_STABS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS][NR_POLARIZATIONS * COMPLEX];
typedef SampleType (*HistoryDataType)[NR_SUBBANDS][NR_STABS][NR_TAPS - 1][NR_CHANNELS][NR_POLARIZATIONS * COMPLEX];
typedef float (*FilteredDataType)[NR_STABS][NR_POLARIZATIONS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS][COMPLEX];
typedef const float (*WeightsType)[NR_CHANNELS][NR_TAPS];
/*!
* Applies the Finite Input Response filter defined by the weightsPtr array
* to the sampledDataPtr array. Output is written into the filteredDataPtr
* array. The filter works on complex numbers. The weights are real values only.
*
* Input values are first converted to (complex) float.
* The kernel also reorders the polarization dimension and expects the weights
* per channel in reverse order. If an FFT is applied afterwards, the weights
* of the odd channels are often supplied negated to get the resulting channels
* in increasing order of frequency.
*
* \param[out] filteredDataPtr 4D output array of floats
* \param[in] sampledDataPtr 4D input array of signed chars or shorts
* \param[in] weightsPtr 2D per-channel FIR filter coefficient array of floats (considering float16 as a dim)
* \param[in] historyDataPtr 5D input array of history input samples needed to initialize the FIR filter
* \param[in] subbandIdx index of the subband to process
*
* Pre-processor input symbols (some are tied to the execution configuration)
* Symbol | Valid Values | Description
* ----------------------- | --------------------------- | -----------
* NR_STABS | >= 1 | number of antenna fields (correlator), or number of tight array beams (tabs) (beamformer)
* NR_TAPS | 16 | number of FIR filtering coefficients
* NR_SAMPLES_PER_CHANNEL | multiple of NR_TAPS and > 0 | number of input samples per channel
* NR_BITS_PER_SAMPLE | 8 or 16 | number of bits of signed integral value type of sampledDataPtr (TODO: support 4)
* NR_CHANNELS | multiple of 16 and > 0 | number of frequency channels per subband
* NR_POLARIZATIONS | 2 | number of polarizations
* COMPLEX | 2 | size of complex in number of floats/doubles
*
* Execution configuration: (TODO: enforce using __attribute__ reqd_work_group_size)
* - Work dim == 2 (can be 1 iff NR_STABS == 1)
* + Inner dim: the channel, pol, real/imag the thread processes
* + Outer dim: the station the thread processes
* - Work group size: must divide global size, no other kernel restrictions
* - Global size: (NR_CHANNELS * NR_POLARIZATIONS * 2, NR_STABS)
*
* TODO: convert complex dim to fcomplex (=float2 in math.cl) in device code and to complex<float> in host code.
*/
extern "C" {
__global__ void FIR_filter( void *filteredDataPtr,
const void *sampledDataPtr,
const void *weightsPtr,
void *historyDataPtr,
unsigned subbandIdx)
{
SampledDataType sampledData = (SampledDataType) sampledDataPtr;
FilteredDataType filteredData = (FilteredDataType) filteredDataPtr;
WeightsType weightsData = (WeightsType) weightsPtr;
HistoryDataType historyData = (HistoryDataType) historyDataPtr;
unsigned cpr = blockIdx.x*blockDim.x+threadIdx.x;
#if 0
//# Straight index calc for NR_CHANNELS == 1
uint pol_ri = cpr & 3;
uint channel = cpr >> 2;
uint ri = cpr & 1;
uint pol = pol_ri >> 1;
#else
unsigned ri = cpr & 1; // index (real/imag) in output data
unsigned channel = (cpr >> 1) % NR_CHANNELS; // index in input & output data
unsigned pol = (cpr >> 1) / NR_CHANNELS; // index (polarization) in output data
unsigned pol_ri = (pol << 1) | ri; // index (polarization & real/imag) in input data
#endif
unsigned station = blockIdx.y;
//# const float16 weights = (*weightsData)[channel];
const float weights_s0 = (*weightsData)[channel][0];
const float weights_s1 = (*weightsData)[channel][1];
const float weights_s2 = (*weightsData)[channel][2];
const float weights_s3 = (*weightsData)[channel][3];
const float weights_s4 = (*weightsData)[channel][4];
const float weights_s5 = (*weightsData)[channel][5];
const float weights_s6 = (*weightsData)[channel][6];
const float weights_s7 = (*weightsData)[channel][7];
const float weights_s8 = (*weightsData)[channel][8];
const float weights_s9 = (*weightsData)[channel][9];
const float weights_sA = (*weightsData)[channel][10];
const float weights_sB = (*weightsData)[channel][11];
const float weights_sC = (*weightsData)[channel][12];
const float weights_sD = (*weightsData)[channel][13];
const float weights_sE = (*weightsData)[channel][14];
const float weights_sF = (*weightsData)[channel][15];
//# float16 delayLine;
float delayLine_s0, delayLine_s1, delayLine_s2, delayLine_s3,
delayLine_s4, delayLine_s5, delayLine_s6, delayLine_s7,
delayLine_s8, delayLine_s9, delayLine_sA, delayLine_sB,
delayLine_sC, delayLine_sD, delayLine_sE, delayLine_sF;
delayLine_s0 = convertIntToFloat((*historyData)[subbandIdx][station][0][channel][pol_ri]);
delayLine_s1 = convertIntToFloat((*historyData)[subbandIdx][station][1][channel][pol_ri]);
delayLine_s2 = convertIntToFloat((*historyData)[subbandIdx][station][2][channel][pol_ri]);
delayLine_s3 = convertIntToFloat((*historyData)[subbandIdx][station][3][channel][pol_ri]);
delayLine_s4 = convertIntToFloat((*historyData)[subbandIdx][station][4][channel][pol_ri]);
delayLine_s5 = convertIntToFloat((*historyData)[subbandIdx][station][5][channel][pol_ri]);
delayLine_s6 = convertIntToFloat((*historyData)[subbandIdx][station][6][channel][pol_ri]);
delayLine_s7 = convertIntToFloat((*historyData)[subbandIdx][station][7][channel][pol_ri]);
delayLine_s8 = convertIntToFloat((*historyData)[subbandIdx][station][8][channel][pol_ri]);
delayLine_s9 = convertIntToFloat((*historyData)[subbandIdx][station][9][channel][pol_ri]);
delayLine_sA = convertIntToFloat((*historyData)[subbandIdx][station][10][channel][pol_ri]);
delayLine_sB = convertIntToFloat((*historyData)[subbandIdx][station][11][channel][pol_ri]);
delayLine_sC = convertIntToFloat((*historyData)[subbandIdx][station][12][channel][pol_ri]);
delayLine_sD = convertIntToFloat((*historyData)[subbandIdx][station][13][channel][pol_ri]);
delayLine_sE = convertIntToFloat((*historyData)[subbandIdx][station][14][channel][pol_ri]);
float sum_s0, sum_s1, sum_s2, sum_s3,
sum_s4, sum_s5, sum_s6, sum_s7,
sum_s8, sum_s9, sum_sA, sum_sB,
sum_sC, sum_sD, sum_sE, sum_sF;
for (unsigned time = 0; time < NR_SAMPLES_PER_CHANNEL; time += NR_TAPS)
{
delayLine_sF = convertIntToFloat((*sampledData)[station][time + 0][channel][pol_ri]);
sum_s0 = weights_sF * delayLine_s0;
delayLine_s0 = convertIntToFloat((*sampledData)[station][time + 1][channel][pol_ri]);
sum_s0 += weights_sE * delayLine_s1;
sum_s0 += weights_sD * delayLine_s2;
sum_s0 += weights_sC * delayLine_s3;
sum_s0 += weights_sB * delayLine_s4;
sum_s0 += weights_sA * delayLine_s5;
sum_s0 += weights_s9 * delayLine_s6;
sum_s0 += weights_s8 * delayLine_s7;
sum_s0 += weights_s7 * delayLine_s8;
sum_s0 += weights_s6 * delayLine_s9;
sum_s0 += weights_s5 * delayLine_sA;
sum_s0 += weights_s4 * delayLine_sB;
sum_s0 += weights_s3 * delayLine_sC;
sum_s0 += weights_s2 * delayLine_sD;
sum_s0 += weights_s1 * delayLine_sE;
sum_s0 += weights_s0 * delayLine_sF;
(*filteredData)[station][pol][time + 0][channel][ri] = sum_s0;
sum_s1 = weights_sF * delayLine_s1;
delayLine_s1 = convertIntToFloat((*sampledData)[station][time + 2][channel][pol_ri]);
sum_s1 += weights_sE * delayLine_s2;
sum_s1 += weights_sD * delayLine_s3;
sum_s1 += weights_sC * delayLine_s4;
sum_s1 += weights_sB * delayLine_s5;
sum_s1 += weights_sA * delayLine_s6;
sum_s1 += weights_s9 * delayLine_s7;
sum_s1 += weights_s8 * delayLine_s8;
sum_s1 += weights_s7 * delayLine_s9;
sum_s1 += weights_s6 * delayLine_sA;
sum_s1 += weights_s5 * delayLine_sB;
sum_s1 += weights_s4 * delayLine_sC;
sum_s1 += weights_s3 * delayLine_sD;
sum_s1 += weights_s2 * delayLine_sE;
sum_s1 += weights_s1 * delayLine_sF;
sum_s1 += weights_s0 * delayLine_s0;
(*filteredData)[station][pol][time + 1][channel][ri] = sum_s1;
sum_s2 = weights_sF * delayLine_s2;
delayLine_s2 = convertIntToFloat((*sampledData)[station][time + 3][channel][pol_ri]);
sum_s2 += weights_sE * delayLine_s3;
sum_s2 += weights_sD * delayLine_s4;
sum_s2 += weights_sC * delayLine_s5;
sum_s2 += weights_sB * delayLine_s6;
sum_s2 += weights_sA * delayLine_s7;
sum_s2 += weights_s9 * delayLine_s8;
sum_s2 += weights_s8 * delayLine_s9;
sum_s2 += weights_s7 * delayLine_sA;
sum_s2 += weights_s6 * delayLine_sB;
sum_s2 += weights_s5 * delayLine_sC;
sum_s2 += weights_s4 * delayLine_sD;
sum_s2 += weights_s3 * delayLine_sE;
sum_s2 += weights_s2 * delayLine_sF;
sum_s2 += weights_s1 * delayLine_s0;
sum_s2 += weights_s0 * delayLine_s1;
(*filteredData)[station][pol][time + 2][channel][ri] = sum_s2;
sum_s3 = weights_sF * delayLine_s3;
delayLine_s3 = convertIntToFloat((*sampledData)[station][time + 4][channel][pol_ri]);
sum_s3 += weights_sE * delayLine_s4;
sum_s3 += weights_sD * delayLine_s5;
sum_s3 += weights_sC * delayLine_s6;
sum_s3 += weights_sB * delayLine_s7;
sum_s3 += weights_sA * delayLine_s8;
sum_s3 += weights_s9 * delayLine_s9;
sum_s3 += weights_s8 * delayLine_sA;
sum_s3 += weights_s7 * delayLine_sB;
sum_s3 += weights_s6 * delayLine_sC;
sum_s3 += weights_s5 * delayLine_sD;
sum_s3 += weights_s4 * delayLine_sE;
sum_s3 += weights_s3 * delayLine_sF;
sum_s3 += weights_s2 * delayLine_s0;
sum_s3 += weights_s1 * delayLine_s1;
sum_s3 += weights_s0 * delayLine_s2;
(*filteredData)[station][pol][time + 3][channel][ri] = sum_s3;
sum_s4 = weights_sF * delayLine_s4;
delayLine_s4 = convertIntToFloat((*sampledData)[station][time + 5][channel][pol_ri]);
sum_s4 += weights_sE * delayLine_s5;
sum_s4 += weights_sD * delayLine_s6;
sum_s4 += weights_sC * delayLine_s7;
sum_s4 += weights_sB * delayLine_s8;
sum_s4 += weights_sA * delayLine_s9;
sum_s4 += weights_s9 * delayLine_sA;
sum_s4 += weights_s8 * delayLine_sB;
sum_s4 += weights_s7 * delayLine_sC;
sum_s4 += weights_s6 * delayLine_sD;
sum_s4 += weights_s5 * delayLine_sE;
sum_s4 += weights_s4 * delayLine_sF;
sum_s4 += weights_s3 * delayLine_s0;
sum_s4 += weights_s2 * delayLine_s1;
sum_s4 += weights_s1 * delayLine_s2;
sum_s4 += weights_s0 * delayLine_s3;
(*filteredData)[station][pol][time + 4][channel][ri] = sum_s4;
sum_s5 = weights_sF * delayLine_s5;
delayLine_s5 = convertIntToFloat((*sampledData)[station][time + 6][channel][pol_ri]);
sum_s5 += weights_sE * delayLine_s6;
sum_s5 += weights_sD * delayLine_s7;
sum_s5 += weights_sC * delayLine_s8;
sum_s5 += weights_sB * delayLine_s9;
sum_s5 += weights_sA * delayLine_sA;
sum_s5 += weights_s9 * delayLine_sB;
sum_s5 += weights_s8 * delayLine_sC;
sum_s5 += weights_s7 * delayLine_sD;
sum_s5 += weights_s6 * delayLine_sE;
sum_s5 += weights_s5 * delayLine_sF;
sum_s5 += weights_s4 * delayLine_s0;
sum_s5 += weights_s3 * delayLine_s1;
sum_s5 += weights_s2 * delayLine_s2;
sum_s5 += weights_s1 * delayLine_s3;
sum_s5 += weights_s0 * delayLine_s4;
(*filteredData)[station][pol][time + 5][channel][ri] = sum_s5;
sum_s6 = weights_sF * delayLine_s6;
delayLine_s6 = convertIntToFloat((*sampledData)[station][time + 7][channel][pol_ri]);
sum_s6 += weights_sE * delayLine_s7;
sum_s6 += weights_sD * delayLine_s8;
sum_s6 += weights_sC * delayLine_s9;
sum_s6 += weights_sB * delayLine_sA;
sum_s6 += weights_sA * delayLine_sB;
sum_s6 += weights_s9 * delayLine_sC;
sum_s6 += weights_s8 * delayLine_sD;
sum_s6 += weights_s7 * delayLine_sE;
sum_s6 += weights_s6 * delayLine_sF;
sum_s6 += weights_s5 * delayLine_s0;
sum_s6 += weights_s4 * delayLine_s1;
sum_s6 += weights_s3 * delayLine_s2;
sum_s6 += weights_s2 * delayLine_s3;
sum_s6 += weights_s1 * delayLine_s4;
sum_s6 += weights_s0 * delayLine_s5;
(*filteredData)[station][pol][time + 6][channel][ri] = sum_s6;
sum_s7 = weights_sF * delayLine_s7;
delayLine_s7 = convertIntToFloat((*sampledData)[station][time + 8][channel][pol_ri]);
sum_s7 += weights_sE * delayLine_s8;
sum_s7 += weights_sD * delayLine_s9;
sum_s7 += weights_sC * delayLine_sA;
sum_s7 += weights_sB * delayLine_sB;
sum_s7 += weights_sA * delayLine_sC;
sum_s7 += weights_s9 * delayLine_sD;
sum_s7 += weights_s8 * delayLine_sE;
sum_s7 += weights_s7 * delayLine_sF;
sum_s7 += weights_s6 * delayLine_s0;
sum_s7 += weights_s5 * delayLine_s1;
sum_s7 += weights_s4 * delayLine_s2;
sum_s7 += weights_s3 * delayLine_s3;
sum_s7 += weights_s2 * delayLine_s4;
sum_s7 += weights_s1 * delayLine_s5;
sum_s7 += weights_s0 * delayLine_s6;
(*filteredData)[station][pol][time + 7][channel][ri] = sum_s7;
sum_s8 = weights_sF * delayLine_s8;
delayLine_s8 = convertIntToFloat((*sampledData)[station][time + 9][channel][pol_ri]);
sum_s8 += weights_sE * delayLine_s9;
sum_s8 += weights_sD * delayLine_sA;
sum_s8 += weights_sC * delayLine_sB;
sum_s8 += weights_sB * delayLine_sC;
sum_s8 += weights_sA * delayLine_sD;
sum_s8 += weights_s9 * delayLine_sE;
sum_s8 += weights_s8 * delayLine_sF;
sum_s8 += weights_s7 * delayLine_s0;
sum_s8 += weights_s6 * delayLine_s1;
sum_s8 += weights_s5 * delayLine_s2;
sum_s8 += weights_s4 * delayLine_s3;
sum_s8 += weights_s3 * delayLine_s4;
sum_s8 += weights_s2 * delayLine_s5;
sum_s8 += weights_s1 * delayLine_s6;
sum_s8 += weights_s0 * delayLine_s7;
(*filteredData)[station][pol][time + 8][channel][ri] = sum_s8;
sum_s9 = weights_sF * delayLine_s9;
delayLine_s9 = convertIntToFloat((*sampledData)[station][time + 10][channel][pol_ri]);
sum_s9 += weights_sE * delayLine_sA;
sum_s9 += weights_sD * delayLine_sB;
sum_s9 += weights_sC * delayLine_sC;
sum_s9 += weights_sB * delayLine_sD;
sum_s9 += weights_sA * delayLine_sE;
sum_s9 += weights_s9 * delayLine_sF;
sum_s9 += weights_s8 * delayLine_s0;
sum_s9 += weights_s7 * delayLine_s1;
sum_s9 += weights_s6 * delayLine_s2;
sum_s9 += weights_s5 * delayLine_s3;
sum_s9 += weights_s4 * delayLine_s4;
sum_s9 += weights_s3 * delayLine_s5;
sum_s9 += weights_s2 * delayLine_s6;
sum_s9 += weights_s1 * delayLine_s7;
sum_s9 += weights_s0 * delayLine_s8;
(*filteredData)[station][pol][time + 9][channel][ri] = sum_s9;
sum_sA = weights_sF * delayLine_sA;
delayLine_sA = convertIntToFloat((*sampledData)[station][time + 11][channel][pol_ri]);
sum_sA += weights_sE * delayLine_sB;
sum_sA += weights_sD * delayLine_sC;
sum_sA += weights_sC * delayLine_sD;
sum_sA += weights_sB * delayLine_sE;
sum_sA += weights_sA * delayLine_sF;
sum_sA += weights_s9 * delayLine_s0;
sum_sA += weights_s8 * delayLine_s1;
sum_sA += weights_s7 * delayLine_s2;
sum_sA += weights_s6 * delayLine_s3;
sum_sA += weights_s5 * delayLine_s4;
sum_sA += weights_s4 * delayLine_s5;
sum_sA += weights_s3 * delayLine_s6;
sum_sA += weights_s2 * delayLine_s7;
sum_sA += weights_s1 * delayLine_s8;
sum_sA += weights_s0 * delayLine_s9;
(*filteredData)[station][pol][time + 10][channel][ri] = sum_sA;
sum_sB = weights_sF * delayLine_sB;
delayLine_sB = convertIntToFloat((*sampledData)[station][time + 12][channel][pol_ri]);
sum_sB += weights_sE * delayLine_sC;
sum_sB += weights_sD * delayLine_sD;
sum_sB += weights_sC * delayLine_sE;
sum_sB += weights_sB * delayLine_sF;
sum_sB += weights_sA * delayLine_s0;
sum_sB += weights_s9 * delayLine_s1;
sum_sB += weights_s8 * delayLine_s2;
sum_sB += weights_s7 * delayLine_s3;
sum_sB += weights_s6 * delayLine_s4;
sum_sB += weights_s5 * delayLine_s5;
sum_sB += weights_s4 * delayLine_s6;
sum_sB += weights_s3 * delayLine_s7;
sum_sB += weights_s2 * delayLine_s8;
sum_sB += weights_s1 * delayLine_s9;
sum_sB += weights_s0 * delayLine_sA;
(*filteredData)[station][pol][time + 11][channel][ri] = sum_sB;
sum_sC = weights_sF * delayLine_sC;
delayLine_sC = convertIntToFloat((*sampledData)[station][time + 13][channel][pol_ri]);
sum_sC += weights_sE * delayLine_sD;
sum_sC += weights_sD * delayLine_sE;
sum_sC += weights_sC * delayLine_sF;
sum_sC += weights_sB * delayLine_s0;
sum_sC += weights_sA * delayLine_s1;
sum_sC += weights_s9 * delayLine_s2;
sum_sC += weights_s8 * delayLine_s3;
sum_sC += weights_s7 * delayLine_s4;
sum_sC += weights_s6 * delayLine_s5;
sum_sC += weights_s5 * delayLine_s6;
sum_sC += weights_s4 * delayLine_s7;
sum_sC += weights_s3 * delayLine_s8;
sum_sC += weights_s2 * delayLine_s9;
sum_sC += weights_s1 * delayLine_sA;
sum_sC += weights_s0 * delayLine_sB;
(*filteredData)[station][pol][time + 12][channel][ri] = sum_sC;
sum_sD = weights_sF * delayLine_sD;
delayLine_sD = convertIntToFloat((*sampledData)[station][time + 14][channel][pol_ri]);
sum_sD += weights_sE * delayLine_sE;
sum_sD += weights_sD * delayLine_sF;
sum_sD += weights_sC * delayLine_s0;
sum_sD += weights_sB * delayLine_s1;
sum_sD += weights_sA * delayLine_s2;
sum_sD += weights_s9 * delayLine_s3;
sum_sD += weights_s8 * delayLine_s4;
sum_sD += weights_s7 * delayLine_s5;
sum_sD += weights_s6 * delayLine_s6;
sum_sD += weights_s5 * delayLine_s7;
sum_sD += weights_s4 * delayLine_s8;
sum_sD += weights_s3 * delayLine_s9;
sum_sD += weights_s2 * delayLine_sA;
sum_sD += weights_s1 * delayLine_sB;
sum_sD += weights_s0 * delayLine_sC;
(*filteredData)[station][pol][time + 13][channel][ri] = sum_sD;
sum_sE = weights_sF * delayLine_sE;
delayLine_sE = convertIntToFloat((*sampledData)[station][time + 15][channel][pol_ri]);
sum_sE += weights_sE * delayLine_sF;
sum_sE += weights_sD * delayLine_s0;
sum_sE += weights_sC * delayLine_s1;
sum_sE += weights_sB * delayLine_s2;
sum_sE += weights_sA * delayLine_s3;
sum_sE += weights_s9 * delayLine_s4;
sum_sE += weights_s8 * delayLine_s5;
sum_sE += weights_s7 * delayLine_s6;
sum_sE += weights_s6 * delayLine_s7;
sum_sE += weights_s5 * delayLine_s8;
sum_sE += weights_s4 * delayLine_s9;
sum_sE += weights_s3 * delayLine_sA;
sum_sE += weights_s2 * delayLine_sB;
sum_sE += weights_s1 * delayLine_sC;
sum_sE += weights_s0 * delayLine_sD;
(*filteredData)[station][pol][time + 14][channel][ri] = sum_sE;
sum_sF = weights_sF * delayLine_sF;
sum_sF += weights_sE * delayLine_s0;
sum_sF += weights_sD * delayLine_s1;
sum_sF += weights_sC * delayLine_s2;
sum_sF += weights_sB * delayLine_s3;
sum_sF += weights_sA * delayLine_s4;
sum_sF += weights_s9 * delayLine_s5;
sum_sF += weights_s8 * delayLine_s6;
sum_sF += weights_s7 * delayLine_s7;
sum_sF += weights_s6 * delayLine_s8;
sum_sF += weights_s5 * delayLine_s9;
sum_sF += weights_s4 * delayLine_sA;
sum_sF += weights_s3 * delayLine_sB;
sum_sF += weights_s2 * delayLine_sC;
sum_sF += weights_s1 * delayLine_sD;
sum_sF += weights_s0 * delayLine_sE;
(*filteredData)[station][pol][time + 15][channel][ri] = sum_sF;
}
for (unsigned time = 0; time < NR_TAPS - 1; time++)
{
(*historyData)[subbandIdx][station][time][channel][pol_ri] =
(*sampledData)[station][NR_SAMPLES_PER_CHANNEL - (NR_TAPS - 1) + time][channel][pol_ri];
}
}
}
| a607092efb2a4e32ba1ca98b294876695cc0c051.cu | //# FIR_Filter.cu
//# Copyright (C) 2013 ASTRON (Netherlands Institute for Radio Astronomy)
//# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
//#
//# This file is part of the LOFAR software suite.
//# The LOFAR software suite is free software: you can redistribute it and/or
//# modify it under the terms of the GNU General Public License as published
//# by the Free Software Foundation, either version 3 of the License, or
//# (at your option) any later version.
//#
//# The LOFAR software suite is distributed in the hope that it will be useful,
//# but WITHOUT ANY WARRANTY; without even the implied warranty of
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//# GNU General Public License for more details.
//#
//# You should have received a copy of the GNU General Public License along
//# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
//#
//# $Id: FIR_Filter.cu 27000 2013-10-17 09:11:13Z loose $
#include "IntToFloat.cuh"
#include <stdio.h>
#if !(NR_STABS >= 1)
#error Precondition violated: NR_STABS >= 1
#endif
#if !(NR_TAPS == 16)
#error Precondition violated: NR_TAPS == 16
#endif
#if !(NR_SUBBANDS > 0)
#error Precondition violated: NR_SUBBANDS > 0
#endif
#if !(NR_SAMPLES_PER_CHANNEL > 0 && NR_SAMPLES_PER_CHANNEL % NR_TAPS == 0)
#error Precondition violated: NR_SAMPLES_PER_CHANNEL > 0 && NR_SAMPLES_PER_CHANNEL % NR_TAPS == 0
#endif
#if NR_BITS_PER_SAMPLE == 16
typedef signed short SampleType;
#elif NR_BITS_PER_SAMPLE == 8
typedef signed char SampleType;
#else
#error Precondition violated: NR_BITS_PER_SAMPLE == 8 || NR_BITS_PER_SAMPLE == 16
#endif
#if NR_CHANNELS == 1
#warning TODO: NR_CHANNELS == 1 is not (yet) supported
#elif !(NR_CHANNELS > 0 && NR_CHANNELS % 16 == 0)
#error Precondition violated: NR_CHANNELS > 0 && NR_CHANNELS % 16 == 0
#endif
#if !(NR_POLARIZATIONS == 2)
#error Precondition violated: NR_POLARIZATIONS == 2
#endif
#if !(COMPLEX == 2)
#error Precondition violated: COMPLEX == 2
#endif
//# NR_STABS means #stations (correlator) or #TABs (beamformer).
typedef SampleType (*SampledDataType)[NR_STABS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS][NR_POLARIZATIONS * COMPLEX];
typedef SampleType (*HistoryDataType)[NR_SUBBANDS][NR_STABS][NR_TAPS - 1][NR_CHANNELS][NR_POLARIZATIONS * COMPLEX];
typedef float (*FilteredDataType)[NR_STABS][NR_POLARIZATIONS][NR_SAMPLES_PER_CHANNEL][NR_CHANNELS][COMPLEX];
typedef const float (*WeightsType)[NR_CHANNELS][NR_TAPS];
/*!
* Applies the Finite Input Response filter defined by the weightsPtr array
* to the sampledDataPtr array. Output is written into the filteredDataPtr
* array. The filter works on complex numbers. The weights are real values only.
*
* Input values are first converted to (complex) float.
* The kernel also reorders the polarization dimension and expects the weights
* per channel in reverse order. If an FFT is applied afterwards, the weights
* of the odd channels are often supplied negated to get the resulting channels
* in increasing order of frequency.
*
* \param[out] filteredDataPtr 4D output array of floats
* \param[in] sampledDataPtr 4D input array of signed chars or shorts
* \param[in] weightsPtr 2D per-channel FIR filter coefficient array of floats (considering float16 as a dim)
* \param[in] historyDataPtr 5D input array of history input samples needed to initialize the FIR filter
* \param[in] subbandIdx index of the subband to process
*
* Pre-processor input symbols (some are tied to the execution configuration)
* Symbol | Valid Values | Description
* ----------------------- | --------------------------- | -----------
* NR_STABS | >= 1 | number of antenna fields (correlator), or number of tight array beams (tabs) (beamformer)
* NR_TAPS | 16 | number of FIR filtering coefficients
* NR_SAMPLES_PER_CHANNEL | multiple of NR_TAPS and > 0 | number of input samples per channel
* NR_BITS_PER_SAMPLE | 8 or 16 | number of bits of signed integral value type of sampledDataPtr (TODO: support 4)
* NR_CHANNELS | multiple of 16 and > 0 | number of frequency channels per subband
* NR_POLARIZATIONS | 2 | number of polarizations
* COMPLEX | 2 | size of complex in number of floats/doubles
*
* Execution configuration: (TODO: enforce using __attribute__ reqd_work_group_size)
* - Work dim == 2 (can be 1 iff NR_STABS == 1)
* + Inner dim: the channel, pol, real/imag the thread processes
* + Outer dim: the station the thread processes
* - Work group size: must divide global size, no other kernel restrictions
* - Global size: (NR_CHANNELS * NR_POLARIZATIONS * 2, NR_STABS)
*
* TODO: convert complex dim to fcomplex (=float2 in math.cl) in device code and to complex<float> in host code.
*/
extern "C" {
__global__ void FIR_filter( void *filteredDataPtr,
const void *sampledDataPtr,
const void *weightsPtr,
void *historyDataPtr,
unsigned subbandIdx)
{
SampledDataType sampledData = (SampledDataType) sampledDataPtr;
FilteredDataType filteredData = (FilteredDataType) filteredDataPtr;
WeightsType weightsData = (WeightsType) weightsPtr;
HistoryDataType historyData = (HistoryDataType) historyDataPtr;
unsigned cpr = blockIdx.x*blockDim.x+threadIdx.x;
#if 0
//# Straight index calc for NR_CHANNELS == 1
uint pol_ri = cpr & 3;
uint channel = cpr >> 2;
uint ri = cpr & 1;
uint pol = pol_ri >> 1;
#else
unsigned ri = cpr & 1; // index (real/imag) in output data
unsigned channel = (cpr >> 1) % NR_CHANNELS; // index in input & output data
unsigned pol = (cpr >> 1) / NR_CHANNELS; // index (polarization) in output data
unsigned pol_ri = (pol << 1) | ri; // index (polarization & real/imag) in input data
#endif
unsigned station = blockIdx.y;
//# const float16 weights = (*weightsData)[channel];
const float weights_s0 = (*weightsData)[channel][0];
const float weights_s1 = (*weightsData)[channel][1];
const float weights_s2 = (*weightsData)[channel][2];
const float weights_s3 = (*weightsData)[channel][3];
const float weights_s4 = (*weightsData)[channel][4];
const float weights_s5 = (*weightsData)[channel][5];
const float weights_s6 = (*weightsData)[channel][6];
const float weights_s7 = (*weightsData)[channel][7];
const float weights_s8 = (*weightsData)[channel][8];
const float weights_s9 = (*weightsData)[channel][9];
const float weights_sA = (*weightsData)[channel][10];
const float weights_sB = (*weightsData)[channel][11];
const float weights_sC = (*weightsData)[channel][12];
const float weights_sD = (*weightsData)[channel][13];
const float weights_sE = (*weightsData)[channel][14];
const float weights_sF = (*weightsData)[channel][15];
//# float16 delayLine;
float delayLine_s0, delayLine_s1, delayLine_s2, delayLine_s3,
delayLine_s4, delayLine_s5, delayLine_s6, delayLine_s7,
delayLine_s8, delayLine_s9, delayLine_sA, delayLine_sB,
delayLine_sC, delayLine_sD, delayLine_sE, delayLine_sF;
delayLine_s0 = convertIntToFloat((*historyData)[subbandIdx][station][0][channel][pol_ri]);
delayLine_s1 = convertIntToFloat((*historyData)[subbandIdx][station][1][channel][pol_ri]);
delayLine_s2 = convertIntToFloat((*historyData)[subbandIdx][station][2][channel][pol_ri]);
delayLine_s3 = convertIntToFloat((*historyData)[subbandIdx][station][3][channel][pol_ri]);
delayLine_s4 = convertIntToFloat((*historyData)[subbandIdx][station][4][channel][pol_ri]);
delayLine_s5 = convertIntToFloat((*historyData)[subbandIdx][station][5][channel][pol_ri]);
delayLine_s6 = convertIntToFloat((*historyData)[subbandIdx][station][6][channel][pol_ri]);
delayLine_s7 = convertIntToFloat((*historyData)[subbandIdx][station][7][channel][pol_ri]);
delayLine_s8 = convertIntToFloat((*historyData)[subbandIdx][station][8][channel][pol_ri]);
delayLine_s9 = convertIntToFloat((*historyData)[subbandIdx][station][9][channel][pol_ri]);
delayLine_sA = convertIntToFloat((*historyData)[subbandIdx][station][10][channel][pol_ri]);
delayLine_sB = convertIntToFloat((*historyData)[subbandIdx][station][11][channel][pol_ri]);
delayLine_sC = convertIntToFloat((*historyData)[subbandIdx][station][12][channel][pol_ri]);
delayLine_sD = convertIntToFloat((*historyData)[subbandIdx][station][13][channel][pol_ri]);
delayLine_sE = convertIntToFloat((*historyData)[subbandIdx][station][14][channel][pol_ri]);
float sum_s0, sum_s1, sum_s2, sum_s3,
sum_s4, sum_s5, sum_s6, sum_s7,
sum_s8, sum_s9, sum_sA, sum_sB,
sum_sC, sum_sD, sum_sE, sum_sF;
for (unsigned time = 0; time < NR_SAMPLES_PER_CHANNEL; time += NR_TAPS)
{
delayLine_sF = convertIntToFloat((*sampledData)[station][time + 0][channel][pol_ri]);
sum_s0 = weights_sF * delayLine_s0;
delayLine_s0 = convertIntToFloat((*sampledData)[station][time + 1][channel][pol_ri]);
sum_s0 += weights_sE * delayLine_s1;
sum_s0 += weights_sD * delayLine_s2;
sum_s0 += weights_sC * delayLine_s3;
sum_s0 += weights_sB * delayLine_s4;
sum_s0 += weights_sA * delayLine_s5;
sum_s0 += weights_s9 * delayLine_s6;
sum_s0 += weights_s8 * delayLine_s7;
sum_s0 += weights_s7 * delayLine_s8;
sum_s0 += weights_s6 * delayLine_s9;
sum_s0 += weights_s5 * delayLine_sA;
sum_s0 += weights_s4 * delayLine_sB;
sum_s0 += weights_s3 * delayLine_sC;
sum_s0 += weights_s2 * delayLine_sD;
sum_s0 += weights_s1 * delayLine_sE;
sum_s0 += weights_s0 * delayLine_sF;
(*filteredData)[station][pol][time + 0][channel][ri] = sum_s0;
sum_s1 = weights_sF * delayLine_s1;
delayLine_s1 = convertIntToFloat((*sampledData)[station][time + 2][channel][pol_ri]);
sum_s1 += weights_sE * delayLine_s2;
sum_s1 += weights_sD * delayLine_s3;
sum_s1 += weights_sC * delayLine_s4;
sum_s1 += weights_sB * delayLine_s5;
sum_s1 += weights_sA * delayLine_s6;
sum_s1 += weights_s9 * delayLine_s7;
sum_s1 += weights_s8 * delayLine_s8;
sum_s1 += weights_s7 * delayLine_s9;
sum_s1 += weights_s6 * delayLine_sA;
sum_s1 += weights_s5 * delayLine_sB;
sum_s1 += weights_s4 * delayLine_sC;
sum_s1 += weights_s3 * delayLine_sD;
sum_s1 += weights_s2 * delayLine_sE;
sum_s1 += weights_s1 * delayLine_sF;
sum_s1 += weights_s0 * delayLine_s0;
(*filteredData)[station][pol][time + 1][channel][ri] = sum_s1;
sum_s2 = weights_sF * delayLine_s2;
delayLine_s2 = convertIntToFloat((*sampledData)[station][time + 3][channel][pol_ri]);
sum_s2 += weights_sE * delayLine_s3;
sum_s2 += weights_sD * delayLine_s4;
sum_s2 += weights_sC * delayLine_s5;
sum_s2 += weights_sB * delayLine_s6;
sum_s2 += weights_sA * delayLine_s7;
sum_s2 += weights_s9 * delayLine_s8;
sum_s2 += weights_s8 * delayLine_s9;
sum_s2 += weights_s7 * delayLine_sA;
sum_s2 += weights_s6 * delayLine_sB;
sum_s2 += weights_s5 * delayLine_sC;
sum_s2 += weights_s4 * delayLine_sD;
sum_s2 += weights_s3 * delayLine_sE;
sum_s2 += weights_s2 * delayLine_sF;
sum_s2 += weights_s1 * delayLine_s0;
sum_s2 += weights_s0 * delayLine_s1;
(*filteredData)[station][pol][time + 2][channel][ri] = sum_s2;
sum_s3 = weights_sF * delayLine_s3;
delayLine_s3 = convertIntToFloat((*sampledData)[station][time + 4][channel][pol_ri]);
sum_s3 += weights_sE * delayLine_s4;
sum_s3 += weights_sD * delayLine_s5;
sum_s3 += weights_sC * delayLine_s6;
sum_s3 += weights_sB * delayLine_s7;
sum_s3 += weights_sA * delayLine_s8;
sum_s3 += weights_s9 * delayLine_s9;
sum_s3 += weights_s8 * delayLine_sA;
sum_s3 += weights_s7 * delayLine_sB;
sum_s3 += weights_s6 * delayLine_sC;
sum_s3 += weights_s5 * delayLine_sD;
sum_s3 += weights_s4 * delayLine_sE;
sum_s3 += weights_s3 * delayLine_sF;
sum_s3 += weights_s2 * delayLine_s0;
sum_s3 += weights_s1 * delayLine_s1;
sum_s3 += weights_s0 * delayLine_s2;
(*filteredData)[station][pol][time + 3][channel][ri] = sum_s3;
sum_s4 = weights_sF * delayLine_s4;
delayLine_s4 = convertIntToFloat((*sampledData)[station][time + 5][channel][pol_ri]);
sum_s4 += weights_sE * delayLine_s5;
sum_s4 += weights_sD * delayLine_s6;
sum_s4 += weights_sC * delayLine_s7;
sum_s4 += weights_sB * delayLine_s8;
sum_s4 += weights_sA * delayLine_s9;
sum_s4 += weights_s9 * delayLine_sA;
sum_s4 += weights_s8 * delayLine_sB;
sum_s4 += weights_s7 * delayLine_sC;
sum_s4 += weights_s6 * delayLine_sD;
sum_s4 += weights_s5 * delayLine_sE;
sum_s4 += weights_s4 * delayLine_sF;
sum_s4 += weights_s3 * delayLine_s0;
sum_s4 += weights_s2 * delayLine_s1;
sum_s4 += weights_s1 * delayLine_s2;
sum_s4 += weights_s0 * delayLine_s3;
(*filteredData)[station][pol][time + 4][channel][ri] = sum_s4;
sum_s5 = weights_sF * delayLine_s5;
delayLine_s5 = convertIntToFloat((*sampledData)[station][time + 6][channel][pol_ri]);
sum_s5 += weights_sE * delayLine_s6;
sum_s5 += weights_sD * delayLine_s7;
sum_s5 += weights_sC * delayLine_s8;
sum_s5 += weights_sB * delayLine_s9;
sum_s5 += weights_sA * delayLine_sA;
sum_s5 += weights_s9 * delayLine_sB;
sum_s5 += weights_s8 * delayLine_sC;
sum_s5 += weights_s7 * delayLine_sD;
sum_s5 += weights_s6 * delayLine_sE;
sum_s5 += weights_s5 * delayLine_sF;
sum_s5 += weights_s4 * delayLine_s0;
sum_s5 += weights_s3 * delayLine_s1;
sum_s5 += weights_s2 * delayLine_s2;
sum_s5 += weights_s1 * delayLine_s3;
sum_s5 += weights_s0 * delayLine_s4;
(*filteredData)[station][pol][time + 5][channel][ri] = sum_s5;
sum_s6 = weights_sF * delayLine_s6;
delayLine_s6 = convertIntToFloat((*sampledData)[station][time + 7][channel][pol_ri]);
sum_s6 += weights_sE * delayLine_s7;
sum_s6 += weights_sD * delayLine_s8;
sum_s6 += weights_sC * delayLine_s9;
sum_s6 += weights_sB * delayLine_sA;
sum_s6 += weights_sA * delayLine_sB;
sum_s6 += weights_s9 * delayLine_sC;
sum_s6 += weights_s8 * delayLine_sD;
sum_s6 += weights_s7 * delayLine_sE;
sum_s6 += weights_s6 * delayLine_sF;
sum_s6 += weights_s5 * delayLine_s0;
sum_s6 += weights_s4 * delayLine_s1;
sum_s6 += weights_s3 * delayLine_s2;
sum_s6 += weights_s2 * delayLine_s3;
sum_s6 += weights_s1 * delayLine_s4;
sum_s6 += weights_s0 * delayLine_s5;
(*filteredData)[station][pol][time + 6][channel][ri] = sum_s6;
sum_s7 = weights_sF * delayLine_s7;
delayLine_s7 = convertIntToFloat((*sampledData)[station][time + 8][channel][pol_ri]);
sum_s7 += weights_sE * delayLine_s8;
sum_s7 += weights_sD * delayLine_s9;
sum_s7 += weights_sC * delayLine_sA;
sum_s7 += weights_sB * delayLine_sB;
sum_s7 += weights_sA * delayLine_sC;
sum_s7 += weights_s9 * delayLine_sD;
sum_s7 += weights_s8 * delayLine_sE;
sum_s7 += weights_s7 * delayLine_sF;
sum_s7 += weights_s6 * delayLine_s0;
sum_s7 += weights_s5 * delayLine_s1;
sum_s7 += weights_s4 * delayLine_s2;
sum_s7 += weights_s3 * delayLine_s3;
sum_s7 += weights_s2 * delayLine_s4;
sum_s7 += weights_s1 * delayLine_s5;
sum_s7 += weights_s0 * delayLine_s6;
(*filteredData)[station][pol][time + 7][channel][ri] = sum_s7;
sum_s8 = weights_sF * delayLine_s8;
delayLine_s8 = convertIntToFloat((*sampledData)[station][time + 9][channel][pol_ri]);
sum_s8 += weights_sE * delayLine_s9;
sum_s8 += weights_sD * delayLine_sA;
sum_s8 += weights_sC * delayLine_sB;
sum_s8 += weights_sB * delayLine_sC;
sum_s8 += weights_sA * delayLine_sD;
sum_s8 += weights_s9 * delayLine_sE;
sum_s8 += weights_s8 * delayLine_sF;
sum_s8 += weights_s7 * delayLine_s0;
sum_s8 += weights_s6 * delayLine_s1;
sum_s8 += weights_s5 * delayLine_s2;
sum_s8 += weights_s4 * delayLine_s3;
sum_s8 += weights_s3 * delayLine_s4;
sum_s8 += weights_s2 * delayLine_s5;
sum_s8 += weights_s1 * delayLine_s6;
sum_s8 += weights_s0 * delayLine_s7;
(*filteredData)[station][pol][time + 8][channel][ri] = sum_s8;
sum_s9 = weights_sF * delayLine_s9;
delayLine_s9 = convertIntToFloat((*sampledData)[station][time + 10][channel][pol_ri]);
sum_s9 += weights_sE * delayLine_sA;
sum_s9 += weights_sD * delayLine_sB;
sum_s9 += weights_sC * delayLine_sC;
sum_s9 += weights_sB * delayLine_sD;
sum_s9 += weights_sA * delayLine_sE;
sum_s9 += weights_s9 * delayLine_sF;
sum_s9 += weights_s8 * delayLine_s0;
sum_s9 += weights_s7 * delayLine_s1;
sum_s9 += weights_s6 * delayLine_s2;
sum_s9 += weights_s5 * delayLine_s3;
sum_s9 += weights_s4 * delayLine_s4;
sum_s9 += weights_s3 * delayLine_s5;
sum_s9 += weights_s2 * delayLine_s6;
sum_s9 += weights_s1 * delayLine_s7;
sum_s9 += weights_s0 * delayLine_s8;
(*filteredData)[station][pol][time + 9][channel][ri] = sum_s9;
sum_sA = weights_sF * delayLine_sA;
delayLine_sA = convertIntToFloat((*sampledData)[station][time + 11][channel][pol_ri]);
sum_sA += weights_sE * delayLine_sB;
sum_sA += weights_sD * delayLine_sC;
sum_sA += weights_sC * delayLine_sD;
sum_sA += weights_sB * delayLine_sE;
sum_sA += weights_sA * delayLine_sF;
sum_sA += weights_s9 * delayLine_s0;
sum_sA += weights_s8 * delayLine_s1;
sum_sA += weights_s7 * delayLine_s2;
sum_sA += weights_s6 * delayLine_s3;
sum_sA += weights_s5 * delayLine_s4;
sum_sA += weights_s4 * delayLine_s5;
sum_sA += weights_s3 * delayLine_s6;
sum_sA += weights_s2 * delayLine_s7;
sum_sA += weights_s1 * delayLine_s8;
sum_sA += weights_s0 * delayLine_s9;
(*filteredData)[station][pol][time + 10][channel][ri] = sum_sA;
sum_sB = weights_sF * delayLine_sB;
delayLine_sB = convertIntToFloat((*sampledData)[station][time + 12][channel][pol_ri]);
sum_sB += weights_sE * delayLine_sC;
sum_sB += weights_sD * delayLine_sD;
sum_sB += weights_sC * delayLine_sE;
sum_sB += weights_sB * delayLine_sF;
sum_sB += weights_sA * delayLine_s0;
sum_sB += weights_s9 * delayLine_s1;
sum_sB += weights_s8 * delayLine_s2;
sum_sB += weights_s7 * delayLine_s3;
sum_sB += weights_s6 * delayLine_s4;
sum_sB += weights_s5 * delayLine_s5;
sum_sB += weights_s4 * delayLine_s6;
sum_sB += weights_s3 * delayLine_s7;
sum_sB += weights_s2 * delayLine_s8;
sum_sB += weights_s1 * delayLine_s9;
sum_sB += weights_s0 * delayLine_sA;
(*filteredData)[station][pol][time + 11][channel][ri] = sum_sB;
sum_sC = weights_sF * delayLine_sC;
delayLine_sC = convertIntToFloat((*sampledData)[station][time + 13][channel][pol_ri]);
sum_sC += weights_sE * delayLine_sD;
sum_sC += weights_sD * delayLine_sE;
sum_sC += weights_sC * delayLine_sF;
sum_sC += weights_sB * delayLine_s0;
sum_sC += weights_sA * delayLine_s1;
sum_sC += weights_s9 * delayLine_s2;
sum_sC += weights_s8 * delayLine_s3;
sum_sC += weights_s7 * delayLine_s4;
sum_sC += weights_s6 * delayLine_s5;
sum_sC += weights_s5 * delayLine_s6;
sum_sC += weights_s4 * delayLine_s7;
sum_sC += weights_s3 * delayLine_s8;
sum_sC += weights_s2 * delayLine_s9;
sum_sC += weights_s1 * delayLine_sA;
sum_sC += weights_s0 * delayLine_sB;
(*filteredData)[station][pol][time + 12][channel][ri] = sum_sC;
sum_sD = weights_sF * delayLine_sD;
delayLine_sD = convertIntToFloat((*sampledData)[station][time + 14][channel][pol_ri]);
sum_sD += weights_sE * delayLine_sE;
sum_sD += weights_sD * delayLine_sF;
sum_sD += weights_sC * delayLine_s0;
sum_sD += weights_sB * delayLine_s1;
sum_sD += weights_sA * delayLine_s2;
sum_sD += weights_s9 * delayLine_s3;
sum_sD += weights_s8 * delayLine_s4;
sum_sD += weights_s7 * delayLine_s5;
sum_sD += weights_s6 * delayLine_s6;
sum_sD += weights_s5 * delayLine_s7;
sum_sD += weights_s4 * delayLine_s8;
sum_sD += weights_s3 * delayLine_s9;
sum_sD += weights_s2 * delayLine_sA;
sum_sD += weights_s1 * delayLine_sB;
sum_sD += weights_s0 * delayLine_sC;
(*filteredData)[station][pol][time + 13][channel][ri] = sum_sD;
sum_sE = weights_sF * delayLine_sE;
delayLine_sE = convertIntToFloat((*sampledData)[station][time + 15][channel][pol_ri]);
sum_sE += weights_sE * delayLine_sF;
sum_sE += weights_sD * delayLine_s0;
sum_sE += weights_sC * delayLine_s1;
sum_sE += weights_sB * delayLine_s2;
sum_sE += weights_sA * delayLine_s3;
sum_sE += weights_s9 * delayLine_s4;
sum_sE += weights_s8 * delayLine_s5;
sum_sE += weights_s7 * delayLine_s6;
sum_sE += weights_s6 * delayLine_s7;
sum_sE += weights_s5 * delayLine_s8;
sum_sE += weights_s4 * delayLine_s9;
sum_sE += weights_s3 * delayLine_sA;
sum_sE += weights_s2 * delayLine_sB;
sum_sE += weights_s1 * delayLine_sC;
sum_sE += weights_s0 * delayLine_sD;
(*filteredData)[station][pol][time + 14][channel][ri] = sum_sE;
sum_sF = weights_sF * delayLine_sF;
sum_sF += weights_sE * delayLine_s0;
sum_sF += weights_sD * delayLine_s1;
sum_sF += weights_sC * delayLine_s2;
sum_sF += weights_sB * delayLine_s3;
sum_sF += weights_sA * delayLine_s4;
sum_sF += weights_s9 * delayLine_s5;
sum_sF += weights_s8 * delayLine_s6;
sum_sF += weights_s7 * delayLine_s7;
sum_sF += weights_s6 * delayLine_s8;
sum_sF += weights_s5 * delayLine_s9;
sum_sF += weights_s4 * delayLine_sA;
sum_sF += weights_s3 * delayLine_sB;
sum_sF += weights_s2 * delayLine_sC;
sum_sF += weights_s1 * delayLine_sD;
sum_sF += weights_s0 * delayLine_sE;
(*filteredData)[station][pol][time + 15][channel][ri] = sum_sF;
}
for (unsigned time = 0; time < NR_TAPS - 1; time++)
{
(*historyData)[subbandIdx][station][time][channel][pol_ri] =
(*sampledData)[station][NR_SAMPLES_PER_CHANNEL - (NR_TAPS - 1) + time][channel][pol_ri];
}
}
}
|
72fc36bd61780ad73983866614a5fcaabc588681.hip | // !!! This is a file automatically generated by hipify!!!
#include "hip/hip_runtime.h"
/*
* mcmc_kernel_mv.cu
*
* Created on: 24-Feb-2009
* Author: alee
*/
#include "temper.ch"
#include "matrix.ch"
#include "matrix.h"
#include <stdio.h>
#include "sharedmem.cuh"
#include "test_functions.h"
#include "rng.h"
#include <ctime>
#include <time.h>
#include <sys/timeb.h>
#include <sys/time.h>
__constant__ float args_p[NUM_AP];
//__constant__ int lik_per_thread[2];
template<int D>
__global__ void FUNC( metropolis_rw_gpu, TYPE)(int N, float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_array_out, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int nt = blockDim.x * gridDim.x;
int j;
float* x;
float* w;
float ratio;
SharedMemory<float> smem;
float* sdata = smem.getPointer();
float* y = sdata + D * threadIdx.x;
x = d_vector_get(d_array_init, D, tid);
for (j = tid; j < N; j += nt) {
w = d_vector_get(d_array_step, D, j);
d_vector_add(x, w, y, D);
// Metropolis so q(y,x) = q(x,y)
if (log == 0) {
ratio = TARGET<D> (y, args_p) / TARGET<D> (x, args_p);
} else {
ratio = expf(LOG_TARGET<D> (y, args_p) - LOG_TARGET<D> (x, args_p));
}
if (d_array_uniform[j] < ratio) {
d_vector_set(x, y, D);
}
d_vector_set(d_vector_get(d_array_out, D, j), x, D);
}
}
template <int D>
void FUNC( metropolis_rw, TYPE)(int N, float* d_array_init, float sigma, float* d_array_out,
float* h_args_p, int log, int nb, int nt) {
hipMemcpyToSymbol(args_p, h_args_p, NUM_AP * sizeof(float));
float* d_array_uniform;
hipMalloc((void **) &d_array_uniform, N * sizeof(float));
populate_rand_d(d_array_uniform, N);
float* d_array_step;
hipMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
FUNC(metropolis_rw_gpu,hipLaunchKernelGGL(( TYPE) < D>) , dim3(nb),dim3(nt),D*nt*sizeof(float), 0, N, d_array_init, d_array_step, d_array_uniform, d_array_out, log);
hipFree(d_array_uniform);
hipFree(d_array_step);
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_step, TYPE)(float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_temps, float* d_array_out, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
float* w;
float* x;
float t, ratio;
SharedMemory<float> smem;
float* sdata = smem.getPointer();
float* y = sdata + D * threadIdx.x;
t = d_temps[tid];
x = d_vector_get(d_array_init, D, tid);
w = d_vector_get(d_array_step, D, tid);
d_vector_add(x, w, y, D);
// Metropolis so q(y,x) = q(x,y)
if (log == 0) {
ratio = temper(TARGET<D> (y, args_p), t) / temper(TARGET<D> (x, args_p), t);
} else {
ratio = expf(LOG_TARGET<D> (y, args_p) * t - LOG_TARGET<D> (x, args_p) * t);
}
if (d_array_uniform[tid] < ratio) {
d_vector_set(d_vector_get(d_array_out, D, tid), y, D);
} else {
d_vector_set(d_vector_get(d_array_out, D, tid), x, D);
}
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_step2, TYPE)(float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_temps, float* d_array_out, int log, float* densities, float* d_args_p) {
//int lpt=10;//*lik_per_thread;
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
unsigned int nbr_data=(int)args_p[0];
//unsigned int lpt=(int)args_p[nbr_data+5];
//unsigned int nl = nbr_data/lpt;
SharedMemory<float> smem;
unsigned int chain_index, data_index;
float* log_densities_shared;
float* y;
float* args_shared;
if (nbr_data<=8192){
chain_index= tid / reduction_size;
data_index = tid % reduction_size;
//unsigned int ind = threadIdx.x;
//unsigned int ceil=1;
//unsigned int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/lpt));
//extra=ceil-(nbr_data/lpt);
log_densities_shared = smem.getPointer();
y = log_densities_shared + blockDim.x + D * ( threadIdx.x / reduction_size );
}
else{
chain_index= tid / (reduction_size/(nbr_data/2048));
data_index = tid % (reduction_size/(nbr_data/2048));
//unsigned int ind = threadIdx.x;
//unsigned int ceil=1;
//unsigned int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/lpt));
//extra=ceil-(nbr_data/lpt);
log_densities_shared = smem.getPointer();
y = log_densities_shared + blockDim.x + D * ( threadIdx.x / (reduction_size/(nbr_data/2048)) );
args_shared = log_densities_shared + blockDim.x + D * ( blockDim.x / (reduction_size/(nbr_data/2048)) );
}
/////////////////////////
//shared memory for data
// for (int k=threadIdx.x;k<nbr_data+6;k=k+blockDim.x)
// args_shared[k] = d_args_p[k];
//__syncthreads();
////////////////////////
float* w;
float* x;
float t, ratio;
if ( data_index == 0 ){
t = d_temps[chain_index];
x = d_vector_get(d_array_init, D, chain_index);
w = d_vector_get(d_array_step, D, chain_index);
d_vector_add(x, w, y, D);
//for (int a=(ind+nbr_data/lpt+extra*(ind/(nbr_data/lpt)));a<(ind+nbr_data/lpt+extra*(ind/(nbr_data/lpt))+extra);a++){
//log_densities_shared[a]=0.0;
//}
}
__syncthreads();
float mySum;
if (nbr_data<=8192){
mySum = LOG_TARGET<D> (y, args_p);
log_densities_shared[threadIdx.x]=mySum;
}
else{
if (threadIdx.x==0){
args_shared[0] = 2048;
args_shared[2049] = d_args_p[nbr_data+1];
args_shared[2050] = d_args_p[nbr_data+2];
args_shared[2051] = d_args_p[nbr_data+3];
args_shared[2052] = d_args_p[nbr_data+4];
args_shared[2053] = d_args_p[nbr_data+5];
}
log_densities_shared[threadIdx.x]=0;
__syncthreads();
for (int loads=0;loads<nbr_data/2048;loads++){
for (int k=threadIdx.x;k<2048;k=k+blockDim.x){
args_shared[k+1] = d_args_p[loads*2048+k+1];
}
__syncthreads();
mySum = LOG_TARGET<D> (y, args_shared);
log_densities_shared[threadIdx.x]+=mySum;
}
mySum=log_densities_shared[threadIdx.x];
//mySum = LOG_TARGET<D> (y, d_args_p);
//log_densities_shared[threadIdx.x]=mySum;
} //mySum = LOG_TARGET<D> (y, args_shared); //shared memory for data
__syncthreads();
if (nbr_data<=8192){
// do reduction in shared mem - completely unrolled
if (reduction_size >= 512)
{
if (data_index < 256)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 256];
}
__syncthreads();
}
if (reduction_size >= 256)
{
if (data_index < 128)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 128];
}
__syncthreads();
}
if (reduction_size >= 128)
{
if (data_index < 64)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 64];
}
__syncthreads();
}
if (data_index < 32)
{
// now that we are using warp-synchronous programming (below)
// we need to declare our shared memory volatile so that the compiler
// doesn't reorder stores to it and induce incorrect behavior.
volatile float *smem = log_densities_shared;
if (reduction_size >= 64)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 32];
}
if (reduction_size >= 32)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 16];
}
if (reduction_size >= 16)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 8];
}
if (reduction_size >= 8)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 4];
}
if (reduction_size >= 4)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 2];
}
if (reduction_size >= 2)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 1];
}
}
}
else{
// do reduction in shared mem - completely unrolled
if ( (reduction_size/(nbr_data/2048)) >= 512)
{
if (data_index < 256)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 256];
}
__syncthreads();
}
if ((reduction_size/(nbr_data/2048)) >= 256)
{
if (data_index < 128)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 128];
}
__syncthreads();
}
if ((reduction_size/(nbr_data/2048)) >= 128)
{
if (data_index < 64)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 64];
}
__syncthreads();
}
if (data_index < 32)
{
// now that we are using warp-synchronous programming (below)
// we need to declare our shared memory volatile so that the compiler
// doesn't reorder stores to it and induce incorrect behavior.
volatile float *smem = log_densities_shared;
if ((reduction_size/(nbr_data/2048)) >= 64)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 32];
}
if ((reduction_size/(nbr_data/2048)) >= 32)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 16];
}
if ((reduction_size/(nbr_data/2048)) >= 16)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 8];
}
if ((reduction_size/(nbr_data/2048)) >= 8)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 4];
}
if ((reduction_size/(nbr_data/2048)) >= 4)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 2];
}
if ((reduction_size/(nbr_data/2048)) >= 2)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 1];
}
}
}
/*
//reduction
for (unsigned int stride=reduction_size>>1; stride>0; stride >>=1){
__syncthreads();
if (data_index < stride)
log_densities_shared[threadIdx.x] += log_densities_shared[threadIdx.x+stride];
}
//reduction
for (unsigned int stride=1; stride<ceil; stride*=2){
__syncthreads();
if (data_index % (2*stride) == 0)
log_densities_shared[ind+extra*(ind/(nbr_data/lpt))] += log_densities_shared[ind+extra*(ind/(nbr_data/lpt))+stride];
}
*/
//__syncthreads();
if (data_index == 0){
//densities[chain_index]=-200000000000000000000000000.0;
//for (int a=threadIdx.x+1;a<(threadIdx.x+(reduction_size/(nbr_data/2048)));a++){
//log_densities_shared[threadIdx.x] += log_densities_shared[a];
//}
ratio = expf(log_densities_shared[threadIdx.x] * t - densities[chain_index] * t);
if (d_array_uniform[chain_index] < ratio) {
densities[chain_index] = log_densities_shared[threadIdx.x];
d_vector_set(d_vector_get(d_array_out, D, chain_index), y, D);
} else {
d_vector_set(d_vector_get(d_array_out, D, chain_index), x, D);
}
}
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_init, TYPE)(float* d_array_init, int log, float* densities) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
float* x = d_vector_get(d_array_init, D, tid);
if (log == 0) {
densities[tid] = TARGET<D> (x, args_p);
} else {
densities[tid] = -100000000000.0;//LOG_TARGET<D> (x, args_p);
}
}
template<int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_exchange, TYPE)(float* d_array_values, int type, float* d_temps,
float* d_array_uniform, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int tt = blockDim.x * gridDim.x;
// if ((type == 1 && tid % 2 == 0) || (type == 0 && tid % 2 == 1)) {
if (tid % 2 == type) {
int otid = (tid + 1) % tt;
float* x = d_vector_get(d_array_values, D, tid);
float* y = d_vector_get(d_array_values, D, otid);
float t = d_temps[tid];
float t2 = d_temps[otid];
float ratio;
if (log) {
float ty = LOG_TARGET<D> (y, args_p);
float tx = LOG_TARGET<D> (x, args_p);
ratio = expf(ty * (t - t2) + tx * (t2 - t));
} else {
ratio = temper(TARGET<D> (y, args_p), t) / temper(TARGET<D> (y, args_p), t2) * temper(
TARGET<D> (x, args_p), t2) / temper(TARGET<D> (x, args_p), t);
}
if (d_array_uniform[tid] < ratio) {
d_vector_swap(x, y, D);
}
}
}
template<int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_exchange2, TYPE)(float* d_array_values, int type, float* d_temps,
float* d_array_uniform, int log, float* densities) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int tt = blockDim.x * gridDim.x;
// if ((type == 1 && tid % 2 == 0) || (type == 0 && tid % 2 == 1)) {
if (tid % 2 == type) {
int otid = (tid + 1) % tt;
float* x = d_vector_get(d_array_values, D, tid);
float* y = d_vector_get(d_array_values, D, otid);
float t = d_temps[tid];
float t2 = d_temps[otid];
float ratio;
float ty = densities[otid];
float tx = densities[tid];
if (log) {
ratio = expf(ty * (t - t2) + tx * (t2 - t));
} else {
ratio = temper(ty, t - t2) * temper(tx, t2 - t);
}
if (d_array_uniform[tid] < ratio) {
densities[tid] = ty;
densities[otid] = tx;
d_vector_swap(x, y, D);
}
}
}
template<int D, int reduction_size>
void FUNC( metropolis_rwpop, TYPE)(int N, float* d_array_init, float sigma, float* h_args_p,
float* d_temps, float* d_array_out, int log, int nb, int nt) {
hipMemcpyToSymbol(args_p, h_args_p, NUM_AP * sizeof(float));
int tt = nb * nt;
int numSteps = N / tt;
int* array_types = (int*) malloc(numSteps * sizeof(int));
populate_randIK(array_types, numSteps, 2);
float* d_array_step;
hipMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
float* d_array_uniform1;
float* d_array_uniform2;
hipMalloc((void **) &d_array_uniform1, N * sizeof(float));
hipMalloc((void **) &d_array_uniform2, N * sizeof(float));
populate_rand_d(d_array_uniform1, N);
populate_rand_d(d_array_uniform2, N);
float* du1 = d_array_uniform1;
float* du2 = d_array_uniform2;
float* ds = d_array_step;
for (int i = 0; i < numSteps; i++) {
// printf("on step %d\n", i);
FUNC(metropolis_rwpop_step,hipLaunchKernelGGL(( TYPE)<D, 0>), dim3(nb),dim3(nt),D*nt*sizeof(float), 0, d_array_init, ds, du1, d_temps, d_array_out, log);
hipDeviceSynchronize();
FUNC(metropolis_rwpop_exchange,hipLaunchKernelGGL(( TYPE)<D, 0>), dim3(nb),dim3(nt), 0, 0, d_array_out, array_types[i], d_temps, du2, log);
hipDeviceSynchronize();
d_array_init = d_array_out;
ds += tt * D;
du1 += tt;
d_array_out += tt * D;
du2 += tt;
}
hipFree(d_array_uniform1);
hipFree(d_array_uniform2);
hipFree(d_array_step);
free(array_types);
}
template<int D, int reduction_size>
void FUNC( metropolis_rwpop_marginal, TYPE)(int N, float* d_array_init, float sigma, float* h_args_p,
float* d_temps, float* d_array_out, int log, int nb, int nt) {
struct timeval tv1, tv2, tv3, tv4, tv5, tv6;
gettimeofday(&tv5, NULL);
int nbr_data=(int)h_args_p[0];
//int internal=(int)h_args_p[nbr_data+5];
//int red=nbr_data/internal;
//int ceil=1;
//int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/internal));
//extra=ceil-nbr_data/internal;
//printf("\nceil: %d extra: %d\n", ceil, extra);
float dens[10];
float samp[4];
//int local_lpt[2];
//local_lpt[0]=10;
//local_lpt[1]=10;
float* d_args_p;
hipMalloc((void **) &d_args_p, (nbr_data+6) * sizeof(float));
hipMemcpy(d_args_p, h_args_p, (nbr_data+6) * sizeof(float), hipMemcpyHostToDevice);
if (nbr_data<=8192){
hipMemcpyToSymbol(args_p, h_args_p, (nbr_data+6) * sizeof(float));
}
else{
hipMemcpyToSymbol(args_p, h_args_p, (8192+6) * sizeof(float));
}
//hipMemcpyToSymbol(lik_per_thread, local_lpt, 2*sizeof(int));
int tt = nb * nt;
int numSteps = N / tt;
int* array_types = (int*) malloc(numSteps * sizeof(int));
populate_randIK(array_types, numSteps, 2);
float* d_array_step;
hipMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
float* d_array_uniform1;
float* d_array_uniform2;
hipMalloc((void **) &d_array_uniform1, N * sizeof(float));
hipMalloc((void **) &d_array_uniform2, N * sizeof(float));
populate_rand_d(d_array_uniform1, N);
populate_rand_d(d_array_uniform2, N);
float* du1 = d_array_uniform1;
float* du2 = d_array_uniform2;
float* ds = d_array_step;
float* d_array_temp;
hipMalloc((void**) &d_array_temp, tt * D * sizeof(float));
float* densities;
hipMalloc((void**) &densities, tt * sizeof(float));
FUNC( metropolis_rwpop_init,hipLaunchKernelGGL(( TYPE)<D, 0>), dim3(nb),dim3(nt), 0, 0, d_array_init, log, densities);
hipDeviceSynchronize();
gettimeofday(&tv6, NULL);
printf ("Init time = %f seconds\n",
(double) (tv6.tv_usec - tv5.tv_usec) / 1000000 +
(double) (tv6.tv_sec - tv5.tv_sec));
double sum1=0, sum2=0, sum3=0;
for (int i = 0; i < numSteps; i++) {
// printf("Time %d:\n", i);
//FUNC(metropolis_rwpop_step2, TYPE)<D><<<nb,nt*nbr_data,nt*sizeof(float)>>>(d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, nbr_data);
//printf("%d\n",aaa);
gettimeofday(&tv1, NULL);
if (nbr_data<=8192){
FUNC(metropolis_rwpop_step2,hipLaunchKernelGGL(( TYPE)<D,reduction_size>), dim3(nb),dim3(nt*reduction_size),(nt*reduction_size+nt*D)*sizeof(float), 0, d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, d_args_p);
}
else{
FUNC(metropolis_rwpop_step2,hipLaunchKernelGGL(( TYPE)<D,reduction_size>), dim3(nb),dim3((nt*reduction_size)/(nbr_data/2048)),((nt*reduction_size)/(nbr_data/2048)+nt*D+2048)*sizeof(float), 0, d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, d_args_p);
}
//remove nbr_data+6 from kernel arguments to get constant memory data
hipDeviceSynchronize();
gettimeofday(&tv2, NULL);
//printf ("Update time = %f seconds\n",
// (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
// (double) (tv2.tv_sec - tv1.tv_sec));
sum1=sum1 +((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
//sum2+=sum1;
gettimeofday(&tv1, NULL);
FUNC(metropolis_rwpop_exchange2,hipLaunchKernelGGL(( TYPE)<D, 0>), dim3(nb),dim3(nt), 0, 0, d_array_temp, array_types[i], d_temps,
du2 + i * tt, log, densities);
gettimeofday(&tv2, NULL);
//printf ("Exchange time = %f seconds\n",
// (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
// (double) (tv2.tv_sec - tv1.tv_sec));
sum2=sum2 + ((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
//sum2+=sum1;
gettimeofday(&tv1, NULL);
hipMemcpy(d_array_init, d_array_temp, tt * D * sizeof(float), hipMemcpyDeviceToDevice);
hipMemcpy(vector_get(d_array_out, D, i), vector_get(d_array_temp, D, tt - 1), D
* sizeof(float), hipMemcpyDeviceToDevice);
gettimeofday(&tv2, NULL);
sum3=sum3 + ((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
// hipMemcpy(dens, densities+tt-4, 4*sizeof(float), hipMemcpyDeviceToHost);
//hipMemcpy(samp, d_array_temp+tt*D-4, 4*sizeof(float), hipMemcpyDeviceToHost);
//printf("\n\nDensities: %f %f %f %f \n\n", dens[0], dens[1], dens[2], dens[3]);
//printf("\n\nSamples: %f %f %f %f\n\n", samp[0], samp[1], samp[2], samp[3]);
}
printf ("Update time total = %f seconds\n",sum1);
printf ("Exchange time total = %f seconds\n",sum2);
printf ("MemCopy time total = %f seconds\n",sum3);
hipFree(d_array_uniform1);
hipFree(d_array_uniform2);
hipFree(d_array_step);
hipFree(d_array_temp);
hipFree(densities);
free(array_types);
gettimeofday(&tv4, NULL);
printf ("FUNC time = %f seconds\n",
(double) (tv4.tv_usec - tv5.tv_usec) / 1000000 +
(double) (tv4.tv_sec - tv5.tv_sec));
}
| 72fc36bd61780ad73983866614a5fcaabc588681.cu | /*
* mcmc_kernel_mv.cu
*
* Created on: 24-Feb-2009
* Author: alee
*/
#include "temper.ch"
#include "matrix.ch"
#include "matrix.h"
#include <stdio.h>
#include "sharedmem.cuh"
#include "test_functions.h"
#include "rng.h"
#include <ctime>
#include <time.h>
#include <sys/timeb.h>
#include <sys/time.h>
__constant__ float args_p[NUM_AP];
//__constant__ int lik_per_thread[2];
template<int D>
__global__ void FUNC( metropolis_rw_gpu, TYPE)(int N, float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_array_out, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int nt = blockDim.x * gridDim.x;
int j;
float* x;
float* w;
float ratio;
SharedMemory<float> smem;
float* sdata = smem.getPointer();
float* y = sdata + D * threadIdx.x;
x = d_vector_get(d_array_init, D, tid);
for (j = tid; j < N; j += nt) {
w = d_vector_get(d_array_step, D, j);
d_vector_add(x, w, y, D);
// Metropolis so q(y,x) = q(x,y)
if (log == 0) {
ratio = TARGET<D> (y, args_p) / TARGET<D> (x, args_p);
} else {
ratio = expf(LOG_TARGET<D> (y, args_p) - LOG_TARGET<D> (x, args_p));
}
if (d_array_uniform[j] < ratio) {
d_vector_set(x, y, D);
}
d_vector_set(d_vector_get(d_array_out, D, j), x, D);
}
}
template <int D>
void FUNC( metropolis_rw, TYPE)(int N, float* d_array_init, float sigma, float* d_array_out,
float* h_args_p, int log, int nb, int nt) {
cudaMemcpyToSymbol(args_p, h_args_p, NUM_AP * sizeof(float));
float* d_array_uniform;
cudaMalloc((void **) &d_array_uniform, N * sizeof(float));
populate_rand_d(d_array_uniform, N);
float* d_array_step;
cudaMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
FUNC(metropolis_rw_gpu, TYPE) < D> <<<nb,nt,D*nt*sizeof(float)>>>(N, d_array_init, d_array_step, d_array_uniform, d_array_out, log);
cudaFree(d_array_uniform);
cudaFree(d_array_step);
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_step, TYPE)(float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_temps, float* d_array_out, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
float* w;
float* x;
float t, ratio;
SharedMemory<float> smem;
float* sdata = smem.getPointer();
float* y = sdata + D * threadIdx.x;
t = d_temps[tid];
x = d_vector_get(d_array_init, D, tid);
w = d_vector_get(d_array_step, D, tid);
d_vector_add(x, w, y, D);
// Metropolis so q(y,x) = q(x,y)
if (log == 0) {
ratio = temper(TARGET<D> (y, args_p), t) / temper(TARGET<D> (x, args_p), t);
} else {
ratio = expf(LOG_TARGET<D> (y, args_p) * t - LOG_TARGET<D> (x, args_p) * t);
}
if (d_array_uniform[tid] < ratio) {
d_vector_set(d_vector_get(d_array_out, D, tid), y, D);
} else {
d_vector_set(d_vector_get(d_array_out, D, tid), x, D);
}
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_step2, TYPE)(float* d_array_init, float* d_array_step,
float* d_array_uniform, float* d_temps, float* d_array_out, int log, float* densities, float* d_args_p) {
//int lpt=10;//*lik_per_thread;
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
unsigned int nbr_data=(int)args_p[0];
//unsigned int lpt=(int)args_p[nbr_data+5];
//unsigned int nl = nbr_data/lpt;
SharedMemory<float> smem;
unsigned int chain_index, data_index;
float* log_densities_shared;
float* y;
float* args_shared;
if (nbr_data<=8192){
chain_index= tid / reduction_size;
data_index = tid % reduction_size;
//unsigned int ind = threadIdx.x;
//unsigned int ceil=1;
//unsigned int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/lpt));
//extra=ceil-(nbr_data/lpt);
log_densities_shared = smem.getPointer();
y = log_densities_shared + blockDim.x + D * ( threadIdx.x / reduction_size );
}
else{
chain_index= tid / (reduction_size/(nbr_data/2048));
data_index = tid % (reduction_size/(nbr_data/2048));
//unsigned int ind = threadIdx.x;
//unsigned int ceil=1;
//unsigned int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/lpt));
//extra=ceil-(nbr_data/lpt);
log_densities_shared = smem.getPointer();
y = log_densities_shared + blockDim.x + D * ( threadIdx.x / (reduction_size/(nbr_data/2048)) );
args_shared = log_densities_shared + blockDim.x + D * ( blockDim.x / (reduction_size/(nbr_data/2048)) );
}
/////////////////////////
//shared memory for data
// for (int k=threadIdx.x;k<nbr_data+6;k=k+blockDim.x)
// args_shared[k] = d_args_p[k];
//__syncthreads();
////////////////////////
float* w;
float* x;
float t, ratio;
if ( data_index == 0 ){
t = d_temps[chain_index];
x = d_vector_get(d_array_init, D, chain_index);
w = d_vector_get(d_array_step, D, chain_index);
d_vector_add(x, w, y, D);
//for (int a=(ind+nbr_data/lpt+extra*(ind/(nbr_data/lpt)));a<(ind+nbr_data/lpt+extra*(ind/(nbr_data/lpt))+extra);a++){
//log_densities_shared[a]=0.0;
//}
}
__syncthreads();
float mySum;
if (nbr_data<=8192){
mySum = LOG_TARGET<D> (y, args_p);
log_densities_shared[threadIdx.x]=mySum;
}
else{
if (threadIdx.x==0){
args_shared[0] = 2048;
args_shared[2049] = d_args_p[nbr_data+1];
args_shared[2050] = d_args_p[nbr_data+2];
args_shared[2051] = d_args_p[nbr_data+3];
args_shared[2052] = d_args_p[nbr_data+4];
args_shared[2053] = d_args_p[nbr_data+5];
}
log_densities_shared[threadIdx.x]=0;
__syncthreads();
for (int loads=0;loads<nbr_data/2048;loads++){
for (int k=threadIdx.x;k<2048;k=k+blockDim.x){
args_shared[k+1] = d_args_p[loads*2048+k+1];
}
__syncthreads();
mySum = LOG_TARGET<D> (y, args_shared);
log_densities_shared[threadIdx.x]+=mySum;
}
mySum=log_densities_shared[threadIdx.x];
//mySum = LOG_TARGET<D> (y, d_args_p);
//log_densities_shared[threadIdx.x]=mySum;
} //mySum = LOG_TARGET<D> (y, args_shared); //shared memory for data
__syncthreads();
if (nbr_data<=8192){
// do reduction in shared mem - completely unrolled
if (reduction_size >= 512)
{
if (data_index < 256)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 256];
}
__syncthreads();
}
if (reduction_size >= 256)
{
if (data_index < 128)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 128];
}
__syncthreads();
}
if (reduction_size >= 128)
{
if (data_index < 64)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 64];
}
__syncthreads();
}
if (data_index < 32)
{
// now that we are using warp-synchronous programming (below)
// we need to declare our shared memory volatile so that the compiler
// doesn't reorder stores to it and induce incorrect behavior.
volatile float *smem = log_densities_shared;
if (reduction_size >= 64)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 32];
}
if (reduction_size >= 32)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 16];
}
if (reduction_size >= 16)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 8];
}
if (reduction_size >= 8)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 4];
}
if (reduction_size >= 4)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 2];
}
if (reduction_size >= 2)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 1];
}
}
}
else{
// do reduction in shared mem - completely unrolled
if ( (reduction_size/(nbr_data/2048)) >= 512)
{
if (data_index < 256)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 256];
}
__syncthreads();
}
if ((reduction_size/(nbr_data/2048)) >= 256)
{
if (data_index < 128)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 128];
}
__syncthreads();
}
if ((reduction_size/(nbr_data/2048)) >= 128)
{
if (data_index < 64)
{
log_densities_shared[threadIdx.x] = mySum = mySum + log_densities_shared[threadIdx.x + 64];
}
__syncthreads();
}
if (data_index < 32)
{
// now that we are using warp-synchronous programming (below)
// we need to declare our shared memory volatile so that the compiler
// doesn't reorder stores to it and induce incorrect behavior.
volatile float *smem = log_densities_shared;
if ((reduction_size/(nbr_data/2048)) >= 64)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 32];
}
if ((reduction_size/(nbr_data/2048)) >= 32)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 16];
}
if ((reduction_size/(nbr_data/2048)) >= 16)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 8];
}
if ((reduction_size/(nbr_data/2048)) >= 8)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 4];
}
if ((reduction_size/(nbr_data/2048)) >= 4)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 2];
}
if ((reduction_size/(nbr_data/2048)) >= 2)
{
smem[threadIdx.x] = mySum = mySum + smem[threadIdx.x + 1];
}
}
}
/*
//reduction
for (unsigned int stride=reduction_size>>1; stride>0; stride >>=1){
__syncthreads();
if (data_index < stride)
log_densities_shared[threadIdx.x] += log_densities_shared[threadIdx.x+stride];
}
//reduction
for (unsigned int stride=1; stride<ceil; stride*=2){
__syncthreads();
if (data_index % (2*stride) == 0)
log_densities_shared[ind+extra*(ind/(nbr_data/lpt))] += log_densities_shared[ind+extra*(ind/(nbr_data/lpt))+stride];
}
*/
//__syncthreads();
if (data_index == 0){
//densities[chain_index]=-200000000000000000000000000.0;
//for (int a=threadIdx.x+1;a<(threadIdx.x+(reduction_size/(nbr_data/2048)));a++){
//log_densities_shared[threadIdx.x] += log_densities_shared[a];
//}
ratio = expf(log_densities_shared[threadIdx.x] * t - densities[chain_index] * t);
if (d_array_uniform[chain_index] < ratio) {
densities[chain_index] = log_densities_shared[threadIdx.x];
d_vector_set(d_vector_get(d_array_out, D, chain_index), y, D);
} else {
d_vector_set(d_vector_get(d_array_out, D, chain_index), x, D);
}
}
}
template <int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_init, TYPE)(float* d_array_init, int log, float* densities) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
float* x = d_vector_get(d_array_init, D, tid);
if (log == 0) {
densities[tid] = TARGET<D> (x, args_p);
} else {
densities[tid] = -100000000000.0;//LOG_TARGET<D> (x, args_p);
}
}
template<int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_exchange, TYPE)(float* d_array_values, int type, float* d_temps,
float* d_array_uniform, int log) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int tt = blockDim.x * gridDim.x;
// if ((type == 1 && tid % 2 == 0) || (type == 0 && tid % 2 == 1)) {
if (tid % 2 == type) {
int otid = (tid + 1) % tt;
float* x = d_vector_get(d_array_values, D, tid);
float* y = d_vector_get(d_array_values, D, otid);
float t = d_temps[tid];
float t2 = d_temps[otid];
float ratio;
if (log) {
float ty = LOG_TARGET<D> (y, args_p);
float tx = LOG_TARGET<D> (x, args_p);
ratio = expf(ty * (t - t2) + tx * (t2 - t));
} else {
ratio = temper(TARGET<D> (y, args_p), t) / temper(TARGET<D> (y, args_p), t2) * temper(
TARGET<D> (x, args_p), t2) / temper(TARGET<D> (x, args_p), t);
}
if (d_array_uniform[tid] < ratio) {
d_vector_swap(x, y, D);
}
}
}
template<int D, int reduction_size>
__global__ void FUNC( metropolis_rwpop_exchange2, TYPE)(float* d_array_values, int type, float* d_temps,
float* d_array_uniform, int log, float* densities) {
const int tid = blockDim.x * blockIdx.x + threadIdx.x;
const int tt = blockDim.x * gridDim.x;
// if ((type == 1 && tid % 2 == 0) || (type == 0 && tid % 2 == 1)) {
if (tid % 2 == type) {
int otid = (tid + 1) % tt;
float* x = d_vector_get(d_array_values, D, tid);
float* y = d_vector_get(d_array_values, D, otid);
float t = d_temps[tid];
float t2 = d_temps[otid];
float ratio;
float ty = densities[otid];
float tx = densities[tid];
if (log) {
ratio = expf(ty * (t - t2) + tx * (t2 - t));
} else {
ratio = temper(ty, t - t2) * temper(tx, t2 - t);
}
if (d_array_uniform[tid] < ratio) {
densities[tid] = ty;
densities[otid] = tx;
d_vector_swap(x, y, D);
}
}
}
template<int D, int reduction_size>
void FUNC( metropolis_rwpop, TYPE)(int N, float* d_array_init, float sigma, float* h_args_p,
float* d_temps, float* d_array_out, int log, int nb, int nt) {
cudaMemcpyToSymbol(args_p, h_args_p, NUM_AP * sizeof(float));
int tt = nb * nt;
int numSteps = N / tt;
int* array_types = (int*) malloc(numSteps * sizeof(int));
populate_randIK(array_types, numSteps, 2);
float* d_array_step;
cudaMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
float* d_array_uniform1;
float* d_array_uniform2;
cudaMalloc((void **) &d_array_uniform1, N * sizeof(float));
cudaMalloc((void **) &d_array_uniform2, N * sizeof(float));
populate_rand_d(d_array_uniform1, N);
populate_rand_d(d_array_uniform2, N);
float* du1 = d_array_uniform1;
float* du2 = d_array_uniform2;
float* ds = d_array_step;
for (int i = 0; i < numSteps; i++) {
// printf("on step %d\n", i);
FUNC(metropolis_rwpop_step, TYPE)<D, 0><<<nb,nt,D*nt*sizeof(float)>>>(d_array_init, ds, du1, d_temps, d_array_out, log);
cudaThreadSynchronize();
FUNC(metropolis_rwpop_exchange, TYPE)<D, 0><<<nb,nt>>>(d_array_out, array_types[i], d_temps, du2, log);
cudaThreadSynchronize();
d_array_init = d_array_out;
ds += tt * D;
du1 += tt;
d_array_out += tt * D;
du2 += tt;
}
cudaFree(d_array_uniform1);
cudaFree(d_array_uniform2);
cudaFree(d_array_step);
free(array_types);
}
template<int D, int reduction_size>
void FUNC( metropolis_rwpop_marginal, TYPE)(int N, float* d_array_init, float sigma, float* h_args_p,
float* d_temps, float* d_array_out, int log, int nb, int nt) {
struct timeval tv1, tv2, tv3, tv4, tv5, tv6;
gettimeofday(&tv5, NULL);
int nbr_data=(int)h_args_p[0];
//int internal=(int)h_args_p[nbr_data+5];
//int red=nbr_data/internal;
//int ceil=1;
//int extra=0;
//do{
// ceil *= 2;
//} while (ceil<(nbr_data/internal));
//extra=ceil-nbr_data/internal;
//printf("\nceil: %d extra: %d\n", ceil, extra);
float dens[10];
float samp[4];
//int local_lpt[2];
//local_lpt[0]=10;
//local_lpt[1]=10;
float* d_args_p;
cudaMalloc((void **) &d_args_p, (nbr_data+6) * sizeof(float));
cudaMemcpy(d_args_p, h_args_p, (nbr_data+6) * sizeof(float), cudaMemcpyHostToDevice);
if (nbr_data<=8192){
cudaMemcpyToSymbol(args_p, h_args_p, (nbr_data+6) * sizeof(float));
}
else{
cudaMemcpyToSymbol(args_p, h_args_p, (8192+6) * sizeof(float));
}
//cudaMemcpyToSymbol(lik_per_thread, local_lpt, 2*sizeof(int));
int tt = nb * nt;
int numSteps = N / tt;
int* array_types = (int*) malloc(numSteps * sizeof(int));
populate_randIK(array_types, numSteps, 2);
float* d_array_step;
cudaMalloc((void **) &d_array_step, N * D * sizeof(float));
populate_randn_d(d_array_step, N * D);
if (sigma != 1.0) {
multiply(N * D, d_array_step, d_array_step, sigma, nb, nt);
}
float* d_array_uniform1;
float* d_array_uniform2;
cudaMalloc((void **) &d_array_uniform1, N * sizeof(float));
cudaMalloc((void **) &d_array_uniform2, N * sizeof(float));
populate_rand_d(d_array_uniform1, N);
populate_rand_d(d_array_uniform2, N);
float* du1 = d_array_uniform1;
float* du2 = d_array_uniform2;
float* ds = d_array_step;
float* d_array_temp;
cudaMalloc((void**) &d_array_temp, tt * D * sizeof(float));
float* densities;
cudaMalloc((void**) &densities, tt * sizeof(float));
FUNC( metropolis_rwpop_init, TYPE)<D, 0><<<nb,nt>>>(d_array_init, log, densities);
cudaThreadSynchronize();
gettimeofday(&tv6, NULL);
printf ("Init time = %f seconds\n",
(double) (tv6.tv_usec - tv5.tv_usec) / 1000000 +
(double) (tv6.tv_sec - tv5.tv_sec));
double sum1=0, sum2=0, sum3=0;
for (int i = 0; i < numSteps; i++) {
// printf("Time %d:\n", i);
//FUNC(metropolis_rwpop_step2, TYPE)<D><<<nb,nt*nbr_data,nt*sizeof(float)>>>(d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, nbr_data);
//printf("%d\n",aaa);
gettimeofday(&tv1, NULL);
if (nbr_data<=8192){
FUNC(metropolis_rwpop_step2, TYPE)<D,reduction_size><<<nb,nt*reduction_size,(nt*reduction_size+nt*D)*sizeof(float)>>>(d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, d_args_p);
}
else{
FUNC(metropolis_rwpop_step2, TYPE)<D,reduction_size><<<nb,(nt*reduction_size)/(nbr_data/2048),((nt*reduction_size)/(nbr_data/2048)+nt*D+2048)*sizeof(float)>>>(d_array_init, ds + i * tt * D, du1 + i * tt, d_temps, d_array_temp, log, densities, d_args_p);
}
//remove nbr_data+6 from kernel arguments to get constant memory data
cudaThreadSynchronize();
gettimeofday(&tv2, NULL);
//printf ("Update time = %f seconds\n",
// (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
// (double) (tv2.tv_sec - tv1.tv_sec));
sum1=sum1 +((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
//sum2+=sum1;
gettimeofday(&tv1, NULL);
FUNC(metropolis_rwpop_exchange2, TYPE)<D, 0><<<nb,nt>>>(d_array_temp, array_types[i], d_temps,
du2 + i * tt, log, densities);
gettimeofday(&tv2, NULL);
//printf ("Exchange time = %f seconds\n",
// (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
// (double) (tv2.tv_sec - tv1.tv_sec));
sum2=sum2 + ((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
//sum2+=sum1;
gettimeofday(&tv1, NULL);
cudaMemcpy(d_array_init, d_array_temp, tt * D * sizeof(float), cudaMemcpyDeviceToDevice);
cudaMemcpy(vector_get(d_array_out, D, i), vector_get(d_array_temp, D, tt - 1), D
* sizeof(float), cudaMemcpyDeviceToDevice);
gettimeofday(&tv2, NULL);
sum3=sum3 + ((double) (tv2.tv_usec - tv1.tv_usec) / 1000000 + (double) (tv2.tv_sec - tv1.tv_sec));
// cudaMemcpy(dens, densities+tt-4, 4*sizeof(float), cudaMemcpyDeviceToHost);
//cudaMemcpy(samp, d_array_temp+tt*D-4, 4*sizeof(float), cudaMemcpyDeviceToHost);
//printf("\n\nDensities: %f %f %f %f \n\n", dens[0], dens[1], dens[2], dens[3]);
//printf("\n\nSamples: %f %f %f %f\n\n", samp[0], samp[1], samp[2], samp[3]);
}
printf ("Update time total = %f seconds\n",sum1);
printf ("Exchange time total = %f seconds\n",sum2);
printf ("MemCopy time total = %f seconds\n",sum3);
cudaFree(d_array_uniform1);
cudaFree(d_array_uniform2);
cudaFree(d_array_step);
cudaFree(d_array_temp);
cudaFree(densities);
free(array_types);
gettimeofday(&tv4, NULL);
printf ("FUNC time = %f seconds\n",
(double) (tv4.tv_usec - tv5.tv_usec) / 1000000 +
(double) (tv4.tv_sec - tv5.tv_sec));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.