serial_no int64 1 24.2k | cuda_source stringlengths 11 9.01M |
|---|---|
11,201 | #include <thrust/host_vector.h>
#include <thrust/device_vector.h>
#include <thrust/generate.h>
#include <thrust/sort.h>
#include <stdio.h>
int main()
{
int n = 10;
thrust::host_vector<int> hv(n);
thrust::generate(hv.begin(), hv.end(), rand);
for (int i = 0; i < n; i++)
printf("%d\n", hv[i]);
thrust::device_vector<int> dv = hv;
thrust::sort(dv.begin(), dv.end());
hv = dv;
printf("\n");
for (int i = 0; i < n; i++)
printf("%d\n", hv[i]);
} |
11,202 | #define TILE_DIM 1024
#include <limits>
template<typename T>
__device__ void argminColumn(const T* matrix, int* result, const int numRows, const int numColumns) {
__shared__ T partsVals[TILE_DIM];
__shared__ int partsArgs[TILE_DIM];
int index = threadIdx.x;
int rowStride = blockDim.x;
int partLength = (numColumns + TILE_DIM - 1) / TILE_DIM;
int limit = numColumns < TILE_DIM ? numColumns : TILE_DIM;
for (int row = blockIdx.x; row < numRows; row += rowStride) {
T min = std::numeric_limits<T>::max();
int argmin = -1;
for (int i = 0; i < partLength; i++) {
int columnIndex = i * TILE_DIM + index;
if (columnIndex < numColumns) {
T value = matrix[row * numColumns + columnIndex];
if (value < min) {
min = value;
argmin = columnIndex;
}
}
}
partsVals[index] = min;
partsArgs[index] = argmin;
for (int d = 1; d < limit; d <<= 1) {
__syncthreads();
if (index % (d << 1) == 0) {
int valueIndex = index + d;
if (valueIndex < limit) {
T value = partsVals[valueIndex];
int arg = partsArgs[valueIndex];
if (value < min) {
min = value;
partsVals[index] = min;
argmin = arg;
partsArgs[index] = argmin;
}
}
}
}
if (index == 0) {
result[row] = argmin;
}
}
} |
11,203 | #include <iostream>
#include <fstream>
#include <vector>
#include <cstdlib>
#include <time.h>
#include <math.h>
#include <stdio.h>
#include <sys/time.h>
#define GPU_CHECKERROR( err ) (gpuCheckError( err, __FILE__, __LINE__ ))
using namespace std;
static void gpuCheckError(cudaError_t err, const char *file, int line)
{
if (err != cudaSuccess)
{
cout<<endl<<cudaGetErrorString(err)<<" in "<<file<< " at"<<line<<".";
}
}
int getMaxThreadsPerBlock()
{
int count;
GPU_CHECKERROR( cudaGetDeviceCount(&count) );
int maxThreads = 0;
int maxThreadDevId = 0;
cudaDeviceProp devProp;
for (int i = 0; i < count; i++)
{
GPU_CHECKERROR( cudaGetDeviceProperties(&devProp, i) );
if (devProp.maxThreadsPerBlock > maxThreads)
{
maxThreadDevId = i;
maxThreads = devProp.maxThreadsPerBlock;
}
}
GPU_CHECKERROR( cudaSetDevice (maxThreadDevId) );
return maxThreads;
}
unsigned int gcd(unsigned int a, unsigned int b)
{
while (b != 0)
{
int tmp = b;
b = a % b;
a = tmp;
}
return a;
}
__global__ void parallelCoprimeCountAtomicAdd(int *dev_A, int *dev_B, unsigned int *dev_count, unsigned int size)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= size)
return;
int a = dev_A[i];
int b = dev_B[i];
while (b != 0)
{
int tmp = b;
b = a % b;
a = tmp;
}
if (a == 1)
atomicAdd(dev_count, 1);
}
__global__ void reduceAdd(int *dev_A, unsigned int size)
{
unsigned int tid = threadIdx.x;
unsigned int index = blockDim.x * blockIdx.x + tid;
if (index >= size)
return;
for (unsigned int s = blockDim.x/2; s > 0; s /= 2)
{
if (tid < s && (index + s) < size)
dev_A[index] += dev_A[index + s];
__syncthreads();
}
}
__global__ void parallelCoprimeCount(int *dev_A, int *dev_B, unsigned int size)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= size)
return;
int a = dev_A[i];
int b = dev_B[i];
while (b != 0)
{
int tmp = b;
b = a % b;
a = tmp;
}
if (a == 1)
dev_A[i] = 1;
else
dev_A[i] = 0;
}
int main (int argc, char *argv[])
{
int *A, *B;
unsigned int count = 0;
unsigned int size = 0;
timeval t0, t1, t2;
if (argc < 2 || argc > 3)
return -1;
if (argc == 2)
{
srand(time(NULL));
size = strtol(argv[1], NULL, 10);
if (size < 1)
{
cout<<endl<<"Size of the arrays should be greater than 0."<<endl;
return -1;
}
A = new int[size];
B = new int[size];
for (int i = 0; i < size; i++)
{
A[i] = rand()%10000000 + 1;
B[i] = rand()%10000000 + 1;
}
}
else if (argc == 3)
{
const char *filename = argv[2];
std::ifstream input_file(filename);
if (!input_file.is_open())
{
cout<<endl<<"Error opening file!! Exiting!!";
return -1;
}
int x;
bool a = true;
while(input_file >> x)
size++;
size /= 2;
if (size < 1)
{
cout<<endl<<"Size of the arrays should be greater than 0."<<endl;
return -1;
}
A = new int[size];
B = new int[size];
input_file.clear();
input_file.seekg(0, input_file.beg);
unsigned int i = 0;
while(input_file >> x)
{
if (a)
A[i] = x;
else
B[i++] = x;
a = !a;
}
input_file.close();
}
/*int a[size], b[size];
std::copy(A.begin(), A.end(), a);
std::copy(B.begin(), B.end(), b);*/
int *device_A;
GPU_CHECKERROR(cudaMalloc((void**) &device_A, size * sizeof(int)));
//check if allocated
GPU_CHECKERROR( cudaMemcpy((void*) device_A,
(void*) A, size * sizeof(int), cudaMemcpyHostToDevice) );
//cudaMemcpy((void*) device_A,
//(void*) a, size * sizeof(int), cudaMemcpyHostToDevice);
int *device_B;
GPU_CHECKERROR( cudaMalloc((void**) &device_B, size * sizeof(int)) );
//check if allocated
GPU_CHECKERROR( cudaMemcpy((void*) device_B,
(void*) B, size * sizeof(int), cudaMemcpyHostToDevice) );
//cudaMemcpy((void*) device_B,
//(void*) b, size * sizeof(int), cudaMemcpyHostToDevice);
unsigned int *device_count;
GPU_CHECKERROR( cudaMalloc((void**) &device_count, sizeof(unsigned int)) );
GPU_CHECKERROR( cudaMemset((void*) device_count, 0, sizeof(unsigned int)) );
cout<<endl<<"Beginning Serial Version...";
gettimeofday(&t0, NULL);
for (unsigned int i = 0; i < size; i++)
if (gcd(A[i], B[i]) == 1)
count++;
gettimeofday(&t1, NULL);
float timdiff1 = (1000000.0*(t1.tv_sec - t0.tv_sec)
+ (t1.tv_usec - t0.tv_usec)) / 1000000.0;
cout<<endl<<"Serial Version ended in "<<timdiff1<<" s";
cout<<endl<<"Serial Version says "<<count<<" pairs are co-prime.";
//int threadsPerBlock = 512;
int threadsPerBlock = getMaxThreadsPerBlock();
unsigned int numBlocks = ceil((double)size/(double)threadsPerBlock);
cout<<endl<<"\nBeginning Parallel Version using AtomicAdd...";
gettimeofday(&t0, NULL);
parallelCoprimeCountAtomicAdd<<<numBlocks, threadsPerBlock>>>(device_A, device_B, device_count, size);
GPU_CHECKERROR( cudaDeviceSynchronize() );
unsigned int parallel_count = 0;
GPU_CHECKERROR( cudaMemcpy((void *) ¶llel_count,(void *) device_count, sizeof(unsigned int), cudaMemcpyDeviceToHost) );
gettimeofday(&t2, NULL);
float timdiff2 = (1000000.0*(t2.tv_sec - t0.tv_sec)
+ (t2.tv_usec - t0.tv_usec)) / 1000000.0;
cout<<endl<<"Parallel Version with AtomicAdd ended in "<<timdiff2<<" s";
cout<<endl<<"Parallel Version with AtomicAdd says "<<parallel_count<<" pairs are co-prime."<<endl;
cout<<endl<<"Beginning Parallel Version using Reduce...";
gettimeofday(&t0, NULL);
parallelCoprimeCount<<<numBlocks, threadsPerBlock>>>(device_A, device_B, size);
GPU_CHECKERROR( cudaDeviceSynchronize() );
parallel_count = 0;
//Please note that I am destroying the original array A to save space but if we need to
//preserve the input we can use a dedicated output array
reduceAdd<<<numBlocks, threadsPerBlock>>>(device_A, size);
GPU_CHECKERROR( cudaDeviceSynchronize() );
GPU_CHECKERROR( cudaMemcpy((void*) A,
(void*) device_A, size * sizeof(int), cudaMemcpyDeviceToHost) );
for (unsigned int i = 0; i < size; i += threadsPerBlock)
{
parallel_count += A[i];
}
gettimeofday(&t2, NULL);
float timdiff3 = (1000000.0*(t2.tv_sec - t0.tv_sec)
+ (t2.tv_usec - t0.tv_usec)) / 1000000.0;
cout<<endl<<"Parallel Version with Reduce ended in "<<timdiff3<<" s";
cout<<endl<<"Parallel Version with Reduce says "<<parallel_count<<" pairs are co-prime."<<endl;
GPU_CHECKERROR( cudaFree(device_A) );
GPU_CHECKERROR( cudaFree(device_B) );
GPU_CHECKERROR( cudaFree(device_count) );
delete[] A;
delete[] B;
return 0;
}
|
11,204 | __global__ void SGD(const float mo, float *MO,
const float lr, const float LR,
const float wd, const float WD,
float *w, const float *dzdw,
const float ts, const float len) {
int index = blockIdx.x * blockDim.x + threadIdx.x;
if (index >= len) return;
MO[index] = mo*MO[index] - (lr*LR)*((wd*WD)*w[index] + dzdw[index]/ts);
w[index] += MO[index];
} |
11,205 | #include "includes.h"
#define L2HYS_EPSILON 0.01f
#define L2HYS_EPSILONHYS 1.0f
#define L2HYS_CLIP 0.2f
#define data_h2y 30
//long h_windowx=Imagewidth/Windowx;
//long h_windowy=ImageHeight/Windowy;
//dim3 blocks(h_windowx,h_windowy);//h_windowx=ImageWidth/Windowx,h_windowy=ImageHeight/Windowy
//dim3 threads(Windowx,Windowy);//ÿһ¸öÏß³Ì¿é¼ÆËãÒ»¸öcellµÄÌØÕ÷Á¿
//dim3 block(18,7);//Ò»¸öcell·Ö18¸ö½Ç¶È·½Ïò,Ò»¸ö·½Ïò7¸öcell£¬
__global__ void countblock(float *in ,float *out)
{
//if(in+70*blockIdx.x+(blockIdx.y+threadIdx.x)*10!=NULL)
//{
float *ptr_in=in+70*blockIdx.x+(blockIdx.y+threadIdx.x)*10;//threadIdx.x;//70=Ò»¸ö½Ç¶È·½Ïò7¸öcell£¬Ã¿¸öcell 10¸öbin,
float *ptr_out=out+120*blockIdx.x+30*blockIdx.y+10*threadIdx.x;//threadIdx.x;//Ò»¸ö½Ç¶È·½Ïò4¸öblock£¬Ò»¸öblock3¸öcell£¬Ò»¸öcell 10¸öbin,
//Ò»¸öblock3¸öcell£¬Ò»¸öcell 10¸öbin,
ptr_out[threadIdx.y]=ptr_in[threadIdx.y];
////}
} |
11,206 | #include "includes.h"
__global__ void CrossVector(float *first , float *second) {
int idx = threadIdx.x + blockIdx.x * blockDim.x; // the element of the vector
first[idx] *= sqrtf(second[idx]);
} |
11,207 | #include <cuda.h>
#include <vector>
#include <cstdio>
#include <cstdlib>
__global__ void kernel(int* x, int* y, int n) {
// TODO: fill this in with the formula to calculate thread ID
// size_t tid = ...;
// TODO: fill in the guard condition
/*
if (...) {
// x[:] += y[:] (add each element of y to the corresponding element of x,
// and store the result in x)
...;
}
*/
}
int main(int argc, char** argv) {
size_t n = 1000;
std::vector<int> x(n, 1);
std::vector<int> y(n, 1);
int* d_x;
cudaMalloc(&d_x, sizeof(int)*n);
int* d_y;
cudaMalloc(&d_y, sizeof(int)*n);
cudaMemcpy(d_x, x.data(), sizeof(int)*n, cudaMemcpyHostToDevice);
cudaMemcpy(d_y, y.data(), sizeof(int)*n, cudaMemcpyHostToDevice);
cudaDeviceSynchronize();
size_t block_size = 256;
// ceil(grid_size / block_size)
dim3 grid((n + block_size - 1) / block_size);
dim3 block(block_size);
kernel<<<grid, block>>>(d_x, d_y, n);
cudaMemcpy(x.data(), d_x, sizeof(int)*n, cudaMemcpyDeviceToHost);
cudaDeviceSynchronize();
bool all_twos = true;
for (size_t i = 0; i < x.size(); i++) {
if (x[i] != 2) {
all_twos = false;
break;
}
}
if (all_twos) {
printf("OK!\n");
} else {
printf("FAILED.\n");
}
cudaFree(d_x);
cudaFree(d_y);
return 0;
}
|
11,208 | #ifndef __lcl_constants
#define __lcl_constants
//#include <thrust/device_vector.h>
#include <cuda.h>
#include <vector>
#include <cufft.h>
#include <cufftXt.h>
#include <stdio.h>
typedef float decimal;
struct MicData {
cufftDoubleComplex** micData;
int* waveLengths;
int numberOfBatches;
};
struct Distances {
decimal* distances;
};
struct Coordinate {
decimal x;
decimal y;
};
struct FftResult {
decimal frequency;
decimal offset;
};
struct FftBatch {
FftResult* fftResults;
unsigned int size;
};
struct WavePair {
int waveIdx1;
int waveIdx2;
decimal offset;
};
struct WavePairContainer{
int firstFFT;
int secondFFT;
WavePair* wavePairArray;
int wavePairCount;
//thrust::device_vector<WavePair> wavePairArray;
};
struct WaveMatches {
std::vector<bool*> matches;
std::vector<unsigned int> widths;
std::vector<unsigned int> heights;
std::vector<int> widthBatches;
std::vector<int> heightBatches;
};
struct GpuWaveMatches {
bool** matches;
unsigned int matchesCount;
unsigned int* widths;
unsigned int widthsCount;
unsigned int* heights;
unsigned int heightsCount;
int* widthBatches;
unsigned int widthBatchesCount;
int* heightBatches;
unsigned int heightBatchesCount;
};
void GpuWaveMatchesToHost(GpuWaveMatches* h_gpuWaveMatches, GpuWaveMatches* d_gpuWaveMatches)
{
h_gpuWaveMatches = (GpuWaveMatches*)malloc(sizeof(GpuWaveMatches));
cudaMemcpy(h_gpuWaveMatches, d_gpuWaveMatches, sizeof(GpuWaveMatches), cudaMemcpyDeviceToHost);
printf("gwmth 1\r\n"); fflush(NULL);
unsigned int* tempWidths = (unsigned int*)malloc( sizeof(unsigned int) * h_gpuWaveMatches->widthsCount);
cudaMemcpy(tempWidths, &h_gpuWaveMatches->widths, sizeof(unsigned int) * h_gpuWaveMatches->widthsCount, cudaMemcpyDeviceToHost);
h_gpuWaveMatches->widths = tempWidths;
printf("gwmth 2\r\n"); fflush(NULL);
unsigned int* tempHeights = (unsigned int*)malloc( sizeof(unsigned int) * h_gpuWaveMatches->heightsCount);
cudaMemcpy(tempHeights, &h_gpuWaveMatches->heights, sizeof(unsigned int) * h_gpuWaveMatches->heightsCount, cudaMemcpyDeviceToHost);
printf("gwmth 3\r\n"); fflush(NULL);
h_gpuWaveMatches->heights = tempHeights;
int* tempWidthBatches = (int*)malloc( sizeof(int) * h_gpuWaveMatches->widthBatchesCount);
cudaMemcpy(tempWidthBatches, &h_gpuWaveMatches->widthBatches, sizeof(int) * h_gpuWaveMatches->widthBatchesCount, cudaMemcpyDeviceToHost);
h_gpuWaveMatches->widthBatches = tempWidthBatches;
printf("gwmth 4\r\n"); fflush(NULL);
int* tempHeightBatches = (int*)malloc( sizeof(int) * h_gpuWaveMatches->heightBatchesCount);
cudaMemcpy(tempHeightBatches, &h_gpuWaveMatches->heightBatches, sizeof(int) * h_gpuWaveMatches->heightBatchesCount, cudaMemcpyDeviceToHost);
h_gpuWaveMatches->heightBatches = tempHeightBatches;
//copy matches
cudaMemcpy(&h_gpuWaveMatches->matches, &d_gpuWaveMatches->matches, sizeof(bool*) * h_gpuWaveMatches->matchesCount, cudaMemcpyDeviceToHost);
for (unsigned int i = 0; i < h_gpuWaveMatches->matchesCount; i++){
printf("widths: %i , heights: %i\r\n", h_gpuWaveMatches->widths[i], h_gpuWaveMatches->heights[i]);
bool* tempMatches = (bool*)malloc( sizeof(bool) * h_gpuWaveMatches->widths[i] * h_gpuWaveMatches->heights[i] );
printf("brianIs");
cudaMemcpy(tempMatches, &h_gpuWaveMatches->matches[i], sizeof(bool) * h_gpuWaveMatches->widths[i] * h_gpuWaveMatches->heights[i], cudaMemcpyDeviceToHost);
printf("notReal");
h_gpuWaveMatches->matches[i] = tempMatches;
printf("its a thing here\r\n");fflush(NULL);
}
}
void freeGpuWaveMatches(GpuWaveMatches* gpuMatches)
{
GpuWaveMatches* h_gpuMatches = (GpuWaveMatches*)malloc(sizeof(GpuWaveMatches));
cudaMemcpy(h_gpuMatches, gpuMatches, sizeof(GpuWaveMatches), cudaMemcpyDeviceToHost);
for (unsigned int i = 0; i < h_gpuMatches->matchesCount; i++)
{
cudaFree(&h_gpuMatches->matches[i]);
}
printf("stuff n things\r\n"); fflush(NULL);
cudaFree(&h_gpuMatches->widths);
cudaFree(&h_gpuMatches->heights);
cudaFree(&h_gpuMatches->widthBatches);
cudaFree(&h_gpuMatches->heightBatches);
free(h_gpuMatches);
cudaFree(gpuMatches);
}
void WaveMatchesToGpu(const WaveMatches& matches, GpuWaveMatches* gpuMatches)
{
//allocate memory for the GpuWaveMatches struct
cudaMalloc(&gpuMatches, sizeof(GpuWaveMatches));
GpuWaveMatches* h_gpuMatches = (GpuWaveMatches*)malloc(sizeof(GpuWaveMatches));
//copy the the matches array and all match matrix
bool** gpuMatchesArray;
cudaMalloc(&gpuMatchesArray, sizeof(bool*) * matches.matches.size());
bool** h_gpuMatchesArray = (bool**)malloc(sizeof(bool*) * matches.matches.size());
for (unsigned int i = 0; i < matches.matches.size(); i++)
{
bool* gpuMatchMatrix;
cudaMalloc(&gpuMatchMatrix, sizeof(bool) * matches.widths[i] * matches.heights[i]);
cudaMemcpy(gpuMatchMatrix, matches.matches[i], sizeof(bool) * matches.widths[i] * matches.heights[i], cudaMemcpyHostToDevice);
h_gpuMatchesArray[i] = gpuMatchMatrix;
}
cudaMemcpy(gpuMatchesArray, h_gpuMatchesArray, sizeof(bool*) * matches.matches.size(), cudaMemcpyHostToDevice);
h_gpuMatches->matches = gpuMatchesArray;
h_gpuMatches->matchesCount = matches.matches.size();
//copy the stored widths
unsigned int* gpuWidths;
cudaMalloc(&gpuWidths, sizeof(unsigned int) * matches.widths.size());
cudaMemcpy(gpuWidths, &matches.widths[0], sizeof(unsigned int) * matches.widths.size(), cudaMemcpyHostToDevice);
h_gpuMatches->widths = gpuWidths;
h_gpuMatches->widthsCount = matches.widths.size();
//copy the stored heights
unsigned int* gpuHeights;
cudaMalloc(&gpuHeights, sizeof(unsigned int) * matches.heights.size());
cudaMemcpy(gpuHeights, &matches.heights[0], sizeof(unsigned int) * matches.heights.size(), cudaMemcpyHostToDevice);
h_gpuMatches->heights = gpuHeights;
h_gpuMatches->heightsCount = matches.heights.size();
//copy stored widthBatches
int* gpuWidthBatches;
cudaMalloc(&gpuWidthBatches, sizeof(int) * matches.widthBatches.size());
cudaMemcpy(gpuWidthBatches, &matches.widthBatches[0], sizeof(int) * matches.widthBatches.size(), cudaMemcpyHostToDevice);
h_gpuMatches->widthBatches = gpuWidthBatches;
h_gpuMatches->widthBatchesCount = matches.widthBatches.size();
//copy stored heightBatches
int* gpuHeightBatches;
cudaMalloc(&gpuHeightBatches, sizeof(int) * matches.heightBatches.size());
cudaMemcpy(gpuHeightBatches, &matches.heightBatches[0], sizeof(int) * matches.heightBatches.size(), cudaMemcpyHostToDevice);
h_gpuMatches->heightBatches = gpuHeightBatches;
h_gpuMatches->heightBatchesCount = matches.heightBatches.size();
cudaMemcpy(gpuMatches, h_gpuMatches, sizeof(GpuWaveMatches), cudaMemcpyHostToDevice);
//TODO: free host memory;
}
#endif
|
11,209 | #include "includes.h"
__global__ void warmup(float *A, float *B, float *C, const int n, int offset)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
unsigned int k = i + offset;
if (k < n) C[i] = A[k] + B[k];
} |
11,210 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
//__global__ void Mat_add(double A[], double B[], double C[], double alpha[], int m, int n) {
__global__ void Mat_add(double *A, double *B, double *C, double *alpha, int m, int n) {
int my_ij = blockDim.x * blockIdx.x + threadIdx.x;
if (blockIdx.x < m && threadIdx.x < n)
C[my_ij] = A[my_ij] - alpha[0]*B[my_ij];
}
/* Host code */
int main(int argc, char* argv[]) {
int m, n;
double *h_A, *h_B, *h_C, *h_alpha;
double *d_A, *d_B, *d_C, *d_alpha;
size_t size;
/* Get size of matrices */
if (argc != 3) {
fprintf(stderr, "usage: %s <row count> <col count>\n", argv[0]);
exit(0);
}
m = strtol(argv[1], NULL, 10);
n = strtol(argv[2], NULL, 10);
printf("m = %d, n = %d\n", m, n);
size = m*n*sizeof(double);
h_A = (double*)malloc(size);
h_B = (double*)malloc(size);
h_C = (double*)malloc(size);
h_alpha = (double*)malloc(size);
for (int i = 0; i < m; i++)
for (int j = 0; j < n; j++) {
h_A[i*n+j] = 2.0f;
h_B[i*n+j] = 4.0f;
h_C[i*n+j] = 0.0f;
h_alpha[i*n+j] = 6.0f;
}
/* Allocate matrices in device memory */
cudaMalloc(&d_A , size);
cudaMalloc(&d_B , size);
cudaMalloc(&d_C , size);
cudaMalloc(&d_alpha, size);
/* Copy matrices from host memory to device memory */
cudaMemcpy(d_A , h_A , size, cudaMemcpyHostToDevice);
cudaMemcpy(d_B , h_B , size, cudaMemcpyHostToDevice);
cudaMemcpy(d_alpha, h_alpha, size, cudaMemcpyHostToDevice);
/* Invoke kernel using m thread blocks, each of */
/* which contains n threads */
float milli = 0;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start,0);
Mat_add<<<m, n>>>(d_A, d_B, d_C, d_alpha, m, n);
cudaEventRecord(stop,0);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&milli,start,stop);
/* Wait for the kernel to complete */
cudaThreadSynchronize();
/* Copy result from device memory to host memory */
cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost);
// for (int i = 0; i < m; i++)
// for (int j = 0; j < n; j++)
// printf("%f %f %f\n", h_A[i*n+j], h_B[i*n+j], h_C[i*n+j]);
printf("%f ms\n", milli);
/* Free device memory */
cudaFree(d_A);
cudaFree(d_B);
cudaFree(d_C);
cudaFree(d_alpha);
/* Free host memory */
free(h_A);
free(h_B);
free(h_C);
free(h_alpha);
return 0;
} /* main */
|
11,211 | #include <iostream>
#include <iomanip>
#define HANDLE_ERROR(err) \
do { if (err != cudaSuccess) { printf("ERROR: %s\n", cudaGetErrorString(err)); exit(0);} } while (0)
__global__ void diffVector(double *v1, double *v2, double *res, int size)
{
int idx = threadIdx.x + blockIdx.x * blockDim.x;
int offset = gridDim.x * blockDim.x;
while (idx < size) {
res[idx] = v1[idx] - v2[idx];
idx += offset;
}
}
int main()
{
std::ios_base::sync_with_stdio(false);
int size = 0;
std::cin >> size;
double *vec1 = new double[size];
double *vec2 = new double[size];
double *res = new double[size];
for (int i = 0; i < size; ++i) {
std::cin >> vec1[i];
}
for (int i = 0; i < size; ++i) {
std::cin >> vec2[i];
}
double *dev1, *dev2, *devRes;
HANDLE_ERROR(cudaMalloc((void **) &dev1, sizeof(double) * size));
HANDLE_ERROR(cudaMalloc((void **) &dev2, sizeof(double) * size));
HANDLE_ERROR(cudaMalloc((void **) &devRes, sizeof(double) * size));
HANDLE_ERROR(cudaMemcpy(dev1, vec1, sizeof(double) * size, cudaMemcpyHostToDevice));
HANDLE_ERROR(cudaMemcpy(dev2, vec2, sizeof(double) * size, cudaMemcpyHostToDevice));
diffVector<<<256, 256>>>(dev1, dev2, devRes, size);
HANDLE_ERROR(cudaGetLastError());
HANDLE_ERROR(cudaMemcpy(res, devRes, sizeof(double) * size, cudaMemcpyDeviceToHost));
std::cout.precision(10);
std::cout.setf(std::ios::scientific);
for (int i = 0; i < size; ++i) {
std::cout << res[i] << ' ';
}
std::cout << '\n';
HANDLE_ERROR(cudaFree(dev1));
HANDLE_ERROR(cudaFree(dev2));
HANDLE_ERROR(cudaFree(devRes));
delete[] vec1;
delete[] vec2;
delete[] res;
}
|
11,212 | #include <stdio.h>
#include <time.h>
#include <cuda_runtime.h>
void
sumMatrixOnHost(float *A, float *B, float *C, const int nx, const int ny)
{
float *ia = A;
float *ib = B;
float *ic = C;
for (int i = 0; i < ny; i++)
{
for (int j = 0; j < nx; j++)
{
ic[j] = ia[j] + ib[j];
}
ia += nx;
ib += nx;
ic += nx;
}
}
__global__ void
sumMatrixOnGPU(float *MatA, float *MatB, float *MatC, int nx, int ny)
{
unsigned int ix = threadIdx.x + (blockIdx.x * blockDim.x);
unsigned int iy = threadIdx.y + (blockIdx.y * blockDim.y);
unsigned int idx = ix + (iy * nx);
if (ix < nx && iy < ny) MatC[idx] = MatA[idx] + MatB[idx];
else printf("WHAT: ix >= nx || iy >= ny\n");
}
void devConfig(const int devId) {
cudaDeviceProp devProp;
cudaGetDeviceProperties(&devProp, devId);
printf("Using Device %d: %s\n", devId, devProp.name);
cudaSetDevice(devId);
}
void
initalData(float *matrix, const int nxy)
{
for (int i = 0; i < nxy; i++)
matrix[i] = 0;
}
int
main(int argc, char *argv[])
{
devConfig(0);
int nx = 1 << 14;
int ny = 1 << 14;
int nxy = nx * ny;
int nBytes = nxy * sizeof(float);
printf("Matrix size: nx %d ny %d\n", nx, ny);
float *h_A, *h_B, *hostRef, *gpuRef;
h_A = (float *)malloc(nBytes);
h_B = (float *)malloc(nBytes);
hostRef = (float *)malloc(nBytes);
gpuRef = (float *)malloc(nBytes);
clock_t iStart = clock();
initalData(h_A, nxy);
initalData(h_B, nxy);
clock_t iEnd = clock();
// TODO : Store time
memset(hostRef, 0, nBytes);
memset(gpuRef, 0, nBytes);
iStart = clock();
sumMatrixOnHost(h_A, h_B, hostRef, nx, ny);
iEnd = clock();
// TODO : Store time
// TODO : Alloc device global memory
// TODO : Transfer data to device
// TODO : Config and envoke kernel
// TODO : Sum matrix on GPU (TODO : Time it)
// TODO : Copy back computed GPU data
// TODO : Compare host and GPU
// TODO : Host and Device clean up
return (0);
}
|
11,213 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <cuda_runtime.h>
#define MAX_ARRAY_SIZE 1000000
/*
* Read data from ./inp.txt
* Store the data in (int * data)
* Return the number of elements read into the array
*/
int * read_data(int * size)
{
FILE * fptr = fopen("./inp.txt", "r");
if (!fptr) {
printf("!! Error in opening data file \n");
exit(1);
}
int cur_array_size = MAX_ARRAY_SIZE;
int * buffer = (int *)malloc(cur_array_size * sizeof(int));
int i = 0;
while (!feof(fptr)) {
if (fscanf(fptr, "%d,", &buffer[i]) != 1) {
break;
}
++i;
}
fclose(fptr);
*size = i;
return buffer;
}
/*
* Round up to the nearest power of 2
*/
int round_up_pow2(int val) {
if (val == 0) return 1;
int pow2 = 1;
while (pow2 < val) {
pow2 <<= 1;
}
return pow2;
}
/*
* Calculate the number of threads per block based on array size
* The function is so designed that a reduction on the array can
* be completed in two steps.
* The assumption is that the size of the array is no more than
* 1,000,000, such that the number of threads is no more than
* 1024, which is the computational limit of the GPU device.
*/
int calc_num_thread(int size) {
int approx = (int)sqrt((double)size);
// find the nearest power of 2
return round_up_pow2(approx);
}
/*
* GPU kernel for part a: reduction, getting the min value in a sub-array
*/
__global__ void shmem_reduce_kernel(int * d_out, const int * d_in, const int size)
{
// sdata is allocated in the kernel call: 3rd arg to <<<b, t, shmem>>>
extern __shared__ int sdata[];
int myId = threadIdx.x + blockDim.x * blockIdx.x;
int tid = threadIdx.x;
// load shared mem from global mem
sdata[tid] = d_in[myId];
__syncthreads(); // make sure entire block is loaded!
// do reduction in shared mem
for (unsigned int s = blockDim.x / 2; s > 0; s >>= 1)
{
if (tid < s && (myId + s) < size)
{
if (sdata[tid] > sdata[tid + s])
sdata[tid] = sdata[tid + s];
}
__syncthreads(); // make sure all adds at one stage are done!
}
// only thread 0 writes result for this block back to global mem
if (tid == 0)
{
d_out[blockIdx.x] = sdata[0];
}
}
/*
* Reduction-based algorithm to find the min value in (int * d_in)
*/
void reduce(int * d_out, int * d_intermediate, int * d_in, int size)
{
// assumes that size is not greater than maxThreadsPerBlock^2
const int maxThreadsPerBlock = calc_num_thread(size);
int threads = maxThreadsPerBlock;
int blocks = (size + maxThreadsPerBlock - 1) / maxThreadsPerBlock;
shmem_reduce_kernel<<<blocks, threads, threads * sizeof(int)>>>(d_intermediate, d_in, size);
// now we're down to one block left, so reduce it
threads = blocks;
blocks = 1;
shmem_reduce_kernel<<<blocks, round_up_pow2(threads), threads * sizeof(int)>>>(d_out, d_intermediate, threads);
}
/*
* GPU kernel for part b: calculate the last digit of each element in the input array in parallel
*/
__global__ void last_digit_kernel(int * d_out, const int * d_in, const int size)
{
int myId = threadIdx.x + blockDim.x * blockIdx.x;
if (myId < size)
d_out[myId] = d_in[myId] % 10;
}
int main(void)
{
int deviceCount;
cudaGetDeviceCount(&deviceCount);
if (deviceCount == 0) {
printf("!! Error: no devices supporting CUDA.\n");
exit(EXIT_FAILURE);
}
int dev = 0;
cudaSetDevice(dev);
// data array on host
int array_size = 0;
int * h_in = read_data(&array_size);
int array_byte = array_size * sizeof(int);
// printf(">> Number of data read in: %d\n", array_size);
/*
* part a
*/
// declare GPU memory pointers
int * d_in, * d_intermediate, * d_out;
// allocate GPU memory
cudaMalloc((void **) &d_in, array_byte);
cudaMalloc((void **) &d_intermediate, array_byte);
cudaMalloc((void **) &d_out, sizeof(int));
// transfer the input array to the GPU
cudaMemcpy(d_in, h_in, array_byte, cudaMemcpyHostToDevice);
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
// launch the kernel
cudaEventRecord(start, 0);
reduce(d_out, d_intermediate, d_in, array_size);
cudaEventRecord(stop, 0);
cudaEventSynchronize(stop);
float elapsedTime;
cudaEventElapsedTime(&elapsedTime, start, stop);
// copy back the min from GPU
int h_out;
cudaMemcpy(&h_out, d_out, sizeof(int), cudaMemcpyDeviceToHost);
// printf(">> Average time elapsed in part a: %f\n", elapsedTime);
// printf(">> Min value returned by device: %d\n", h_out);
// output the result into file
FILE * fptr_a = fopen("./q1a.txt", "w");
if (!fptr_a) {
printf("!! Error in opening output file \n");
exit(1);
}
fprintf(fptr_a, "%d", h_out);
fclose(fptr_a);
// free GPU memory allocation
// reuse d_in for the input array of part b
// reuse d_intermediate for the output array of part b
cudaFree(d_out);
/*
* part b
*/
d_out = d_intermediate;
int numThreadPerBlock = calc_num_thread(array_size);
int numBlock = (array_size + numThreadPerBlock - 1) / numThreadPerBlock;
// launch the kernel
cudaEventRecord(start, 0);
last_digit_kernel<<<numBlock, numThreadPerBlock>>>(d_out, d_in, array_size);
cudaEventRecord(stop, 0);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&elapsedTime, start, stop);
// printf(">> Average time elapsed of part b: %f\n", elapsedTime);
// copy back the result array from GPU
int * h_out_array = (int *)malloc(array_byte);
cudaMemcpy(h_out_array, d_out, array_byte, cudaMemcpyDeviceToHost);
// output the result array into file
FILE * fptr_b = fopen("./q1b.txt", "w");
if (!fptr_b) {
printf("!! Error in opening output file \n");
exit(1);
}
for (int i = 0; i < array_size; ++i) {
fprintf(fptr_b, "%d", h_out_array[i]);
if (i < array_size - 1)
fprintf(fptr_b, ", ");
}
fclose(fptr_b);
// free CPU memory allocation
free(h_in);
free(h_out_array);
// free GPU memory allocation
cudaFree(d_in);
cudaFree(d_intermediate);
return 0;
}
|
11,214 |
#include <cstdio>
class Layer {
public:
int M, N, O;
float *output;
float *preact;
float *bias;
float *weight;
float *d_output;
float *d_preact;
float *d_weight;
Layer(int M, int N, int O);
~Layer();
void setOutput(float *data);
void clear();
void bp_clear();
void save_to_file(FILE*);
void read_from_file(FILE*);
};
// Constructor
Layer::Layer(int M, int N, int O)
{
this->M = M;
this->N = N;
this->O = O;
float h_bias[N];
float h_weight[N][M];
output = NULL;
preact = NULL;
bias = NULL;
weight = NULL;
for (int i = 0; i < N; ++i) {
h_bias[i] = 0.5f - float(rand()) / float(RAND_MAX);
/*h_bias[i] = 0.0f;*/
for (int j = 0; j < M; ++j) {
h_weight[i][j] = 0.5f - float(rand()) / float(RAND_MAX);
/*h_weight[i][j] = 0.05f;*/
}
}
cudaMalloc(&output, sizeof(float) * O);
cudaMalloc(&preact, sizeof(float) * O);
cudaMalloc(&bias, sizeof(float) * N);
cudaMalloc(&weight, sizeof(float) * M * N);
cudaMalloc(&d_output, sizeof(float) * O);
cudaMalloc(&d_preact, sizeof(float) * O);
cudaMalloc(&d_weight, sizeof(float) * M * N);
cudaMemcpy(bias, h_bias, sizeof(float) * N, cudaMemcpyHostToDevice);
cudaMemcpy(weight, h_weight, sizeof(float) * M * N, cudaMemcpyHostToDevice);
}
// Destructor
Layer::~Layer()
{
cudaFree(output);
cudaFree(preact);
cudaFree(bias);
cudaFree(weight);
cudaFree(d_output);
cudaFree(d_preact);
cudaFree(d_weight);
}
// Send data one row from dataset to the GPU
void Layer::setOutput(float *data)
{
cudaMemcpy(output, data, sizeof(float) * O, cudaMemcpyHostToDevice);
}
// Reset GPU memory between iterations
void Layer::clear()
{
cudaMemset(output, 0x00, sizeof(float) * O);
cudaMemset(preact, 0x00, sizeof(float) * O);
}
void Layer::bp_clear()
{
cudaMemset(d_output, 0x00, sizeof(float) * O);
cudaMemset(d_preact, 0x00, sizeof(float) * O);
cudaMemset(d_weight, 0x00, sizeof(float) * M * N);
}
void Layer::save_to_file(FILE *model)
{
char buffer[100000];
fwrite((char*)&M, sizeof(int), 1, model);
fwrite((char*)&N, sizeof(int), 1, model);
cudaMemcpy(buffer, (char*)bias, sizeof(float) * N, cudaMemcpyDeviceToHost);
fwrite(buffer, sizeof(float) * N, 1, model);
cudaMemcpy(buffer, (char*)weight, sizeof(float) * M * N, cudaMemcpyDeviceToHost);
fwrite(buffer, sizeof(float) * M * N, 1, model);
}
void Layer::read_from_file(FILE *model)
{
char buffer[100000];
fread((char*)&M, sizeof(int), 1, model);
fread((char*)&N, sizeof(int), 1, model);
fread(buffer, sizeof(float) * N, 1, model);
cudaMemcpy(bias, (float*)buffer, sizeof(float) * N, cudaMemcpyHostToDevice);
fread(buffer, sizeof(float) * M * N, 1, model);
cudaMemcpy(weight, (float*)buffer, sizeof(float) * M * N, cudaMemcpyHostToDevice);
}
|
11,215 | #include <stdio.h>
__device__ void MatrixMultiply(void *input)
{
int warp_size=32;
int thread = threadIdx.x % warp_size;
float* inputIn = (float*)input;
int matrixWidth = inputIn[0];
float *matrixA = inputIn+1;
float *matrixB = matrixA + matrixWidth*matrixWidth;
float *matrixOut = matrixA + 2*matrixWidth*matrixWidth;
// Inlcude the oommented for printing the input and output
/*
int i;
// If master thread, print details
printf("My thread id is: %d\n", thread);
if(thread == 0){
printf("Matrix Width is: %d\n", matrixWidth);
printf("Printing Matrix A:\n");
for(i=0; i<(matrixWidth*matrixWidth); i++){
if (i%matrixWidth == 0 && i!=0)
printf("\n");
printf("%f ", matrixA[i]);
}
}
// Print B
if(thread == 0){
printf("Matrix Width is: %d\n", matrixWidth);
printf("Printing Matrix B:\n");
for(i=0; i<(matrixWidth*matrixWidth); i++){
if (i%matrixWidth == 0 && i!=0)
printf("\n");
printf("%f ", matrixB[i]);
}
}
// Print C, i.e., The out Matrix
if(thread == 0){
printf("Matrix Width is: %d\n", matrixWidth);
printf("Printing Matrix C:\n");
for(i=0; i<(matrixWidth*matrixWidth); i++){
if (i%matrixWidth == 0 && i!=0)
printf("\n");
printf("%f ", matrixOut[i]);
}
}
*/
for (unsigned int i = thread; i < matrixWidth; i=i+32)
{
for (unsigned int j = 0; j < matrixWidth; j++) {
float sum = 0;
for (unsigned int k = 0; k < matrixWidth; k++) {
float a = matrixA[i * matrixWidth + k];
float b = matrixB[k * matrixWidth + j];
sum += a * b;
}
matrixOut[i * matrixWidth + j ] = sum;
}
}
}
|
11,216 | // EPIMORPH library file
// seed width transformation functions fot the seed_wca seed
__device__ float wt_id(float w){
// identity transform
// FULL, LIVE
return w;
}
__device__ float wt_inv(float w){
// identity transform
// FULL, LIVE
return 1.0f - w;
}
__device__ float wt_circular(float w){
// circular transform
// FULL, LIVE
return sqrtf(1.0f - (1.0f - w) * (1.0f - w));
}
__device__ float wt_inv_circular(float w){
// circular transform
// FULL, LIVE
return 1.0f - sqrtf(1.0f - (1.0f - w) * (1.0f - w));
}
|
11,217 | //
// Peaks.cu
//
//
// Created by Hemant Sharma on 2015/07/04.
//
#include <stdio.h>
#include <sys/time.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <unistd.h>
#define deg2rad 0.0174532925199433
#define rad2deg 57.2957795130823
#define MAX_LINE_LENGTH 10240
#define MAX_N_RINGS 5000
#define MAX_N_EVALS 3000
#define MAX_N_OVERLAPS 20
#define nOverlapsMaxPerImage 20000
#define CalcNorm3(x,y,z) sqrt((x)*(x) + (y)*(y) + (z)*(z))
#define CalcNorm2(x,y) sqrt((x)*(x) + (y)*(y))
#define CHECK(call){ \
const cudaError_t error = call; \
if (error != cudaSuccess){ \
printf("Error: %s:%d, ", __FILE__, __LINE__); \
printf("code:%d, reason: %s\n", error, cudaGetErrorString(error)); \
exit(-10*error); \
} \
}
typedef uint16_t pixelvalue;
static inline double sind(double x){return sin(deg2rad*x);}
static inline double cosd(double x){return cos(deg2rad*x);}
static inline double tand(double x){return tan(deg2rad*x);}
static inline double asind(double x){return rad2deg*(asin(x));}
static inline double acosd(double x){return rad2deg*(acos(x));}
static inline double atand(double x){return rad2deg*(atan(x));}
//BEGIN NLDRMD FUNCTION scratch space: 3n+(n+1)*(n+1)
__device__ void nelmin ( double fn ( int n_fun, double *x, void *data ),
int n, double *start, double *xmin,
double *lb, double *ub, double *scratch, double *ynewlo,
double reqmin, double *step, int konvge, int kcount,
int *icount, int *numres, int *ifault, void *data_t)
{
double ccoeff = 0.5;
double del;
double dn;
double dnn;
double ecoeff = 2.0;
double eps = 0.001;
int i;
int ihi;
int ilo;
int j;
int jcount;
int l;
int nn;
double *p;
double *p2star;
double *pbar;
double *pstar;
double rcoeff = 1.0;
double rq;
double x;
double *y;
double y2star;
double ylo;
double ystar;
double z;
/*
Check the input parameters.
*/
if ( reqmin <= 0.0 )
{
*ifault = 1;
return;
}
if ( n < 1 )
{
*ifault = 1;
return;
}
if ( konvge < 1 )
{
*ifault = 1;
return;
}
p = scratch;
pstar = p + n*(n+1);
p2star = pstar + n;
pbar = p2star + n;
y = pbar + n;
*icount = 0;
*numres = 0;
jcount = konvge;
dn = ( double ) ( n );
nn = n + 1;
dnn = ( double ) ( nn );
del = 1.0;
rq = reqmin * dn;
/*
Initial or restarted loop.
*/
for ( ; ; )
{
for ( i = 0; i < n; i++ )
{
p[i+n*n] = start[i];
}
y[n] = fn ( n, start, data_t );
*icount = *icount + 1;
for ( j = 0; j < n; j++ )
{
x = start[j];
start[j] = start[j] + step[j] * del;
if (start[j] < lb[j]) start[j] = lb[j]; // Constraints
if (start[j] > ub[j]) start[j] = ub[j]; // Constraints
for ( i = 0; i < n; i++ )
{
p[i+j*n] = start[i];
}
y[j] = fn ( n, start, data_t );
*icount = *icount + 1;
start[j] = x;
}
/*
The simplex construction is complete.
Find highest and lowest Y values. YNEWLO = Y(IHI) indicates
the vertex of the simplex to be replaced.
*/
ylo = y[0];
ilo = 0;
for ( i = 1; i < nn; i++ )
{
if ( y[i] < ylo )
{
ylo = y[i];
ilo = i;
}
}
/*
Inner loop.
*/
for ( ; ; )
{
if ( kcount <= *icount )
{
break;
}
*ynewlo = y[0];
ihi = 0;
for ( i = 1; i < nn; i++ )
{
if ( *ynewlo < y[i] )
{
*ynewlo = y[i];
ihi = i;
}
}
/*
Calculate PBAR, the centroid of the simplex vertices
excepting the vertex with Y value YNEWLO.
*/
for ( i = 0; i < n; i++ )
{
z = 0.0;
for ( j = 0; j < nn; j++ )
{
z = z + p[i+j*n];
}
z = z - p[i+ihi*n];
pbar[i] = z / dn;
}
/*
Reflection through the centroid.
*/
for ( i = 0; i < n; i++ )
{
pstar[i] = pbar[i] + rcoeff * ( pbar[i] - p[i+ihi*n] );
if (pstar[i] < lb[i]) pstar[i] = lb[i]; // Constraints
if (pstar[i] > ub[i]) pstar[i] = ub[i]; // Constraints
}
ystar = fn ( n, pstar, data_t );
*icount = *icount + 1;
/*
Successful reflection, so extension.
*/
if ( ystar < ylo )
{
for ( i = 0; i < n; i++ )
{
p2star[i] = pbar[i] + ecoeff * ( pstar[i] - pbar[i] );
if (p2star[i] < lb[i]) p2star[i] = lb[i]; // Constraints
if (p2star[i] > ub[i]) p2star[i] = ub[i]; // Constraints
}
y2star = fn ( n, p2star, data_t );
*icount = *icount + 1;
/*
Check extension.
*/
if ( ystar < y2star )
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = pstar[i];
}
y[ihi] = ystar;
}
/*
Retain extension or contraction.
*/
else
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = p2star[i];
}
y[ihi] = y2star;
}
}
/*
No extension.
*/
else
{
l = 0;
for ( i = 0; i < nn; i++ )
{
if ( ystar < y[i] )
{
l = l + 1;
}
}
if ( 1 < l )
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = pstar[i];
}
y[ihi] = ystar;
}
/*
Contraction on the Y(IHI) side of the centroid.
*/
else if ( l == 0 )
{
for ( i = 0; i < n; i++ )
{
p2star[i] = pbar[i] + ccoeff * ( p[i+ihi*n] - pbar[i] );
if (p2star[i] < lb[i]) p2star[i] = lb[i]; // Constraints
if (p2star[i] > ub[i]) p2star[i] = ub[i]; // Constraints
}
y2star = fn ( n, p2star, data_t );
*icount = *icount + 1;
/*
Contract the whole simplex.
*/
if ( y[ihi] < y2star )
{
for ( j = 0; j < nn; j++ )
{
for ( i = 0; i < n; i++ )
{
p[i+j*n] = ( p[i+j*n] + p[i+ilo*n] ) * 0.5;
xmin[i] = p[i+j*n];
if (xmin[i] < lb[i]) xmin[i] = lb[i]; // Constraints
if (xmin[i] > ub[i]) xmin[i] = ub[i]; // Constraints
}
y[j] = fn ( n, xmin, data_t );
*icount = *icount + 1;
}
ylo = y[0];
ilo = 0;
for ( i = 1; i < nn; i++ )
{
if ( y[i] < ylo )
{
ylo = y[i];
ilo = i;
}
}
continue;
}
/*
Retain contraction.
*/
else
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = p2star[i];
}
y[ihi] = y2star;
}
}
/*
Contraction on the reflection side of the centroid.
*/
else if ( l == 1 )
{
for ( i = 0; i < n; i++ )
{
p2star[i] = pbar[i] + ccoeff * ( pstar[i] - pbar[i] );
if (p2star[i] < lb[i]) p2star[i] = lb[i]; // Constraints
if (p2star[i] > ub[i]) p2star[i] = ub[i]; // Constraints
}
y2star = fn ( n, p2star, data_t );
*icount = *icount + 1;
/*
Retain reflection?
*/
if ( y2star <= ystar )
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = p2star[i];
}
y[ihi] = y2star;
}
else
{
for ( i = 0; i < n; i++ )
{
p[i+ihi*n] = pstar[i];
}
y[ihi] = ystar;
}
}
}
/*
Check if YLO improved.
*/
if ( y[ihi] < ylo )
{
ylo = y[ihi];
ilo = ihi;
}
jcount = jcount - 1;
if ( 0 < jcount )
{
continue;
}
/*
Check to see if minimum reached.
*/
if ( *icount <= kcount )
{
jcount = konvge;
z = 0.0;
for ( i = 0; i < nn; i++ )
{
z = z + y[i];
}
x = z / dnn;
z = 0.0;
for ( i = 0; i < nn; i++ )
{
z = z + pow ( y[i] - x, 2 );
}
if ( z <= rq )
{
break;
}
}
}
/*
Factorial tests to check that YNEWLO is a local minimum.
*/
for ( i = 0; i < n; i++ )
{
xmin[i] = p[i+ilo*n];
}
*ynewlo = y[ilo];
if ( kcount < *icount )
{
*ifault = 2;
break;
}
*ifault = 0;
for ( i = 0; i < n; i++ )
{
del = step[i] * eps;
xmin[i] = xmin[i] + del;
if (xmin[i] < lb[i]) xmin[i] = lb[i]; // Constraints
if (xmin[i] > ub[i]) xmin[i] = ub[i]; // Constraints
z = fn ( n, xmin, data_t );
*icount = *icount + 1;
if ( z < *ynewlo )
{
*ifault = 2;
break;
}
xmin[i] = xmin[i] - del - del;
if (xmin[i] < lb[i]) xmin[i] = lb[i]; // Constraints
if (xmin[i] > ub[i]) xmin[i] = ub[i]; // Constraints
z = fn ( n, xmin, data_t );
*icount = *icount + 1;
if ( z < *ynewlo )
{
*ifault = 2;
break;
}
xmin[i] = xmin[i] + del;
}
if ( *ifault == 0 )
{
break;
}
/*
Restart the procedure.
*/
for ( i = 0; i < n; i++ )
{
start[i] = xmin[i];
}
del = eps;
*numres = *numres + 1;
}
return;
}
//END NLDRMD FUNCTION
static inline pixelvalue** allocMatrixPX(int nrows, int ncols)
{
pixelvalue** arr;
int i;
arr = (pixelvalue **) malloc(nrows * sizeof(*arr));
if (arr == NULL ) {
return NULL;
}
for ( i = 0 ; i < nrows ; i++) {
arr[i] = (pixelvalue *) malloc(ncols * sizeof(*arr[i]));
if (arr[i] == NULL ) {
return NULL;
}
}
return arr;
}
static inline int** allocMatrixInt(int nrows, int ncols)
{
int** arr;
int i;
arr = (int **) malloc(nrows * sizeof(*arr));
if (arr == NULL ) {
return NULL;
}
for ( i = 0 ; i < nrows ; i++) {
arr[i] = (int *) malloc(ncols * sizeof(*arr[i]));
if (arr[i] == NULL ) {
return NULL;
}
}
return arr;
}
static inline void FreeMemMatrixPx(pixelvalue **mat,int nrows)
{
int r;
for ( r = 0 ; r < nrows ; r++) {
free(mat[r]);
}
free(mat);
}
static inline void DoImageTransformations (int NrTransOpt, int TransOpt[10], pixelvalue *Image, int NrPixels)
{
int i,j,k,l,m;
pixelvalue **ImageTemp1, **ImageTemp2;
ImageTemp1 = allocMatrixPX(NrPixels,NrPixels);
ImageTemp2 = allocMatrixPX(NrPixels,NrPixels);
for (k=0;k<NrPixels;k++) {
for (l=0;l<NrPixels;l++) {
ImageTemp1[k][l] = Image[(NrPixels*k)+l];
}
}
for (k=0;k<NrTransOpt;k++) {
if (TransOpt[k] == 1){
for (l=0;l<NrPixels;l++) for (m=0;m<NrPixels;m++) ImageTemp2[l][m] = ImageTemp1[l][NrPixels-m-1]; //Inverting Y.
} else if (TransOpt[k] == 2){
for (l=0;l<NrPixels;l++) for (m=0;m<NrPixels;m++) ImageTemp2[l][m] = ImageTemp1[NrPixels-l-1][m]; //Inverting Z.
} else if (TransOpt[k] == 3){
for (l=0;l<NrPixels;l++) for (m=0;m<NrPixels;m++) ImageTemp2[l][m] = ImageTemp1[m][l];
} else if (TransOpt[k] == 0){
for (l=0;l<NrPixels;l++) for (m=0;m<NrPixels;m++) ImageTemp2[l][m] = ImageTemp1[l][m];
}
for (l=0;l<NrPixels;l++) for (m=0;m<NrPixels;m++) ImageTemp1[l][m] = ImageTemp2[l][m];
}
for (k=0;k<NrPixels;k++) for (l=0;l<NrPixels;l++) Image[(NrPixels*k)+l] = ImageTemp2[k][l];
FreeMemMatrixPx(ImageTemp1,NrPixels);
FreeMemMatrixPx(ImageTemp2,NrPixels);
}
static inline void Transposer (double *x, int n, double *y)
{
int i,j;
for (i=0;i<n;i++){
for (j=0;j<n;j++){
y[(i*n)+j] = x[(j*n)+i];
}
}
}
const int dx[] = {+1, 0, -1, 0, +1, -1, +1, -1};
const int dy[] = { 0, +1, 0, -1, +1, +1, -1, -1};
static inline void DepthFirstSearch(int x, int y, int current_label, int NrPixels, int **BoolImage, int **ConnectedComponents,int **Positions, int *PositionTrackers)
{
if (x < 0 || x == NrPixels) return;
if (y < 0 || y == NrPixels) return;
if ((ConnectedComponents[x][y]!=0)||(BoolImage[x][y]==0)) return;
ConnectedComponents[x][y] = current_label;
Positions[current_label][PositionTrackers[current_label]] = (x*NrPixels) + y;
PositionTrackers[current_label] += 1;
int direction;
for (direction=0;direction<8;++direction){
DepthFirstSearch(x + dx[direction], y + dy[direction], current_label, NrPixels, BoolImage, ConnectedComponents,Positions,PositionTrackers);
}
}
static inline int FindConnectedComponents(int **BoolImage, int NrPixels, int **ConnectedComponents, int **Positions, int *PositionTrackers){
int i,j;
for (i=0;i<NrPixels;i++){
for (j=0;j<NrPixels;j++){
ConnectedComponents[i][j] = 0;
}
}
int component = 0;
for (i=0;i<NrPixels;++i) {
for (j=0;j<NrPixels;++j) {
if ((ConnectedComponents[i][j]==0) && (BoolImage[i][j] == 1)){
DepthFirstSearch(i,j,++component,NrPixels,BoolImage,ConnectedComponents,Positions,PositionTrackers);
}
}
}
return component;
}
static inline int FindRegionalMaxima(double *z,int **PixelPositions,
int NrPixelsThisRegion,int **MaximaPositions,double *MaximaValues,
int *IsSaturated, double IntSat)
{
int nPeaks = 0;
int i,j,k,l;
double zThis, zMatch;
int xThis, yThis;
int xNext, yNext;
int isRegionalMax = 1;
for (i=0;i<NrPixelsThisRegion;i++){
isRegionalMax = 1;
zThis = z[i];
if (zThis > IntSat) {
*IsSaturated = 1;
} else {
*IsSaturated = 0;
}
xThis = PixelPositions[i][0];
yThis = PixelPositions[i][1];
for (j=0;j<8;j++){
xNext = xThis + dx[j];
yNext = yThis + dy[j];
for (k=0;k<NrPixelsThisRegion;k++){
if (xNext == PixelPositions[k][0] && yNext == PixelPositions[k][1] && z[k] > (zThis)){
isRegionalMax = 0;
}
}
}
if (isRegionalMax == 1){
MaximaPositions[nPeaks][0] = xThis;
MaximaPositions[nPeaks][1] = yThis;
MaximaValues[nPeaks] = zThis;
nPeaks++;
}
}
if (nPeaks==0){
MaximaPositions[nPeaks][0] = PixelPositions[NrPixelsThisRegion/2][0];
MaximaPositions[nPeaks][1] = PixelPositions[NrPixelsThisRegion/2][1];
MaximaValues[nPeaks] = z[NrPixelsThisRegion/2];
nPeaks=1;
}
return nPeaks;
}
struct func_data{
int NrPixels;
double *RsEtasZ;
double *results;
};
__device__ void YZ4mREta(int NrElements, double *R, double *Eta, double *Y, double *Z){
int i;
for (i=0;i<NrElements;i++){
Y[i] = -R[i]*sin(Eta[i]*deg2rad);
Z[i] = R[i]*cos(Eta[i]*deg2rad);
}
}
__device__ double CalcEtaAngle(double y, double z){
double alph;
alph = rad2deg*acos(z/sqrt(y*y+z*z));
if (y>0) alph = -alph;
return alph;
}
__global__ void CalcOnePixel(const double *x, double *REtaZ, int nPeaks, int NrPixels, double *result){
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= NrPixels) return;
double L, G;
result[i]=0;
int j;
for (j=0;j<nPeaks;j++){
L = 1/(((((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/((x[(8*j)+6])*(x[(8*j)+6])))+1)*((((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/((x[(8*j)+8])*(x[(8*j)+8])))+1));
G = exp(-(0.5*(((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/(x[(8*j)+5]*x[(8*j)+5])))-(0.5*(((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/(x[(8*j)+7]*x[(8*j)+7]))));
result[i] += x[(8*j)+1]*((x[(8*j)+4]*L) + ((1-x[(8*j)+4])*G));
}
}
__device__ double problem_function(
int n,
double *x,
void* f_data_trial)
{
struct func_data *f_data = (struct func_data *) f_data_trial;
int NrPixels = f_data->NrPixels;
double *REtaZ;
REtaZ = &(f_data->RsEtasZ[0]);
int nPeaks = (n-1)/8;
double TotalDifferenceIntensity=0, CalcIntensity, L, G, IntPeaks, BG = x[0];
int NrPixelsThisRegion = NrPixels;
for (int j=0;j<nPeaks;j++){
IntPeaks = 0;
for (int i=0;i<NrPixelsThisRegion;i++){
L = 1/(((((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/((x[(8*j)+6])*(x[(8*j)+6])))+1)*((((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/((x[(8*j)+8])*(x[(8*j)+8])))+1));
G = exp(-(0.5*(((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/(x[(8*j)+5]*x[(8*j)+5])))-(0.5*(((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/(x[(8*j)+7]*x[(8*j)+7]))));
IntPeaks += x[(8*j)+1]*((x[(8*j)+4]*L) + ((1-x[(8*j)+4])*G));
}
CalcIntensity = BG + IntPeaks;
TotalDifferenceIntensity += (CalcIntensity - REtaZ[j*3+2])*(CalcIntensity - REtaZ[j+3+2]);
}
return TotalDifferenceIntensity;
}
__global__ void Fit2DPeaks (int *PkPx, double *yzInt, double *MaximaInfo,
double *ReturnMatrix, int *PosnPeaks, int *PosnPixels, double *ExtraInfo,
double *ThreshInfo, double *xDevice, double *xlDevice, double *xuDevice, double *REtaIntDevice,
double *resultsmat, double *scratch, double *xStepArr, double *xoutDevice){
int RegNr = blockIdx.x * blockDim.x + threadIdx.x;
if (RegNr >= (int)ExtraInfo[0]) return;
int nPeaks = PkPx[RegNr*2];
int NrPixelsThisRegion = PkPx[RegNr*2+1];
double Thresh = ThreshInfo[RegNr];
double *scratchArr;
int n = 1 + (8*nPeaks);
double *yzIntThis, *MaximaInfoThis, *ReturnMatrixThis, *resultsThis;
resultsThis = resultsmat + PosnPixels[RegNr];
yzIntThis = yzInt+ PosnPixels[RegNr]*3;
MaximaInfoThis = MaximaInfo + PosnPeaks[RegNr]*3;
ReturnMatrixThis = ReturnMatrix + PosnPeaks[RegNr]*9;
double *x,*xl,*xu, *RetaInt, *REtaZ, *xstep, *xout;
int Posxlu = PosnPeaks[RegNr] * 8 + RegNr;
int Posreta = PosnPixels[RegNr]*3;
scratchArr = scratch + ((Posxlu + RegNr)*(Posxlu+RegNr) + 3*Posxlu);
x = xDevice + Posxlu;
xout = xoutDevice + Posxlu;
xl = xlDevice + Posxlu;
xu = xuDevice + Posxlu;
RetaInt = REtaIntDevice + Posreta;
REtaZ = RetaInt;
xstep = xStepArr + Posxlu;
x[0] = Thresh/2;
xl[0] = 0;
xu[0] = Thresh;
int i;
for (i=0;i<NrPixelsThisRegion;i++){
RetaInt[i*3] = CalcNorm2(yzIntThis[i*3]-ExtraInfo[1],yzIntThis[i*3+1]-ExtraInfo[2]);
RetaInt[i*3+1] = CalcEtaAngle(yzIntThis[i*3]-ExtraInfo[1],yzIntThis[i*3+1]-ExtraInfo[2]);
RetaInt[i*3+2] = yzIntThis[i*3+2];
}
double Width = sqrt((double)NrPixelsThisRegion/(double)nPeaks);
for (i=0;i<nPeaks;i++){
x[(8*i)+1] = MaximaInfoThis[i*3]; // Imax
x[(8*i)+2] = CalcNorm2(MaximaInfoThis[i*3+1]-ExtraInfo[1],MaximaInfoThis[i*3+2]-ExtraInfo[2]); //Radius
x[(8*i)+3] = CalcEtaAngle(MaximaInfoThis[i*3+1]-ExtraInfo[1],MaximaInfoThis[i*3+2]-ExtraInfo[2]); // Eta
x[(8*i)+4] = 0.5; // Mu
x[(8*i)+5] = Width; //SigmaGR
x[(8*i)+6] = Width; //SigmaLR
x[(8*i)+7] = rad2deg*atan(Width/x[(8*i)+2]); //SigmaGEta //0.5;
x[(8*i)+8] = rad2deg*atan(Width/x[(8*i)+2]); //SigmaLEta //0.5;
double dEta = rad2deg*atan(1/x[(8*i)+2]);
xl[(8*i)+1] = MaximaInfoThis[i*3]/2;
xl[(8*i)+2] = x[(8*i)+2] - 1;
xl[(8*i)+3] = x[(8*i)+3] - dEta;
xl[(8*i)+4] = 0;
xl[(8*i)+5] = 0.01;
xl[(8*i)+6] = 0.01;
xl[(8*i)+7] = 0.005;
xl[(8*i)+8] = 0.005;
xu[(8*i)+1] = MaximaInfoThis[i*3]*2;
xu[(8*i)+2] = x[(8*i)+2] + 1;
xu[(8*i)+3] = x[(8*i)+3] + dEta;
xu[(8*i)+4] = 1;
xu[(8*i)+5] = 30;
xu[(8*i)+6] = 30;
xu[(8*i)+7] = 2;
xu[(8*i)+8] = 2;
}
for (i=0;i<n;i++){
xstep[i] = fabs(xu[i]-xl[i])*0.25;
}
struct func_data f_data;
f_data.NrPixels = NrPixelsThisRegion;
f_data.RsEtasZ = RetaInt;
f_data.results = resultsThis;
struct func_data *f_datat;
f_datat = &f_data;
void *trp = (struct func_data *) f_datat;
double minf;
double reqmin = 1e-8;
int konvge = 10;
int kcount = (int)ExtraInfo[3];
int icount, numres, ifault;
double IntPeaks, L, G, BGToAdd;
nelmin(problem_function, n, x, xout, xl, xu, scratchArr, &minf, reqmin, xstep, konvge, kcount, &icount, &numres, &ifault, trp);
if (ifault !=0) {
//printf("%d %d %d %d %d %d\n",RegNr,icount,numres,ifault,nPeaks,NrPixelsThisRegion);
for (int j=0;j<nPeaks;j++){
ReturnMatrixThis[j*9+8] = 1;
}
return;
}
x = xout;
for (int j=0;j<nPeaks;j++){
ReturnMatrixThis[j*9] = 0;
for (i=0;i<NrPixelsThisRegion;i++){
L = 1/(((((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/((x[(8*j)+6])*(x[(8*j)+6])))+1)*((((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/((x[(8*j)+8])*(x[(8*j)+8])))+1));
G = exp(-(0.5*(((REtaZ[i*3]-x[(8*j)+2])*(REtaZ[i*3]-x[(8*j)+2]))/(x[(8*j)+5]*x[(8*j)+5])))-(0.5*(((REtaZ[i*3+1]-x[(8*j)+3])*(REtaZ[i*3+1]-x[(8*j)+3]))/(x[(8*j)+7]*x[(8*j)+7]))));
IntPeaks = x[(8*j)+1]*((x[(8*j)+4]*L) + ((1-x[(8*j)+4])*G));
BGToAdd = x[0];
ReturnMatrixThis[j*9] += (BGToAdd + IntPeaks);
}
ReturnMatrixThis[j*9+1] = -x[(8*j)+2]*sin(x[(8*j)+3]*deg2rad);
ReturnMatrixThis[j*9+2] = x[(8*j)+2]*cos(x[(8*j)+3]*deg2rad);
ReturnMatrixThis[j*9+3] = x[8*j+1];
ReturnMatrixThis[j*9+4] = x[8*j+2];
ReturnMatrixThis[j*9+5] = x[8*j+3];
ReturnMatrixThis[j*9+6] = (x[8*j+5]+x[8*j+6])/2;
ReturnMatrixThis[j*9+7] = (x[8*j+7]+x[8*j+8])/2;
ReturnMatrixThis[j*9+8] = 0;
}
}
void CallFit2DPeaks(int *nPeaksNrPixels, double *yzInt, double *MaximaInfo,
double *ReturnMatrix, int TotNrRegions, double *YZCen, double *ThreshInfo,
int *PosMaximaInfoReturnMatrix, int *PosyzInt, int totalPixels,
int totalPeaks, int blocksize, int cudaDeviceNum, int nEvals)
{
cudaSetDevice(cudaDeviceNum);
size_t freeMem, totalMem;
int *PkPxDevice,*PosMaxInfoRetMatDevice,*PosyzIntDevice;
cudaMalloc((int **) &PkPxDevice, TotNrRegions*2*sizeof(int));
CHECK(cudaPeekAtLastError());
cudaMemcpy(PkPxDevice,nPeaksNrPixels,TotNrRegions*2*sizeof(int),cudaMemcpyHostToDevice);
cudaMalloc((int **) &PosMaxInfoRetMatDevice, TotNrRegions*sizeof(int));
CHECK(cudaPeekAtLastError());
cudaMemcpy(PosMaxInfoRetMatDevice,PosMaximaInfoReturnMatrix,TotNrRegions*sizeof(int),cudaMemcpyHostToDevice);
cudaMalloc((int **) &PosyzIntDevice, TotNrRegions*sizeof(int));
CHECK(cudaPeekAtLastError());
cudaMemcpy(PosyzIntDevice,PosyzInt,TotNrRegions*sizeof(int),cudaMemcpyHostToDevice);
double ExtraInfo[4] = {(double)TotNrRegions,YZCen[0],YZCen[1],(double)nEvals};
double *yzIntDevice, *MaximaInfoDevice, *ReturnMatrixDevice, *ExtraInfoDevice,
*ThreshInfoDevice, *xDevice, *xlDevice, *xuDevice, *REtaIntDevice, *resultsmat,
*scratch, *xStepArr, *xoutDevice;
cudaMalloc((double **)&yzIntDevice, totalPixels*3*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMemcpy(yzIntDevice,yzInt,totalPixels*3*sizeof(double),cudaMemcpyHostToDevice);
cudaMalloc((double **)&MaximaInfoDevice, totalPeaks*3*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMemcpy(MaximaInfoDevice,MaximaInfo,totalPeaks*3*sizeof(double),cudaMemcpyHostToDevice);
cudaMalloc((double **)&ReturnMatrixDevice, totalPeaks*9*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&ThreshInfoDevice, TotNrRegions*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMemcpy(ThreshInfoDevice,ThreshInfo,TotNrRegions*sizeof(double),cudaMemcpyHostToDevice);
cudaMalloc((double **)&ExtraInfoDevice, 4*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMemcpy(ExtraInfoDevice,ExtraInfo,4*sizeof(double),cudaMemcpyHostToDevice);
cudaMalloc((double **)&xDevice,(totalPeaks*8+TotNrRegions)*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&xlDevice,(totalPeaks*8+TotNrRegions)*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&xuDevice,(totalPeaks*8+TotNrRegions)*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&xStepArr,(totalPeaks*8+TotNrRegions)*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&xoutDevice,(totalPeaks*8+TotNrRegions)*sizeof(double));
CHECK(cudaPeekAtLastError());
int totN = totalPeaks*8 + TotNrRegions;
int scratchSpace = (totN+TotNrRegions)*(totN+TotNrRegions) + 3*totN;
cudaMalloc((double **)&scratch,scratchSpace*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&REtaIntDevice,(totalPixels+100)*3*sizeof(double));
CHECK(cudaPeekAtLastError());
cudaMalloc((double **)&resultsmat,totalPixels*sizeof(double));
CHECK(cudaPeekAtLastError());
int dim = TotNrRegions;
dim3 block (blocksize);
dim3 grid ((dim/block.x)+1);
cudaMemGetInfo(&freeMem, &totalMem);
fprintf(stderr, "Block size : %d, grid size: %d Free = %zu MB, Total = %zu MB\n", block.x, grid.x, freeMem/(1024*1024), totalMem/(1024*1024));
fflush(stdout);
Fit2DPeaks<<<grid,block>>>(PkPxDevice,yzIntDevice, MaximaInfoDevice,
ReturnMatrixDevice, PosMaxInfoRetMatDevice, PosyzIntDevice,
ExtraInfoDevice, ThreshInfoDevice, xDevice, xlDevice, xuDevice,
REtaIntDevice, resultsmat,scratch, xStepArr, xoutDevice);
CHECK(cudaPeekAtLastError());
CHECK(cudaDeviceSynchronize());
cudaMemcpy(ReturnMatrix,ReturnMatrixDevice,totalPeaks*9*sizeof(double),cudaMemcpyDeviceToHost);
cudaFree(PkPxDevice);
cudaFree(PosMaxInfoRetMatDevice);
cudaFree(PosyzIntDevice);
cudaFree(yzIntDevice);
cudaFree(MaximaInfoDevice);
cudaFree(ReturnMatrixDevice);
cudaFree(ExtraInfoDevice);
cudaFree(ThreshInfoDevice);
cudaFree(xDevice);
cudaFree(xlDevice);
cudaFree(xuDevice);
cudaFree(xStepArr);
cudaFree(xoutDevice);
cudaFree(scratch);
cudaFree(REtaIntDevice);
cudaFree(resultsmat);
CHECK(cudaDeviceSynchronize());
}
double cpuSecond(){
struct timeval tp;
gettimeofday(&tp,NULL);
return ((double)tp.tv_sec + (double)tp.tv_usec*1.e-6);
}
int getSPcores(cudaDeviceProp devProp)
{
int cores = 0;
int mp = devProp.multiProcessorCount;
switch (devProp.major){
case 2: // Fermi
if (devProp.minor == 1) cores = mp * 48;
else cores = mp * 32;
break;
case 3: // Kepler
cores = mp * 192;
break;
case 5: // Maxwell
cores = mp * 128;
break;
default:
printf("Unknown device type\n");
break;
}
return cores;
}
int main(int argc, char *argv[]){ // Arguments: parameter file name
if (argc != 3){
printf("Not enough arguments, exiting. Use as:\n\t\t%s Parameters.txt cudaDeviceNumber\n",argv[0]);
return 1;
}
//Read params file
char *ParamFN;
FILE *fileParam;
ParamFN = argv[1];
char line[MAX_LINE_LENGTH];
fileParam = fopen(ParamFN,"r");
if (fileParam == NULL){
printf("Parameter file: %s could not be read. Exiting\n",argv[1]);
return 1;
}
char *str;
double tstart = cpuSecond();
int cmpres, StartFileNr, NrFilesPerSweep, NumDarkBegin=0, NumDarkEnd=0,
ColBeamCurrent, NrOfRings=0, RingNumbers[MAX_N_RINGS], TransOpt[10],
NrTransOpt=0, DoFullImage=0, Padding, NrPixels, LayerNr, FrameNumberToDo=-1;
double OmegaOffset = 0, bc=0, RingSizeThreshold[MAX_N_RINGS][4], px,
Width, IntSat, Ycen, Zcen;
char dummy[MAX_LINE_LENGTH], ParFilePath[MAX_LINE_LENGTH],
FileStem[MAX_LINE_LENGTH], RawFolder[MAX_LINE_LENGTH],
OutputFolder[MAX_LINE_LENGTH], darkcurrentfilename[MAX_LINE_LENGTH],
floodfilename[MAX_LINE_LENGTH], Ext[MAX_LINE_LENGTH];
while (fgets(line, MAX_LINE_LENGTH, fileParam) != NULL) {
str = "ParFilePath ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %s", dummy, ParFilePath);
continue;
}
str = "RingThresh ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d %lf", dummy, &RingNumbers[NrOfRings],
&RingSizeThreshold[NrOfRings][1]);
NrOfRings++;
continue;
}
str = "FileStem ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %s", dummy, FileStem);
continue;
}
str = "ParFileColBeamCurrent ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d", dummy, &ColBeamCurrent);
continue;
}
str = "StartFileNr ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d", dummy, &StartFileNr);
continue;
}
str = "NrFilesPerSweep ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d", dummy, &NrFilesPerSweep);
continue;
}
str = "NumDarkBegin ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d", dummy, &NumDarkBegin);
continue;
}
str = "NumDarkEnd ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %d", dummy, &NumDarkEnd);
continue;
}
str = "OmegaOffset ";
cmpres = strncmp(line, str, strlen(str));
if (cmpres == 0) {
sscanf(line, "%s %lf", dummy, &OmegaOffset);
continue;
}
str = "BeamCurrent ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %lf", dummy, &bc);
continue;
}
str = "Width ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %lf", dummy, &Width);
continue;
}
str = "px ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %lf", dummy, &px);
continue;
}
str = "ImTransOpt ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &TransOpt[NrTransOpt]);
NrTransOpt++;
continue;
}
str = "DoFullImage ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &DoFullImage);
continue;
}
str = "RawFolder ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %s", dummy, RawFolder);
continue;
}
str = "Folder ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %s", dummy, OutputFolder);
continue;
}
str = "Dark ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %s", dummy, darkcurrentfilename);
continue;
}
str = "Flood ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %s", dummy, floodfilename);
continue;
}
str = "BC ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %lf %lf", dummy, &Ycen, &Zcen);
continue;
}
str = "UpperBoundThreshold ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %lf", dummy, &IntSat);
continue;
}
str = "LayerNr ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &LayerNr);
continue;
}
str = "NrPixels ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &NrPixels);
continue;
}
str = "Padding ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &Padding);
continue;
}
str = "SingleFrameNumber ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %d", dummy, &FrameNumberToDo);
continue;
}
str = "Ext ";
cmpres = strncmp(line,str,strlen(str));
if (cmpres==0){
sscanf(line,"%s %s", dummy, Ext);
continue;
}
}
printf("Read params file.\n");
fflush(stdout);
if (DoFullImage == 1 && FrameNumberToDo == -1){
printf("For processing the full image you need to provide a single"
" Frame Number using the FrameNumberToDo parameter in the"
" param file.\n Exiting\n");
return (1);
}
Width = Width/px;
FILE *ParFile;
ParFile = fopen(ParFilePath,"r");
if (ParFile == NULL){
printf("ParFile could not be read");
return 1;
}
int i, j, k;
int NrFramesPerFile[NrFilesPerSweep],CurrFileNrOffset;
for (i=0;i<NrFilesPerSweep;i++){
NrFramesPerFile[i] = -(NumDarkBegin+NumDarkEnd);
}
char *token, *saveptr;
int OmegaSign=1, goodLine, omegafound;
double Omegas[NrFilesPerSweep][300],BeamCurrents[NrFilesPerSweep][300],
maxBC=0;
char aline[MAX_LINE_LENGTH];
int nFramesBC=0;
while (fgets(aline, MAX_LINE_LENGTH, ParFile) != NULL) {
strncpy(line,aline,strlen(aline));
goodLine = 0;
for (str = line; ; str=NULL){
token = strtok_r(str, " ", &saveptr);
if (token == NULL) break;
if (!strncmp(token,FileStem,strlen(FileStem))){
token = strtok_r(str, " ", &saveptr);
token = strtok_r(str, " ", &saveptr);
CurrFileNrOffset = atoi(token)-StartFileNr;
if (CurrFileNrOffset >=0 && CurrFileNrOffset < NrFilesPerSweep){
NrFramesPerFile[CurrFileNrOffset]++;
goodLine = 1;
}
}
}
if (NrFramesPerFile[CurrFileNrOffset] < -NumDarkBegin + 1) continue;
if (goodLine){
strncpy(line,aline,strlen(aline));
omegafound = 0;
for (i=1, str = line; ; i++, str = NULL){
token = strtok_r(str, " ", &saveptr);
if (token == NULL) break;
if (!strncmp(token,"ramsrot",strlen("ramsrot"))){
omegafound = 1;
OmegaSign = 1;
} else if (!strncmp(token,"aero",strlen("aero"))){
omegafound = 1;
OmegaSign = -1;
} else if (!strncmp(token,"preci",strlen("preci"))){
omegafound = 1;
OmegaSign = 1;
}
if (omegafound){
token = strtok_r(str," ", &saveptr);
token = strtok_r(str," ", &saveptr);
token = strtok_r(str," ", &saveptr);
i+=3;
Omegas[CurrFileNrOffset][NrFramesPerFile
[CurrFileNrOffset]+NumDarkBegin-1]
= atof(token) * OmegaSign + OmegaOffset;
omegafound = 0;
}
if (i == ColBeamCurrent){
BeamCurrents[CurrFileNrOffset][NrFramesPerFile
[CurrFileNrOffset]+NumDarkBegin-1] = atof(token);
maxBC = (maxBC > atof(token)) ? maxBC : atof(token);
nFramesBC++;
}
}
}
}
int TotalNrFrames = 0;
for (i=0;i<NrFilesPerSweep;i++){
TotalNrFrames += NrFramesPerFile[i];
}
bc = (bc > maxBC) ? bc : maxBC;
// Read hkls.csv
char *hklfn = "hkls.csv";
FILE *hklf = fopen(hklfn,"r");
fgets(line,1000,hklf);
int Rnr;
double RRd;
while (fgets(line,1000,hklf)!=NULL){
sscanf(line, "%s %s %s %s %d %s %s %s %s %s %lf", dummy, dummy,
dummy, dummy, &Rnr, dummy, dummy, dummy, dummy ,dummy, &RRd);
for (i=0;i<NrOfRings;i++){
if (Rnr == RingNumbers[i]){
RingSizeThreshold[i][0] = RRd/px;
RingSizeThreshold[i][2] = RRd/px - Width;
RingSizeThreshold[i][3] = RRd/px + Width;
}
}
}
for (i=0;i<NrTransOpt;i++){
if (TransOpt[i] < 0 || TransOpt[i] > 3){
printf("TransformationOptions can only be 0, 1, 2 or 3.\nExiting.\n");
return 1;
}
printf("TransformationOptions: %d ",TransOpt[i]);
if (TransOpt[i] == 0) printf("No change.\n");
else if (TransOpt[i] == 1) printf("Flip Left Right.\n");
else if (TransOpt[i] == 2) printf("Flip Top Bottom.\n");
else printf("Transpose.\n");
}
int *GoodCoords, *RingInfoImage, TotalGoodPixels=0, ythis, zthis;
double Rmin, Rmax, Rt;
GoodCoords = (int*) malloc(NrPixels*NrPixels*sizeof(*GoodCoords));
RingInfoImage = (int*) malloc(NrPixels*NrPixels*sizeof(*RingInfoImage));
for (i=0;i<NrPixels*NrPixels;i++){
GoodCoords[i] = 0;
}
for (i=1;i<NrPixels;i++){
for (j=1;j<NrPixels;j++){
Rt = sqrt((i-Ycen)*(i-Ycen)+(j-Zcen)*(j-Zcen));
for (k=0;k<NrOfRings;k++){
Rmin = RingSizeThreshold[k][2];
Rmax = RingSizeThreshold[k][3];
if (Rt > Rmin && Rt < Rmax){
GoodCoords[((i-1)*NrPixels)+(j-1)] = 1;
RingInfoImage[((i-1)*NrPixels)+(j-1)] = RingNumbers[k];
TotalGoodPixels++;
}
}
}
}
if (DoFullImage == 1){
TotalNrFrames = 1;
for (i=0;i<NrPixels*NrPixels;i++) {
GoodCoords[i] = 1;
}
TotalGoodPixels = NrPixels*NrPixels;
}
double *dark,*flood, *darkTemp, *darkTemp2;
dark = (double *) malloc(NrPixels*NrPixels*NrFilesPerSweep*sizeof(*dark));
darkTemp = (double *) malloc(NrPixels*NrPixels*sizeof(*darkTemp));
darkTemp2 = (double *) malloc(NrPixels*NrPixels*sizeof(*darkTemp2));
flood = (double *) malloc(NrPixels*NrPixels*sizeof(*flood));
// If a darkfile is specified.
FILE *darkfile=fopen(darkcurrentfilename,"rb");
int sz, nFrames;
int SizeFile = sizeof(pixelvalue) * NrPixels * NrPixels;
long int Skip;
for (i=0;i<(NrPixels*NrPixels);i++){
dark[i]=0;
darkTemp[i]=0;
}
pixelvalue *darkcontents;
darkcontents = (pixelvalue *) malloc(NrPixels*NrPixels*sizeof(*darkcontents));
if (darkfile==NULL){
printf("No dark file was specified, will use %d frames at the beginning of each file for dark calculation.\n", NumDarkBegin);
for (i=0;i<NrPixels*NrPixels;i++){
darkTemp[i] = 0;
}
}else{
fseek(darkfile,0L,SEEK_END);
sz = ftell(darkfile);
rewind(darkfile);
nFrames = sz/(8*1024*1024);
Skip = sz - (nFrames*8*1024*1024);
fseek(darkfile,Skip,SEEK_SET);
printf("Reading dark file: %s, nFrames: %d, skipping first %ld bytes.\n",darkcurrentfilename,nFrames,Skip);
for (i=0;i<nFrames;i++){
fread(darkcontents,SizeFile,1,darkfile);
DoImageTransformations(NrTransOpt,TransOpt,darkcontents,NrPixels);
for (j=0;j<(NrPixels*NrPixels);j++){
darkTemp[j] += (double) darkcontents[j];
}
}
fclose(darkfile);
for (i=0;i<(NrPixels*NrPixels);i++){
darkTemp[i] /= (double) nFrames;
}
}
Transposer(darkTemp,NrPixels,darkTemp2);
for (i=0;i<NrFilesPerSweep;i++){
for (j=0;j<NrPixels*NrPixels;j++){
dark[i*NrPixels*NrPixels + j] = darkTemp2[j];
}
}
char FN[MAX_LINE_LENGTH];
if (NumDarkBegin != 0){
for (i=0;i<NrFilesPerSweep;i++){
for (j=0;j<NrPixels*NrPixels;j++){
darkTemp[j] = 0;
}
if (Padding == 2){sprintf(FN,"%s/%s_%02d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 3){sprintf(FN,"%s/%s_%03d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 4){sprintf(FN,"%s/%s_%04d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 5){sprintf(FN,"%s/%s_%05d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 6){sprintf(FN,"%s/%s_%06d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 7){sprintf(FN,"%s/%s_%07d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 8){sprintf(FN,"%s/%s_%08d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
else if (Padding == 9){sprintf(FN,"%s/%s_%09d%s",RawFolder,FileStem,StartFileNr+i,Ext);}
FILE *FileTempDark = fopen(FN,"rb");
fseek(FileTempDark, 0L, SEEK_END);
sz = ftell(FileTempDark);
rewind(FileTempDark);
nFrames = sz/(8*1024*1024);
Skip = sz - (nFrames*8*1024*1024);
fseek(FileTempDark,Skip, SEEK_SET);
for (j=0;j<NumDarkBegin;j++){
fread(darkcontents,SizeFile,1,FileTempDark);
DoImageTransformations(NrTransOpt,TransOpt,darkcontents,NrPixels);
for (k=0;k<NrPixels*NrPixels;k++){
darkTemp[k] += (double) darkcontents[k];
}
}
fclose(FileTempDark);
for (j=0;j<NrPixels*NrPixels;j++){
darkTemp[k] /= NumDarkBegin;
}
Transposer(darkTemp,NrPixels,darkTemp2);
for (j=0;j<NrPixels*NrPixels;j++){
dark[i*NrPixels*NrPixels + j] = darkTemp2[j];
}
}
}
free(darkcontents);
FILE *floodfile=fopen(floodfilename,"rb");
if (floodfile==NULL){
printf("Could not read the flood file. Using no flood correction.\n");
for(i=0;i<(NrPixels*NrPixels);i++){
flood[i]=1;
}
}
else{
fread(flood,sizeof(double)*NrPixels*NrPixels, 1, floodfile);
fclose(floodfile);
}
int FrameNr = 0, FramesToSkip, CurrentFileNr, CurrentRingNr;
double beamcurr, Thresh;
pixelvalue *Image;
Image = (pixelvalue *) malloc(NrPixels*NrPixels*sizeof(*Image));
double *ImgCorrBCTemp, *ImgCorrBC;
ImgCorrBC = (double *) malloc(NrPixels*NrPixels*sizeof(*ImgCorrBC));
ImgCorrBCTemp = (double *) malloc(NrPixels*NrPixels*sizeof(*ImgCorrBCTemp));
char outfoldername[MAX_LINE_LENGTH];
sprintf(outfoldername,"%s/Temp",OutputFolder);
char extcmd[MAX_LINE_LENGTH];
sprintf(extcmd,"mkdir -p %s",outfoldername);
system(extcmd);
int **BoolImage, **ConnectedComponents, **Positions, *PositionTrackers, NrOfReg;
BoolImage = allocMatrixInt(NrPixels,NrPixels);
ConnectedComponents = allocMatrixInt(NrPixels,NrPixels);
Positions = allocMatrixInt(nOverlapsMaxPerImage,NrPixels*4);
PositionTrackers = (int *) malloc(nOverlapsMaxPerImage*sizeof(*PositionTrackers));
int RegNr, IsSaturated;
char OutFile[MAX_LINE_LENGTH];
int TotNrRegions=0, NrPixelsThisRegion;
int *nPeaksNrPixels,*PosyzInt,*PosMaximaInfoReturnMatrix, *RingNumberMatrix;
nPeaksNrPixels = (int *) malloc(nOverlapsMaxPerImage*2*sizeof(*nPeaksNrPixels));
RingNumberMatrix = (int *) malloc(nOverlapsMaxPerImage * 200 * sizeof(*RingNumberMatrix));
double *yzInt, *MaximaInfo, *ReturnMatrix, *ThreshInfo, *YZCen, *OmegaValues;
OmegaValues = (double *) malloc(nOverlapsMaxPerImage*100*sizeof(*OmegaValues));
yzInt = (double *) malloc(nOverlapsMaxPerImage*3*NrPixels*sizeof(*yzInt));
MaximaInfo = (double *) malloc(nOverlapsMaxPerImage*3*100*sizeof(*MaximaInfo));
ReturnMatrix = (double *) malloc(nOverlapsMaxPerImage*9*100*sizeof(*ReturnMatrix));
ThreshInfo = (double *) malloc(nOverlapsMaxPerImage*sizeof(*ThreshInfo));
PosyzInt = (int *) malloc(nOverlapsMaxPerImage*sizeof(*PosyzInt));
PosMaximaInfoReturnMatrix = (int *) malloc(nOverlapsMaxPerImage*sizeof(*PosMaximaInfoReturnMatrix));
YZCen = (double *) malloc(2*sizeof(*YZCen));
YZCen[0] = Ycen;
YZCen[1] = Zcen;
int **MaximaPositions, **UsefulPixels, Pos;
double *MaximaValues, *z, Omega;
MaximaPositions = allocMatrixInt(NrPixels*10,2);
MaximaValues = (double*) malloc(NrPixels*10*sizeof(*MaximaValues));
UsefulPixels = allocMatrixInt(NrPixels*10,2);
z = (double *) malloc(NrPixels*10*sizeof(*z));
int counter, counteryzInt, counterMaximaInfoReturnMatrix;
sprintf(OutFile,"%s/%s_%d_PS.csv",outfoldername,FileStem,LayerNr);
FILE *outfilewrite;
outfilewrite = fopen(OutFile,"w");
fprintf(outfilewrite,"SpotID IntegratedIntensity Omega(degrees) YCen(px) ZCen(px) IMax Radius(px) Eta(degrees) SigmaR SigmaEta RingNr FrameNr\n");
int OldCurrentFileNr=StartFileNr-1;
FILE *ImageFile;
counter = 0;
counteryzInt = 0;
counterMaximaInfoReturnMatrix = 0;
printf("Starting peaksearch now.\n");
fflush(stdout);
int cudaDeviceNum = atoi(argv[2]);
cudaSetDevice(cudaDeviceNum);
cudaDeviceProp deviceProp;
cudaGetDeviceProperties(&deviceProp, 0);
int nCores = getSPcores(deviceProp);
printf("Cuda Cores: %d\n",nCores);
int nJobsLast, nJobsNow=0, resetArrays=1, blocksize = 256, nBad=0, totalPeaks=0;
int *badNPeaksNrPixels, *badPosNPeaks, *badPosNPixels, *badRingNrMatrix;
int badCounterNrPixels = 0, badCounterNPeaks = 0;
double *badYZInt, *badMaximaInfo, *badThreshInfo, *badOmegaValues;
badNPeaksNrPixels = (int *) malloc(nOverlapsMaxPerImage*10*2*sizeof(int));
badPosNPeaks = (int *) malloc(nOverlapsMaxPerImage*10*2*sizeof(int));
badPosNPixels = (int *) malloc(nOverlapsMaxPerImage*10*2*sizeof(int));
badRingNrMatrix = (int *) malloc(nOverlapsMaxPerImage*10*2*sizeof(int));
badYZInt = (double *) malloc(nOverlapsMaxPerImage*100*3*NrPixels*sizeof(double));
badMaximaInfo = (double *) malloc(nOverlapsMaxPerImage*10*3*sizeof(double));
badThreshInfo = (double *) malloc(nOverlapsMaxPerImage*10*sizeof(int));
badOmegaValues = (double *) malloc(nOverlapsMaxPerImage*100*sizeof(double));
int nPeaks, nEvals=MAX_N_EVALS, nEvals2=MAX_N_EVALS*10;
while (FrameNr < TotalNrFrames){
if (TotalNrFrames == 1){ // Look at the next part
/*FrameNr = FrameNumberToDo;
for (i=0;i<NrFilesPerSweep;i++){
if (NrFramesPerFile[i]/FrameNumberToDo > 0){
FrameNumberToDo -= NrFramesPerFile[i];
}else{
CurrentFileNr = StartFileNr + i;
FramesToSkip = FrameNumberToDo;
break;
}
}*/
}else{
CurrentFileNr = StartFileNr;
FramesToSkip = FrameNr;
if (FramesToSkip >= (NrFramesPerFile[0])){
for (i=0;i<NrFilesPerSweep;i++){
if (FramesToSkip / (NrFramesPerFile[i]) >= 1){
FramesToSkip -= NrFramesPerFile[i];
CurrentFileNr++;
}
}
}
}
if (OldCurrentFileNr !=CurrentFileNr){
if (FrameNr > 0) fclose(ImageFile);
if (Padding == 2){sprintf(FN,"%s/%s_%02d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 3){sprintf(FN,"%s/%s_%03d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 4){sprintf(FN,"%s/%s_%04d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 5){sprintf(FN,"%s/%s_%05d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 6){sprintf(FN,"%s/%s_%06d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 7){sprintf(FN,"%s/%s_%07d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 8){sprintf(FN,"%s/%s_%08d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
else if (Padding == 9){sprintf(FN,"%s/%s_%09d%s",RawFolder,FileStem,CurrentFileNr,Ext);}
ImageFile = fopen(FN,"rb");
if (ImageFile == NULL){
printf("Could not read the input file. Exiting.\n");
return 1;
}
fseek(ImageFile,0L,SEEK_END);
sz = ftell(ImageFile);
rewind(ImageFile);
Skip = sz - ((NrFramesPerFile[StartFileNr-CurrentFileNr] - NumDarkEnd - FramesToSkip) * 8*1024*1024);
fseek(ImageFile,Skip,SEEK_SET);
}
printf("Now processing file: %s, Frame: %d\n",FN, FramesToSkip);
fflush(stdout);
fread(Image,SizeFile,1,ImageFile);
DoImageTransformations(NrTransOpt,TransOpt,Image,NrPixels);
beamcurr = BeamCurrents[CurrentFileNr - StartFileNr][FramesToSkip];
Omega = Omegas[CurrentFileNr - StartFileNr][FramesToSkip];
printf("Beam current this file: %f, Beam current scaling value: %f\n",beamcurr,bc);
for (i=0;i<NrPixels*NrPixels;i++)
ImgCorrBCTemp[i]=(double) Image[i];
Transposer(ImgCorrBCTemp,NrPixels,ImgCorrBC);
for (i=0;i<NrPixels*NrPixels;i++){
ImgCorrBC[i] = (ImgCorrBC[i] - dark[NrPixels*NrPixels*(CurrentFileNr-StartFileNr) + i])/flood[i];
ImgCorrBC[i] = ImgCorrBC[i]*bc/beamcurr;
if (GoodCoords[i] == 0){
ImgCorrBC[i] = 0;
continue;
}
CurrentRingNr = RingInfoImage[i];
for (j=0;j<NrOfRings;j++){
if (RingNumbers[j] == CurrentRingNr){
Pos = j;
}
}
Thresh = RingSizeThreshold[Pos][1];
if (ImgCorrBC[i] < Thresh){
ImgCorrBC[i] = 0;
}
}
for (i=0;i<nOverlapsMaxPerImage;i++)
PositionTrackers[i] = 0;
for (i=0;i<NrPixels;i++){
for (j=0;j<NrPixels;j++){
if (ImgCorrBC[(i*NrPixels)+j] != 0){
BoolImage[i][j] = 1;
}else{
BoolImage[i][j] = 0;
}
}
}
NrOfReg = FindConnectedComponents(BoolImage,NrPixels,ConnectedComponents,Positions,PositionTrackers);
if (resetArrays == 1){
counter = 0;
counteryzInt = 0;
counterMaximaInfoReturnMatrix = 0;
TotNrRegions = 0;
}
TotNrRegions += NrOfReg;
nJobsLast = nJobsNow;
for (RegNr=1;RegNr<=NrOfReg;RegNr++){
NrPixelsThisRegion = PositionTrackers[RegNr];
if (NrPixelsThisRegion == 1){
TotNrRegions--;
continue;
}
for (i=0;i<NrPixelsThisRegion;i++){
UsefulPixels[i][0] = (int)(Positions[RegNr][i]/NrPixels);
UsefulPixels[i][1] = (int)(Positions[RegNr][i]%NrPixels);
z[i] = ImgCorrBC[((UsefulPixels[i][0])*NrPixels) + (UsefulPixels[i][1])];
}
nPeaks = FindRegionalMaxima(z,UsefulPixels,NrPixelsThisRegion,
MaximaPositions,MaximaValues,&IsSaturated,IntSat);
if (IsSaturated == 1){
TotNrRegions--;
continue;
}
nPeaksNrPixels[counter*2] = nPeaks;
nPeaksNrPixels[counter*2+1] = NrPixelsThisRegion;
PosMaximaInfoReturnMatrix[counter] = counterMaximaInfoReturnMatrix;
PosyzInt[counter] = counteryzInt;
for (i=0;i<NrPixelsThisRegion;i++){
yzInt[(counteryzInt+i)*3 + 0] = (double)UsefulPixels[i][0];
yzInt[(counteryzInt+i)*3 + 1] = (double)UsefulPixels[i][1];
yzInt[(counteryzInt+i)*3 + 2] = z[i];
}
for (i=0;i<nPeaks;i++){
MaximaInfo[(counterMaximaInfoReturnMatrix+i)*3 + 0] = MaximaValues[i];
MaximaInfo[(counterMaximaInfoReturnMatrix+i)*3 + 1] = (double)MaximaPositions[i][0];
MaximaInfo[(counterMaximaInfoReturnMatrix+i)*3 + 2] = (double)MaximaPositions[i][1];
RingNumberMatrix[(counterMaximaInfoReturnMatrix+i)*2+0] = RingInfoImage[MaximaPositions[0][0]*NrPixels+MaximaPositions[0][1]];
RingNumberMatrix[(counterMaximaInfoReturnMatrix+i)*2+1] = FrameNr;
OmegaValues[counterMaximaInfoReturnMatrix+i] = Omega;
}
for (i=0;i<NrOfRings;i++){
if (RingNumbers[i] == RingNumberMatrix[(counterMaximaInfoReturnMatrix+i)*2+0]){
Pos = i;
}
}
ThreshInfo[counter] = RingSizeThreshold[Pos][1];
counteryzInt+= NrPixelsThisRegion;
counterMaximaInfoReturnMatrix += nPeaks;
counter++;
}
nJobsNow = TotNrRegions;
nJobsLast = nJobsNow - nJobsLast;
fflush(stdout);
resetArrays = 0;
if (nJobsNow + nJobsLast + blocksize >= nCores || FrameNr == TotalNrFrames-1){
totalPeaks += counterMaximaInfoReturnMatrix;
printf("Starting CUDA job with %d jobs at %d frameNr. CUDA Cores: %d\n",nJobsNow, FrameNr, nCores);
printf("Total number of peaks for CUDA run: %d\n",counterMaximaInfoReturnMatrix);
printf("Total number of useful pixels for CUDA run: %d\n",counteryzInt);
// Now send all info to the GPU calling code
CallFit2DPeaks(nPeaksNrPixels, yzInt, MaximaInfo, ReturnMatrix,
TotNrRegions, YZCen, ThreshInfo, PosMaximaInfoReturnMatrix,
PosyzInt, counteryzInt, counterMaximaInfoReturnMatrix,
blocksize, cudaDeviceNum, nEvals);
for (i=0;i<TotNrRegions;i++){
if (ReturnMatrix[PosMaximaInfoReturnMatrix[i]*9+8] == 1){
// Maintain arrays with bad peaks.
badNPeaksNrPixels[nBad*2] = nPeaksNrPixels[i*2];
badNPeaksNrPixels[nBad*2+1] = nPeaksNrPixels[i*2+1];
badPosNPeaks[nBad] = badCounterNPeaks;
badPosNPixels[nBad] = badCounterNrPixels;
badThreshInfo[nBad] = ThreshInfo[i];
nPeaks = nPeaksNrPixels[i*2];
NrPixelsThisRegion = nPeaksNrPixels[i*2+1];
for (j=0;j<NrPixelsThisRegion;j++){
badYZInt[(badCounterNrPixels+j)*3+0] = yzInt[(PosyzInt[i]+j)*3+0];
badYZInt[(badCounterNrPixels+j)*3+1] = yzInt[(PosyzInt[i]+j)*3+1];
badYZInt[(badCounterNrPixels+j)*3+2] = yzInt[(PosyzInt[i]+j)*3+2];
}
for (j=0;j<nPeaks;j++){
badMaximaInfo[(badCounterNPeaks+j)*3+0] = MaximaInfo[(PosMaximaInfoReturnMatrix[i]+j)*3+0];
badMaximaInfo[(badCounterNPeaks+j)*3+1] = MaximaInfo[(PosMaximaInfoReturnMatrix[i]+j)*3+1];
badMaximaInfo[(badCounterNPeaks+j)*3+2] = MaximaInfo[(PosMaximaInfoReturnMatrix[i]+j)*3+2];
badRingNrMatrix[(badCounterNPeaks+j)*2+0] = RingNumberMatrix[(PosMaximaInfoReturnMatrix[i]+j)*2+0];
badRingNrMatrix[(badCounterNPeaks+j)*2+1] = RingNumberMatrix[(PosMaximaInfoReturnMatrix[i]+j)*2+1];
badOmegaValues[badCounterNPeaks] = OmegaValues[PosMaximaInfoReturnMatrix[i]];
}
badCounterNrPixels += NrPixelsThisRegion;
badCounterNPeaks += nPeaks;
nBad++; // Number of regions
}
}
for (i=0;i<counterMaximaInfoReturnMatrix;i++){
if (ReturnMatrix[i*9+8] == 0){
fprintf(outfilewrite,"%d %f %f %f %f %f %f %f %f %f %d %d\n",i+1,
ReturnMatrix[i*9+0],OmegaValues[i],ReturnMatrix[i*9+1]+Ycen,
ReturnMatrix[i*9+2]+Zcen,ReturnMatrix[i*9+3],
ReturnMatrix[i*9+4], ReturnMatrix[i*9+5],
ReturnMatrix[i*9+6],ReturnMatrix[i*9+7],
RingNumberMatrix[i*2],RingNumberMatrix[i*2+1]);
}
}
printf("Time taken till %d frame: %lf seconds, bad peaks= %d out of %d peaks.\n",FrameNr, cpuSecond()-tstart, nBad, totalPeaks);
resetArrays = 1;
}
if (nBad + 200 + blocksize > nCores || FrameNr == TotalNrFrames-1){
printf("Starting CUDA job with %d difficult jobs. CUDA Cores: %d\n",nBad, nCores);
printf("Total number of peaks for CUDA run: %d\n",badCounterNPeaks);
printf("Total number of useful pixels for CUDA run: %d\n",badCounterNrPixels);
CallFit2DPeaks(badNPeaksNrPixels, badYZInt, badMaximaInfo, ReturnMatrix,
nBad, YZCen, badThreshInfo, badPosNPeaks, badPosNPixels,
badCounterNrPixels, badCounterNPeaks, blocksize, cudaDeviceNum, nEvals2);
for (i=0;i<badCounterNPeaks;i++){
fprintf(outfilewrite,"%d %f %f %f %f %f %f %f %f %f %d %d\n",i+1,
ReturnMatrix[i*9+0],badOmegaValues[i],ReturnMatrix[i*9+1]+Ycen,
ReturnMatrix[i*9+2]+Zcen,ReturnMatrix[i*9+3],
ReturnMatrix[i*9+4], ReturnMatrix[i*9+5],
ReturnMatrix[i*9+6],ReturnMatrix[i*9+7],
badRingNrMatrix[i*2],badRingNrMatrix[i*2+1]);
}
badCounterNrPixels = 0;
badCounterNPeaks = 0;
nBad = 0;
printf("Time taken after difficult peaksFitting: %lf seconds.\n",cpuSecond()-tstart);
}
FrameNr++;
OldCurrentFileNr = CurrentFileNr;
}
fclose(outfilewrite);
printf("Total time taken: %lf seconds.\n",cpuSecond()-tstart);
}
|
11,218 | #include "includes.h"
__global__ void copy_sort_int( const int *orig, const unsigned int *sort_idx, const unsigned int nitems, int *sorted ) {
for( int i = 0; i < nitems; ++ i ) {
sorted[sort_idx[i]] = orig[i];
}
} |
11,219 | #include <chrono>
#include <iostream>
//Kernel definition
template<typename T>
__global__
void oftKernel (T* out,
T* in,
const unsigned int sd_size,
const unsigned int block_size,
const unsigned int I,
const unsigned int L)
{
const unsigned int sd_id = static_cast<int> (threadIdx.x / L); //automatically rounded down in int arithmetics
const unsigned int id = threadIdx.x - sd_id * L;
const unsigned int sd_start = blockIdx.x * blockDim.x * I + sd_id * L * I;
for (unsigned int i = 0; i < I; i++)
{
const unsigned el_id = sd_start + i * L + id;
((T*) out)[el_id] = ((T*) in)[el_id];
// out[el_id] = in[el_id];
// ((T*) out)[0] = ((T*) in)[0];
}
}
int main () {
using namespace std::chrono;
std::cout << "np.array("; //output the results so that they can be read easily by python
std::cout << "(";
for (int Tindx = 1; Tindx <= 5; Tindx++)
{
std::cout << "(";
for(int j = 0; j <= 10; j++)
{
unsigned int I = 1 << j;
unsigned int size = 1 << 30;
unsigned int L = 32;
unsigned int N = 16;
unsigned int sd_size;
switch(Tindx)
{
case 1 :
sd_size = I * L * sizeof(char);
break;
case 2 :
sd_size = I * L * sizeof(short);
break;
case 3 :
sd_size = I * L * sizeof(int);
break;
case 4 :
sd_size = I * L * sizeof(int2);
break;
case 5 :
sd_size = I * L * sizeof(int4);
break;
}
unsigned int block_size = sd_size * N;
unsigned int block_amount = size / block_size;
void* out;
void* in;
auto err1 = cudaMalloc(&out, block_size * block_amount);
auto err2 = cudaMalloc(&in, block_size * block_amount);
// size_t free;
// size_t total;
// auto err3 = cudaMemGetInfo(&free, &total);
if (err2 != cudaSuccess)
{
std::cout << "ERROR: " << cudaGetErrorString(err2) << std::endl;
}
// for (int x = 1; x <= 10; x++) {
// oftKernel<<<block_amount, L * N >>> (out, in, sd_size, block_size, I, L);
// cudaDeviceSynchronize();
// }
// std::cout<<"free:" <<free << " total:" << total << " savedArrays: " << (total - free)/ (block_size * block_amount) << " j:" << j << " Tindx:" << Tindx << std::endl;
// cudaFree(out);
// cudaFree(in);
//make a warmup
switch(Tindx)
{
case 1 :
oftKernel<<<block_amount, L * N >>> (static_cast<char*> (out), static_cast<char*> (in), sd_size, block_size, I, L);
break;
case 2 :
oftKernel<<<block_amount, L * N >>> (static_cast<short*> (out), static_cast<short*> (in), sd_size, block_size, I, L);
break;
case 3 :
oftKernel<<<block_amount, L * N >>> (static_cast<int*> (out), static_cast<int*> (in), sd_size, block_size, I, L);
break;
case 4 :
oftKernel<<<block_amount, L * N >>> (static_cast<int2*> (out), static_cast<int2*> (in), sd_size, block_size, I, L);
break;
case 5 :
oftKernel<<<block_amount, L * N >>> (static_cast<int4*> (out), static_cast<int4*> (in), sd_size, block_size, I, L);
break;
}
cudaDeviceSynchronize();
//Time Measururement Point 1
high_resolution_clock::time_point timeBefore = high_resolution_clock::now();
for(int x = 1; x <= 100; x++)//run 100 times for better measurement accuracy
{
switch(Tindx)
{
case 1 :
oftKernel<<<block_amount, L * N >>> (static_cast<char*> (out), static_cast<char*> (in), sd_size, block_size, I, L);
break;
case 2 :
oftKernel<<<block_amount, L * N >>> (static_cast<short*> (out), static_cast<short*> (in), sd_size, block_size, I, L);
break;
case 3 :
oftKernel<<<block_amount, L * N >>> (static_cast<int*> (out), static_cast<int*> (in), sd_size, block_size, I, L);
break;
case 4 :
oftKernel<<<block_amount, L * N >>> (static_cast<int2*> (out), static_cast<int2*> (in), sd_size, block_size, I, L);
break;
case 5 :
oftKernel<<<block_amount, L * N >>> (static_cast<int4*> (out), static_cast<int4*> (in), sd_size, block_size, I, L);
break;
}
cudaDeviceSynchronize();
auto lstErr = cudaGetLastError();
if ( cudaSuccess != lstErr )
{
std::cout << lstErr << ": " << cudaGetErrorString(lstErr) << std::endl;
}
}
// oftKernel<<<block_amount, L * N >>> (out, in, sd_size, block_size, I, L);
// std::cout<< "size of out:" << sizeof(out) << "tindx:" << Tindx << " block_amount:" << block_amount << " L:" << L << " N:" << N << " block_size: " << block_size << std::endl;
// cudaDeviceSynchronize();
// oftKernel<<<block_amount, L * N >>> (static_cast<int4*> (out), static_cast<int4*> (in), sd_size, block_size, I, L);
// cudaDeviceSynchronize();
//Time Measurement Point 2
high_resolution_clock::time_point timeAfter = high_resolution_clock::now();
//Output Time Measurement Result
duration<double> time_span = duration_cast<duration<double>>(timeAfter - timeBefore);
std::cout << time_span.count();
cudaFree(out);
cudaFree(in);
if( j != 10) {std::cout << ",";} //output a , if we aren't the last element of the for loop
}
std::cout << ")";
if( Tindx != 5) {std::cout << ",";} //output a , if we aren't the last element of the for loop
}
std::cout << ")";
std::cout << ")" << std::endl;
return 0;
}
|
11,220 | #include <cuda.h>
#include <assert.h>
#include <stdio.h>
// this method rewinds a matrix
template <int parallel_threads>
__global__ static void _cwc_kern_reorder_matrix_major(float* a, float* b, const int count, const int channels_per_partition, const int partition, const int batch)
{
assert(blockDim.x == parallel_threads);
const int batch_group_idx = blockIdx.y % (batch / parallel_threads);
const int channel_group_idx = blockIdx.y / (batch / parallel_threads);
a += (blockIdx.z * count * channels_per_partition + blockIdx.x + channel_group_idx * parallel_threads * count) * batch + batch_group_idx * parallel_threads;
b += (blockIdx.z * count * batch + batch_group_idx * parallel_threads * count + blockIdx.x) * channels_per_partition + channel_group_idx * parallel_threads;
__shared__ float prod[parallel_threads][parallel_threads];
int i;
#pragma unroll
for (i = 0; i < parallel_threads; i++)
prod[i][threadIdx.x] = a[i * count * batch + threadIdx.x];
__syncthreads();
#pragma unroll
for (i = 0; i < parallel_threads; i++)
b[i * count * channels_per_partition + threadIdx.x] = prod[threadIdx.x][i];
__syncthreads();
}
int main(int argc, char** argv)
{
float* in = 0;
float* out = 0;
cudaMalloc(&in, sizeof(float) * (55 * 55 * 96 * 256));
cudaMalloc(&out, sizeof(float) * (27 * 27 * 256 * 256));
float* in_host = 0;
float* out_host = 0;
int i, j, c, k;
cudaMallocHost(&in_host, sizeof(float) * 55 * 55 * 96 * 128);
for (i = 0; i < 55; i++)
for (j = 0; j < 55; j++)
for (c = 0; c < 96; c++)
for (k = 0; k < 128; k++)
in_host[i * 55 * 96 * 128 + j * 96 * 128 + c * 128 + k] = c * k;
cudaMemcpy(in, in_host, sizeof(float) * 55 * 55 * 96 * 128, cudaMemcpyHostToDevice);
cudaMallocHost(&out_host, sizeof(float) * 27 * 27 * 256 * 128);
for (i = 0; i < 27; i++)
for (j = 0; j < 27; j++)
for (c = 0; c < 256; c++)
for (k = 0; k < 128; k++)
out_host[i * 27 * 256 * 128 + j * 256 * 128 + c * 128 + k] = c * k;
cudaMemcpy(out, out_host, sizeof(float) * 27 * 27 * 256 * 128, cudaMemcpyHostToDevice);
float* chin = 0;
float* chout = 0;
cudaMalloc(&chin, sizeof(float) * (55 * 55 * 96 * 256));
cudaMalloc(&chout, sizeof(float) * (27 * 27 * 256 * 256));
_cwc_kern_reorder_matrix_major
<16>
<<<dim3(55 * 55, (96 / 2 / 16) * (128 / 16), 2), 16, 16 * 16 * sizeof(float)>>>
(in, chin, 55 * 55, 96 / 2, 2, 128);
_cwc_kern_reorder_matrix_major
<16>
<<<dim3(27 * 27, (256 / 2 / 16) * (128 / 16), 2), 16, 16 * 16 * sizeof(float)>>>
(out, chout, 27 * 27, 256 / 2, 2, 128);
cudaFree(out);
cudaFree(in);
cudaFreeHost(out_host);
cudaFreeHost(in_host);
return 0;
}
|
11,221 | #include "includes.h"
__global__ void sliceIntArray ( const int n, const int indx, const int *ss, int *zz ) {
int i = threadIdx.x + blockDim.x * blockIdx.x;
if ( i < n ) {
zz[i] = ss[i+indx];
}
} |
11,222 |
__global__ void set_chunk_data_vertices(
int x,
int y,
int halo_depth,
double dx,
double dy,
double x_min,
double y_min,
double* vertex_x,
double* vertex_y,
double* vertex_dx,
double* vertex_dy)
{
const int gid = blockIdx.x*blockDim.x+threadIdx.x;
if(gid < x+1)
{
vertex_x[gid] = x_min + dx*(gid-halo_depth);
vertex_dx[gid] = dx;
}
if(gid < y+1)
{
vertex_y[gid] = y_min + dy*(gid-halo_depth);
vertex_dy[gid] = dy;
}
}
// Extended kernel for the chunk initialisation
__global__ void set_chunk_data(
int x,
int y,
double dx,
double dy,
double* cell_x,
double* cell_y,
double* cell_dx,
double* cell_dy,
double* vertex_x,
double* vertex_y,
double* volume,
double* x_area,
double* y_area)
{
const int gid = blockIdx.x*blockDim.x+threadIdx.x;
if(gid < x)
{
cell_x[gid] = 0.5*(vertex_x[gid]+vertex_x[gid+1]);
cell_dx[gid] = dx;
}
if(gid < y)
{
cell_y[gid] = 0.5*(vertex_y[gid]+vertex_y[gid+1]);
cell_dy[gid] = dy;
}
if(gid < x*y)
{
volume[gid] = dx*dy;
}
if(gid < (x+1)*y)
{
x_area[gid] = dy;
}
if(gid < x*(y+1))
{
y_area[gid] = dx;
}
}
|
11,223 | extern "C"
{
__global__ void vsquare_32(const float *a, float *c)
{
int i = threadIdx.x+blockIdx.x*blockDim.x;
double v = a[i];
c[i] = v*v;
}
} |
11,224 | /*
* This program is a CUDA C program simulating the N-body system
* of two galaxies as PHY 241 FINAL PROJECTS
*
*/
/*
* TODO:(*for final project)
* 1. andromeda
* 2. report
* 3. presentation
* *4. N-body galaxy code-generat 10^11 particles
* *5. MatLab write a function to track the distance between Milkway and Andromeda
* *6. change accel function to the N-body one.
* *7. print mass[i]. because the halo is dark matter. Or better way distinguish dark matter and rings?
*/
#include <cuda.h>
#include <math.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <curand.h>
#include <curand_kernel.h>
#define PI 3.14159265
#define BUFFERSIZE 500
#define BLOCKSIZE 256
#define G 1.0
#define MASS_1 38.2352941
#define RMIN (7.733/4.5)
#define SOFTPARAMETER 0.000001
#define AndromedaXOffsetP -41.0882
#define AndromedaYOffsetP 68.3823
#define AndromedaZOffsetP -33.8634
#define AndromedaXOffsetV 0.0420
#define AndromedaYOffsetV -0.2504
#define AndromedaZOffsetV 0.1240
#define MilkwayXOffsetP 41.0882
#define MilkwayYOffsetP -68.3823
#define MilkwayZOffsetP 33.8634
#define MilkwayXOffsetV -0.0420
#define MilkwayYOffsetV 0.2504
#define MilkwayZOffsetV -0.1240
// Headers
void rotate(double* x, double* y, double *z, double n1, double n2, double n3, double theta);
__global__ void leapstep(unsigned long n, double *x, double *y, double *z, double *vx, double *vy, double *vz, double dt);
__global__ void accel(unsigned long n, double *x, double *y, double *z, double *vx, double *vy, double *vz, double* mass, double dt);
__global__ void printstate(double *x, double *y, double *z, unsigned long tnow);
void initialCondition_host_file(char *input1, char *input2, double **x, double **y, double **z, double **vx, double **vy, double **vz, double **mass, unsigned long *size);
void read_size_from_file(char *input, unsigned long *size) ;
/** Main function **/
int main(int argc, char *argv[]) {
/*
* Handling commandline inputs and setting initial value of the arguments
* 1. number of steps (mstep)
* 2. warp (nout)
* 3. offset (start printing position)
* 4. timestamp (dt)
*
*/
unsigned long mstep, nout, offset, tnow = 0, n;
double dt, *x, *y, *z, *vx, *vy, *vz, *mass;
mstep = (argc > 1) ? atoi(argv[1]) : 100;
nout = (argc > 2) ? atoi(argv[2]) : 1;
offset = (argc > 3) ? atoi(argv[3]) : 0;
dt = (argc > 4) ? atof(argv[4]) : (2.0 * PI * RMIN * RMIN) / (sqrt(G * MASS_1) * 40.0);
initialCondition_host_file("milky_way.dat", "andromeda.dat", &x, &y, &z, &vx, &vy, &vz, &mass, &n);
unsigned long grids = ceil((double)n / BLOCKSIZE), threads = BLOCKSIZE;
/*
* Use cudaDeviceSetLimit() to change the buffer size of printf
* used in kernel functions to solve the problem encountered before:
* cannot print more than 4096 lines of data using printf
*
*/
cudaDeviceSetLimit(cudaLimitPrintfFifoSize, n * BUFFERSIZE);
/* Start looping steps from first step to mstep */
for (unsigned long i = 0; i < offset; i++, tnow++){
accel<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, mass, dt);
cudaDeviceSynchronize();
leapstep<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, dt);
cudaDeviceSynchronize();
accel<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, mass, dt);
cudaDeviceSynchronize();
}
for (unsigned long i = offset; i < mstep; i++, tnow++) {
if(i % nout == 0) {
printstate<<<grids, threads>>> (x, y, z, tnow);
cudaDeviceSynchronize();
}
accel<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, mass, dt);
cudaDeviceSynchronize();
leapstep<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, dt);
cudaDeviceSynchronize();
accel<<<grids, BLOCKSIZE>>> (n, x, y, z, vx, vy, vz, mass, dt);
cudaDeviceSynchronize();
}
if(mstep % nout == 0) {
printstate<<<grids, BLOCKSIZE>>>(x, y, z, tnow);
}
cudaDeviceSynchronize();
// After finishing, free the allocated memory
cudaFree(x);
// Exit the current thread
return 0;
}
void rotate(double* x, double* y, double *z, double n1, double n2, double n3, double theta) {
double sigma = -theta;
double c = cos(sigma);
double s = sin(sigma);
double a = 1 - cos(sigma);
double tmpx = ( a * n1 * n1 + c ) * (*x) + ( a * n1 * n2 - s * n3 ) * (*y) + ( a * n1 * n3 + s * n2 ) * (*z);
double tmpy = ( a * n1 * n2 + s * n3 ) * (*x) + ( a * n2 * n2 + c ) * (*y) + ( a * n2 * n3 - s * n1 ) * (*z);
double tmpz = ( a * n1 * n3 - s * n2 ) * (*x) + ( a * n2 * n3 + s * n1 ) * (*y) + ( a * n3 * n3 + c ) * (*z);
(*x) = tmpx;
(*y) = tmpy;
(*z) = tmpz;
}
__global__ void leapstep(unsigned long n, double *x, double *y, double *z, double *vx, double *vy, double *vz, double dt) {
const unsigned long serial = blockIdx.x * blockDim.x + threadIdx.x;
if (serial < n){
x[serial] += dt * vx[serial];
y[serial] += dt * vy[serial];
z[serial] += dt * vz[serial];
}
}
__global__ void accel(unsigned long n, double *x, double *y, double *z, double *vx, double *vy, double *vz, double* mass, double dt) {
const unsigned long serial = blockIdx.x * blockDim.x + threadIdx.x;
const unsigned long tdx = threadIdx.x;
__shared__ double lx[BLOCKSIZE];
__shared__ double ly[BLOCKSIZE];
__shared__ double lz[BLOCKSIZE];
__shared__ double lm[BLOCKSIZE];
double ax = 0.0, ay = 0.0, az = 0.0;
double norm;
double thisX, thisY, thisZ;
if (serial < n) {
thisX = x[serial];
thisY = y[serial];
thisZ = z[serial];
}
for (unsigned long i = 0; i < gridDim.x; i++) {
unsigned long index = i * blockDim.x + tdx;
if (index < n) {
// Copy data from main memory
lx[tdx] = x[index];
lz[tdx] = y[index];
ly[tdx] = z[index];
lm[tdx] = mass[index];
}
__syncthreads();
// Accumulates the acceleration
#pragma unroll
for (unsigned long j = 0; j < BLOCKSIZE; j++) {
unsigned long pos = i * blockDim.x + j;
if (pos >= n) {
continue;
}
norm = pow(SOFTPARAMETER + pow(thisX - lx[j], 2) + pow(thisY - ly[j], 2) + pow(thisZ - lz[j], 2), 1.5);
ax += - G * lm[j] * (thisX - lx[j]) / norm;
ay += - G * lm[j] * (thisY - ly[j]) / norm;
az += - G * lm[j] * (thisZ - lz[j]) / norm;
}
__syncthreads();
}
if (serial < n) {
vx[serial] += 0.5 * dt * ax;
vy[serial] += 0.5 * dt * ay;
vz[serial] += 0.5 * dt * az;
}
}
__global__ void printstate(double *x, double *y, double *z, unsigned long tnow) {
const unsigned long serial = blockIdx.x * blockDim.x + threadIdx.x;
if(serial < 10000 || (serial >= 44000 && serial < 54000)){
printf("%d,%12.6lf,%12.6lf,%12.6lf,%d\n", serial, x[serial], y[serial], z[serial], tnow);
}
}
void initialCondition_host_file(char *input1, char *input2, double **x, double **y, double **z, double **vx, double **vy, double **vz, double **mass, unsigned long *size) {
unsigned long s1, s2;
read_size_from_file(input1, &s1);
(*size) = s1;
read_size_from_file(input2, &s2);
(*size) += s2;
unsigned long numOfBlocks = ceil(((double)(*size)) / BLOCKSIZE);
// Initial local data array
double *lx, *ly, *lz, *lvx, *lvy, *lvz, *lm;
lx = (double*) malloc(7 * numOfBlocks * BLOCKSIZE * sizeof(double));
ly = lx + numOfBlocks * BLOCKSIZE;
lz = ly + numOfBlocks * BLOCKSIZE;
lvx = lz + numOfBlocks * BLOCKSIZE;
lvy = lvx + numOfBlocks * BLOCKSIZE;
lvz = lvy + numOfBlocks * BLOCKSIZE;
lm = lvz + numOfBlocks * BLOCKSIZE;
// Read data from file1
FILE *fp = fopen(input1, "r");
if(fp == NULL){
printf("Error: fail to open file 1\n");
exit(-1);
}
unsigned long count = 0;
// Skip first galaxy
unsigned long junk1;
double junk2;
fscanf(fp, "%lu %lf\n", &junk1, &junk2);
double omega = 0.0;
double sigma = PI / 2.0;
while((!feof(fp)) && (count < s1)){
fscanf(fp, "%lf %lf %lf %lf %lf %lf %lf\n", lm + count, lx + count, ly + count, lz + count, lvx + count, lvy + count, lvz + count);
rotate(lx + count, ly + count, lz + count, cos(omega), sin(omega), 0, sigma);
rotate(lvx + count, lvy + count, lvz + count, cos(omega), sin(omega), 0, sigma);
*(lx + count) += MilkwayXOffsetP;
*(ly + count) += MilkwayYOffsetP;
*(lz + count) += MilkwayZOffsetP;
*(lvx + count) += MilkwayXOffsetV;
*(lvy + count) += MilkwayYOffsetV;
*(lvz + count) += MilkwayZOffsetV;
count++;
}
fclose(fp);
// Read data from file2
fp = fopen(input2, "r");
if(fp == NULL){
printf("Error: fail to open file 2\n");
exit(-1);
}
// Skip first line
fscanf(fp, "%lu %lf\n", &junk1, &junk2);
omega = - 2.0 * PI / 3.0;
sigma = PI / 6.0;
while((!feof(fp)) && (count < (*size))){
fscanf(fp, "%lf %lf %lf %lf %lf %lf %lf\n", lm + count, lx + count, ly + count, lz + count, lvx + count, lvy + count, lvz + count);
rotate(lx + count, ly + count, lz + count, cos(omega), sin(omega), 0, sigma);
rotate(lvx + count, lvy + count, lvz + count, cos(omega), sin(omega), 0, sigma);
*(lx + count) += AndromedaXOffsetP;
*(ly + count) += AndromedaYOffsetP;
*(lz + count) += AndromedaZOffsetP;
*(lvx + count) += AndromedaXOffsetV;
*(lvy + count) += AndromedaYOffsetV;
*(lvz + count) += AndromedaZOffsetV;
count++;
}
fclose(fp);
// Allocate device memory
cudaMalloc(x, 7 * numOfBlocks * BLOCKSIZE * sizeof(double));
(*y) = (*x) + numOfBlocks * BLOCKSIZE;
(*z) = (*y) + numOfBlocks * BLOCKSIZE;
(*vx) = (*z) + numOfBlocks * BLOCKSIZE;
(*vy) = (*vx) + numOfBlocks * BLOCKSIZE;
(*vz) = (*vy) + numOfBlocks * BLOCKSIZE;
(*mass) = (*vz) + numOfBlocks * BLOCKSIZE;
cudaMemcpy((*x), lx, 7 * numOfBlocks * BLOCKSIZE * sizeof(double), cudaMemcpyHostToDevice);
free(lx);
}
void read_size_from_file(char *input, unsigned long *size) {
FILE *fp = fopen(input, "r");
fscanf(fp, "%lu", size);
fclose(fp);
}
|
11,225 | //#include "driver.cu"
#include <stdio.h>
#include <stdlib.h>
#include <cuda.h>
#include <cufft.h>
#include <stdlib.h>
#include <time.h>
#include <math.h>
#include <sys/types.h>
#include <fcntl.h>
#include <unistd.h>
#include <float.h>
#include <getopt.h>
#include <string.h>
#include <errno.h>
#include <assert.h>
#define NUM_EL 64
#define CHANNELS 25
#define PFB_CHANNELS 5
#define SAMPLES 4000
#define DEF_CUDA_DEVICE 0
#define checkCudaErrors(err) __checkCudaErrors(err, __FILE__, __LINE__)
char* g_inputData = NULL;
char2* g_outputData = NULL;
char* g_inputData_d = NULL;
char2* g_outputData_d = NULL;
int loadData(char* f){
int ret = EXIT_SUCCESS;
int file = 0;
int readSize = NUM_EL * CHANNELS * SAMPLES * (2*sizeof(char));
g_inputData = (char*) malloc(readSize);
if(NULL == g_inputData) {
(void) fprintf(stderr, "ERROR: Memory allocation failed! %s.\n", strerror(errno));
return EXIT_FAILURE;
}
file = open(f, O_RDONLY);
if (file < EXIT_SUCCESS) {
(void) fprintf(stderr, "ERROR: failed to open data file. %s\n", strerror(errno));
return EXIT_FAILURE;
}
ret = read(file, g_inputData, readSize);
if (ret < EXIT_SUCCESS) {
(void) fprintf(stderr, "ERROR: failed to read data file. %s\n", strerror(errno));
(void) close(file);
return EXIT_FAILURE;
}
(void) close(file);
return EXIT_SUCCESS;
}
void __checkCudaErrors(cudaError_t err, const char* file, const int line) {
if (err != cudaSuccess) {
(void) fprintf(stderr, "ERROR: file <%s>, Line %d: %s\n",
file,
line,
cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
return;
}
int init(){
int cudaDevice = DEF_CUDA_DEVICE;
checkCudaErrors(cudaSetDevice(cudaDevice));
int inputSize = NUM_EL * CHANNELS * SAMPLES * (2*sizeof(char));
int outputSize = SAMPLES * PFB_CHANNELS * NUM_EL * (2*sizeof(char));
// allocate memory for input and output data on the device.
checkCudaErrors(cudaMalloc((void **) &g_inputData_d, inputSize));
checkCudaErrors(cudaMemset((void *) g_inputData_d, 0, inputSize));
checkCudaErrors(cudaMalloc((void **) &g_outputData_d, outputSize));
checkCudaErrors(cudaMemset((void *) g_outputData_d, 0, outputSize));
// copy data to the device.
checkCudaErrors(cudaMemcpy(g_inputData_d, g_inputData, inputSize, cudaMemcpyHostToDevice));
return EXIT_SUCCESS;
}
__global__ void map(char* dataIn,
char2* dataOut,
int channelSelect) {
// select the channel range
int channelMin = PFB_CHANNELS*channelSelect;
int absIdx = 2 * blockDim.y*(blockIdx.x*CHANNELS + (channelMin+blockIdx.y)) + 2 * threadIdx.y; // times 2 because we are mapping a sequence of values to char2 array.
int mapIdx = blockDim.y*(blockIdx.x*gridDim.y + blockIdx.y) + threadIdx.y;
dataOut[mapIdx].x = dataIn[absIdx];
dataOut[mapIdx].y = dataIn[absIdx+1];
return;
}
int main(int argc, char *argv[]) {
int ret = EXIT_SUCCESS;
if(argc < 2) {
(void) fprintf(stderr, "ERROR: Data filename not specified.\n");
return EXIT_FAILURE;
}
char filename[256] = {0};
(void) strncpy(filename, argv[1], 256);
filename[255] = '\0';
ret = loadData(filename);
if (ret == EXIT_FAILURE) {
return EXIT_FAILURE;
}
ret = init();
// run map
int select = 0;
dim3 gridSize(SAMPLES,PFB_CHANNELS,1);
dim3 blockSize(1, NUM_EL, 1);
map<<<gridSize, blockSize>>>(g_inputData_d, g_outputData_d, select);
checkCudaErrors(cudaGetLastError());
int outputSize = SAMPLES * PFB_CHANNELS * NUM_EL * (2*sizeof(char));
g_outputData = (char2*) malloc(outputSize);
checkCudaErrors(cudaMemcpy(g_outputData, g_outputData_d, outputSize, cudaMemcpyDeviceToHost));
//output the true data as a check.
/*int file = 0;
char outfileFull[256] = "outfileFull.dat\0";
file = open(outfile,
O_CREAT | O_TRUNC | O_WRONLY,
S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
if(file < EXIT_SUCCESS) {
(void) fprintf(stderr, "ERROR: writing outfile failed\n");
return EXIT_FAILURE;
}
(void) write(file, g_inputData, SAMPLES*CHANNELS*NUM_EL*2*sizeof(char));
(void) close(file); */
// output the mapped data.
int file = 0;
char outfile[256] = "outfile.dat\0";
file = open(outfile,
O_CREAT | O_TRUNC | O_WRONLY,
S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
if(file < EXIT_SUCCESS) {
(void) fprintf(stderr, "ERROR: writing outfile failed\n");
return EXIT_FAILURE;
}
(void) write(file, g_outputData, outputSize);
(void) close(file);
return EXIT_SUCCESS;
}
|
11,226 | #include "includes.h"
__global__ void _bcnn_vsum_kernel(int n, float *x, float *sum)
{
int i = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x;
if (i < n)
*sum += x[i];
} |
11,227 |
#include <cuda_runtime.h>
typedef unsigned char uint8_t;
static __device__ uint8_t cast(float value){
return value < 0 ? 0 : (value > 255 ? 255 : value);
}
static __global__ void convert_nv12_to_bgr_kernel(
const uint8_t* y, const uint8_t* uv, int width, int height, int linesize, uint8_t* dst_bgr, int edge
){
int position = blockDim.x * blockIdx.x + threadIdx.x;
if (position >= edge) return;
int ox = position % width;
int oy = position / width;
const uint8_t& yvalue = y[oy * linesize + ox];
int offset_uv = (oy >> 1) * linesize + (ox & 0xFFFFFFFE);
const uint8_t& u = uv[offset_uv + 0];
const uint8_t& v = uv[offset_uv + 1];
dst_bgr[position * 3 + 0] = cast(1.164f * (yvalue - 16.0f) + 2.018f * (u - 128.0f));
dst_bgr[position * 3 + 1] = cast(1.164f * (yvalue - 16.0f) - 0.813f * (v - 128.0f) - 0.391f * (u - 128.0f));
dst_bgr[position * 3 + 2] = cast(1.164f * (yvalue - 16.0f) + 1.596f * (v - 128.0f));
}
static __global__ void convert_nv12_to_rgb_kernel(
const uint8_t* y, const uint8_t* uv, int width, int height, int linesize, uint8_t* dst_rgb, int edge
){
int position = blockDim.x * blockIdx.x + threadIdx.x;
if (position >= edge) return;
int ox = position % width;
int oy = position / width;
const uint8_t& yvalue = y[oy * linesize + ox];
int offset_uv = (oy >> 1) * linesize + (ox & 0xFFFFFFFE);
const uint8_t& u = uv[offset_uv + 0];
const uint8_t& v = uv[offset_uv + 1];
dst_rgb[position * 3 + 2] = cast(1.164f * (yvalue - 16.0f) + 2.018f * (u - 128.0f));
dst_rgb[position * 3 + 1] = cast(1.164f * (yvalue - 16.0f) - 0.813f * (v - 128.0f) - 0.391f * (u - 128.0f));
dst_rgb[position * 3 + 0] = cast(1.164f * (yvalue - 16.0f) + 1.596f * (v - 128.0f));
}
void convert_nv12_to_bgr_invoker(
const uint8_t* y, const uint8_t* uv, int width, int height, int linesize, uint8_t* dst_bgr,
cudaStream_t stream
){
int total = width * height;
int block = total < 512 ? total : 512;
int grid = (total + block - 1) / block;
convert_nv12_to_bgr_kernel<<<grid, block, 0, stream>>>(
y, uv, width, height, linesize,
dst_bgr, total
);
}
void convert_nv12_to_rgb_invoker(
const uint8_t* y, const uint8_t* uv, int width, int height, int linesize, uint8_t* dst_rgb,
cudaStream_t stream
){
int total = width * height;
int block = total < 512 ? total : 512;
int grid = (total + block - 1) / block;
convert_nv12_to_rgb_kernel<<<grid, block, 0, stream>>>(
y, uv, width, height, linesize,
dst_rgb, total
);
} |
11,228 | #include <stdio.h>
#include "cuda.cuh"
int main(int argc, char const* argv[])
{
printf("hoge!\n");
/* kernel<<<1,1>>>(); */
run();
return 0;
}
|
11,229 | #include <stdio.h>
#include <cufft.h>
#define BLOCKSIZE 32
#define NUM_STREAMS 3
/**********/
/* iDivUp */
/*********/
int iDivUp(int a, int b) { return ((a % b) != 0) ? (a / b + 1) : (a / b); }
/********************/
/* CUDA ERROR CHECK */
/********************/
#define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(cudaError_t code, char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
/******************/
/* SUMMING KERNEL */
/******************/
__global__ void kernel(float2 *vec1, float2 *vec2, float2 *vec3, float2 *out, int N) {
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if (tid < N) {
out[tid].x = vec1[tid].x + vec2[tid].x + vec3[tid].x;
out[tid].y = vec1[tid].y + vec2[tid].y + vec3[tid].y;
}
}
/********/
/* MAIN */
/********/
int main()
{
const int N = 600000;
const int Npartial = N / NUM_STREAMS;
// --- Host input data initialization
float2 *h_in1 = new float2[Npartial];
float2 *h_in2 = new float2[Npartial];
float2 *h_in3 = new float2[Npartial];
for (int i = 0; i < Npartial; i++) {
h_in1[i].x = 1.f;
h_in1[i].y = 0.f;
h_in2[i].x = 1.f;
h_in2[i].y = 0.f;
h_in3[i].x = 1.f;
h_in3[i].y = 0.f;
}
// --- Host output data initialization
float2 *h_out = new float2[N];
// --- Registers host memory as page-locked (required for asynch cudaMemcpyAsync)
gpuErrchk(cudaHostRegister(h_in1, Npartial*sizeof(float2), cudaHostRegisterPortable));
gpuErrchk(cudaHostRegister(h_in2, Npartial*sizeof(float2), cudaHostRegisterPortable));
gpuErrchk(cudaHostRegister(h_in3, Npartial*sizeof(float2), cudaHostRegisterPortable));
// --- Device input data allocation
float2 *d_in1; gpuErrchk(cudaMalloc((void**)&d_in1, N*sizeof(float2)));
float2 *d_in2; gpuErrchk(cudaMalloc((void**)&d_in2, N*sizeof(float2)));
float2 *d_in3; gpuErrchk(cudaMalloc((void**)&d_in3, N*sizeof(float2)));
float2 *d_out1; gpuErrchk(cudaMalloc((void**)&d_out1, N*sizeof(float2)));
float2 *d_out2; gpuErrchk(cudaMalloc((void**)&d_out2, N*sizeof(float2)));
float2 *d_out3; gpuErrchk(cudaMalloc((void**)&d_out3, N*sizeof(float2)));
float2 *d_out; gpuErrchk(cudaMalloc((void**)&d_out, N*sizeof(float2)));
// --- Zero padding
gpuErrchk(cudaMemset(d_in1, 0, N*sizeof(float2)));
gpuErrchk(cudaMemset(d_in2, 0, N*sizeof(float2)));
gpuErrchk(cudaMemset(d_in3, 0, N*sizeof(float2)));
// --- Creates CUDA streams
cudaStream_t streams[NUM_STREAMS];
for (int i = 0; i < NUM_STREAMS; i++) gpuErrchk(cudaStreamCreate(&streams[i]));
// --- Creates cuFFT plans and sets them in streams
cufftHandle* plans = (cufftHandle*) malloc(sizeof(cufftHandle)*NUM_STREAMS);
for (int i = 0; i < NUM_STREAMS; i++) {
cufftPlan1d(&plans[i], N, CUFFT_C2C, 1);
cufftSetStream(plans[i], streams[i]);
}
// --- Async memcopyes and computations
gpuErrchk(cudaMemcpyAsync(d_in1, h_in1, Npartial*sizeof(float2), cudaMemcpyHostToDevice, streams[0]));
gpuErrchk(cudaMemcpyAsync(&d_in2[Npartial], h_in2, Npartial*sizeof(float2), cudaMemcpyHostToDevice, streams[1]));
gpuErrchk(cudaMemcpyAsync(&d_in3[2*Npartial], h_in3, Npartial*sizeof(float2), cudaMemcpyHostToDevice, streams[2]));
cufftExecC2C(plans[0], (cufftComplex*)d_in1, (cufftComplex*)d_out1, CUFFT_FORWARD);
cufftExecC2C(plans[1], (cufftComplex*)d_in2, (cufftComplex*)d_out2, CUFFT_FORWARD);
cufftExecC2C(plans[2], (cufftComplex*)d_in3, (cufftComplex*)d_out3, CUFFT_FORWARD);
for(int i = 0; i < NUM_STREAMS; i++) gpuErrchk(cudaStreamSynchronize(streams[i]));
kernel<<<iDivUp(BLOCKSIZE,N), BLOCKSIZE>>>(d_out1, d_out2, d_out3, d_out, N);
gpuErrchk(cudaPeekAtLastError());
gpuErrchk(cudaDeviceSynchronize());
gpuErrchk(cudaMemcpy(h_out, d_out, N*sizeof(float2), cudaMemcpyDeviceToHost));
// for (int i=0; i<N; i++) printf("i = %i; real(h_out) = %f; imag(h_out) = %f\n", i, h_out[i].x, h_out[i].y);
// --- Releases resources
gpuErrchk(cudaHostUnregister(h_in1));
gpuErrchk(cudaHostUnregister(h_in2));
gpuErrchk(cudaHostUnregister(h_in3));
gpuErrchk(cudaFree(d_in1));
gpuErrchk(cudaFree(d_in2));
gpuErrchk(cudaFree(d_in3));
gpuErrchk(cudaFree(d_out1));
gpuErrchk(cudaFree(d_out2));
gpuErrchk(cudaFree(d_out3));
gpuErrchk(cudaFree(d_out));
for(int i = 0; i < NUM_STREAMS; i++) gpuErrchk(cudaStreamDestroy(streams[i]));
delete[] h_in1;
delete[] h_in2;
delete[] h_in3;
delete[] h_out;
cudaDeviceReset();
return 0;
} |
11,230 | #include "includes.h"
/**
* Quantum Lattice Boltzmann
* (c) 2015 Fabian Thüring, ETH Zurich
*
* This file contains all the CUDA kernels and function that make use of the
* CUDA runtime API
*/
// Local includes
// ==== CONSTANTS ====
__constant__ unsigned int d_L;
__constant__ float d_dx;
__constant__ float d_dt;
__constant__ float d_mass;
__constant__ float d_g;
__constant__ unsigned int d_t;
__constant__ float d_scaling;
__constant__ int d_current_scene;
// ==== INITIALIZATION ====
__global__ void kernel_calculate_vertex_V(float3* vbo_ptr, float* d_ptr)
{
int i = blockIdx.x*blockDim.x + threadIdx.x;
int j = blockIdx.y*blockDim.y + threadIdx.y;
if(i < d_L && j < d_L)
vbo_ptr[d_L*i + j].y = d_scaling * fabsf( d_ptr[i*d_L +j] ) - 0.005f*d_L;
} |
11,231 | #include <stdio.h>
__global__ void sum(int *a, int *b, int *c, int N) {
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if (tid<N) {
c[tid] = a[tid] + b[tid];
}
}
//host function
void initialize(int *a, int c, int N) {
int i;
for (i=0;i<N;++i) {
a[i] = c;
}
}
void printResultArray(int *c, int N) {
int i;
for (i=0;i<N;++i) {
printf("%d ",c[i]);
}
printf("\n");
}
int main() {
//allocate host memory
int N = 10;
int *h_a = (int *)malloc(N*sizeof(int));
int *h_b = (int *)malloc(N*sizeof(int));
int *h_c = (int *)malloc(N*sizeof(int));
//intialize h_a to all 1s
initialize(h_a,1,N);
initialize(h_b,5,N);
//allocate device memory
int *d_a, *d_b, *d_c;
cudaMalloc((void **)&d_a, N*sizeof(int));
cudaMalloc((void **)&d_b, N*sizeof(int));
cudaMalloc((void **)&d_c, N*sizeof(int));
//copy data from host to device
cudaMemcpy(d_a, h_a, N*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_b, h_b, N*sizeof(int), cudaMemcpyHostToDevice);
// Case 1: Execute the kernel with 1 block and 1 thread. With this
// case, there is only 1 thread with global id = 0 and thus we only do
// addition on the first element of the array.
// The expected output is therefore: 6 0 0 0 0 0 0 0 0 0
//execute the kernel
sum<<<1,1>>>(d_a, d_b, d_c, N);
//copy data from device back to host
cudaMemcpy(h_c, d_c, N*sizeof(int), cudaMemcpyDeviceToHost);
//print the result
printf("Case 1: 1 block, 1 thread per block\n");
printResultArray(h_c,N);
// Case 2: Execute the kernel with 1 block and 4 threads. With this
// case, there are 4 threads with global id = 0, 1, 2, 3 therefore,
// the device only perform additions on the first 4 elements of the
// array.
// The expected output is therefore: 6 6 6 6 0 0 0 0 0 0
// reset the d_c array
cudaMemset(d_c, 0, N*sizeof(int));
sum<<<1,4>>>(d_a, d_b, d_c, N);
//copy data from device back to host
cudaMemcpy(h_c, d_c, N*sizeof(int), cudaMemcpyDeviceToHost);
//print the result
printf("Case 2: 1 block, 4 threads per block\n");
printResultArray(h_c,N);
// Case 3: Execute the kernel with 4 blocks and 2 thread per
// block. With this case, we have total 4 threads with global id =
// 0, 1, 2, 3, 4, 5, 6, 7
// Therefore, the device only performs additions on the first 8
// elements of the array.
// The expected output is therefore: 6 6 6 6 6 6 6 6 0 0
// reset the d_c array
cudaMemset(d_c, 0, N*sizeof(int));
sum<<<4,2>>>(d_a, d_b, d_c, N);
//copy data from device back to host
cudaMemcpy(h_c, d_c, N*sizeof(int), cudaMemcpyDeviceToHost);
//print the result
printf("Case 3: 4 blocks, 2 threads per block\n");
printResultArray(h_c,N);
// Case 4: To fully do additions on every elements of the array, we
// must have >= N threads. For example, with 2 blocks and 10 threads
// per block, we will have total 20 threads which are more than
// enough to operate this kernel. Since we have the check tid<N in
// the kernel, we infact only use N threads to compute the array sum
// despite the fact that we have more than number of threads needed.
// The expected output is therefore: 6 6 6 6 6 6 6 6 6 6
//reset the d_c array
cudaMemset(d_c, 0, N*sizeof(int));
sum<<<2,10>>>(d_a, d_b, d_c, N);
//copy data from device back to host
cudaMemcpy(h_c, d_c, N*sizeof(int), cudaMemcpyDeviceToHost);
//print the result
printf("Case 4: 2 blocks, 10 threads per block\n");
printResultArray(h_c,N);
//free device memory
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
//free host memory
free(h_a);
free(h_b);
free(h_c);
}
|
11,232 | #include "includes.h"
__global__ void kernel_bfs(int *g_left_weight, int *g_right_weight, int *g_down_weight, int *g_up_weight, int *g_graph_height, bool *g_pixel_mask, int vertex_num,int width,int height, int vertex_num1, int width1, int height1, bool *g_over, int *g_counter)
{
/*******************************
*threadId is calculated ******
*****************************/
int thid = __umul24(blockIdx.x, blockDim.x) + threadIdx.x ;
if(thid < vertex_num && g_pixel_mask[thid] == true)
{
int col = thid % width1 , row = thid / width1 ;
if(col < width - 1 && col > 0 && row < height - 1 && row > 0 )
{
int height_l = 0, height_d = 0, height_u = 0 , height_r = 0 ;
height_r = g_graph_height[thid+1] ;
height_l = g_graph_height[thid-1] ;
height_d = g_graph_height[thid+width1] ;
height_u = g_graph_height[thid-width1] ;
if(((height_l == (*g_counter) && g_right_weight[thid-1] > 0)) ||((height_d == (*g_counter) && g_up_weight[thid+width1] > 0) || ( height_r == (*g_counter) && g_left_weight[thid+1] > 0 ) || ( height_u == (*g_counter) && g_down_weight[thid-width1] > 0 ) ))
{
g_graph_height[thid] = (*g_counter) + 1 ;
g_pixel_mask[thid] = false ;
*g_over = true ;
}
}
}
} |
11,233 | #include <cuda.h>
#include <cuda_runtime.h>
#include <random>
#include <iostream>
#include <stdio.h>
#include <vector_types.h>
#define L 2048
#define M 2048
#define N 2048
using copy4_t = int4;
constexpr int l = L;
constexpr int m = M;
constexpr int n = N;
//constexpr int SHMEM_SIZE = 2 >> 14; // 32kB
constexpr int tbp = 512;
constexpr int nblocks = l * n / tbp;
// This kernel is actually 10x faster than NaiveCOrdered because of memory coalescing.
__global__
void matrixMultiplyNaive(float* A, float* B, float* C, int l, int m, int n) {
const int row = (blockIdx.x * tbp + threadIdx.x) / n;
const int col = (blockIdx.x * tbp + threadIdx.x) % n;
float tmp = 0;
for (int k = 0; k < m; ++k)
tmp += A[k + row * m] * B[col + k * n];
C[col + n * row] = tmp;
}
__global__
void matrixMultiplyNaiveBColOrdered(float* A, float* B, float* C, int l, int m, int n) {
const int row = (blockIdx.x * tbp + threadIdx.x) / n;
const int col = (blockIdx.x * tbp + threadIdx.x) % n;
float tmp = 0;
for (int k = 0; k < m; ++k)
tmp += A[k + row * m] * B[k + col * m];
C[col + n * row] = tmp;
}
__global__
void matrixMultiplyNaiveAColOrdered(float* A, float* B, float* C, int l, int m, int n) {
const int row = (blockIdx.x * tbp + threadIdx.x) / n;
const int col = (blockIdx.x * tbp + threadIdx.x) % n;
float tmp = 0;
for (int k = 0; k < m; ++k)
tmp += A[row + k * m] * B[col + k * n];
C[col + n * row] = tmp;
}
/*
// This is a convenience function for allocating a pair of a host and device pointer
template<typename T>
std::pair<T*, T*> cudaAlloc(size_t nobj) {
T* host_ptr = new T[nobj * sizeof(T)];
T* device_ptr;
cudaError_t err;
if ((err = cudaMalloc((void **) &device_ptr, nobj * sizeof(T))) != cudaSuccess) {
std::cout << "CUDA error: " << cudaGetErrorString(err) << std::endl;
exit(1):
}
return {host_ptr, device_ptr};
}
*/
//constexpr int TILE_WIDTH = 32;
#define TILE_WIDTH 32
// We are going to create a (k x k) tiling - k must divide n and l
__global__
void matrixMultiplyShmem(float* A, float* B, float* C, int l, int m, int n) {
// Declare some shared memory. We'll load from global mem into these buffers.
__shared__ float bufA[TILE_WIDTH][TILE_WIDTH];
__shared__ float bufB[TILE_WIDTH][TILE_WIDTH];
const int row = blockDim.y * blockIdx.y + threadIdx.y;
const int col = blockDim.x * blockIdx.x + threadIdx.x;
float result = 0; // This variable will store the result of matrix mult
for (int tile_idx = 0; tile_idx < M / TILE_WIDTH; ++tile_idx) {
// Load the tile from global memory into shared memory and wait until all
// threads have performed the load
bufA[threadIdx.y][threadIdx.x] = A[row * m + tile_idx * TILE_WIDTH + threadIdx.x];
bufB[threadIdx.y][threadIdx.x] = B[n * (tile_idx * TILE_WIDTH + threadIdx.y) + col];
__syncthreads();
// Add the contribution from this tile
#pragma unroll
for (int k = 0; k < TILE_WIDTH; ++k)
result += bufA[threadIdx.y][k] * bufB[k][threadIdx.x];
// I don't think this barrier is strictly necessary, but if it is not here,
// we may not be able to coalesce global memory accesses.
// COMPLETE: See what happens to kernel benchmarks if I remove it.
// RESULT: Actually, the code breaks and gives incorrect results if I don't include this.
// Kirk+Hwu, p.93 explains why. If it's not here, then a thread may modify shared memory
// while other threads are still using it.
__syncthreads();
}
C[col + n * row] = result;
}
/*
* Steps:
* 1. Copy C into shared memory
* 2. Put C from shared memory into an accumulator fragment
*/
constexpr int BITS_PER_BYTE = 8;
constexpr int THREADS_PER_WARP = 32;
constexpr int WMMA_BIT_ALIGNMENT = 128;
//constexpr int SHMEM_HALF_BANK_OFFSET = WMMA_BIT_ALIGNMENT / (BITS_PER_BYTE * sizeof(half));
constexpr int SHMEM_FLOAT_BANK_OFFSET = WMMA_BIT_ALIGNMENT / (BITS_PER_BYTE * sizeof(float));
// How should we create tiles of the output matrix and assign threads/warps to those
// tiles? What part of the output is each thread/warp responsible for?
//
// Definitions:
// 1) A WMMA tile is the portion of a matrix handled by the wmma::fragment type and its
// associated operations
// 2) A warp tile is a (WARP_ROWS x WARP_COLS) block of WMMA tiles of the output matrix.
// Each warp is responsible for computing the matrix elements in its warp tile.
// 3) A block tile is comprised of (BLOCK_WARP_ROWS x BLOCK_ROW_COLS) warp tiles. This
// is the portion of the output matrix that each thread block is computing.
// 4) A constant labeled with {A}_TILE_WIDTH or {A}_TILE_HEIGHT is referring to a number
// of matrix elements along the rows or columns, respectively, of a tile of type A
// 5) A constant labeled with {A}_{B}_ROWS or {A}_{B}_COLS is referring to the number of
// tiles of type B along the rows or columns, respectively, of a tile of type A.
constexpr int WARP_WMMA_COLS = 2;
constexpr int WARP_WMMA_ROWS = 4;
constexpr int BLOCK_WARP_ROWS = 2;
constexpr int BLOCK_WARP_COLS = 4;
constexpr int BLOCK_WMMA_ROWS = BLOCK_WARP_ROWS * WARP_WMMA_ROWS; // 8
constexpr int BLOCK_WMMA_COLS = BLOCK_WARP_COLS * WARP_WMMA_COLS; // 8
// This is the number of WMMA fragments along the M-dimension for the tiles of A and B
constexpr int BLOCK_WMMA_M_DIM = 4;
constexpr int WMMA_TILE_WIDTH = 16;
constexpr int WMMA_TILE_HEIGHT = 16;
constexpr int IRRELEVANT_DIM = 16;
constexpr int WARP_TILE_WIDTH = WARP_WMMA_COLS * WMMA_TILE_WIDTH;
constexpr int WARP_TILE_HEIGHT = WARP_WMMA_ROWS * WMMA_TILE_HEIGHT;
constexpr int BLOCK_TILE_WIDTH = WMMA_TILE_WIDTH * WARP_WMMA_COLS * BLOCK_WARP_COLS; // 16 * 2 * 4 = 128
constexpr int BLOCK_TILE_HEIGHT = WMMA_TILE_HEIGHT * WARP_WMMA_ROWS * BLOCK_WARP_ROWS; // 16 * 4 * 2 = 128
constexpr int WARPS_PER_BLOCK = BLOCK_WMMA_ROWS * BLOCK_WMMA_COLS / (WARP_WMMA_COLS * WARP_WMMA_ROWS);
constexpr int THREADS_PER_BLOCK = THREADS_PER_WARP * WARPS_PER_BLOCK;
// Since we only have 65kB of shared memory available per block,
// we choose l and m such that:
// l * m * 16 * 16 * 2 * 2 <= 65kB => l * m <= 64
// We choose l = 8 and m = 4.
/*
template<typename T>
__device__
inline T* offset(T* ptr, int rows, int cols, int stride) {
return ptr + stride * rows + cols;
}
// Args:
// A: Row major ordered matrix of size l x m
// B: Row major ordered matrix of size m x n
// C: Row major ordered matrix of size l x n
__global__
void matrixMultiplyWmma(float* A, float* B, float* C, int l, int m, int n) {
extern __shared__ half shmem[][WMMA_TILE_WIDTH * BLOCK_WMMA_M_DIM + SHMEM_HALF_BANK_OFFSET];
const int tid = threadIdx.x + threadIdx.y * blockDim.x;
const int block_warp_x = (tid / THREADS_PER_WARP) % BLOCK_WARP_COLS;
const int block_warp_y = (tid / THREADS_PER_WARP) / BLOCK_WARP_COLS;
const int lane_id = tid % THREADS_PER_WARP;
const int warp_id = tid / THREADS_PER_WARP;
const int tile_col_offset = blockIdx.x * BLOCK_TILE_WIDTH;
const int tile_row_offset = blockIdx.y * BLOCK_TILE_HEIGHT;
// This is where the current warp will write C to in shmem
float *shmem_float_ptr = offset(
(float *)&shmem[0][0],
warp_id * WMMA_TILE_HEIGHT,
0,
BLOCK_TILE_WIDTH
);
// Each warp copies a 16x128 chunk of the C matrix, one row at a time.
// Each thread copies 4 entries (32 bytes) at a time from glmem
#pragma unroll
for (int i = 0; i < WMMA_TILE_HEIGHT; ++i) {
float* shmem_ptr = offset(shmem_float_ptr, warp_id * WMMA_TILE_HEIGHT + i, 0, BLOCK_TILE_WIDTH);
float* glmem_ptr = offset(C, tile_row_offset + warp_id * WMMA_TILE_HEIGHT + i, tile_col_offset, n);
*((copy4_t *)shmem_ptr + lane_id) = *((copy4_t *)glmem_ptr + lane_id);
// May want to try out a syncthreads in here, rather than after the loop
}
__syncthreads();
wmma::fragment<wmma::accumulator, WMMA_TILE_HEIGHT, WMMA_TILE_WIDTH, IRRELEVANT_DIM, float> c_frag[WARP_WMMA_ROWS][WARP_WMMA_COLS];
// Load the tiles into the fragments
float* tile_ptr;
#pragma unroll
for (int i = 0; i < BLOCK_WARP_ROWS; ++i) {
#pragma unroll
for (int j = 0; j < BLOCK_WARP_COLS; ++j) {
tile_ptr = offset(shmem_float_ptr, i + block_warp_y * WARP_TILE_HEIGHT, j * WMMA_TILE_WIDTH + block_warp_x * WARP_TILE_WIDTH, BLOCK_TILE_WIDTH);
wmma::load_matrix_sync(c_frag[i][j], tile_ptr, BLOCK_TILE_WIDTH, wmma::mem_row_major);
}
}
__syncthreads();
wmma::fragment<wmma::matrix_a, WMMA_M, WMMA_N, WMMA_K, half, wmma::col_major> a_frag[WARP_WMMA_ROWS];
wmma::fragment<wmma::matrix_b, WMMA_M, WMMA_N, WMMA_K, half, wmma::col_major> b_frag[WARP_WMMA_COLS];
// Loop for sliding through tiles on A and B.
for (int tile_idx = 0; tile_idx < m / TILE_WIDTH; ++tile_idx) {
// Copy A and B from global memory into shared memory.
// 1. All warps with block_warp_y = 0 copy A, and those with block_warp_y = 1 copy B
if (block_warp_y == 0) {
#pragma unroll
for (int i = 0; i < 2 * WMMA_TILE_HEIGHT; i += 4) {
half* shmem_ptr_half = offset(
&shmem[0][0],
block_warp_x * 2 * WMMA_TILE_HEIGHT + lane_id / 8 + i,
0,
BLOCK_WMMA_M_DIM * WMMA_TILE_WIDTH + SHMEM_HALF_BANK_OFFSET
);
half* glmem_ptr_half = offset(
A,
tile_row_offset + block_warp_x * 2 * WMMA_TILE_HEIGHT + lane_id / 8 + i,
tile_idx * BLOCK_WMMA_M_DIM * WMMA_TILE_WIDTH,
l
)
*((copy4_t *)shmem_ptr_half + lane_id % 8) = *((copy4_t *)glmem_ptr_half + lane_id % 8)
}
}
else {
}
__syncthreads();
// These three loops are performing the actual matrix multiplication over the global tile.
#pragma unroll
for (int k = 0; k < WMMA_TILE_K; ++k) {
#pragma unroll
for (int j = 0; j < WARP_WMMA_COLS; ++j) {
tile_ptr = &shmem[idx_b + block_warp_x * WARP_TILE_WIDTH + j * WMMA_TILE_WIDTH][k * WMMA_TILE_WIDTH];
wmma::load_matrix_sync(b[j], tile_ptr, BLOCK_WMMA_M_DIM * WMMA_TILE_WIDTH + SHMEM_HALF_BANK_OFFSET, wmma:mem_col_major);
#pragma unroll
for (int i = 0; i < WARP_WMMA_ROWS; ++i) {
if (j == 0) {
tile_ptr = &shmem[block_warp_y * WARP_TILE_HEIGHT + i * WMMA_TILE_HEIGHT][k * WMMA_TILE_WIDTH];
wmma::load_matrix_sync(a[i], tile_ptr, BLOCK_WMMA_M_DIM * WMMA_TILE_WIDTH + SHMEM_HALF_BANK_OFFSET, wmma:mem_row_major);
}
// Perform the matrix multiplication of the WMMA tile
wmma::mma_sync(c_frag[i][j], a_frag[i], b_frag[j], c_frag[i][j]);
}
}
}
}
// Memory access patterns for wmma::store_matrix_sync are basically random
// and will not allow us to coalesce. In that case, we first load the result
// into shared memory, then into global memory.
// Copy tiles into shmem
#pragma unroll
for (int i = 0; i < WMMA_TILE_ROWS; ++i) {
#pragma unroll
for (int j = 0; j < WMMA_TILE_COLS; ++j) {
// TODO: Compute ptr...
wmma::store_matrix_sync(shmem_ptr, c[i][j], stride, wmma::mem_row_major);
}
}
// Copy from shmem into glmem
#pragma unroll
for (int i = 0; i < WMMA_TILE_HEIGHT; ++i) {
float* shmem_ptr = offset(shmem_float_ptr, i, 0, BLOCK_TILE_WIDTH);
float* glmem_ptr = offset(C, i + tile_row_offset + warp_id * WMMA_TILE_HEIGHT, tile_col_offset, n);
*((copy4_t *)shmem_ptr + lane_id) = *((copy4_t *)glmem_ptr + lane_id);
// May want to try out a syncthreads in here, rather than after the loop
}
__syncthreads();
}
*/
void initRandMatrix(float* mem, int rows, int cols) {
static std::random_device rd;
static std::mt19937 gen(rd());
static std::uniform_int_distribution<int> dis(0, 1);
for (int i = 0; i < rows; ++i) {
for (int j = 0; j < cols; ++j) {
mem[j + cols * i] = dis(gen);
}
}
return;
}
void colOrderFrom(float* row_ordered, float* col_ordered, int rows, int cols) {
for (int i = 0; i < rows; ++i) {
for (int j = 0; j < cols; ++j) {
col_ordered[i + rows * j] = row_ordered[j + cols * i];
}
}
}
void initRandMatrixColOrdered(float* mem, int rows, int cols) {
static std::random_device rd;
static std::mt19937 gen(rd());
static std::uniform_int_distribution<int> dis(0, 1);
for (int i = 0; i < rows; ++i) {
for (int j = 0; j < cols; ++j) {
mem[i + rows * j] = dis(gen);
}
}
return;
}
void printMatrix(float* mem, int rows, int cols) {
for (int i = 0; i < rows; ++i) {
for (int j = 0; j < cols; ++j) {
std::cout << mem[j + cols * i] << " ";
}
std::cout << std::endl;
}
}
int main(void) {
//cudaFuncSetAttribute(matrixMultiplyWmma, cudaFuncAttributeMaxDynamicSharedMemorySize, BLOCK_TILE_HEIGHT * BLOCK_TILE_WIDTH * sizeof(float));
float* h_A = new float[l * m];
float* h_Acol = new float[l * m];
float* h_B = new float[m * n];
float* h_Bcol = new float[m * n];
float* h_C = new float[l * n];
float* h_C3 = new float[l * n];
float* h_Ccol = new float[l * n];
float* h_Ccol2 = new float[l * n];
float *d_A, *d_Acol, *d_B, *d_Bcol, *d_C, *d_Ccol, *d_Ccol2, *d_C3;
cudaError_t err_A = cudaMalloc((void **) &d_A, l * m * sizeof(float));
cudaError_t err_Acol = cudaMalloc((void **) &d_Acol, l * m * sizeof(float));
cudaError_t err_B = cudaMalloc((void **) &d_B, m * n * sizeof(float));
cudaError_t err_Bcol = cudaMalloc((void **) &d_Bcol, m * n * sizeof(float));
cudaError_t err_C = cudaMalloc((void **) &d_C, l * n * sizeof(float));
cudaError_t err_Ccol = cudaMalloc((void **) &d_Ccol, l * n * sizeof(float));
cudaError_t err_C3 = cudaMalloc((void **) &d_C3, l * n * sizeof(float));
initRandMatrix(h_A, l, m);
initRandMatrix(h_B, m, n);
colOrderFrom(h_B, h_Bcol, m, n);
colOrderFrom(h_A, h_Acol, l, m);
//printMatrix(h_A, l, m);
//printMatrix(h_B, m, n);
cudaMemcpy(d_A, h_A, l * m * sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(d_Acol, h_Acol, l * m * sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(d_B, h_B, m * n * sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(d_Bcol, h_Bcol, m * n * sizeof(float), cudaMemcpyHostToDevice);
cudaMemset(d_C, 0, l * n * sizeof(float));
cudaMemset(d_Ccol, 0, l * n * sizeof(float));
cudaMemset(d_C3, 0, l * n * sizeof(float));
matrixMultiplyNaive<<<nblocks, tbp>>>(d_A, d_B, d_C, l, m, n);
matrixMultiplyNaiveBColOrdered<<<nblocks, tbp>>>(d_A, d_Bcol, d_Ccol, l, m, n);
//matrixMultiplyNaiveAColOrdered<<<nblocks, tbp>>>(d_Acol, d_B, d_Ccol2, l, m, n);
dim3 dimBlock(TILE_WIDTH, TILE_WIDTH);
dim3 dimGrid(n / TILE_WIDTH, l / TILE_WIDTH);
matrixMultiplyShmem<<<dimGrid, dimBlock>>>(d_A, d_B, d_C3, l, m, n);
dim3 dimBlock2(THREADS_PER_WARP * BLOCK_WARP_COLS, BLOCK_WARP_ROWS);
dim3 dimGrid2(n / BLOCK_TILE_WIDTH, l / BLOCK_TILE_HEIGHT);
// matrixMultiplyWmma<<<dimGrid2, dimBlock2, BLOCK_TILE_WIDTH * BLOCK_TILE_HEIGHT * sizeof(float) >>>(d_A, d_B, d_C3, l, m, n);
cudaMemcpy(h_C, d_C, l * n * sizeof(float), cudaMemcpyDeviceToHost);
cudaMemcpy(h_C3, d_C3, l * n * sizeof(float), cudaMemcpyDeviceToHost);
cudaMemcpy(h_Ccol, d_Ccol, l * n * sizeof(float), cudaMemcpyDeviceToHost);
//cudaMemcpy(h_Ccol2, d_Ccol2, l * n * sizeof(float), cudaMemcpyDeviceToHost);
bool agrees = true;
for (int i = 0; i < l; ++i) {
for (int j = 0; j < n; ++j) {
if (h_Ccol[j + n * i] != h_C[j + n * i])
agrees = false;
}
}
std::cout << agrees << std::endl;
for (int i = 0; i < l; ++i) {
for (int j = 0; j < n; ++j) {
if (h_C3[j + n * i] != h_C[j + n * i])
agrees = false;
}
}
std::cout << agrees << std::endl;
cudaError_t err = cudaGetLastError();
if (err != cudaSuccess) std::cout << "CUDA error: " << cudaGetErrorString(err) << std::endl;
delete [] h_A;
delete [] h_B;
delete [] h_Bcol;
delete [] h_C;
delete [] h_C3;
delete [] h_Ccol;
cudaFree(d_A);
cudaFree(d_B);
cudaFree(d_Bcol);
cudaFree(d_C);
cudaFree(d_C3);
cudaFree(d_Ccol);
}
|
11,234 | #include <stdio.h>
#include <cuda.h>
#include <stdlib.h>
#define THREADS_PER_BLOCK 1024
#define TIME 3600000
__global__ void initialize(float *a_d, float *b_d, float *c_d, int arraySize)
{
int ix = blockIdx.x * blockDim.x + threadIdx.x;
if(ix==0)
{
a_d[ix]=200.0;
b_d[ix]=200.0;
}
else if (ix<arraySize)
{
a_d[ix]=0.0;
b_d[ix]=0.0;
}
}
__global__ void compute(float *a_d, float *b_d, float *c_d, int arraySize)
{
int ix = blockIdx.x * blockDim.x + threadIdx.x;
float temp;
if( ix > 0 && ix < arraySize-1){
temp = (a_d[ix+1]+a_d[ix-1])/2.0;
__syncthreads();
b_d[ix]=temp;
__syncthreads();
}
}
extern "C" void pointsource_pollution (float *a, float *b, int *c, int arraySize)
{
float *a_d, *b_d, *c_d;
cudaMalloc ((void**) &a_d, sizeof(float) * arraySize);
cudaMalloc ((void**) &b_d, sizeof(float) * arraySize);
cudaMalloc ((void**) &c_d, sizeof(float) * arraySize);
initialize<<< ceil((float) arraySize/THREADS_PER_BLOCK), THREADS_PER_BLOCK >>> (a_d, b_d, c_d, arraySize);
for(int i=0;i<TIME;i++){
compute <<< ceil((float) arraySize/THREADS_PER_BLOCK), THREADS_PER_BLOCK >>> (a_d, b_d, c_d, arraySize);
a_d=b_d;
}
cudaMemcpy (a, a_d, sizeof(float) * arraySize, cudaMemcpyDeviceToHost);
cudaError_t err = cudaGetLastError();
if (err != cudaSuccess)
printf ("CUDA error: %s\n", cudaGetErrorString(err));
cudaFree (a_d);
cudaFree (b_d);
cudaFree (c_d);
} |
11,235 | #include <cuda_runtime.h>
#include <stdio.h>
int main(int argc, char** argv)
{
cudaDeviceProp prop;
cudaGetDeviceProperties(&prop, 0);
printf("Device name: %s\n", prop.name);
printf("Memory clock rate (KHz) : %d\n", prop.memoryClockRate);
printf("Memory bus width (bits) : %d\n", prop.memoryBusWidth);
printf("Compute capability : %d.%d\n", prop.major, prop.minor);
printf("Max threads per block : %d\n", prop.maxThreadsPerBlock);
printf("Max threads per multiprocessor: %d\n", prop.maxThreadsPerMultiProcessor);
printf("Multiprocessor count: %d\n", prop.multiProcessorCount);
return 0;
}
|
11,236 | float h_A[]= {
0.9213138439968901, 0.6617113060617275, 0.6973252758084447, 0.9189721456227458, 0.5955755819121558, 0.9957115787327653, 0.8491107840707075, 0.531162429640395, 0.6677428699556074, 0.7824000784592192, 0.5505106490153537, 0.5832391186440731, 0.7711317603741012, 0.6646085525970225, 0.7442827705187394, 0.9756813198234241, 0.7887299618414738, 0.6956699445426889, 0.6061237554854295, 0.8573870593637098, 0.7888317188780021, 0.9638136840462357, 0.68375593933514, 0.5588910890562644, 0.7649804451642838, 0.9367768127139396, 0.5587362578472823, 0.5827928817432861, 0.6336721959391562, 0.9344205193933401, 0.7818988319593521, 0.884629834491663, 0.8890091058175251, 0.9950449762194271, 0.8661320669056769, 0.9761377689723415, 0.7260784163660063, 0.6195817639687591, 0.7694233370213943, 0.8963389622587666, 0.815804966913392, 0.8831459228646862, 0.5063971938431947, 0.5109684317682512, 0.7253904485249004, 0.9457441542862165, 0.5215631053449747, 0.6743369442166605, 0.9327294881153647, 0.9064057328001663, 0.9860746619282325, 0.90431929602179, 0.6125068595757304, 0.9553005979863161, 0.927194757590015, 0.5132242371647311, 0.7466528076096621, 0.5968066041411364, 0.6864944075834807, 0.9904669336822217, 0.7017972666913999, 0.8207754627184087, 0.5682861502716219, 0.7401120636959151, 0.7880333585811294, 0.8283926336430738, 0.6369097455185311, 0.5352471031211463, 0.9036532323713982, 0.5916437691016483, 0.640653558314106, 0.8208418296549747, 0.8529303473928216, 0.601218862371208, 0.9499496052702191, 0.8811286011136765, 0.5109227443987332, 0.7150459415447242, 0.8243688993880125, 0.8523707964142424, 0.6346396912320413, 0.8454216409756758, 0.5176497344880678, 0.597550543020652, 0.9545452069434124, 0.8083301474456324, 0.5120202361527675, 0.7359683541908371, 0.8133228072881178, 0.6725261780871735, 0.8557099832244176, 0.595262052886045, 0.7085194677775875, 0.6512276445707252, 0.5913414998974679, 0.6226601788893293, 0.8703145669197716, 0.8788080463686347, 0.5019919759770144, 0.5994275618527458, 0.6384876717906051, 0.6801522278619762, 0.6232739512289674, 0.7052503276084385, 0.7744290746747858, 0.5722090529457147, 0.536215665694525, 0.7141825729261049, 0.8519982085280031, 0.8048478528459495, 0.9358833136090606, 0.9383464315066373, 0.9834391047828861, 0.7456992436432699, 0.9695925842151586, 0.7049115446746395, 0.6166861541680222, 0.8082148665213122, 0.6285573463645353, 0.5003143725939613, 0.8868885850707321, 0.5090942125536072, 0.8126972663647498, 0.9288214568494513, 0.53535259273228, 0.9614966552474801, 0.7220207617928296, 0.7769522399234333, 0.5889505822482057, 0.8054171389210836, 0.6191810317744927, 0.8169878711895158, 0.5097270046859617, 0.8830435317572556, 0.7262890050304052, 0.8669154381024686, 0.5768358180519302, 0.513566573079854, 0.718042770101878, 0.5067519500995075, 0.8372272196318808, 0.7184771660710114, 0.60443230117148, 0.5902947261363308, 0.7573972174951713, 0.7601993283283502, 0.8288928221769483, 0.8774230679260133, 0.7866367967620398, 0.8551034482484052, 0.5078889916054081, 0.7041990583900157, 0.9608940026506632, 0.9248630693867664, 0.6149382803513148, 0.6203784393256522, 0.714855660641709, 0.8314542952730773, 0.5242474447212822, 0.6654655972378938, 0.6163919670439311, 0.9450509671309852, 0.7867844818165987, 0.6733176288858438, 0.7108931120423674, 0.5211313749857152, 0.9209127440118445, 0.5106982881940506, 0.7365196875106441, 0.9682016837363279, 0.9746482458113667, 0.9499303935216327, 0.8080200781470434, 0.6343513674184825, 0.8386036122917506, 0.704247989697583, 0.9809298390074119, 0.5022704078131262, 0.6014697780888354, 0.9215228260835286, 0.6808950854330558, 0.8069518805198245, 0.9332226628579925, 0.9158096684137551, 0.8764645551566621, 0.9740261755701645, 0.988350970860148, 0.6032427083004839, 0.9371992662498768, 0.6060834600941467, 0.6435343104232547, 0.7375000800333054, 0.926764162483988, 0.9185366674163356, 0.5844043800313836, 0.8188177005681383, 0.5889917671739536, 0.7943179981402895, 0.9197300460037116, 0.8967241957140333, 0.7107079437880638, 0.5637418241112583, 0.8188972871012254, 0.6273030183268432, 0.7616496909429529, 0.632212338298351, 0.916147963592624, 0.7159015861258679, 0.8985814798615548, 0.6390261541049693, 0.9395860856138587, 0.9726403147215578, 0.520536482993029, 0.9525442051314446, 0.9233532353930289, 0.8043303000095816, 0.9202184904764594, 0.6731548759104473, 0.5287393274514122, 0.8811250104137489, 0.8078665012819175, 0.8925836292764979, 0.815341863745159, 0.9436298869981611, 0.9499497639622326, 0.6836834514780603, 0.8562076078231965, 0.6912187622071855, 0.7248508322864908, 0.63349303502002, 0.86578320163277, 0.9996078520435281, 0.938377357867142, 0.8849423200801534, 0.7092446030835369, 0.9246135508701374, 0.5209290050020012, 0.8952353531455832, 0.6452167456081357, 0.5581668652960827, 0.5827293385234611, 0.6926751666149265, 0.5984945054693587, 0.7856521044227901, 0.8176113912383568, 0.7709299699207963, 0.9315789928645999, 0.8339752995667553, 0.6680570322811232, 0.9580139215004729, 0.5062062471510205, 0.6266091760751459, 0.5595129706202944, 0.6603412920142004, 0.5250136788231861, 0.822116840223662, 0.8241594547632314, 0.8991355773378519, 0.7915502338883441, 0.6857368722718598, 0.931180898394643, 0.7471135876556761, 0.8829125059000968, 0.5138193497995386, 0.5550316748573476, 0.5155188933015109, 0.6664429805572643, 0.5693498807751034, 0.5135666593273845, 0.5936429292853419, 0.7877592208235149, 0.693654706831756, 0.6230722939315309, 0.7347165138718539, 0.9820285047189834, 0.9978330049057096, 0.8621118208883793, 0.6422117018626841, 0.5431940005103233, 0.8614238940814458, 0.8839172040857715, 0.6157476024658496, 0.6791147992271993, 0.8155832311346511, 0.9240205387322583, 0.5065190288367147, 0.8893673582884829, 0.5894175527018853, 0.9877148244805092, 0.8765107679297415, 0.6337062086987324, 0.7593227168670618, 0.8173285826955005, 0.5441084938723191, 0.8618740203126534, 0.5514810111708714, 0.8834243964018, 0.990074363255294, 0.8028579613409033, 0.9314458564466523, 0.843930374790142, 0.5489363713146778, 0.9340631598048674, 0.694286276509914, 0.6720697413034853, 0.9690203655264102, 0.9017056600335969, 0.9974515165204398, 0.761781853355202, 0.5383115108571531, 0.879412328924398, 0.9557210215884528, 0.6941763736756581, 0.646078500607639, 0.8216589150233979, 0.9086828295072065, 0.7857909673101408, 0.561872861515413, 0.9394183210348427, 0.8324724493582512, 0.6235379293679445, 0.5675708059836051, 0.5848527877686216, 0.6559214312856307, 0.5431419746077084, 0.6848465544041087, 0.5585134756022248, 0.6756395547196032, 0.7922841445336918, 0.5647752786358828, 0.7194333764175925, 0.5724535245325331, 0.5929926697145413, 0.5074592971151539, 0.8759533270870681, 0.9067022908658389, 0.525011529414759, 0.925776673762946, 0.6126050319679297, 0.970107301969612, 0.7969591624719596, 0.6765002739186492, 0.7846552716743067, 0.7609489712464117, 0.6374339980490997, 0.7833461956938411, 0.9406132945955519, 0.7936068195004711, 0.6374919897876583, 0.9311742292478775, 0.8870360734218763, 0.6234883027297169, 0.7646562041572402, 0.7774790814221768, 0.9030885317880788, 0.8775818111845541, 0.5500001905028593, 0.5881731233205629, 0.9668652427883045, 0.8081859989181961, 0.6762786865777735, 0.7240014224712716, 0.8839415612614032, 0.8310496488316574, 0.5286050853497082, 0.6300527608813941, 0.6397831798672151, 0.6509268810032968, 0.571450190894548, 0.5473756541923214, 0.9140562542551587, 0.8498915172245607, 0.7810012293030637, 0.9115571641949973, 0.7397413229704715, 0.6363950016820447, 0.5549977182177982, 0.5107897169977984, 0.8740765120469831, 0.8917047199430245, 0.8235058500043432, 0.6151351542508212, 0.675827315241271, 0.7033808050818696, 0.805334367819428, 0.769312114860699, 0.7338695407770981, 0.5284014795469145, 0.8039295308734578, 0.8827268206539864, 0.6553949488902523, 0.5752938936238763, 0.8578847009319591, 0.9037815190808781, 0.5598050321200964, 0.523112414691696, 0.7889239961016936, 0.5966354676500754, 0.8652523188880519, 0.7710005557701081, 0.8405308750603226, 0.5524516611541433, 0.7976865115586408, 0.9693521642474611, 0.6886289319792168, 0.7046246175512761, 0.7257035228458036, 0.9151121348564233, 0.8365945677195428, 0.7574054095452287, 0.5823439769982497, 0.6137511872693262, 0.9132808959615126, 0.9281439554558509, 0.550227004492487, 0.5633376952051408, 0.87853725162506, 0.545491156809489, 0.9949056167616106, 0.759065486557172, 0.5008702239184589, 0.7706840093729481, 0.5921150392748907, 0.9875562537715001, 0.9302151088133481, 0.5366779655749296, 0.6514170092295786, 0.8123288517561488, 0.9524757158901291, 0.6488237598071469, 0.6515776644386464, 0.9464957146651036, 0.617417904709037, 0.6050731179926144, 0.6220924657021931, 0.95406387757844, 0.9664001811863862, 0.8010058914351228, 0.7269867808409736, 0.9602142685516066, 0.99078100247964, 0.601666803677173, 0.9668572546664373, 0.8405409151819567, 0.795537356252098, 0.9302976446214142, 0.5402517371440358, 0.7595605599599353, 0.5765303515996011, 0.6960369954779286, 0.5503095237683817, 0.7409397567662244, 0.590454544812087, 0.5316507403013782, 0.675773919609585, 0.8021232067806666, 0.5984181291864921, 0.904811906729178, 0.9096384565724636, 0.7815191848345607, 0.5907223240471347, 0.6896281691051034, 0.6955790805860239, 0.7278683542533273, 0.7908796491894377, 0.6212172032883387, 0.6227003197531187, 0.8484701709276339, 0.7017996631717792, 0.8279690518381979, 0.8363782921855117, 0.8257785731512317, 0.5280645980447091, 0.5229331173186793, 0.9847450357052088, 0.9995465087300303, 0.6418052428797396, 0.8869681896284793, 0.9416544648524999, 0.8284862881297023, 0.9691735811432749, 0.9732738799427064, 0.8993618238963219, 0.9303263368402735, 0.5551813713293607, 0.9215655884116339, 0.8980709220763093, 0.569548488939999, 0.979027082964123, 0.5063842954178022, 0.6206249633087864, 0.8627288829621833, 0.9453564601164249, 0.6335385669823169, 0.7324078919095677, 0.6829080875711978, 0.6492513681618555, 0.6925159575970481, 0.9614533582328111, 0.9475529646407608, 0.7413253879553534, 0.7624232898257397, 0.9286794728811282, 0.5577064465557449, 0.8567861894885433, 0.8038204856460676, 0.9429240052623078, 0.6407236538994753, 0.6002176086477564, 0.7647867749804915, 0.7021687104729522, 0.5265027729872056, 0.7115000090322441, 0.9219020918933868, 0.9550232554710076, 0.8914258296367472, 0.9679211791668507, 0.7121509270032165, 0.8993340433530022, 0.6154990020938966, 0.6494645033827919, 0.5409819899766061, 0.6893162600914108, 0.5574654244663296, 0.9761173150962055, 0.8492134496457368, 0.9422955832411732, 0.5829589505646401, 0.7694046677232587, 0.8267878632209347, 0.558680013455916, 0.8266335006031895, 0.7125729638098326, 0.5961126638905275, 0.8131089636554333, 0.9066343539993849, 0.9805571463161247, 0.745531974357482, 0.9862868265062223, 0.59922976002036, 0.5034861970525293, 0.9310291785495954, 0.7398559773231437, 0.8728672718537183, 0.6278046135221239, 0.8402674661340022, 0.8562707500822978, 0.8110777102318334, 0.9208698343758435, 0.9925747716478452, 0.9586057628651443, 0.5554853762236769, 0.5209345942671286, 0.629942941213514, 0.8788533800791956, 0.9452753867504191, 0.714530664641053, 0.5114508910039877, 0.7768508596558965, 0.9659788003492769, 0.7668983084959307, 0.5784038687204451, 0.7345997716240257, 0.6243289963152416, 0.6639402385848998, 0.7797549650333977, 0.9288546502270165, 0.639414357943358, 0.9167786731046268, 0.6243415205586629, 0.64538400406126, 0.7749137578748759, 0.6626101055501751, 0.8305814809611287, 0.6564840898560114, 0.9835759507736384, 0.8325314658944833, 0.8757631123061189, 0.9953267206435406, 0.7014223695112768, 0.5749880615623306, 0.9687609215482289, 0.6523814804460538, 0.7607546653811169, 0.8734246393645808, 0.684913965268335, 0.8827587225129063, 0.9863513789927555, 0.9949117039897485, 0.6443292794406299, 0.60078111311699, 0.9845267443330843, 0.7688604889514353, 0.5182772010614618, 0.9066493709689818, 0.5982142243938853, 0.7457619290595671, 0.9536922090105336, 0.9372075904823043, 0.607505666475077, 0.5133127272060396, 0.8270550162816617, 0.7513137686255871, 0.699640023375302, 0.6134500446137308, 0.6312609502503127, 0.7140994471586877, 0.9812400504325134, 0.521742721275322, 0.6879318942975159, 0.7196815339173754, 0.8801139230379653, 0.773042712017211, 0.9926393784656184, 0.847974718867001, 0.7042734249457238, 0.5944431966512629, 0.6066393230351271, 0.8998646862431009, 0.8555987211502855, 0.9076695144088937, 0.793807829031961, 0.7236353896101901, 0.7477702662104313, 0.6655027693652726, 0.8712992870970961, 0.6821442122514384, 0.621184504322102, 0.67286345724764, 0.6983515700404863, 0.9444352852414037, 0.6051459785986959, 0.6679568730136857, 0.5017833864131516, 0.783613672840838, 0.6410409440420561, 0.5518885825712181, 0.7804442483162989, 0.6548359699396413, 0.6831873792098135, 0.8721284427380831, 0.7147018068011171, 0.9620199332317316, 0.7536951190820953, 0.783832610894462, 0.9588604030218566, 0.9388106680982622, 0.8079707671143539, 0.7934706760651189, 0.6353897301839628, 0.9855220820797421, 0.6655234452575653, 0.8382690218535209, 0.8281355597049112, 0.9475066789143562, 0.7869623144543693, 0.9698820534753646, 0.5366205821715433, 0.703528858663379, 0.7237875793622583, 0.922414119688237, 0.6728653040525245, 0.7660916961515816, 0.9586740230924166, 0.9126556731183592, 0.8468411459238943, 0.6499208796766138, 0.9499847701979273, 0.6007519232873446, 0.9428094608821973, 0.5413772389883929, 0.9055716769350244, 0.5166518628741985, 0.6668097022816298, 0.8724859844796677, 0.9442181923948361, 0.9056400999393682, 0.740237311099238, 0.8249516355888945, 0.6426124684035766, 0.7933298778207292, 0.8142375023924553, 0.6965620174507144, 0.8131874283718075, 0.8223295543193958, 0.7632135425775649, 0.9140317777962845, 0.8474636736494916, 0.783564281862829, 0.797274553295531, 0.6164080677737934, 0.8487073074523975, 0.7884743253120741, 0.5681871169578858, 0.9534198554667233, 0.7987153306106783, 0.8611084729958864, 0.8072024333059433, 0.6399998099805906, 0.8279509071011603, 0.7461459686198184, 0.6231348667375246, 0.6939431939006877, 0.9650520865163168, 0.9246385602503346, 0.9545539268558865, 0.7714012027818943, 0.8675730522114838, 0.8365323934483052, 0.5523890345746553, 0.9762901995830242, 0.5632975291458249, 0.7769136655774056, 0.8995466121685034, 0.9209287108492907, 0.6281009391199786, 0.6250128108333435, 0.8496873771334282, 0.5617751226996854, 0.5411461254700738, 0.8625993437792817, 0.9046478979566388, 0.898417289803636, 0.9415567963413076, 0.6649956430420085, 0.9446809820543678, 0.8926647124809406, 0.9589883741895937, 0.9731745104048604, 0.5873176000910354, 0.9681710870984095, 0.6036949618380488, 0.6799997830149983, 0.6997724677038474, 0.9178273793007696, 0.9154896499545218, 0.984534572374349, 0.6511973453339925, 0.8972814671608926, 0.9378037659184658, 0.7464171392289224, 0.5233781584489194, 0.628410423226477, 0.8502052154667683, 0.8319539111305841, 0.8708683520519465, 0.6053841024607387, 0.8930786929194013, 0.9187543599352724, 0.6667321870884554, 0.7867572984463278, 0.6639535658986112, 0.9610766001464703, 0.822762065127848, 0.7580404973575497, 0.7885728005517114, 0.8723597651113808, 0.6866533284799603, 0.66436541936227, 0.6640132018423571, 0.6851740805900297, 0.5072628786642617, 0.6978885401974334, 0.5080099285829729, 0.7919880662292916, 0.9425647273137541, 0.820198279582182, 0.6646581772894421, 0.8292002206757488, 0.5313093433197623, 0.7178115153316478, 0.7455293930681892, 0.9993344653024343, 0.9153159063909184, 0.5824110049155375, 0.6253201612201593, 0.5760255176857083, 0.7522203051525591, 0.981944811641343, 0.5114314476707387, 0.9111067663549972, 0.8517707174470155, 0.5013560284503262, 0.9006932512907924, 0.8097582421362829, 0.5730688765714811, 0.5701841557611667, 0.5456543112537778, 0.6181785537120963, 0.9175830481224996, 0.8990039520929862, 0.5159643117667366, 0.9327339748551959, 0.8451734395786716, 0.8758883029407388, 0.588842565561398, 0.7728249624887699, 0.5754609790237863, 0.7197767137743564, 0.608558485966924, 0.9878716684083282, 0.7125873722578895, 0.6789863539582446, 0.5688335371407226, 0.9948531446682386, 0.8809106792080342, 0.7105713484904198, 0.8974097173134918, 0.8513148312808894, 0.913202414688548, 0.6373376447204888, 0.9769188549849084, 0.7167448507346046, 0.9287406260694404, 0.6364925868677759, 0.9730885362834522, 0.9811948607288675, 0.9517584408284324, 0.6696741194764904, 0.9916327523341584, 0.8198618684210428, 0.6240886610689386, 0.6880050951245782, 0.5808314203116973, 0.5878761852085289, 0.9541341451616266, 0.8302069502213613, 0.5213438434177853, 0.653128765770022, 0.5212331088920134, 0.7775093907166157, 0.9996382149032468, 0.9561374261585444, 0.797453349698515, 0.5348632054549857, 0.6765971100227537, 0.8109673464116836, 0.8037861919658855, 0.9132599700213813, 0.9350793899470902, 0.8372943400025273, 0.7256771402288158, 0.9707203898843977, 0.5102186318851054, 0.7013477220573475, 0.5009134614335993, 0.7718951218365477, 0.7353213087662188, 0.9062277299027031, 0.7375995706067677, 0.5174175420797624, 0.7175043873269977, 0.9955420619820525, 0.8117599879547506, 0.5179538813517319, 0.6549082337430491, 0.534779414286837, 0.5990936400678444, 0.6628196274316802, 0.8842381158266008, 0.5167569318052979, 0.946738303566659, 0.5117439724502947, 0.7912276757395401, 0.5247010978601421, 0.6830736636418309, 0.6443831695933355, 0.9967934004474803, 0.9888366272855822, 0.8243566338591275, 0.8677482111849385, 0.983588581552375, 0.7207591927297794, 0.9959856037827739, 0.5290821119621975, 0.936752457471806, 0.9193832676227038, 0.573299594324099, 0.6680755458384213, 0.6526576533573365, 0.8188221385504503, 0.7010602991773396, 0.8157300048453628, 0.9336095546982273, 0.8530361824102615, 0.7790359099743845, 0.9536626600570728, 0.8691454593844774, 0.6096479539032578, 0.9109609918342644, 0.5995451119742756, 0.7756069541760624, 0.9862450431619353, 0.6234009418430208, 0.6334930051571729, 0.6515449271152223, 0.9514642788241146, 0.9275155433575539, 0.6825314725928051, 0.6237238796758586, 0.6314254423348429, 0.7162377885409964, 0.518473466706222, 0.7072185768984647, 0.5715617080162441, 0.7701845812183326, 0.5177984210947559, 0.8632036336795688, 0.8600076125600888, 0.8421798496391861, 0.8928186769021296, 0.6341983533256054, 0.7573774858388321, 0.9895072659672601, 0.7684832286558465, 0.7087600712610781, 0.9431427716628289, 0.7354463528513555, 0.9244459066382869, 0.9474940292371377, 0.809593390723216, 0.793212525436671, 0.5908053614337323, 0.5696843422896505, 0.6344265807727889, 0.5456673050384342, 0.8212563377105893, 0.9885605914025386, 0.9390791934663343, 0.5648101737352256, 0.9287124206333183, 0.914790964026589, 0.9717666818577784, 0.7709872905457538, 0.6416431245795641, 0.6428203682284876, 0.6401389715215052, 0.5749941512498163, 0.7451170905739906, 0.9198830625958612, 0.5647068761012368, 0.8525040125383627, 0.5209696416490462, 0.7895394253738848, 0.5963643264824638, 0.5363791938085285, 0.927798057343715, 0.6639429050529377, 0.6765147209130757, 0.7460280824680372, 0.8881586145181026, 0.9151644003285705, 0.8289101685170881, 0.5940595544464298, 0.8380519765868975, 0.9878279721643874, 0.5400832832503119, 0.9994354104611078, 0.9447606225713312, 0.5207973142298226, 0.7778159486323815, 0.9563650558775747, 0.8661477837516844, 0.8668225942924737, 0.6718933736282356, 0.9995299592390006, 0.5284683388512079, 0.5351165106433899, 0.5313195552251944, 0.5147312102075903, 0.5408785588266396, 0.8949268923381049, 0.857307805215596, 0.7291630506725972, 0.7159980889432135, 0.8584645268316922, 0.7211256006847373, 0.5272201931984198, 0.6204371285925949, 0.8974572003464716, 0.7082853343847608, 0.8312423163467488, 0.9196636356015964, 0.7021453724345135, 0.611881886493491, 0.6370125386058332, 0.8094501315508713, 0.7414794788712142, 0.7523391589691751, 0.7476658034149558, 0.9629352093200951, 0.7120464840576, 0.5866934399021375, 0.7592643256929265, 0.7703725270804689, 0.5028960102162969, 0.8804881943010903, 0.7674771375883827, 0.5609656960345657, 0.5382469724471496, 0.7552205453281213, 0.9081558035688302, 0.8793963047137472, 0.5207874879478163, 0.5721725084936631, 0.8467022348917699, 0.6867057765377129, 0.9088316224046389, 0.5098967622255618, 0.9373904055831958, 0.9064335959321663, 0.684461173629582, 0.8823305888506588, 0.7431298831624085, 0.9363560726874733, 0.9220192536033214, 0.9065599613854792, 0.9556588644624306, 0.6256118448983603, 0.7185864936233205, 0.8503942277539731, 0.692643022315186, 0.9626319353383588, 0.8567805255392448, 0.872495911751407, 0.5558861065320662, 0.7353251753118251, 0.6585163487489034, 0.6002667135331314, 0.9954862961268514, 0.6765809646232503, 0.8414147099632132, 0.6801140568340278, 0.7340237989508951, 0.8098212000160913, 0.5971662540698586, 0.8959712598913985, 0.7415646384122542, 0.8859008278336087, 0.8998009635646558, 0.869205887852648, 0.7077757948880842, 0.6947141602356159, 0.6969553083762895, 0.7531813232103698, 0.8574246614756766, 0.5340729862217922, 0.7275621720872323, 0.6046621649985108, 0.7477630460294791, 0.8331215646570309, 0.7841785827517418, 0.7698490136827337, 0.7879188995448094, 0.6474248033066603, 0.9251831564691739, 0.6930981483084122, 0.9624351467461363, 0.5455752269431491, 0.5071234455068256, 0.8293480289985027, 0.7455817405766516, 0.817236164794652, 0.8935696491133525, 0.9692507648921818, 0.6784138588714279, 0.7846693249871675, 0.9175052237777901, 0.8194063990868292, 0.6730641419925674, 0.8888078010665124, 0.8063564999379802, 0.5792588012777382, 0.5526842476004942, 0.7051929486107722, 0.8042145908900749, 0.8825030763474124, 0.6261653727177814, 0.662705150110158, 0.6969328225222894, 0.5211697879174881, 0.6005998693063204, 0.90732060458407, 0.587462360057101, 0.6954176446543124, 0.6119916258887177, 0.7337355079777679, 0.7190018438881196, 0.9692854114854673, 0.6114018570331978, 0.9466069989305321, 0.7232479784289216, 0.7267851476540597, 0.8288463632916041, 0.9996329213347468, 0.7880237034033656, 0.5453073244700188, 0.903511762692371, 0.5797171306499027, 0.814766088433293, 0.9229765730765807, 0.9252230890559662, 0.8190635161786717, 0.9567708125525078, 0.7817084403959075, 0.9609247497161364, 0.8398608501173668, 0.9649130738775091, 0.9185949173899708, 0.832902601408293, 0.6301445346890665, 0.7664406757525704, 0.7083950083001287, 0.6274425541853361, 0.7677258026394096, 0.9167889052762799, 0.5327634249039357, 0.6364861293385746, 0.7232620960826318, 0.6563648756239473, 0.8787676116289282, 0.5696449472446262, 0.8916365926100992, 0.9293105146221562, 0.8936198567156142, 0.9232849146374715, 0.9857480728885282, 0.5946262952672393, 0.6493296597657474, 0.875409069612206, 0.5898378395074693, 0.7725042659869286, 0.6744560669544837, 0.7742598712721822, 0.8053623300714641, 0.9307827042100724, 0.8610745582651963, 0.727697819921689, 0.6964361865229826, 0.8005669945399954, 0.873283386057519, 0.6257449853758833, 0.9829769431009615, 0.8479670100145051, 0.7752111737947314, 0.6966879700613876, 0.8083684520331742, 0.8705017173906661, 0.7546957138868207, 0.9060561650966141, 0.8127401059266408, 0.7771272898249237, 0.6078819651477005, 0.9021790512713344, 0.9478752619308775, 0.8718885682005366, 0.632251664667288, 0.7529358064076657, 0.9678870365936685, 0.7526460839670694, 0.647056382721164, 0.860335521267962, 0.7924116787315887, 0.753511969726697, 0.9077376852777979, 0.6811915939785458, 0.8144942174378216, 0.8894943438898286, 0.7032621928689939, 0.6856291359629537, 0.9069226243383277, 0.8921491128436293, 0.943325623962418, 0.6609987430315252, 0.7366306646596188, 0.5461261005484555, 0.7455730457844141, 0.655325105719867, 0.6633496348145701, 0.7487405218291732, 0.8324379657149326, 0.8887060678626951, 0.6875788735312723, 0.9527131544602414, 0.9025963764048144, 0.502524310138688, 0.742145415096839, 0.7624856790153796, 0.8874090888092828, 0.8608040676167303, 0.8826572578299502, 0.7109826249344552, 0.859779462455972, 0.9042010179507742, 0.6015305078384228, 0.7738404085047089, 0.5781795204815254, 0.7592816341207289, 0.9233532946983656, 0.7339307642064353, 0.9587870189389163, 0.9294925345109021, 0.6443341864712138, 0.6141848386801514, 0.894706613752462, 0.6368468354456962, 0.5239037901222956, 0.9209510508189744, 0.6292320442470325, 0.6206183384022621, 0.7539766650445601, 0.7927370911136593, 0.968416219831677, 0.724315358356578, 0.9785893710445573, 0.6775338820394712, 0.8315755787467725, 0.9006445027659974, 0.5268693649968685, 0.5209728952022139, 0.6412748137253917, 0.6649483246560206, 0.7353093375191322, 0.8904261497564601, 0.660565021205947, 0.6823093026682382, 0.9645916699451041, 0.875227119000495, 0.9639673665784303, 0.605430886576726, 0.7906229544710521, 0.9823861245369381, 0.7655513809876309, 0.7577477823819867, 0.8369764851863333, 0.9444928613581489, 0.6152954167283629, 0.6838349069652403, 0.8411136329182085, 0.6059401773427819, 0.921972115785779, 0.7087797335438764, 0.9481333286647171, 0.6970734481831666, 0.8233869249814926, 0.927856709977395, 0.7280381423382314, 0.5012674321134967, 0.7810406476307454, 0.8162101701294333, 0.8864386245632163, 0.6317128897175084, 0.6891409284057992, 0.6845710310896956, 0.8020673682185343, 0.7361803753677418, 0.5061949458192794, 0.9663203563458929, 0.6390489794484899, 0.6377979353845862, 0.96607592530475, 0.7343782965803086, 0.5035738688788665, 0.9592520899918049, 0.8872823819679502, 0.5907365696643494, 0.9557855565077618, 0.8696780266040007, 0.8548516534251618, 0.9630603992689915, 0.7991409163879585, 0.9521813009823001, 0.8690503090970404, 0.925990320738707, 0.5783452152175825, 0.9042728228941469, 0.8328589825918704, 0.6524881949312173, 0.6266599298966362, 0.6594277883884122, 0.6594434120550936, 0.6438384939289906, 0.656867145538546, 0.9590952849699174, 0.9585047368346347, 0.8972286715437079, 0.5321323733050015, 0.5090012423540036, 0.5591333359278301, 0.6246655880723938, 0.5204304745243498, 0.8552212550762457, 0.8323617099130853, 0.5154954902071809, 0.7469521938715759, 0.6810778713554306, 0.5755144756803761, 0.510850750091505, 0.5491723769994494, 0.5611654329891024, 0.626919946815026, 0.7308105891483017, 0.7570778451031386, 0.60419366611069, 0.6704371262685974, 0.7245056757900645, 0.8186628402668388, 0.5481976590041258, 0.5950562249547098, 0.6430001535336631, 0.9737347457299491, 0.5100426314408404, 0.8976421567647876, 0.883050781751236, 0.7333027955411491, 0.9390647376039252, 0.9252589602902526, 0.856353990584904, 0.5662081475104059, 0.687451419183494, 0.8565192436438304, 0.5488665966715671, 0.9483117397106394, 0.75098853270236, 0.7504420801487576, 0.6995873354588329, 0.6065955283483009, 0.6335645646191164, 0.5152793292086097, 0.6819872592490462, 0.6700941868867263, 0.5694279655500116, 0.7782075413060338, 0.594092323336844, 0.7679567474277821, 0.6236497633598205, 0.9237262456854313, 0.9822317178166091, 0.8421297977197308, 0.7133520212721078, 0.6579512483992838, 0.734553017906074, 0.908558198180941, 0.9725896571192232, 0.5231713762861016, 0.8447403679483918, 0.8701897675165573, 0.8750576496297304, 0.7496096783251948, 0.6563949275759782, 0.7622659331374626, 0.6188602608691511, 0.9919942482759991, 0.5736925492937082, 0.7429122302192781, 0.9288803751232169, 0.8662106719140643, 0.5905036731107658, 0.7338281835007734, 0.5293844621126826, 0.5498891332907736, 0.600178252855646, 0.8686041479234785, 0.5956435235061452, 0.6639585212320511, 0.9654021135089013, 0.6784096765752845, 0.5463290413494368, 0.7196858247650769, 0.7562289608463548, 0.8797226386197856, 0.7090464953285123, 0.6947702925225956, 0.8636287498921176, 0.6820612475849852, 0.6284036541760338, 0.7553790398714587, 0.7570286296508224, 0.9229661853312402, 0.9277043057004382, 0.6593859777825822, 0.906326682591578, 0.7085984823948865, 0.8088651738987471, 0.9569873621774129, 0.9274219278389954, 0.6601033562209411, 0.7068645638294571, 0.5314768312350635, 0.5017574954162279, 0.7801732636385761, 0.635169903481716, 0.6123089364976868, 0.6836069918290871, 0.9084117570893626, 0.7798442052068955, 0.8219730084838128, 0.684947412089197, 0.8638795912360135, 0.9080211973675409, 0.889687405473347, 0.5628259323779055, 0.8663150711019807, 0.7509849489521179, 0.6737976199787665, 0.7644414732855468, 0.58680920357782, 0.9051871037991619, 0.5513008962925164, 0.6521250775548286, 0.7755475201419002, 0.8415024785590433, 0.8034189994467551, 0.9502948377479472, 0.9773149029923892, 0.9243391625158008, 0.8136649870622119, 0.7288522079840452, 0.7281235624725091, 0.5034525218991613, 0.9370958172488555, 0.8186059525938516, 0.99177523338742, 0.6943947923844546, 0.9859448713533203, 0.8827370627632861, 0.5286588234139273, 0.7298489987042875, 0.5131797169725869, 0.5054641109865526, 0.9968173622660002, 0.6400747419822439, 0.8401066978565214, 0.8242906814656779, 0.8133837618618449, 0.6964731619386546, 0.992242735488558, 0.897595481015129, 0.638326176271427, 0.8665628368864188, 0.8577206364718393, 0.5537049465577399, 0.7026608315327956, 0.8540950103146887, 0.8580198938655215, 0.672037577507365, 0.6123833410927304, 0.9237007867891714, 0.5950873155922319, 0.6011970478648807, 0.9812474318940769, 0.8310744487876471, 0.7537731923745266, 0.5057086497550283, 0.7482725545342923, 0.6636614691619321, 0.7104655520117101, 0.5291922787248121, 0.6059442629191147, 0.6068971880450309, 0.8258181979112527, 0.9034255144246897, 0.6753847219364114, 0.9916693542492705, 0.9592368090905772, 0.8075632950558391, 0.9341232686291476, 0.6217233382036196, 0.5547532863745356, 0.8691850095286464, 0.9237033498935519, 0.6722187424719747, 0.7262096530453854, 0.5071988149041251, 0.8827269759114214, 0.5040115952131199, 0.7379506211950488, 0.7407549368817203, 0.9383444215043952, 0.6097296863542186, 0.6983267876376966, 0.5810946869889404, 0.6948168259705969, 0.8869752676725551, 0.8159384595170001, 0.9920259497112235, 0.9716853163946796, 0.8394579519233862, 0.834800683024157, 0.7206353397972726, 0.6699447653894219, 0.9248295152455013, 0.9571470805184845, 0.7486174883520031, 0.9266950314693915, 0.8896009033751129, 0.9854049642324655, 0.7056501983206888, 0.714789822480701, 0.8306939005601082, 0.8064938256293286, 0.8636862136860666, 0.8395811656317227, 0.8019773230347529, 0.585509433943768, 0.5948377561344239, 0.7413617444796688, 0.9849642278792696, 0.5896345052479892, 0.6547655861188632, 0.5659390263592314, 0.9526719142408657, 0.8919130149405536, 0.9317694985558438, 0.7974294453991211, 0.747544800299057, 0.866050846935285, 0.949568756808159, 0.8672531306070412, 0.873934224899771, 0.592756987007612, 0.6252548829519704, 0.9239096813472101, 0.7664881350472605, 0.5613049008318222, 0.9927936312938057, 0.9309945150504334, 0.6924254043200638, 0.9245496563950742, 0.8975298926213118, 0.9736566359068891, 0.6533984139882658, 0.9220262437844213, 0.8154423216860004, 0.6947950964920797, 0.9967784089397369, 0.9483837107379964, 0.9172857471813072, 0.736645832230465, 0.7781485085614495, 0.6367959600016977, 0.7848599536697364, 0.8171207931559956, 0.5807163987769973, 0.6148645690281459, 0.9321552973164, 0.6014505460526971, 0.9967485158271162, 0.6312274513169935, 0.6332838527552253, 0.7265092935102875, 0.8267717073179701, 0.5880385634212022, 0.8207801835330789, 0.9273825931487969, 0.5595255704591218, 0.6111284217596339, 0.8823119548096493, 0.6826072217734178, 0.6129634146090674, 0.6721948902447092, 0.8538977388048239, 0.500664876435207, 0.9050978613704659, 0.6378973646199548, 0.5065320644657856, 0.7549775473013056, 0.6835449066953605, 0.528931121743651, 0.8186954403257412, 0.5472257227157779, 0.8494332483909991, 0.964400273861378, 0.5350388615388996, 0.8971863797215651, 0.8419847096729076, 0.5738088844032514, 0.803553746718428, 0.6302906607016621, 0.5443450038323354, 0.6813750614485445, 0.853223774489364, 0.8466974188427077, 0.519229113068286, 0.6037294657047647, 0.8618109359222135, 0.9800992458823135, 0.5574589828852305, 0.7115044684490592, 0.8062933834524044, 0.8990338427189093, 0.7924746483561946, 0.7991910326355176, 0.6864212389071388, 0.9951704983562826, 0.8122165973654347, 0.9670203945873744, 0.9423780413987488, 0.6825094129076335, 0.6769409057658846, 0.6969798730496111, 0.8938935905885591, 0.5191202314474033, 0.759460844172086, 0.9202872262795035, 0.7359561216230721, 0.5350686680035965, 0.7940274839884294, 0.8455041330169377, 0.730192157891566, 0.8426915207509977, 0.8416889673581085, 0.5899084245808996, 0.9045359077158928, 0.8230230458939121, 0.918325876680123, 0.6700255161415972, 0.8763358279280151, 0.6300450276300058, 0.6056983672169626, 0.8630729695705848, 0.9014331459303593, 0.9295633991501415, 0.9438074755847273, 0.9909168704124802, 0.7887992389303788, 0.9986897966275514, 0.6680954110561637, 0.7530833818935755, 0.6416619487999791, 0.785482888697674, 0.9797569280483893, 0.5834410400933567, 0.6351186213737248, 0.6392338200735634, 0.7323353282234224, 0.7047746588683079, 0.8344064921360528, 0.6046022929513604, 0.6496332749421698, 0.7916497369167819, 0.8405349731576244, 0.5962270955133485, 0.7857441812316661, 0.6286357019398727, 0.7361666741802471, 0.9567992432112696, 0.763921024404673, 0.727113462912246, 0.5248461298010801, 0.8288196337752871, 0.9490796567333295, 0.9680119173307501, 0.5399832729841314, 0.9246640432657465, 0.572745313483672, 0.7918832173655912, 0.9892227273883949, 0.9632909707789469, 0.8044949335734726, 0.6273615291591086, 0.8992613570238543, 0.8219504955837237, 0.7219408183946664, 0.9122489147328063, 0.6147029689125285, 0.6557361511602775, 0.5041709364698692, 0.5023725995993744, 0.7733690661366417, 0.7641064309855528, 0.671383856207321, 0.7206872146816119, 0.7251736176581453, 0.8357217235171157, 0.7150821216870731, 0.6829288152375939, 0.8643974864631138, 0.7124602468511678, 0.7887870529347389, 0.9253467450524169, 0.54013370271928, 0.771691669522506, 0.6584621277292109, 0.997915824758707, 0.9909045720587888, 0.6957749967782995, 0.5278122487792128, 0.6726086023743161, 0.9858851051438893, 0.7083886474209855, 0.9453591728241317, 0.7468198041120645, 0.5433550914672857, 0.6127143340896444, 0.6541975473618982, 0.7690755867016729, 0.8967125225711226, 0.7885262836701011, 0.759984625175432, 0.6844938989507929, 0.5629054390123307, 0.6860621576405496, 0.9943628518018611, 0.7789738888230742, 0.9882274463496528, 0.5437145646477719, 0.6753287313993711, 0.7961996699439384, 0.732761744323681, 0.5618282661742896, 0.6286280797339951, 0.6874913832525857, 0.557883277983681, 0.6466247967616556, 0.8991065631480508, 0.6510420326186801, 0.9045869714233308, 0.5186089476257747, 0.6831618326353674, 0.9522818785134277, 0.7294698245223697, 0.8845794737097907, 0.5949440325581798, 0.6615754279076058, 0.7890477716942004, 0.9534425226944028, 0.9765136083058927, 0.59973855398982, 0.8869269584608074, 0.6814891609863115, 0.821474747895038, 0.5508505082328482, 0.5304074071309846, 0.8101446788420813, 0.7561779751882595, 0.8012530120686487, 0.9571702869254122, 0.8595983273596348, 0.6463215670816072, 0.5568421893045189, 0.6630996569910065, 0.5066981977984968, 0.8903196611660755, 0.9792104361691871, 0.5104793150275164, 0.535905942163555, 0.5216535169864769, 0.6803346234135212, 0.9805458963661979, 0.8881424848177002, 0.8098642026426166, 0.9466574998000827, 0.5022271559107763, 0.7171908607911706, 0.5624139808907221, 0.998395452137971, 0.9127102852655324, 0.8128770272204617, 0.5660883430792161, 0.9370600943357659, 0.8518626583220608, 0.7431199271017943, 0.7569043158517709, 0.6768854585474033, 0.6423232368605888, 0.9658078603025446, 0.8767906312300286, 0.8742574943350598, 0.5859879930966598, 0.5245357450333669, 0.5634690269899472, 0.7344186722738271, 0.6016809218841971, 0.5250860681467928, 0.715735719331395, 0.7028033915558694, 0.9323222619131668, 0.5018515449862626, 0.7229668384573549, 0.845698939662173, 0.6251368349728783, 0.8260001219563596, 0.7189192662482247, 0.9024738993093446, 0.8513576781748293, 0.8213002386755641, 0.8788100340762902, 0.6367049895419119, 0.8032209798608743, 0.7191801357231975, 0.8099702637731828, 0.8618214140343159, 0.9371735250681449, 0.9669442434028348, 0.7691506927568559, 0.8257489607971664, 0.7713848228286497, 0.5497070638312622, 0.6864896852374553, 0.5481578797369713, 0.5866171421114884, 0.822175665181413, 0.9673307104855768, 0.7550919212525973, 0.5430539628237468, 0.5140068636977543, 0.9293449533855316, 0.7831116124525496, 0.6150486416457493, 0.6570511878776337, 0.7901909939851224, 0.8815716177886527, 0.7763019543595047, 0.5047166381524376, 0.7995005988663229, 0.9151056575703843, 0.8455203770553461, 0.8643608616555886, 0.8894733566757639, 0.5737779352383003, 0.7038856600465155, 0.664536115607841, 0.6753971397831451, 0.8858796708508048, 0.8336808336350329, 0.7650724565835993, 0.8369849757372221, 0.5283371362494957, 0.8263195464493553, 0.5552197888844415, 0.6813201694127644, 0.5192212343694229, 0.7701374044061715, 0.6392634059748485, 0.8588590823548509, 0.6967885051660412, 0.5627424941223966, 0.5851640521014715, 0.9519998142435055, 0.8644409768151331, 0.9387259288116003, 0.9877733968068548, 0.6675631470999474, 0.688281359242864, 0.7544990102083359, 0.8532474589397534, 0.5980485206350443, 0.5717712578224132, 0.5670048932093408, 0.6430906132528846, 0.9048874759834034, 0.8360629590049671, 0.6999974252852075, 0.6441888633847392, 0.8776618417112603, 0.9692125542730641, 0.7727127778317682, 0.7128978634337484, 0.9815898098004494, 0.6854871758197334, 0.6271377605281974, 0.9057211051510043, 0.6175786205255961, 0.8537547740749407, 0.8592561702439222, 0.9575264759300588, 0.7019180741807565, 0.8097828039841831, 0.8713877169905728, 0.5545687940610541, 0.5584688973782961, 0.8239832858884323, 0.939091665496593, 0.7743305638888789, 0.768406062429678, 0.9623569661148637, 0.9155708458506853, 0.6623079609352405, 0.7812002966049206, 0.7145697550889512, 0.5417169224697735, 0.8593289187330626, 0.6252584664439347, 0.8943668983326847, 0.8627558610146424, 0.984508838419757, 0.7730221313785938, 0.6533072145544523, 0.8445899116664914, 0.575340032860497, 0.8886833542754291, 0.8211970076840919, 0.5029698578378714, 0.6237860583218189, 0.5056988865867699, 0.74238114226441, 0.853556402643669, 0.8204904901902321, 0.8031465988348252, 0.930306820171618, 0.7274829127742449, 0.9827575149551647, 0.7560711527442068, 0.7106620296461026, 0.7412830894331612, 0.9737501003077215, 0.8935300064316425, 0.9020133096813627, 0.9751684989301237, 0.6907451864145906, 0.9813397318327686, 0.6959372725073003, 0.9506684687105329, 0.9556150621717261, 0.821329661717527, 0.8876702385655804, 0.6351321359777369, 0.6237063401760811, 0.5525979864268502, 0.9905116221166266, 0.5427334396443103, 0.5943353118142571, 0.5709674342922281, 0.8778345082653836, 0.7343227645774042, 0.9023236151468814, 0.7020040726239953, 0.6641593320913751, 0.5669895454257461, 0.9906251376725246, 0.5904070430019941, 0.5632328556203828, 0.9548526574704642, 0.6789563530814265, 0.8245757030823874, 0.5531643078992887, 0.8073489057411263, 0.6924879161240897, 0.6367696917833534, 0.5015554961485504, 0.794603656752175, 0.8426879637622844, 0.7907135892555424, 0.94745301332212, 0.6082369274960517, 0.9368227649769048, 0.7875411650818915, 0.915860896572124, 0.7661167313702888, 0.8689735368900744, 0.7732836409484084, 0.8038223900688889, 0.5899607258860382, 0.7477221240522096, 0.6306091603557631, 0.7598035437864336, 0.5803929133757646, 0.931639592986278, 0.7127105655415684, 0.8822530837595028, 0.9729053921444886, 0.7873078954132182, 0.6965849637476873, 0.9255917612578453, 0.7367535141672508, 0.5827463335289591, 0.8496938559828868, 0.8466709106534897, 0.5804181579804035, 0.9244088540253015, 0.5994685767380274, 0.7298727446701956, 0.5872992501244195, 0.5055761952905282, 0.9353412925537168, 0.5016661164371696, 0.9725169451231392, 0.7629250064662907, 0.7947975754539676, 0.5360691060128032, 0.7504678754498005, 0.6489234927540559, 0.7097271995107642, 0.9154599399075943, 0.7271727842924841, 0.6057328325654, 0.5164788921393255, 0.9415894772898417, 0.6633803277919346, 0.502493187916339, 0.5888867493586345, 0.6512953115783187, 0.6599782676203778, 0.7056396548299411, 0.8095824388322455, 0.8763225575611286, 0.7149193659527286, 0.8664714445119304, 0.989073750647899, 0.5255784107739889, 0.8322062626332614, 0.9378495530241062, 0.8791310814711553, 0.5771745346149879, 0.7234932935728877, 0.8937127125628832, 0.508658372014966, 0.6878734006890894, 0.8184426916294618, 0.7406129961550163, 0.7142358657303317, 0.5268176293063578, 0.9625492277336729, 0.6391332747596008, 0.6914909086561767, 0.8128653332092615, 0.9713711069072037, 0.9116501790548819, 0.8107681429223111, 0.6704868039583486, 0.9157625768795612, 0.9851922031563587, 0.8708002685730425, 0.9609664274169498, 0.8151108323655047, 0.812537807198757, 0.8456559242628178, 0.7455532319175944, 0.8282985095757014, 0.9161836603795241, 0.9975596747579021, 0.6258482231127971, 0.9793412559352261, 0.5949565542224058, 0.6137779113643733, 0.8496709133353114, 0.6901180671237356, 0.5692268716921616, 0.6638778810671984, 0.8083778653499383, 0.847107941441738, 0.5612750159365361, 0.8651711768723203, 0.8297029305550084, 0.7946888292910289, 0.7535494207350169, 0.5261955422361908, 0.7981665472762105, 0.7233855665010979, 0.6800730445345281, 0.5872320176976498, 0.9172395452189142, 0.6241876885938531, 0.6101876604302502, 0.5184477432557302, 0.9094372381925013, 0.5224207906471223, 0.9829033168077586, 0.5803604855065347, 0.5508895571903183, 0.5605276800751173, 0.6808208500564226, 0.8609582126309376, 0.8286265768397337, 0.9319372601010124, 0.5975842692001174, 0.7378820899736643, 0.6774185341812353, 0.7061564497914943, 0.7176802086718578, 0.6593088429213391, 0.9728430956873872, 0.9863091283447359, 0.732772887905814, 0.7427546651090255, 0.6286694435690472, 0.9097588823489493, 0.9117635256605013, 0.5477182381139347, 0.6730728874122065, 0.9231596015106602, 0.6303773063893653, 0.7987639391779089, 0.8812406240757951, 0.7181107825586317, 0.834861482783033, 0.7117458148182678, 0.5303346418575496, 0.5349756575437074, 0.9401077857119005, 0.6328113621835334, 0.8927712357619926, 0.8867074583546501, 0.7275566914047921, 0.687958103387079, 0.8931441528038289, 0.8907945330348794, 0.6082999159494771, 0.7250189228544117, 0.7777981079673002, 0.7772152586609402, 0.9197908955661571, 0.5121172275010697, 0.5392235452612313, 0.7966663212571155, 0.9424251863989866, 0.6470426125363204, 0.9478281037164462, 0.8869672839282857, 0.8551281065831453, 0.8835913331856264, 0.5569727159641111, 0.7270560134010915, 0.6744117483301739, 0.8463626291985243, 0.8874034047423596, 0.8502608395698448, 0.6337987524042963, 0.9498049661880772, 0.6858133444109507, 0.6912069145121217, 0.5948889846085786, 0.6931897855583301, 0.9093558207211321, 0.7598549015519813, 0.5226364985267974, 0.7681605057389671, 0.7945342134556577, 0.799099158082621, 0.9407330485196951, 0.5295435586573223, 0.6938471217240232, 0.5636144899212255, 0.9193361132390936, 0.5456676458768042, 0.9943213985707344, 0.5612468605170322, 0.775783082034174, 0.7265711415533542, 0.9202155606112963, 0.743310098421336, 0.5935649165046177, 0.9097273989356605, 0.9803838960617752, 0.9785803853920618, 0.7754472808080501, 0.8813171769297248, 0.7041886398630361, 0.672354766401051, 0.6153945255450388, 0.9903139002552278, 0.5534858431797758, 0.8895538899144221, 0.520632236413056, 0.612470912396035, 0.8593040026596588, 0.9076455677627264, 0.6384729284552095, 0.6918673888497291, 0.950612684289236, 0.5843531120627307, 0.8725363892671687, 0.9899721665944603, 0.7484716839200667, 0.9314215183666542, 0.5623459975246738, 0.7606445743425122, 0.8636457476170779, 0.5031430592086845, 0.8559631407167386, 0.7965839102294814, 0.576007185855732, 0.577955903875113, 0.6471164544675391, 0.9074602447970299, 0.5730098568449762, 0.5451376636910644, 0.8999049872162733, 0.9187920328759062, 0.7596600852288447, 0.9349991461047256, 0.5565431240196961, 0.8140475932414373, 0.5201596512674281, 0.7109144898479699, 0.5271778449400591, 0.7521640377991603, 0.7332173234529216, 0.9925507033371159, 0.7633007845477189, 0.8582820581644365, 0.6194999431262832, 0.6813717150229737, 0.9398699627840481, 0.8243101917939522, 0.9743260142767449, 0.5678251643576129, 0.7468962139669846, 0.8088215968140691, 0.723983605544334, 0.7369079113132802, 0.8224528640370329, 0.8389092505058899, 0.7333429971844756, 0.9128881274099019, 0.7242225417381292, 0.7009958802614716, 0.9131854000288497, 0.5702693046521888, 0.7253064053805629, 0.9547434400804437, 0.783279959276738, 0.9281193236594244, 0.8253232546442786, 0.989119598273708, 0.5955534311298579, 0.9322345272964885, 0.7168725065945066, 0.973086934708842, 0.7737871156177252, 0.82921992060102, 0.7396131929261109, 0.9893603802612259, 0.7819652780786168, 0.8294120591450207, 0.9976731117282996, 0.7591174522458848, 0.6578378359968855, 0.6139878693493477, 0.7191694935956305, 0.8535018372149077, 0.9280328794732664, 0.9462349130262782, 0.5247374431651199, 0.9623737873497658, 0.6929131166062148, 0.6386563129962589, 0.5539550627869478, 0.6087125049557249, 0.5592947284580796, 0.8197925828211943, 0.6996272695462333, 0.7327972089789728, 0.7366875431529061, 0.942967779057025, 0.7493366075518834, 0.5773266774997314, 0.7020603613644356, 0.5507651469116774, 0.7448450370688899, 0.8977763896014643, 0.6690975500259018, 0.9421831818142474, 0.6714696297406162, 0.5951852260104613, 0.9362295591970984, 0.8880785512294704, 0.6001659691540271, 0.9095497767356044, 0.9123205217035517, 0.9677469297301255, 0.755085227633259, 0.9414465172982404, 0.9603836031117088, 0.591758895584057, 0.9090057328710378, 0.7599494736419031, 0.6530050994500981, 0.6197398402320198, 0.6079594640336501, 0.9500581385387981, 0.9906663710658925, 0.6265342501008161, 0.6846942453225519, 0.7669737816198813, 0.6397779978180352, 0.689527275536274, 0.7593859658473358, 0.9504612197155209, 0.6600465641050854, 0.7491629625474169, 0.6735115234414699, 0.5396374420087715, 0.9800923554100494, 0.7519643931560149, 0.8855240703499858, 0.8106545067945832, 0.9229606007278902, 0.7915617179070031, 0.6601321339761219, 0.5296300359896384, 0.585247761928938, 0.7900130913770785, 0.5290549335642125, 0.5782049350668861, 0.6712128613744452, 0.7425517588969297, 0.7195850996695484, 0.9592687380219649, 0.5539602272482301, 0.5126071160429795, 0.8243944898036797, 0.7101358133515159, 0.6413253740146563, 0.8172849619088355, 0.5227753406902822, 0.5542195128952239, 0.9608200763081616, 0.9101530744620567, 0.6267145173756377, 0.6961767339974321, 0.6559831724272083, 0.9556510084633755, 0.7389233293715352, 0.8882826422925549, 0.843000668859003, 0.8243064724930953, 0.9842821380740248, 0.6428087679340925, 0.8277942027467502, 0.6377410625725558, 0.867796403538191, 0.998301070446455, 0.6942184981674644, 0.5033435464493218, 0.9552593457697316, 0.5590253231895869, 0.9024800133392643, 0.5584953548131926, 0.6349229804949297, 0.8781411967565218, 0.5564799274101844, 0.7973188657219175, 0.652926289652614, 0.9839008167360901, 0.8872233967456239, 0.6473326477952965, 0.6382645097427988, 0.8504657642957059, 0.7608438472712691, 0.5548019263018387, 0.882947128521959, 0.6508376048395363, 0.651320369945448, 0.9072973790750485, 0.8081352902649857, 0.9634350737938046, 0.6469314319648227, 0.5703480634945539, 0.8928575066140405, 0.9653166125545866, 0.6554021186895751, 0.7935268149895555, 0.6481035121402303, 0.5706157714781814, 0.778371244777019, 0.5294847907156016, 0.5173407683528896, 0.6811567160837679, 0.9483451653413787, 0.5042426530594993, 0.5413155760132016, 0.8702486085516772, 0.930141207728837, 0.8178754367873193, 0.5100734882474096, 0.7331202995631401, 0.7544585425884793, 0.8713918904800491, 0.8582729600996848, 0.6662455672294392, 0.6388213257497926, 0.7206150479547327, 0.9123457248658671, 0.5718779647803738, 0.808835899979883, 0.892450193933471, 0.7872582424120025, 0.5861590467833769, 0.9017193717841993, 0.8294255407792388, 0.8056790929775641, 0.846413915894516, 0.6449517576636525, 0.5387185486408288, 0.8323797601857233, 0.6022636556089985, 0.6397129986472568, 0.6444034239969583, 0.9370046815096711, 0.5804278175420305, 0.5799608188051452, 0.6851208449605093, 0.9834819467916668, 0.6847790148847077, 0.9059269940687875, 0.847719493777177, 0.9214608379243197, 0.5660770337734442, 0.8317698233437358, 0.568209830870405, 0.7055027198957218, 0.8549247749814528, 0.9411357631999555, 0.724931431295184, 0.9701496783879971, 0.8893404347470824, 0.7761739785632584, 0.6954778204593142, 0.7687533388523431, 0.7072418630451355, 0.6592579391357007, 0.8220949793625806, 0.5161901923782004, 0.748614696929979, 0.7006581800173642, 0.8963131750818112, 0.9457759552780143, 0.5342883944373416, 0.8024239639725456, 0.7306889866808238, 0.5678837174653304, 0.6977020800816532, 0.6744687234025386, 0.9535643869076846, 0.7055802761131953, 0.9913731892697798, 0.907208400993056, 0.545309628793375, 0.9566619514443113, 0.7986796567220495, 0.7410249211355081, 0.8744039976288213, 0.5158261315490603, 0.5449480793694519, 0.97671991828438, 0.5193214812314211, 0.8840347428715367, 0.8274455928226454, 0.7248253398325563, 0.6308619986835122, 0.9668095415630542, 0.8704456731262784, 0.9245464506537746, 0.7073852006295442, 0.9878701839177655, 0.6676673018058903, 0.8082815405290416, 0.8049851555975354, 0.9274537522154351, 0.9129678455822191, 0.7716228632148097, 0.5156136052941369, 0.5057165373233339, 0.8549686206384062, 0.5800463913847, 0.8239512790494409, 0.5612531032818975, 0.8056518940024405, 0.7255874245047351, 0.7445743645227637, 0.5097606130180253, 0.7087675097884533, 0.8461924638236713, 0.5501003244294338, 0.6871746597985129, 0.6583936770950765, 0.7900134844653575, 0.5447684569393143, 0.9056837610502865, 0.5291790455781997, 0.5644022864410354, 0.9413928365796509, 0.6044105447639582, 0.922052393106808, 0.5321195813951359, 0.6956430421875464, 0.6516460326972451, 0.5214198119841884, 0.9246850090720609, 0.6889184013714529, 0.56504168058009, 0.7554789385374796, 0.6362211853294437, 0.8549937507942016, 0.845753019717036, 0.9911056811662582, 0.5869827207828546, 0.5683619072444346, 0.6235524594834205, 0.8908578621310982, 0.8442042503581786, 0.5268478503290867, 0.5803455341448394, 0.6792481503868841, 0.9387481899399888, 0.5846574352877881, 0.7209876561241488, 0.9120864959522206, 0.6656360122205367, 0.9692862274548937, 0.9562380020166187, 0.7044887039854835, 0.8931915174150689, 0.5134175216188548, 0.5849080080494831, 0.770096847438699, 0.9025954058594579, 0.8880834081264036, 0.6267120986638919, 0.6113247533930519, 0.766953832213394, 0.9612124704050115, 0.7878202907605569, 0.65659947326616, 0.5688849400557769, 0.956530006389406, 0.6208941822034733, 0.564554765355499, 0.5379176227934686, 0.7072677914057373, 0.9221558392795209, 0.9647829607398635, 0.5888276422166072, 0.8103726032788993, 0.595652107092049, 0.5844507049643795, 0.9063138627876208, 0.7524649902661891, 0.9288588311040464, 0.5662626893335689, 0.9775841818986746, 0.5066387127952878, 0.761350910122766, 0.6969415733349549, 0.9303995645562784, 0.7579732691880161, 0.8509378846732889, 0.8453140325627209, 0.7722667051222674, 0.9373760249354927, 0.7123646621480509, 0.8962728208239319, 0.6481425619369239, 0.8132792036016585, 0.6056083010045161, 0.7066222344503051, 0.8918860189597614, 0.9900880552173219, 0.6466578825761378, 0.6315298576612014, 0.5981474723468088, 0.5099733089428062, 0.6834675689735576, 0.6324653015743622, 0.7111900617831801, 0.6947429595368366, 0.5333173032267051, 0.5850256553810947, 0.831726244896354, 0.7033343825122284, 0.823749240967214, 0.7260850357058883, 0.921091921437596, 0.5264159631824572, 0.5782302147690994, 0.7018128967060184, 0.617616735534533, 0.9243198711302676, 0.7692186145348894, 0.929405591594826, 0.5699003724393701, 0.7813466486247691, 0.5112290144545442, 0.6313589020867353, 0.7439651057987424, 0.6772247231986563, 0.5790635054773229, 0.6648652583534642, 0.5966233136501371, 0.6616842124302642, 0.8365074026331177, 0.5530595936097087, 0.6412246474173566, 0.9641503151659624, 0.7617246823085646, 0.7335016080936483, 0.7873449235520071, 0.8281789077911654, 0.5197929675983065, 0.9340823309844903, 0.741980295361518, 0.9367675279830692, 0.6370345830492588, 0.693070993083494, 0.8154641793815167, 0.5320493603627063, 0.6359481635462528, 0.6211473582069778, 0.7024507131649866, 0.5188198273345649, 0.7387412896795535, 0.5440099889359284, 0.6167926949365382, 0.5995426885247437, 0.7969217402413112, 0.9354168083971819, 0.632816044109688, 0.8942282218962184, 0.8384839060275824, 0.9514138402305925, 0.8529068529279775, 0.5414561038052115, 0.9073271303235217, 0.6779235093589933, 0.9553487353308622, 0.7498154475218977, 0.6359516544014764, 0.9577112296643184, 0.9247757685881937, 0.9787535760343193, 0.9549395980315546, 0.8757896867735617, 0.6827130625708839, 0.5904307395846307, 0.6160167984307148, 0.6037275295774716, 0.7366492106894131, 0.620815819415189, 0.5947055003147805, 0.877232935411361, 0.5518560044665419, 0.9087548462215476, 0.600889820978496, 0.9757873544191522, 0.6401954173536396, 0.7369451814624435, 0.64360893989194, 0.6261171269863929, 0.8098945899444048, 0.6760743757962988, 0.6546493733103803, 0.9983313867048873, 0.6544171417230289, 0.5106087178025582, 0.9969175110240813, 0.7936914103061032, 0.6131883788375315, 0.5799341118579902, 0.7372310565582734, 0.902927675673668, 0.6576832931608207, 0.6049126930853198, 0.8103892613737649, 0.5305991610579113, 0.5349218444122756, 0.827650116708067, 0.8184059536967041, 0.7942707562139562, 0.790507506600011, 0.8291270253867558, 0.7662425144665974, 0.5397372674849068, 0.5288048354257553, 0.5213664353111033, 0.5620516450053636, 0.6198633544692007, 0.6316208676958528, 0.7456172157704385, 0.8138980219453793, 0.6900300804568215, 0.9320919672296806, 0.7496306614843997, 0.8002353175647248, 0.7792841437749906, 0.7438889272641818, 0.9122696059823214, 0.6431698855333963, 0.6372543340500729, 0.6775091421600006, 0.7046322424094604, 0.5512352423013109, 0.8061190359398289, 0.6326988105842686, 0.7664823445831236, 0.9506514819758953, 0.6263912333544246, 0.999224366205417, 0.646375494163596, 0.7357767705698544, 0.6338417184013533, 0.6253656737512192, 0.7481403202413626, 0.5030882016468954, 0.8223547631790011, 0.842338550275479, 0.7836120093611665, 0.6962784649853471, 0.6823781204942814, 0.5407403981892052, 0.6603233905048849, 0.8687833582091791, 0.5326957229959601, 0.7027188920856986, 0.7634094298545497, 0.628991585062513, 0.5517466208029547, 0.8071103209390986, 0.5335924448117274, 0.8881303794864193, 0.5360166186492488, 0.600840457883228, 0.7398595724191069, 0.6456831790892817, 0.8042248241619316, 0.764218311591917, 0.5526683459948176, 0.7977276160168388, 0.7976480993341347, 0.7157444139957703, 0.7770303700414827, 0.5076009638847843, 0.7844521220242426, 0.8509029028141326, 0.871336636262803, 0.803249615249902, 0.695415356947209, 0.7651460343864505, 0.8768511717021101, 0.8428982217282377, 0.748538140772729, 0.7154453367391865, 0.9171178264440769, 0.7676200837756444, 0.9243991955996244, 0.7152374939034976, 0.5900376012593286, 0.842152796222458, 0.533583784914268, 0.8208277000078487, 0.7950875858766235, 0.6090676815804873, 0.6060699719250103, 0.7065686262575182, 0.5078554119411228, 0.8768682262259548, 0.898084712554696, 0.8368797981617919, 0.5259752554475461, 0.6707068639904166, 0.5440182338362041, 0.7311873153294897, 0.5669421863081321, 0.9986796811279781, 0.981355069602699, 0.5525318842990418, 0.9712422462881138, 0.8887095337059683, 0.5923323696583067, 0.6764440360659644, 0.9930885785146439, 0.7733318212635152, 0.789154494264585, 0.7546561865935889, 0.5941900076122778, 0.839766597915827, 0.6346382831018724, 0.9342198713520811, 0.719843311916798, 0.5087729486705812, 0.8005924170478571, 0.5534057008687675, 0.563533737162291, 0.9982324401641505, 0.9990680305266152, 0.5399711351797942, 0.5732820305987489, 0.7669770063763206, 0.59437274901419, 0.7999190428917666, 0.8871107771770774, 0.6250227888483334, 0.9512635439979087, 0.5914709371144105, 0.8188420116081863, 0.5285572950427317, 0.7988521994009457, 0.9123993137426525, 0.7605599873457299, 0.8651661979717163, 0.7678158406669385, 0.7963333124216467, 0.5364316374417002, 0.925383037879988, 0.7856426223610646, 0.7588123429931342, 0.6326375217373429, 0.8006601537842071, 0.819516903588393, 0.9431659915310491, 0.9835277402950704, 0.8862360103555358, 0.6213850953689877, 0.5814734171518139, 0.5078927828887623, 0.8187806140878997, 0.6518675239448718, 0.5978652749303447, 0.5460649836823572, 0.6947596883975289, 0.794768776818514, 0.8680528925636538, 0.7992633217767457, 0.5850579957896993, 0.615646361081574, 0.7664563342564787, 0.7055564447495031, 0.6887540648394506, 0.7024382632168837, 0.5826256867030275, 0.7739296453449058, 0.804046228486084, 0.6826668606254931, 0.5457946135521358, 0.6297576231503386, 0.5275227648740979, 0.6972577220567534, 0.7894376615151238, 0.6514251855722697, 0.911758119774732, 0.7025230501111632, 0.758306279233959, 0.5554017413273441, 0.6385084212918896, 0.6168495375194406, 0.8899981837377384, 0.9178023472054984, 0.8574245139813463, 0.5234076447761363, 0.7178345812680149, 0.7488496632208492, 0.612379812179987, 0.8746324353635189, 0.5635009063663328, 0.5643184235896594, 0.7914957823662037, 0.5335021947326691, 0.9015776166397769, 0.8729598484585586, 0.6084586133473561, 0.8015909613672426, 0.9184793008535432, 0.8363116420001062, 0.5500184587146968, 0.6172663662689915, 0.5591424814638957, 0.8636132945404151, 0.9295633555532865, 0.6808718604920361, 0.5961973309020169, 0.8966299580215336, 0.9015135391205673, 0.7913795486543223, 0.8146934281932274, 0.8819370475345731, 0.8184480548659114, 0.814251606376839, 0.6173439699973651, 0.5769625795744191, 0.6658185959577236, 0.9136767553835909, 0.7903024658796282, 0.926223541181471, 0.9777506122821549, 0.9555630749101713, 0.7360546643539198, 0.506154128144511, 0.5907526483454489, 0.989331827917541, 0.7383121416828127, 0.6822821553914236, 0.9523845530439949, 0.8383236595306992, 0.6443234120988943, 0.7167687342608916, 0.7233435846043064, 0.72660686717679, 0.7568496003350029, 0.621246587664199, 0.9055655265431181, 0.8550456949338043, 0.6491727985774549, 0.8058970315240566, 0.8847089538142532, 0.5247763206227504, 0.944465515339977, 0.8878450613487994, 0.8675711486178135, 0.8066010426948166, 0.7032810592931822, 0.9512608035176242, 0.8418654687904665, 0.5722787490536898, 0.8119031423351251, 0.8243771154955286, 0.867857625144801, 0.5103303479001471, 0.7034271113027567, 0.9220786607203733, 0.7520267853250293, 0.7750203543299355, 0.825977061636587, 0.5345467397497471, 0.8042013623124536, 0.941569436635615, 0.6092149940475171, 0.506440874774736, 0.7732091784835612, 0.5144078848393003, 0.9825430962367442, 0.935982934650399, 0.6574676678793117, 0.6500183959554309, 0.834482372727986, 0.9339635870329022, 0.7849513666469978, 0.7909906924618868, 0.6037670287447499, 0.8644902891076043, 0.8648490437442354, 0.5763777814810234, 0.9841223470938851, 0.5749704201028552, 0.5953431069066207, 0.6966434325769102, 0.9141248304473788, 0.9199835191460211, 0.654136516827352, 0.7843121084588338, 0.9369307290500968, 0.941106438747288, 0.5008554746791419, 0.6925301923507942, 0.5776382764888095, 0.6476978287171421, 0.97761923783717, 0.6664979943777549, 0.6441697958170509, 0.5055616764578468, 0.9618916371548552, 0.5912749240917636, 0.5800256818137466, 0.7727995376309924, 0.944608564665488, 0.8021874888485245, 0.8076893285602014, 0.6251081077981877, 0.6304841749412321, 0.6978858522248479, 0.8377220691783096, 0.5152907777870828, 0.8434303230909619, 0.8018884877525755, 0.5391777220298971, 0.7332971739935081, 0.7601564901224003, 0.9749333003133294, 0.6085342861200838, 0.8768784440989751, 0.5482083913178508, 0.703431306685824, 0.7553184269245719, 0.8197092600718399, 0.982804501537772, 0.6999935480727031, 0.9902921859646188, 0.8818575739382994, 0.5663146042860342, 0.5474190085826703, 0.6252727959799784, 0.9125983215582493, 0.7098310446486968, 0.724173720107556, 0.9266524831351928, 0.6776773177337574, 0.9908810480637668, 0.8242829952427233, 0.7335612806509708, 0.5529353580716938, 0.759018921914816, 0.8641456815492483, 0.6796045366716467, 0.9324821520180715, 0.6892880742588136, 0.662622657317119, 0.6908685381087372, 0.695017422546626, 0.7455125837686118, 0.7552298658912004, 0.8134904402994614, 0.7197330808607073, 0.8727136059756546, 0.5193964812678424, 0.5891626338999867, 0.5524512745031518, 0.7952172020387802, 0.6106551983388533, 0.6076618040750625, 0.9730887466445723, 0.9762910403408515, 0.9827380526040681, 0.9348927063541691, 0.6661073110063682, 0.9887662894264975, 0.597300111483407, 0.885765493633043, 0.8723536051699756, 0.6309625807979997, 0.7926585520024421, 0.5382550346855568, 0.5803339919309878, 0.6980339395764353, 0.6952097075357005, 0.5633839478183873, 0.9896145267253358, 0.5273205650610221, 0.898702663128192, 0.743774818392877, 0.8212386577136916, 0.816545352737574, 0.5945380639409407, 0.5400370080546173, 0.782632089512788, 0.5487745797365329, 0.6024302511672796, 0.870241747145435, 0.6251439217162162, 0.9708703737330757, 0.9028405805333327, 0.9471146296739686, 0.6377510925860419, 0.5238335912729928, 0.6129139951521119, 0.6379076111097771, 0.6651799877359981, 0.8251422394070762, 0.5345919269990878, 0.6475891003827569, 0.8363575429012395, 0.9305488991197883, 0.5820948992904348, 0.7585604727687945, 0.5765408208760037, 0.7703380183240149, 0.7578972926671244, 0.7660675383164915, 0.7511816838565909, 0.7722941541791701, 0.8741400757146529, 0.680638330072199, 0.5394251880832264, 0.7005980811176965, 0.6942328937060371, 0.7956935971654262, 0.9704147641932992, 0.9285842434772296, 0.8625747561689727, 0.7512806440173716, 0.7886954448171613, 0.5956281201871172, 0.5277211911342677, 0.835612893082591, 0.5456379294465066, 0.8070712378746282, 0.7942241887242502, 0.6666187381549146, 0.5359056501010165, 0.9572040156500597, 0.9418263869249037, 0.7088496528957905, 0.51603689619542, 0.672408026346818, 0.9847512906629114, 0.7464983186976193, 0.5791343107479917, 0.7110032866241556, 0.9258055776176781, 0.5226316444867789, 0.9540342419123983, 0.8450760796761514, 0.6015158116075039, 0.787885158728819, 0.720238450247694, 0.5233252935886312, 0.9699283732114923, 0.8704663932967069, 0.6475764127738095, 0.5576003784712817, 0.8968731456595154, 0.8059174199657326, 0.7952908764934361, 0.532030775718181, 0.5642470814840086, 0.6841344537603058, 0.558396604107041, 0.5131191368204119, 0.6553450481002422, 0.784832066221794, 0.8046082821743228, 0.718545088295204, 0.8516375177716696, 0.755251973845674, 0.5529523790816449, 0.8576973673204383, 0.5136381677204436, 0.9369559830873948, 0.7672432281834649, 0.5829024103031364, 0.9068181149759913, 0.5899275982408961, 0.5379535286946058, 0.9580790761282221, 0.8118492944203173, 0.7615710128339421, 0.625136395749157, 0.6989639552214911, 0.91441987501514, 0.738649366276003, 0.9485999334907868, 0.5744677314109271, 0.5599449852019249, 0.9568743709481878, 0.8525491165867116, 0.8382231926536712, 0.8115435965548291, 0.7065495566835238, 0.9257614412294973, 0.8566208602759874, 0.6664027746114226, 0.7852358678948901, 0.656504506352255, 0.5831823342583091, 0.9506878075749217, 0.868227262806819, 0.8263641223325207, 0.8394417959568993, 0.8079796206893194, 0.6394599940373458, 0.674724632267329, 0.9431692446899489, 0.8991740609019363, 0.9933276318196989, 0.8058198039067999, 0.9015960487977215, 0.7400856043861136, 0.6849427509362704, 0.7054060701613905, 0.8576977410929019, 0.6491725724733004, 0.862225974494127, 0.9783870417031594, 0.5288271502284179, 0.7076678243831651, 0.9083019152795782, 0.6572625120502276, 0.5128870159017966, 0.5754897462696323, 0.9308501139625087, 0.6856834875113, 0.8191504959965494, 0.6623800805695442, 0.656006917958395, 0.679575603408269, 0.8960607615022269, 0.8911277793505445, 0.75462925368157, 0.7956586158365111, 0.6582968644297538, 0.97188650978316, 0.8612244072385011, 0.576734278517889, 0.5397813215588674, 0.8443391049153187, 0.7390838424253892, 0.9396219580338574, 0.8240050976231286, 0.5992533805555493, 0.7120147438899987, 0.9142221589636643, 0.6758652281361773, 0.8040466296894966, 0.5257488525060934, 0.5082762608224474, 0.5105653820905892, 0.834652312839556, 0.7619402557619708, 0.5101596307679689, 0.652217530453173, 0.7143333977186158, 0.9925186047239734, 0.63683566765111, 0.7785446135025127, 0.7746460748382078, 0.9909520054796148, 0.7920427900117797, 0.7078619046089643, 0.5691164072923403, 0.6691184762601169, 0.839162109428915, 0.5916065635814511, 0.8252469348618747, 0.8618311955804142, 0.6413471487190032, 0.7537206724640553, 0.6368060256431152, 0.7760605424561151, 0.8471026871553842, 0.9213584186544286, 0.738045439790767, 0.6836267423355148, 0.7743230999634043, 0.955390476639379, 0.9018503843606771, 0.5986204565235075, 0.8309207825091169, 0.6471598591394134, 0.6683769109616495, 0.6041759877663329, 0.6729863072951418, 0.7046120699176686, 0.6339796211953601, 0.70332461802633, 0.8773518140039314, 0.7497310388547078, 0.7789559348837602, 0.6645142394550387, 0.5869163406139664, 0.6837765745150111, 0.7508038971944916, 0.7929043755280474, 0.6234280682254206, 0.9561918292708134, 0.5538345848305196, 0.8400147076376521, 0.6693739423536014, 0.6634828771230461, 0.6288861913520043, 0.8242556295617707, 0.6802410385992779, 0.731805001116274, 0.6204936308888027, 0.7103527635953821, 0.8840162677191001, 0.5516054490149405, 0.5528619739243659, 0.6538547184873218, 0.9722351745330264, 0.91230316408408, 0.5456533277855813, 0.8004608512889078, 0.5002702362715956, 0.7536399950876143, 0.6025814223811272, 0.6172566952106335, 0.8273807872740795, 0.5614260233025398, 0.9722295251904742, 0.6621545542762894, 0.6775764474634075, 0.821222559938547, 0.6166093815147988, 0.8021473372604893, 0.6180911600733706, 0.9560839664541907, 0.9556908127534299, 0.6870378970958954, 0.5829297739328869, 0.9411320696235588, 0.5969941337356979, 0.8188503410134682, 0.6923890666903798, 0.9943495108889517, 0.9719735127571194, 0.6852801475620726, 0.9840474414111502, 0.5201295807744224, 0.5478273048829045, 0.5044535135090866, 0.5702305351102199, 0.8432043488856175, 0.6805531081340204, 0.7687588498489495, 0.7632418169764763, 0.6076979935620793, 0.5273342125248878, 0.5995837877183722, 0.6802520005943793, 0.6117076180675085, 0.9467731460476974, 0.7248459562802633, 0.5785918993558081, 0.7372868839652943, 0.6523171801789307, 0.6075828757154074, 0.8279441707813736, 0.5990171094609007, 0.9248064446335691, 0.8252190728735493, 0.5991659404121485, 0.8752575837063852, 0.5901454219261888, 0.9653398146424734, 0.9763376282238827, 0.731822860210514, 0.8908197648676832, 0.5871945142546839, 0.6978405836079296, 0.5979652947339879, 0.749314984203854, 0.7385194141342315, 0.8619004530863832, 0.8323100008137887, 0.5964034576235941, 0.5589418004749035, 0.9445105762497643, 0.9875468663343134, 0.7418341273364366, 0.6307988944358245, 0.8699966029067387, 0.5851560093081378, 0.5094066604597747, 0.7929563067935255, 0.825194575537788, 0.7877895844384042, 0.6343851212652083, 0.5069696608137733, 0.7235508946306186, 0.64230706591171, 0.732838921965635, 0.6594570799384052, 0.7639597159920914, 0.6881985930400821, 0.6197491680443924, 0.817693127068549, 0.6849764008848185, 0.7462994018034044, 0.8581691379072478, 0.527597550881161, 0.5311113221311706, 0.8698173333712779, 0.5982655657016841, 0.6446058413119875, 0.5952990986286864, 0.6281259188560374, 0.9930033037621201, 0.8407285618700755, 0.7644656633833485, 0.8498632241097042, 0.50866087327053, 0.9538709361577064, 0.7999872893886142, 0.864584070547086, 0.799536241963783, 0.7040803440677618, 0.5989005965916863, 0.5110614484695213, 0.9241387594733346, 0.9561792327552201, 0.9262319972829524, 0.9195498321687705, 0.7011757281211142, 0.7412501434681771, 0.8236634075473662, 0.9117557205022789, 0.7993864129819674, 0.5596812847927757, 0.5308038086180029, 0.8238170355479091, 0.9017464990724073, 0.8837593670184782, 0.9134426703189846, 0.5711518150304562, 0.9697979802847131, 0.6749488071278436, 0.7452416212815551, 0.722164321790499, 0.7419152525276642, 0.9804364037215205, 0.9165439339859848, 0.5052392662485024, 0.8722855082649696, 0.8292066415411767, 0.5768231215059179, 0.509775941312413, 0.5204974475300421, 0.9427933639396056, 0.9033866399105043, 0.7705869887974699, 0.9849343877543753, 0.7388901855791212, 0.5062832040021157, 0.9325659377329969, 0.5414470161946208, 0.762474692274616, 0.8617501751286192, 0.8373478348137813, 0.8670339992760224, 0.7938448951898164, 0.511676108189554, 0.7187088278351854, 0.5296278540144479, 0.702795087275057, 0.9431013987950783, 0.6411916715068932, 0.7161524127769913, 0.5603550033323483, 0.5106235681898327, 0.6731770876386096, 0.6378813183825057, 0.7946176572743651, 0.5299569395772208, 0.9118089804930574, 0.7784971413625069, 0.6550111504896371, 0.7633395306742776, 0.7434807548398743, 0.8984269877516815, 0.8644260819109069, 0.5997715299876247, 0.5315969146071031, 0.6236339095918212, 0.782051102011335, 0.7084450968755411, 0.7475034509040082, 0.6779783208226229, 0.6268581258926826, 0.5031829969494408, 0.7152029154032368, 0.5230705938018436, 0.9924822075412401, 0.8810243900476332, 0.5227268413139732, 0.7774445299795004, 0.8183125817974694, 0.8890566554961934, 0.5168665307821827, 0.6416060066311997, 0.6942033393967926, 0.7351076847980205, 0.8079198872104383, 0.8851613642777473, 0.7075853448659637, 0.9371667410940985, 0.9252156142148404, 0.8185406413458016, 0.6702297145504961, 0.5488914484737671, 0.6447723920938938, 0.8068156176522068, 0.8067058464479466, 0.8084072369017101, 0.9388238217409272, 0.8668367871296294, 0.6626435516579912, 0.638111402220606, 0.6495324505230193, 0.6269773288288081, 0.532911245187887, 0.5533672780385163, 0.6457342920281053, 0.5117850421487745, 0.5374167212993659, 0.8312763180338381, 0.8568646924971346, 0.8073041873976383, 0.7519851539033535, 0.9143906100373975, 0.8807422866885635, 0.8939517391317766, 0.5282136838231111, 0.5463481462580697, 0.9512908360816758, 0.6633388321701015, 0.8746479014966841, 0.6578506657969181, 0.6726741016351079, 0.8898411822965675, 0.6258480127253887, 0.9812099412711048, 0.722392603898945, 0.594110121951255, 0.5790932611464187, 0.9037971636954676, 0.9429651443243136, 0.699976674173699, 0.566329455670552, 0.9065970108030761, 0.7749073698198534, 0.9454768035635824, 0.6347970317617522, 0.9126881903065374, 0.7628759487888335, 0.6956691693740564, 0.8068560526429156, 0.5413403597612109, 0.8824531939699483, 0.9310794730855356, 0.9520503757910845, 0.8483575238079153, 0.8738552535662109, 0.7889068480373791, 0.6098362986008403, 0.7648812972538512, 0.93943045505072, 0.946397917018313, 0.6719389481977487, 0.5002687241179561, 0.6033527755294819, 0.5772960445389577, 0.8208605806158452, 0.6988941043993453, 0.7883581491289878, 0.5967612742228255, 0.5344431906811122, 0.8812271864161757, 0.786385200454548, 0.5564571962786398, 0.8556706286051764, 0.6276843551621887, 0.8710814348801439, 0.8393479857237758, 0.5587730659408083, 0.6366173210598431, 0.7218919863498108, 0.6899103960403312, 0.7517245539975632, 0.602311706330493, 0.6895205236585742, 0.8485091445653162, 0.8954724912585541, 0.7233268366141584, 0.6402451719251574, 0.761085728807546, 0.7721895415001923, 0.949043886071743, 0.5035849322784332, 0.9673139974149518, 0.7037154065440308, 0.9227092334659286, 0.5107381204406096, 0.6481451012784929, 0.8147521777238212, 0.68191762698843, 0.5363015197086873, 0.962153077887118, 0.6092960306326796, 0.906048474605071, 0.8336645399790408, 0.8764480533122765, 0.7183770363979046, 0.6339492247972074, 0.7435752710116317, 0.7604254602393639, 0.8142532178707605, 0.6668393935342476, 0.6287474587573195, 0.5506154891352742, 0.943190770115675, 0.6442543399654268, 0.8885972351016385, 0.9434510951560826, 0.6648931482869228, 0.8256572635732209, 0.8614025812753245, 0.5022324085742194, 0.5443826738917021, 0.5857001437483658, 0.9628237131630115, 0.8035522125595751, 0.7278887299567327, 0.8958090934217726, 0.5206892448370278, 0.9421161707826884, 0.7809530280980761, 0.9713711365190332, 0.9674875552371525, 0.6719757722660187, 0.7976308696267671, 0.5476605793687662, 0.8182495438912152, 0.5520424767853065, 0.9945331159263326, 0.5361687640754945, 0.5731013493873355, 0.6350535020655154, 0.8333221885369677, 0.6182506962017615, 0.8597261575020281, 0.8012199133378862, 0.6364301544886453, 0.8269561394031779, 0.593371016249092, 0.6101683050877142, 0.7405817531027338, 0.7319285743299763, 0.8590897256369824, 0.9825668926709816, 0.7854668924805598, 0.9995290275572561, 0.5744166083305103, 0.9066735716330669, 0.5868992038954326, 0.9379059310179518, 0.8330364023386723, 0.5409180006602297, 0.7966991195198769, 0.8113788845966998, 0.5076850711829131, 0.5475693355143259, 0.6263278881799437, 0.9509087996465657, 0.672314980525771, 0.6006815433491698, 0.6864254307178143, 0.9109567128997644, 0.6157433625481956, 0.9505542667873307, 0.6526717069848653, 0.7930122033311795, 0.5511625508025597, 0.6729671554875069, 0.5651328597733076, 0.6012645383064448, 0.5391258547180762, 0.9576035550929913, 0.9394363741109644, 0.8753856620646008, 0.6905359072207482, 0.8566428306730894, 0.5128098167659225, 0.9074651517240601, 0.7093491552891663, 0.6406217726044423, 0.9032641587664467, 0.7258665202473943, 0.907295095098216, 0.9264033658960555, 0.9806028301894802, 0.7223329992915501, 0.661921781561458, 0.8798673428492243, 0.5563585618753777, 0.7973113667593638, 0.5737142994149615, 0.6153701936504674, 0.5404914387294141, 0.9633520402631263, 0.7295173499863452, 0.9973872466575775, 0.6742665059440849, 0.8571419702519583, 0.9724122092013749, 0.8765844507001163, 0.6869403352486849, 0.5776003159711585, 0.558771554386053, 0.7105287345206627, 0.9231082709889196, 0.6988299635250819, 0.6312967440706361, 0.8155783955164282, 0.5716289251952515, 0.7953146801203381, 0.6420619254033909, 0.8812045666032057, 0.7341508785200566, 0.8884210029858914, 0.8856986233123945, 0.5543055003026132, 0.6561933001100311, 0.9852379537171532, 0.7029124938491724, 0.9308682425019639, 0.6873734327663987, 0.5102054088247514, 0.5579422668578362, 0.7478120893456746, 0.8715771915789198, 0.6214637409719157, 0.6283377844046596, 0.8051021267301159, 0.8769098903760684, 0.5831953296253392, 0.522651380371669, 0.9172733499293163, 0.5211244605169958, 0.5369715283134473, 0.9506853121805824, 0.5844028921710466, 0.6599558864456491, 0.8600065729371167, 0.5516016495672249, 0.5085093950577377, 0.8917710816080358, 0.5067706954757383, 0.7700678805068186, 0.6780838253405875, 0.9377634101591532, 0.9815704700370171, 0.7113585489108034, 0.8855733234393088, 0.7782989649712808, 0.8986616349639558, 0.617314252468266, 0.5757983293556264, 0.8593155154642771, 0.8372749688304408, 0.9324039617253118, 0.5156028631248226, 0.6702555527513783, 0.608168264909272, 0.9863230525776794, 0.6503962890461921, 0.6712075476688283, 0.552513720454838, 0.9896785693665007, 0.9950337202616873, 0.5403957123443066, 0.7240771536803288, 0.9107650852834133, 0.7253529851653759, 0.8913234135454818, 0.7436048579815187, 0.8037398279866467, 0.9784634782384013, 0.6366492573331486, 0.6624718083741798, 0.9759438871015973, 0.7947441096465411, 0.5240013308034641, 0.9500408804099572, 0.8403304239305869, 0.969325107125696, 0.8225394210034824, 0.692567120486468, 0.7415045506909568, 0.659483548015409, 0.9144608262898397, 0.7983614832656027, 0.817742357884386, 0.951615865080724, 0.6498979444506312, 0.8926447891806376, 0.7032333885372812, 0.5209065904744894, 0.6377683741602296, 0.8251863665754422, 0.6407876239724799, 0.6401112620806302, 0.6405920364210043, 0.8316870013633721, 0.8759489162044893, 0.7285759443916475, 0.9959722074800013, 0.6944691010851375, 0.7826662211547306, 0.7338732846150614, 0.7116176879385456, 0.6004820958282859, 0.7542700240839383, 0.7568548346891188, 0.5944959827409362, 0.72541405130083, 0.5469536182763173, 0.5339464985602613, 0.9976107297179609, 0.7850714478316354, 0.9096540418637296, 0.6769862228770469, 0.6424838634889591, 0.8983624827269454, 0.709669112871998, 0.8530955495215521, 0.7288258704864423, 0.8157869964873203, 0.6774805770533532, 0.7995843238070084, 0.5227609641889523, 0.9194966589677493, 0.6607504644849806, 0.6313814245582295, 0.6546787340435364, 0.5028522508227153, 0.9725562634002416, 0.6432571522476426, 0.6928777942563651, 0.6741600232595253, 0.6611467100012647, 0.9369153425416905, 0.7926900528867253, 0.8178464352695513, 0.6056679836487678, 0.5022874923837191, 0.8936930873959807, 0.5349339663412688, 0.5300238146081788, 0.5155418506003968, 0.9918966640918917, 0.969876131650153, 0.8409890240947812, 0.8957775871706504, 0.7766793131440128, 0.8592250672037711, 0.8466028613330732, 0.9396430586038367, 0.6448675120647892, 0.5591152877872179, 0.5696998407159501, 0.8537978794440217, 0.8075858409015312, 0.7040905185485191, 0.5891810250166056, 0.8639023435355788, 0.8638640721061812, 0.9099087045802794, 0.5934181737641788, 0.5509793850673073, 0.9143465693620818, 0.9999357895293539, 0.5449558423822232, 0.540441339982597, 0.9792655874498931, 0.8248067871106166, 0.9252624116190924, 0.8113178053750603, 0.6307450812399693, 0.7388573472743915, 0.5781075148621464, 0.6072781048203646, 0.8274738968376565, 0.8325440540124918, 0.5940548083904802, 0.6500286303516793, 0.7611317716514522, 0.50235936560556, 0.575514636182315, 0.8714156652275742, 0.5243106141970636, 0.8589639723968939, 0.7465084321167816, 0.7365748236597189, 0.5167762970065675, 0.5822036328954079, 0.6874717611349472, 0.6706882756792849, 0.9848210165485359, 0.9314454899165321, 0.62258301365433, 0.6231502402689002, 0.5745537944466006, 0.6789527599560938, 0.6303708532214576, 0.6393744455277475, 0.9805808593376741, 0.7473892394797508, 0.7013931479528684, 0.7664501390674183, 0.9157036843053139, 0.5203191112599568, 0.8779020469106764, 0.5628790921405269, 0.5323477169597098, 0.9794646808831798, 0.9234450879870753, 0.9050051638682651, 0.9573450744506561, 0.6706917265500404, 0.8050006659584656, 0.5814998619497207, 0.6292833147985539, 0.8641169614994734, 0.6488525574667412, 0.6943018549613953, 0.9032077472034732, 0.7130148598990285, 0.8574800542342742, 0.6248774923646943, 0.5827420339649967, 0.6459097279264336, 0.9774566867049299, 0.5423096687942146, 0.704862498068975, 0.9990976883208791, 0.5076189960455263, 0.8214409430117317, 0.9740103978831022, 0.5496863752055006, 0.7809581502752514, 0.5354386518131866, 0.8143106771106592, 0.6467573363754324, 0.8469026597684661, 0.6363793339034699, 0.7273449121060915, 0.9424190890675571, 0.973146220522787, 0.7491844049944483, 0.7851488376819618, 0.7954602034172242, 0.9986794486662987, 0.8120455451924288, 0.5059220341669797, 0.769690750451351, 0.6281233814450387, 0.733917860944022, 0.543999167766483, 0.6847856359093814, 0.9305762150104171, 0.9982827669083238, 0.6153728483919039, 0.7719877897185827, 0.8212090050005643, 0.8246644608550389, 0.9815776799923599, 0.6479789033058562, 0.513927218635986, 0.618970608106686, 0.897783279464572, 0.6146381249330177, 0.772530972000717, 0.6939700754999912, 0.8245459654775276, 0.615253591569372, 0.8254396592460372, 0.6342406168465912, 0.5147318705050141, 0.6155254022383408, 0.6043056428032606, 0.7180952115821433, 0.9627543441940487, 0.6526830117900916, 0.9251567303634272, 0.7881293190649907, 0.716994983646611, 0.9473456993077123, 0.9367204692101192, 0.7157192597446989, 0.8621368654915238, 0.9465039702302658, 0.9947967285464363, 0.7632591646669027, 0.5388532226620557, 0.9951254785884739, 0.6517491715682084, 0.928434694024215, 0.714827092908817, 0.9788781657400735, 0.5297157204365032, 0.9184891087872029, 0.9021218553687782, 0.9360817108716517, 0.9994145907257979, 0.7021306196165894, 0.5730012983541444, 0.6121397828375128, 0.8655168942353718, 0.7327484963777195, 0.5659746157290371, 0.6078609938250193, 0.8633387822681055, 0.6250784129315494, 0.7998121422954259, 0.9216703051875819, 0.5748955542964119, 0.6080606952571636, 0.6995677061355419, 0.7178971372107221, 0.8584260889784932, 0.5215138626450805, 0.7032022226428578, 0.6599520219545012, 0.6333185750753916, 0.5857618024080089, 0.7936109796067141, 0.8160860417816431, 0.9341480988288244, 0.733735579004851, 0.6637980853894829, 0.7546315479421164, 0.6370013187315631, 0.9460189185194028, 0.7738023132328906, 0.8088774113775032, 0.9000757148797939, 0.8571607873009874, 0.8218372377219662, 0.9289172419335909, 0.6735481060856753, 0.7657822787699184, 0.8845743456572357, 0.9671971827635387, 0.7325408583209893, 0.9537017737754119, 0.7216293799201668, 0.7942252814243655, 0.6077950461982258, 0.6260338899707336, 0.5135694849171242, 0.955913338519319, 0.8972676605763232, 0.9379252509011352, 0.8085366189721022, 0.5609285428524053, 0.727554128853531, 0.9443780934749824, 0.7759840341407227, 0.6321842917915158, 0.8587179265630551, 0.6863439190384326, 0.7887544504714668, 0.611410734377543, 0.5488747308682755, 0.9009314601820833, 0.641221317836801, 0.6217638752150609, 0.6680551590018557, 0.654814137848862, 0.5440077675266957, 0.504891851472794, 0.9338459179275858, 0.9517102090426988, 0.9082670240985251, 0.745705679769503, 0.7531157706216567, 0.7867775538707997, 0.8191075452777448, 0.7177369237865177, 0.5066250910151475, 0.8814854351878827, 0.9054457001691945, 0.7803292186086463, 0.5399308511326881, 0.579103979568403, 0.6972461612205385, 0.790105624214021, 0.6707578461635828, 0.7544752876016153, 0.9087237066796974, 0.5492994323019138, 0.7497248726215684, 0.6479173359353756, 0.6180699556467509, 0.944829587146653, 0.5816462984602992, 0.5282366840705912, 0.5992884125762101, 0.8470915112297217, 0.8933941327564392, 0.9658007422499273, 0.7725853382756531, 0.7427044406469241, 0.7095644238046308, 0.5231925568411303, 0.6293124517007165, 0.8316126448745771, 0.5217149671701051, 0.5833699746997223, 0.9056297995167262, 0.5098135052325172, 0.8431420228919893, 0.6879907625251918, 0.9906255028071345, 0.8264681300078763, 0.8023285253839367, 0.799428627901388, 0.8753485728604543, 0.604547400596729, 0.5413472915732545, 0.9141617143291397, 0.6869857747339765, 0.6183259569263289, 0.7920358236346234, 0.9349712227565946, 0.6765586253359428, 0.7438934017107108, 0.8690753977903005, 0.5339616721199762, 0.8557375441438257, 0.7476695624433392, 0.9760147707385639, 0.745581847270788, 0.5474502426366746, 0.6037518908396302, 0.510737919173949, 0.6529101371882948, 0.5007809233768947, 0.535739712633417, 0.7998671838185516, 0.8818552277834004, 0.8371727079524294, 0.6837105525277497, 0.5046551602828442, 0.8783713653432469, 0.6567791146222302, 0.8358452258419682, 0.548761466814508, 0.8627584920978335, 0.7910131504178763, 0.8995740600049689, 0.9014731829432899, 0.8644842926213607, 0.7601332282491869, 0.6701168689329966, 0.8341675782756925, 0.6052062626830108, 0.9502788502367362, 0.6794265898448886, 0.8234090934692493, 0.8860025985237344, 0.8369706295423833, 0.9074951728415286, 0.8324548302928807, 0.7197001392759035, 0.5509856368283671, 0.6985644184299826, 0.7931847743436851, 0.5494765551554529, 0.9166513564591303, 0.9290680303627088, 0.5124137382246949, 0.9226756092859664, 0.6544042967861066, 0.5843254543576458, 0.7190867606776451, 0.7082169107428495, 0.6326429007666765, 0.7264637948612428, 0.8462823786299565, 0.6638291724612573, 0.9865530299931651, 0.8557558488154885, 0.8187628934343378, 0.9275195605002029, 0.6341774266774722, 0.8300617581533244, 0.6467375919829761, 0.8063257831156021, 0.6424028993551121, 0.7223330475073244, 0.7250058106074142, 0.6382385754240936, 0.6908101013389414, 0.6528734901192337, 0.9237164862973513, 0.5949445823596247, 0.951450896657074, 0.858939070197666, 0.8869843670729403, 0.991130042333568, 0.5721187636373722, 0.6618273424850036, 0.9300702213372634, 0.9338043079868718, 0.510342010103755, 0.957745782484668, 0.947647588394208, 0.7368514415899755, 0.7871535911849327, 0.9216100697641165, 0.9604873707318833, 0.5348524459390348, 0.8019495143562219, 0.9328161985902219, 0.9207154484629066, 0.7972571230669998, 0.5820890691913514, 0.7162733044770486, 0.9271195330208168, 0.9013943559798236, 0.98709634497174, 0.9031253361789326, 0.9362440309768292, 0.6186964654380049, 0.6713890225679845, 0.8236629159857669, 0.9808100836386884, 0.9633053065865769, 0.7855315906974186, 0.9596638395913946, 0.6366764137984362, 0.7095395774464978, 0.7582866632186898, 0.7779016015190177, 0.6235948789719169, 0.8012427139468805, 0.7069278535894958, 0.6231737005901629, 0.6892636064504032, 0.6374213775458941, 0.7392708094505944, 0.7500597153696344, 0.9069222622206452, 0.5506027947283748, 0.9153540286818174, 0.887223515774777, 0.764002981052009, 0.7620937263376029, 0.8042346547245002, 0.5190575558812818, 0.663467232011929, 0.907862960319532, 0.8173253552523578, 0.7379202701091535, 0.8442883803193871, 0.8196581010001107, 0.590841727767663, 0.7376495349307972, 0.9534205457689263, 0.7118986838810584, 0.5749920271133675, 0.8325494458796834, 0.5421270928603761, 0.8239963998410547, 0.789492985301854, 0.8905257224852831, 0.9620224810568172, 0.8611191406944576, 0.9289928022105451, 0.7081298571630037, 0.768232424060292, 0.8906069709249249, 0.6836686512429372, 0.8230699982264287, 0.7207578851836163, 0.5693589888562166, 0.8218154311778298, 0.8877151840142975, 0.6548107466252364, 0.9567218197551839, 0.7099122894187736, 0.9996499725355392, 0.9024722010524169, 0.8209367142609754, 0.9292676076596947, 0.6309655550433318, 0.7547449764720564, 0.8805703878680136, 0.6501577871378013, 0.5320508798361461, 0.9715185804656905, 0.9738590584522714, 0.6510561093080334, 0.5216932853782961, 0.934159829970608, 0.9576404137509322, 0.9811163067944028, 0.6632339008157188, 0.5867379057610553, 0.8728238797039456, 0.7679384968704596, 0.5585432098950939, 0.8823429148260582, 0.7723541391604234, 0.6479296455846083, 0.5050346720551679, 0.8066900720235357, 0.8710013045341864, 0.6707081560287544, 0.6293536173238635, 0.6903376102252738, 0.9916362707613717, 0.7491337993669125, 0.9417131632791914, 0.7877153568877442, 0.7470597530337353, 0.6916585636039537, 0.9159639321049864, 0.5293839603573839, 0.7849657495024505, 0.9846532282509249, 0.5827241843406262, 0.9383851858004308, 0.5732020707931449, 0.5154441805521848, 0.6321433946482469, 0.6730685514759771, 0.5284767951161475, 0.9831881143275261, 0.6689396752179594, 0.8638428237569438, 0.795802230800109, 0.5750661998498774, 0.8910027643944773, 0.8555326177407823, 0.9025026708687043, 0.5735556881854411, 0.5609928322847746, 0.7900406149885444, 0.5909085805906615, 0.9143818449237575, 0.5296183561306753, 0.508771130605941, 0.9037445982694711, 0.9250571444997499, 0.7650231365614991, 0.6899550110136838, 0.6183111349369661, 0.7118896155520926, 0.8669171110239402, 0.6940834228176804, 0.9107635600962274, 0.6160221524688484, 0.5969506792028416, 0.5442675965946782, 0.5250459962563352, 0.7413823385354816, 0.6243716333265237, 0.6815535084722719, 0.6012539037481988, 0.5702096665447636, 0.68396123259107, 0.9141760641171973, 0.7898470291692328, 0.9445476241461987, 0.6669728128748789, 0.6845083477514179, 0.7693898254459168, 0.5345974684246427, 0.6985144488004051, 0.5361319833777582, 0.7201530157165603, 0.570972996443585, 0.5326064140295512, 0.9307926128170951, 0.5573774397011586, 0.7500685537812025, 0.9581708173831808, 0.5198734026782079, 0.7157445910068577, 0.5044770061811992, 0.9800823168559005, 0.6718984047983156, 0.5110677834642301, 0.8756602307872334, 0.630228674115555, 0.6089874538081017, 0.7526342046492756, 0.6830026426538384, 0.9709327971724728, 0.8226292664877495, 0.5730361701213594, 0.9295624075946902, 0.6833501975073347, 0.8414296158891114, 0.9088298471262938, 0.9982047755898668, 0.8246274402741124, 0.7094449708981277, 0.6848612420235864, 0.9612634000511223, 0.726008211474878, 0.6534451916943439, 0.797781531293623, 0.8538077224104006, 0.5289168194000413, 0.7033950139911513, 0.6836427592560307, 0.7180410790102226, 0.5587979342404074, 0.786290996676024, 0.6223601044295605, 0.5139319828683123, 0.6252815469511401, 0.701128896090883, 0.9437423330645345, 0.5641537532019846, 0.8145718828063653, 0.6427929703563884, 0.784080568831538, 0.6190421220786486, 0.9962411655855086, 0.8248654543534807, 0.5777359833818153, 0.9408354706942887, 0.7631881296163345, 0.5736437476853635, 0.5253944646566242, 0.5841873756158643, 0.8846081383063102, 0.5578623356268249, 0.8945368846478269, 0.6108556107450329, 0.8888067868880424, 0.552563176320515, 0.5264209374363455, 0.6651053745887789, 0.7291673912079095, 0.9014817407207839, 0.6983680104227095, 0.6902970801626734, 0.9335470115117381, 0.7484494138881335, 0.9447386413922043, 0.6403830478769215, 0.7820875285611824, 0.7174494019363282, 0.5633274162487505, 0.6419981173406528, 0.9542927844399614, 0.978431499558708, 0.9504900608242046, 0.6594113995623592, 0.873735955118745, 0.6486486906074544, 0.8430403057869406, 0.5357156209209534, 0.683510525084551, 0.7031273389762882, 0.6545333219029668, 0.8219826697038873, 0.8057087915422382, 0.7358174748090315, 0.8066183640650836, 0.5195705727628697, 0.9959306368993953, 0.5923058041256024, 0.9432644698813136, 0.8002260128840055, 0.6854106061109514, 0.5479228140913677, 0.5501696708551238, 0.6541284927267985, 0.5259961590319125, 0.8994462131771513, 0.7371228947458514, 0.904739967383503, 0.7581283322475578, 0.852787609550576, 0.7690844727218558, 0.9003532310264926, 0.6721314618414801, 0.870724649387704, 0.8611303707635105, 0.9122155051566906, 0.6561930042908595, 0.948618528171237, 0.5264709405626864, 0.5385455081989781, 0.7297370182373637, 0.5776982706696032, 0.6010421038899603, 0.6230118603662738, 0.7031825562546465, 0.7723081597127135, 0.8489546223378528, 0.7634890564050014, 0.9094829097201645, 0.7926342359135767, 0.8416282467397505, 0.8057275245423083, 0.6861136821228981, 0.8943365808175365, 0.7558124470075588, 0.9993611829737261, 0.7840394777238795, 0.907990246879542, 0.7854547543401943, 0.6500625073074259, 0.7316787006230694, 0.7188107019227683, 0.6564141601553805, 0.5303380689080279, 0.7563106766941893, 0.5727610281342405, 0.9942451930305687, 0.7383006665695259, 0.5509415472373073, 0.7306520077930794, 0.8304200802238169, 0.9583798625974004, 0.5007500689845596, 0.5381933115203894, 0.9127011560004266, 0.6022072546883204, 0.6659012829919646, 0.8275913782580506, 0.5733275352049703, 0.9656784625904788, 0.8493288805568109, 0.5285355790485724, 0.5516387241559518, 0.5649317743889932, 0.6642096946357688, 0.7577732328699036, 0.8068242264023533, 0.9665150802797071, 0.5406979033635786, 0.8518741362102684, 0.9202561228274275, 0.5938433673373096, 0.6241558789902963, 0.5636525838517785, 0.6379607238043672, 0.8577025651324712, 0.9907438788340963, 0.7533467857848746, 0.7732332992480484, 0.9162003263175349, 0.7475497550368637, 0.6646695936806415, 0.5190948039613559, 0.9267491700342603, 0.9858862441689724, 0.9023056115458226, 0.7949202115748379, 0.9378208322446644, 0.5946187271729653, 0.6515094678740536, 0.8173593991950072, 0.6258449450024395, 0.7123203645411922, 0.5015254727806515, 0.6588501233656067, 0.9992538448541628, 0.538750033700035, 0.9725617020069044, 0.7199081375096964, 0.6033632095921365, 0.915586786338855, 0.8950560046000012, 0.5813459868838629, 0.9183493314538211, 0.7407602356042118, 0.9165335602609903, 0.8046163801375625, 0.8661784599243654, 0.6578630791730826, 0.9236743023765109, 0.6923529093752256, 0.7426721416513237, 0.7605058142813059, 0.5168989597048601, 0.7606777950035386, 0.54293722780977, 0.8880940558432326, 0.7365364168536257, 0.9841403412429424, 0.5446973257106674, 0.8971668564887603, 0.5436535248639706, 0.7943776111153441, 0.570895179041618, 0.5123281608855987, 0.9692781642838484, 0.9136089940409542, 0.9394186502474804, 0.5106010226338749, 0.8156133600962359, 0.8239674717894317, 0.7617580558476398, 0.8452108459639789, 0.8176932728394204, 0.6949852238068621, 0.7382047361371631, 0.6259734462716453, 0.8143029184893011, 0.8180315412512227, 0.9678020998320264, 0.6332775833682419, 0.9125325763624176, 0.6552065084193566, 0.5434229980322192, 0.8313500277946019, 0.6603113735734913, 0.750315360588292, 0.7288467782749792, 0.5720582588282765, 0.9729040688240447, 0.7711421700550031, 0.9754410154818139, 0.6952464523461205, 0.5827698328270559, 0.8442676004260068, 0.6155295921078996, 0.982611832403304, 0.9475241892722956, 0.7964266602928413, 0.8887435056197943, 0.846576216152952, 0.8465818753046972, 0.7043963806700839, 0.6441945850574575, 0.50181658655701, 0.9487266573669862, 0.7058218355040549, 0.838027873743764, 0.7470235967979658, 0.6275294300659897, 0.8258177845209289, 0.8886192938620697, 0.7716242041074672, 0.6630306029897185, 0.5304418800903072, 0.6917704819523907, 0.5285319240434485, 0.7755636231056695, 0.6630904612132034, 0.9696122400216747, 0.5753310808007901, 0.7383508329865291, 0.9362976944448234, 0.6631998978200611, 0.6118326512005468, 0.66432525386665, 0.7368007169284163, 0.7492270445022093, 0.74053634380492, 0.689496406497778, 0.645148479900731, 0.7175432163982818, 0.8924272613135213, 0.6082801702752645, 0.8860083297844081, 0.6050622024385361, 0.9495574819535317, 0.90222509449461, 0.7819362724964292, 0.5592348303995248, 0.821716699175044, 0.7891876380129215, 0.916613205353056, 0.6713706492482551, 0.5531802365111111, 0.8151397072096731, 0.8041203700305222, 0.505666669076589, 0.9468145369067276, 0.5729232671223177, 0.8078690350531588, 0.7825373399137905, 0.9934707622970307, 0.7262592626717822, 0.7861600555681348, 0.5589078124537128, 0.5380560693968984, 0.5725786886290243, 0.9147756082445919, 0.5381623829766039, 0.5623938360006413, 0.7208162588402998, 0.6228991444380598, 0.8471078567903692, 0.5852788993899061, 0.8424542099566722, 0.9640171704014173, 0.634837484934762, 0.9740878747246513, 0.931946599384126, 0.5745294743757032, 0.9129422935296938, 0.5138272338909584, 0.6901564752632778, 0.9933584652141494, 0.518209978795626, 0.7150102655621732, 0.863270992063542, 0.7855419814091593, 0.6809683342987833, 0.7533563143751929, 0.539917346330043, 0.7229252070171359, 0.8026683664639875, 0.7866506764131809, 0.5833048278062223, 0.5803038462207375, 0.8917148229641014, 0.5230195436111948, 0.573859040397578, 0.7727769528852775, 0.8463010438069625, 0.7678511308070108, 0.9044143094083179, 0.7472242579486121, 0.6848655935875643, 0.9986773629503578, 0.6235845777394458, 0.5021432389677439, 0.7815206620173631, 0.8323126567627681, 0.596057271618442, 0.6088569695996005, 0.5361898889021204, 0.7769610773968716, 0.6516708014367708, 0.8268420093532292, 0.6239851158517171, 0.5295396687913863, 0.6715391725571092, 0.5295417759488015, 0.6739810600160281, 0.9446587177582486, 0.9183513890482278, 0.9249157110220156, 0.7884610912999384, 0.5133449008287465, 0.9275719016745989, 0.6430629612201025, 0.8924281941018034, 0.9929920027854152, 0.910177748705154, 0.520424818136352, 0.7212732838849778, 0.5306717009996287, 0.6381310668796905, 0.9241332845228205, 0.5614959414511321, 0.7994694629709482, 0.6554674786484642, 0.9513268432151785, 0.9568612944073649, 0.822462522601621, 0.9719905709512733, 0.6322049169202725, 0.9166789315328687, 0.7596208887836455, 0.9375699886569439, 0.8393023940943479, 0.7226385614568744, 0.6068330689126149, 0.66743921485318, 0.8787646075642179, 0.5384910736075474, 0.7166594462378388, 0.6267690701359512, 0.6475280244244239, 0.9017647600671128, 0.9892749456486377, 0.9262589163030399, 0.7835746671986681, 0.5479527400161601, 0.7036766414622823, 0.8098437174842312, 0.8547276605022434, 0.6285458382549014, 0.5339070380005253, 0.6653017521249658, 0.5761765186508099, 0.5939828637033371, 0.5165575585979765, 0.6657650704695257, 0.5461904655446362, 0.6684089410592442, 0.5741302162126424, 0.950412950661688, 0.6158583162081164, 0.7795708182900506, 0.9659812986771857, 0.731280900869762, 0.5431854224902946, 0.6643733656428925, 0.7117557340250769, 0.7140809018558136, 0.9029097686262908, 0.5596948791870069, 0.5357005227807872, 0.7708225693687383, 0.5856203009889073, 0.8485613772570872, 0.8841976514314893, 0.7466355193061297, 0.6285419546872495, 0.5037691087542724, 0.6135856044041568, 0.6030565129759138, 0.6357362124879012, 0.5764467447776234, 0.6226162933091945, 0.8021247748646622, 0.8353024163575519, 0.8505185125596337, 0.6252492996375714, 0.7958531663840203, 0.5309448014733388, 0.7846741947919088, 0.6203873304963485, 0.7308376885457919, 0.6580180685930399, 0.8739703665024103, 0.6358719743105241, 0.6313971465271315, 0.7554527273483256, 0.5817936806210755, 0.5956530644798652, 0.6921325177892693, 0.8511969505896453, 0.9821092764068129, 0.5958272188428808, 0.7332716340057528, 0.622462570796152, 0.6910327714213766, 0.9200719931875634, 0.5083376205765204, 0.8197377045284444, 0.8095545170645165, 0.9351844328984872, 0.868927834401442, 0.6498009357246094, 0.7985683657707199, 0.5280207823812595, 0.6373671761655155, 0.5338249146624907, 0.9899495207746833, 0.7181416490027834, 0.9969198641388282, 0.7706755897740896, 0.7981195877023269, 0.5064160058946181, 0.6900819370861961, 0.8643204618789155, 0.9527839348177305, 0.6086458236766379, 0.7705345280079734, 0.6886154073967907, 0.7957049898174584, 0.9054200869349769, 0.8386321618288137, 0.9912913919020792, 0.991093252656029, 0.908684530736849, 0.7839712590453927, 0.6575017515918525, 0.5972657875139467, 0.5383694494978162, 0.6239666629795688, 0.8765227341619106, 0.7224234814744737, 0.8216291491425174, 0.8951696026553484, 0.5254405477405157, 0.9953296118331769, 0.704927785547421, 0.8551290498527258, 0.6460912018731506, 0.6319057840831834, 0.8227555809207324, 0.8433798480942283, 0.7836486438981184, 0.5150945292420497, 0.9712548154029194, 0.6765302350576117, 0.7964491489360204, 0.9979026048747386, 0.6372546883658419, 0.7004158637804646, 0.8536034997328836, 0.5313542819257607, 0.8193697947287291, 0.8534663449149407, 0.9512627074983502, 0.8150346983461447, 0.8333641455170864, 0.7163984729321213, 0.9566014090702741, 0.544889331268179, 0.9445378080316749, 0.7180042869585899, 0.9047701333028952, 0.5469738439089997, 0.9651687117328897, 0.9271471336922881, 0.541951745310568, 0.7405217081540939, 0.9966012956487913, 0.5611589266004882, 0.5747764771788326, 0.5323009534805734, 0.732739205440238, 0.9710651180594286, 0.7198471038609839, 0.5234614377742898, 0.8609568137292598, 0.9144766386878197, 0.6321786715713219, 0.6236309664690647, 0.7757253412468534, 0.5063925099424316, 0.9027266202828397, 0.9585748241854255, 0.7352155565897915, 0.6608575994502784, 0.8674002421089915, 0.7733445381563899, 0.684121150151182, 0.8730009893315152, 0.9375420496444837, 0.5315822888361657, 0.8239645385489269, 0.5268569723177513, 0.8358081950451728, 0.7634289636962053, 0.5301449726822589, 0.6429902621194085, 0.5291918404194584, 0.9526758854892865, 0.978334172534992, 0.7096152374456811, 0.8002251531388525, 0.7061474182512251, 0.7543195708818529, 0.6544571439057956, 0.9349393194037139, 0.8417467959758418, 0.9044894692810905, 0.8449009483311737, 0.7717491736651896, 0.7343710165374044, 0.719480488724634, 0.9088387486226317, 0.5973100684673867, 0.7228041022430285, 0.7693270680913218, 0.9567767063141758, 0.8635187394568702, 0.8734510048374188, 0.9861803184352878, 0.781398800356966, 0.8426703930278371, 0.9490078272985409, 0.8782127029559077, 0.6931888354743427, 0.6912094035828564, 0.7394796291426555, 0.866449491999866, 0.5340457005629728, 0.9805172838226026, 0.8242400746609158, 0.9371728001488044, 0.9543026961855225, 0.5661331735892035, 0.6216342845268357, 0.5566551653143494, 0.8245037205682164, 0.8853809964861692, 0.5560446280526431, 0.8309742736515402, 0.7817075906225954, 0.7913459025498069, 0.9632001729028608, 0.6604709278928387, 0.7814686283434947, 0.9840569841555443, 0.7277454490402824, 0.8991037543095497, 0.6998654770632452, 0.50221902222565, 0.795858603730211, 0.8040719047597781, 0.9892007593810068, 0.6208969032692904, 0.9119535284168256, 0.5977560278055938, 0.7683005328153897, 0.849639069843128, 0.7583060021146144, 0.5779414832155947, 0.9713269404969808, 0.6548663249863771, 0.6600728974086159, 0.7254178839850851, 0.6669707566629022, 0.6807763741330641, 0.6744483907790787, 0.6758755013620414, 0.8515518159398543, 0.9602051887668497, 0.770239616129993, 0.7002240257685689, 0.5672111917729754, 0.7622577587479572, 0.6501294503436688, 0.6813370570325102, 0.8610435511173611, 0.8865764889478784, 0.8827349110831957, 0.8949474138776052, 0.9689328284490424, 0.8047279768898303, 0.6278606557675102, 0.661308548998983, 0.7590076902481941, 0.8768626673287041, 0.8426537617789804, 0.9332173319015598, 0.8214641612438933, 0.6627885727141385, 0.7221262059703528, 0.9938442880450685, 0.8538354421624529, 0.7848168766023622, 0.7276367138457698, 0.6567544276731542, 0.5227982201718702, 0.9575954106109092, 0.683764019329822, 0.8556587402327307, 0.7839375082052804, 0.8040898734003508, 0.5345122574718864, 0.5157471912744698, 0.947077904512793, 0.6829645246537208, 0.5771113362194352, 0.976055142521939, 0.6471012093476183, 0.555723154558522, 0.8928244624918267, 0.7438758304404209, 0.7005007130576912, 0.9510673047807339, 0.6556733256763425, 0.6618435070795244, 0.97930805439522, 0.9128853378724617, 0.6157526145163592, 0.9166749005433543, 0.8106262752140598, 0.8762603826478965, 0.7991510131162176, 0.9208646599798, 0.5984290069864953, 0.772360340122463, 0.7129235110807122, 0.915602880661063, 0.6630592354375775, 0.774546916720019, 0.602959746207358, 0.588632753100153, 0.8417186054459889, 0.6025531964129793, 0.6399520367529298, 0.7498905037458106, 0.7152205984684281, 0.6690168630739295, 0.574545579969451, 0.9427289718078797, 0.5136532287886179, 0.5479125488659959, 0.8808206784456505, 0.5270220849039855, 0.9933315711972155, 0.9837008702842238, 0.8137316510792525, 0.9841169012635871, 0.5341252659015707, 0.6173867188489789, 0.9670544720567513, 0.9717634227733096, 0.8074777527701156, 0.9362237350823623, 0.8208942604808351, 0.6245642430717707, 0.5359282105585847, 0.6720096358774884, 0.5986618331000022, 0.5808117001056982, 0.9269097613217703, 0.8654147769743391, 0.7175303174504069, 0.76579073442491, 0.5441327307141957, 0.5425750264725335, 0.7965715131802813, 0.6697502894087655, 0.7660295265867783, 0.6772096049835297, 0.5310214936497795, 0.8431221902705899, 0.5476902697598172, 0.9924798661871856, 0.6716443011617759, 0.9055907369449421, 0.5390321441854575, 0.7540247979898648, 0.7069708946304187, 0.8050081515090646, 0.5856544729069775, 0.926543316815938, 0.7458685000136962, 0.5845377277036964, 0.8733112813913763, 0.5523214218488104, 0.9721968548606552, 0.9098518564851817, 0.6065410028990685, 0.8101771439680882, 0.771241047051124, 0.6056100919222893, 0.9879447717981699, 0.6860068312868012, 0.5021721783768662, 0.8512707449338606, 0.7380879277794838, 0.9304682205445707, 0.751010433168555, 0.7534516998944987, 0.7022699956181497, 0.5934815654981799, 0.6516809963321895, 0.9921591900599149, 0.5380225943655433, 0.7699507461200392, 0.837858353174832, 0.8637162239647982, 0.9589137853996091, 0.9514975885239095, 0.9784304638505199, 0.584978164083827, 0.6796437219478786, 0.50237248223562, 0.9295254718318247, 0.8832745897153143, 0.8394982497927055, 0.9151756805183924, 0.6377799087141154, 0.6432471485112425, 0.9341766386218642, 0.8905856751411447, 0.9374329069960607, 0.9359646572270355, 0.5742701187938598, 0.5497171659627345, 0.9973376197441362, 0.686443539761092, 0.7647242678740886, 0.9644755589201364, 0.608202127207926, 0.5160315266635617, 0.6629030851759452, 0.7446651831370019, 0.7475924855814782, 0.5333967004800151, 0.5309879845627166, 0.8706185615330903, 0.6828337881789817, 0.7574916685996171, 0.8079254938614796, 0.711860268793624, 0.7720928083972547, 0.7092583984299075, 0.9830632735517933, 0.578027924994875, 0.5123994130145487, 0.9782631752261424, 0.6589033870800436, 0.5728211688943134, 0.6940008886260761, 0.916263147483199, 0.629730659880873, 0.7375650803808153, 0.8368195354420671, 0.6173252123482116, 0.9275977982244763, 0.7635757337669279, 0.7714832140833665, 0.7371907819466673, 0.8730708347328818, 0.8020043933904386, 0.8716205195010398, 0.7183410647140422, 0.8600859355888761, 0.6316365865366316, 0.6951378377338552, 0.8264048058236051, 0.8365588898118408, 0.7037925588494226, 0.7935766422316892, 0.8046318276520463, 0.7926744128740686, 0.8708276781294101, 0.8855556866175094, 0.5175021003971705, 0.7223291170730917, 0.8614236035213734, 0.9937943982648412, 0.8322941926827478, 0.9013145626156328, 0.6017388751357553, 0.8499211616994597, 0.8228911483562726, 0.7164275444590269, 0.6278737888298702, 0.6685436897387123, 0.6671711317576008, 0.6647318101124275, 0.6252153274529606, 0.5705380722534013, 0.5612901145993152, 0.7535557899373844, 0.7311731679791509, 0.773853758529137, 0.7964000809161575, 0.9229705871899787, 0.6388981295053437, 0.9060396469720947, 0.5848266321586792, 0.728744101590892, 0.6368689825250115, 0.9021638354655728, 0.9515707305268102, 0.9893857929400001, 0.9759719008877281, 0.7114995563081805, 0.9700525540465723, 0.7362083761279304, 0.5452983671701004, 0.9843952692947542, 0.6478765688644943, 0.9390440211537878, 0.7931092843422534, 0.6280093888536759, 0.8414078890296122, 0.8109668976888835, 0.7483486403804183, 0.5369136962585221, 0.888697506860176, 0.8021170794988977, 0.838964959012614, 0.6362394853240311, 0.889583567581484, 0.5935762063353893, 0.9607714859423184, 0.5752269597163352, 0.7108032295983142, 0.5878568201974418, 0.7273536535332381, 0.8667181098143965, 0.5172197040814585, 0.641793365289163, 0.6215367768579145, 0.6459235415131424, 0.5490833611169392, 0.6652570055031675, 0.9514363690552231, 0.6447202123231335, 0.8650492250831825, 0.5677513167711241, 0.9731038425485674, 0.895180404072244, 0.8654580676029261, 0.757570280385673, 0.5638782496943281, 0.5200087837564009, 0.677301557711273, 0.9862529895336003, 0.9902874679808267, 0.5366190816786653, 0.9423607689375562, 0.6825223995409044, 0.5394851189328547, 0.9622064585995325, 0.8915267539599476, 0.8002410418555492, 0.7676787840768671, 0.9444405499338053, 0.9078916826692973, 0.631854930257953, 0.8678753162591059, 0.5524115429601792, 0.9988479889331663, 0.9903857581332316, 0.6658353272926985, 0.9362131652915768, 0.7908615090324562, 0.6534634712787095, 0.8412186979934021, 0.5107109858266219, 0.9534774839489569, 0.7146868182555683, 0.9150442766640954, 0.9589604667650657, 0.8241173391253904, 0.7969285156951056, 0.9403231896290152, 0.5071492577357715, 0.6188875946707519, 0.5436159659672011, 0.9616470190890003, 0.9994002360270281, 0.7722447107516885, 0.8336979058153949, 0.7964977411734118, 0.6793025129416672, 0.7569154236360066, 0.7021428472794501, 0.5414061515846923, 0.6273107715934357, 0.5834823827400164, 0.5956178209330332, 0.9354154473226599, 0.6291754046581214, 0.6283900275001013, 0.5052127641806765, 0.7367313159456435, 0.9583478648393735, 0.8313557538157375, 0.7215167640172843, 0.7514772059708894, 0.7099216874413521, 0.525289744804038, 0.8010003787870966, 0.8518597442024893, 0.9584667941173364, 0.9818862566900779, 0.9909361226745114, 0.603644968064514, 0.7747233941936325, 0.9335998260498279, 0.9562112245770604, 0.8812365165924546, 0.653916268672651, 0.643248995861037, 0.7619027851087712, 0.9863421802810577, 0.7477021450426189, 0.8904777900986387, 0.7016974949687631, 0.8862116541823626, 0.6996205480456701, 0.8526891918596511, 0.5675953015730008, 0.8648902423732245, 0.7206690717745146, 0.5737823133782513, 0.9337342374682838, 0.9316213058132989, 0.6031370341695266, 0.5055120091674994, 0.9852358402725218, 0.6614338562016109, 0.967691647940831, 0.9274611198243408, 0.6951988889214933, 0.5895297087661736, 0.6636100866425942, 0.9381352551448754, 0.674849691346128, 0.5261849532340621, 0.8060039785657279, 0.8404044369715031, 0.8365687436401419, 0.6586734360249085, 0.6012161955537121, 0.9343438336455097, 0.61361438222529, 0.6864702541226763, 0.6814061030641116, 0.5531907998763249, 0.7753258351552095, 0.9576272157862902, 0.6463417390784163, 0.6067253428680401, 0.9561542682312942, 0.839332173268173, 0.6135354460275502, 0.7719675804230892, 0.7689540155213782, 0.8561666966130567, 0.7781842776325277, 0.8634899465163993, 0.5037261877759293, 0.8153165558635396, 0.8030356123418055, 0.7099941316675389, 0.6055181672920473, 0.7090509291048016, 0.8189169760058128, 0.6821274287777127, 0.9320287118644812, 0.7887455377252286, 0.6197954354742996, 0.9537810135293098, 0.6350595791355855, 0.5097252109873336, 0.9334503674713761, 0.6679173052019468, 0.7498606142613063, 0.5771617747512053, 0.9712443938473412, 0.7904475528245155, 0.733727253336245, 0.5649207815348815, 0.6202860312288785, 0.8187002296613406, 0.7175442613662562, 0.5065211758639481, 0.6740153052981532, 0.5530577977088895, 0.7779898733502346, 0.6719654669804016, 0.7568542571132233, 0.9962557651291162, 0.7085103052843403, 0.8718407927418097, 0.77798923689508, 0.9775174555824566, 0.945116223287619, 0.88405542075862, 0.701349561797775, 0.8966646791132169, 0.6423745021007113, 0.7168311711221516, 0.7840892416000458, 0.9790103400507908, 0.7506930476976803, 0.7848177477984056, 0.5445522645922138, 0.5802864698718548, 0.8863055608121281, 0.6780549016558506, 0.7425936026095511, 0.586871871011613, 0.9986668600185402, 0.552214351520119, 0.5960931038393802, 0.9551288204833805, 0.6773789939281605, 0.7905309781463765, 0.9875325799257543, 0.6110297270278735, 0.7228077309687819, 0.7606699132799846, 0.6286439827277965, 0.5598262036573876, 0.6656926746761472, 0.7658597323181267, 0.506593308989169, 0.6635093267077928, 0.7376332045974672, 0.592373896559478, 0.6449805536503016, 0.6965218288038832, 0.5113266230308877, 0.8769984159578452, 0.6378782266253847, 0.6559534228679682, 0.7096944446377644, 0.5798058728354331, 0.7591675802033228, 0.6032190616763067, 0.7847710439060565, 0.7085928427168775, 0.8750578381717803, 0.5760753926164245, 0.9098438906194141, 0.5734794373603436, 0.7216669669990039, 0.9695997954236918, 0.6499324614340762, 0.8139059651409954, 0.9239746297793691, 0.8405006779110281, 0.755963936683352, 0.6538021343807134, 0.5653502329581197, 0.9196706850690739, 0.7307801145229655, 0.7404231493095961, 0.7128311866829957, 0.7459717399591068, 0.6597582724454862, 0.6987576786772192, 0.7740707212930965, 0.5169549548514307, 0.7498763519353475, 0.6256904151763534, 0.5962862867841604, 0.9358634151392459, 0.8130261598050277, 0.7809068883379954, 0.8946776027229816, 0.7193754422669834, 0.9632764146713989, 0.9561961159624497, 0.9817005191405104, 0.5953841969215257, 0.6474158096899254, 0.6911491004376892, 0.8308748513194177, 0.5377486607593887, 0.6756292404655192, 0.925251898477794, 0.9997199881387432, 0.8034124067224253, 0.8661864818281556, 0.5883812212321271, 0.7581627566567418, 0.8840549710634402, 0.9000026586721444, 0.9918226558185854, 0.5500914341829072, 0.9140068703840758, 0.552041488454442, 0.6915986782708268, 0.9013095788634741, 0.597592647592389, 0.9567853302938765, 0.962224212368917, 0.8570961583383618, 0.5758705180087831, 0.945454051368301, 0.7803012162909089, 0.852496853762346, 0.941115060393031, 0.746442076432922, 0.7114195023046277, 0.9398459422430245, 0.5791409396255671, 0.6068442353764231, 0.796279946160874, 0.7806322099723475, 0.605953457308021, 0.5036844824278867, 0.9074983320678272, 0.9548133291170797, 0.6556340505361107, 0.5571702931534301, 0.7616271367620295, 0.5655293166443385, 0.5311209447402536, 0.848905201289341, 0.5571089927270269, 0.7899869695321207, 0.5526272886088777, 0.5107688274584734, 0.6966751929482302, 0.820357965820153, 0.9282799792530274, 0.8155423147691987, 0.9145499872847459, 0.7399505912182526, 0.8264789282783811, 0.9537899422622611, 0.8452823046686069, 0.9740871151640649, 0.665239990942571, 0.8351685785252663, 0.6592627310520928, 0.6870943402737884, 0.8905577982629426, 0.6146823868071158, 0.8883620561749915, 0.6815247182137669, 0.9922553277717177, 0.5876444104966079, 0.5214095732403137, 0.650797442353815, 0.9673744647279234, 0.7343639956370402, 0.7256850459984852, 0.6122743010465936, 0.6356220538498645, 0.809870959699234, 0.5953408296604883, 0.8818294249324885, 0.5460643673228934, 0.5180043341478844, 0.5745662552046759, 0.5673188639240422, 0.9159858945524608, 0.8191900359021014, 0.8136138017488739, 0.5205249248675026, 0.9818625163953363, 0.7566528168603187, 0.7047193993834362, 0.8711164714141553, 0.6361895936440147, 0.81262362408553, 0.8736159252167324, 0.6838910710235742, 0.9576964912749473, 0.895058660889261, 0.7602376777131081, 0.887439532483774, 0.8307813172379823, 0.7495043281948292, 0.7875964967742536, 0.8446227512334292, 0.6638967353856382, 0.6119400503369332, 0.5518745763522479, 0.7237818660435912, 0.9584486073723024, 0.9226155457125789, 0.8112917677980078, 0.5883347565458599, 0.7768584775974716, 0.8016502953098756, 0.8067057236336983, 0.864889927157532, 0.6233997629325341, 0.7071437931808596, 0.8329783166871392, 0.7999779267133333, 0.767352555858372, 0.9854200192486201, 0.9945761677695104, 0.9904232057276722, 0.5496725828169242, 0.5355772745223927, 0.980379448365795, 0.7129899662339723, 0.9701414533965029, 0.885369144728889, 0.9190915600399141, 0.9847028407611038, 0.577783042585132, 0.6074147641277498, 0.7840955755678678, 0.6770615114612049, 0.9819498314632116, 0.9266123667367108, 0.9935058761113515, 0.9351564778683984, 0.5085623226873561, 0.5359914929062959, 0.501709802964738, 0.5860403015156141, 0.5595817995041452, 0.8643556037655138, 0.8634694513946883, 0.718510472779761, 0.8613665464670126, 0.7099227586111911, 0.8988183566797402, 0.5963579580683463, 0.5392773889261389, 0.5925929214738512, 0.9988553982999739, 0.9755655916481478, 0.7035095814419867, 0.5112311518633118, 0.8963946803949587, 0.7350101865350842, 0.6414265032034985, 0.7060812671683335, 0.6509609414364401, 0.9456237276450818, 0.8836488442504411, 0.9097820141016889, 0.7033939523123158, 0.8377251657431448, 0.6174349253195751, 0.7057550011132326, 0.5063128375986812, 0.5696469862539775, 0.7730567407527099, 0.507206997050187, 0.8925589247937373, 0.9580014486733517, 0.6048206896898844, 0.5321051345894364, 0.6777082222161178, 0.5556228077939092, 0.9047463893717329, 0.8129485392674554, 0.7575006970764974, 0.6584745918403561, 0.7687212295197827, 0.9960218128305327, 0.8481006165904579, 0.6953569525945851, 0.8529511014812736, 0.9086785506992007, 0.6011941047741922, 0.8506573540517886, 0.7239727417356525, 0.5713116707208907, 0.7786425108850019, 0.6117933826606137, 0.7171022254590097, 0.7260009027260057, 0.7886934462871298, 0.9038509516038501, 0.5653677425429771, 0.5145977187346767, 0.7493895987941604, 0.8123498924421503, 0.7869118466252183, 0.8970737186372444, 0.5846203027783277, 0.776406915087841, 0.6213185330323374, 0.6902251032654767, 0.6109955683902484, 0.7212841166353167, 0.5817623669837484, 0.9062743186366893, 0.5548405620793844, 0.8026526202119011, 0.798111865621364, 0.8458124637730082, 0.7534996314250868, 0.7645305560630098, 0.9426860782980298, 0.8326656244948616, 0.5051233510342654, 0.5458064763883705, 0.5913187498372896, 0.9493171134454448, 0.5068119475275822, 0.9367099037320095, 0.963868130054332, 0.8733657180339365, 0.6444850010266883, 0.8071917024715181, 0.6272255964374017, 0.919717451863042, 0.6105170488111847, 0.7170626604961337, 0.8369631499749545, 0.8872045377567598, 0.7237176973303374, 0.5930182974156571, 0.6041193856426368, 0.9945142153595296, 0.61335272317616, 0.6426185817387982, 0.7140606596589811, 0.7751132248219096, 0.8004848322526621, 0.5145184058747346, 0.9727093240542365, 0.9638963895216158, 0.6704334075812486, 0.7143447105205637, 0.5444470699159126, 0.8550348278908309, 0.7261916505881488, 0.5790301356459587, 0.9777039790128257, 0.5199017783438376, 0.7570762884632447, 0.7164216887556953, 0.91607936026096, 0.5961152703382238, 0.9491125252877772, 0.7313282907333225, 0.5582898412803812, 0.8928122132250997, 0.8951439027505295, 0.774514106528958, 0.5659660425765041, 0.5448815465056199, 0.7646174757200046, 0.5916281096400554, 0.5922506218480373, 0.5574098864918373, 0.6229485111109107, 0.5806903800155487, 0.7912656969393832, 0.5684004670127425, 0.5295755227419451, 0.7742012622013406, 0.8351566193366686, 0.8922351278095155, 0.8290325170957602, 0.5157526515226226, 0.8122554216104341, 0.6443620806247072, 0.9944109167164827, 0.8155448988625777, 0.887435537349618, 0.9809215179826818, 0.8165639363579801, 0.70996003062339, 0.6941302342217333, 0.7925876131658045, 0.8786010030293713, 0.6071097580703823, 0.5289926586213223, 0.7814808949132269, 0.7734477210343558, 0.6351282788000149, 0.9998431884588346, 0.7092143853014818, 0.6498183012038146, 0.8555598403214066, 0.5404039435559278, 0.5190966043442736, 0.6444861039139325, 0.5893513161197597, 0.7651323004313294, 0.8523871826718579, 0.9382696998513465, 0.6016733179827424, 0.85681493440842, 0.7685200321986521, 0.57269869901416, 0.7816507669522226, 0.8445749348000957, 0.7046690928096447, 0.549365286577718, 0.5794277156534711, 0.8292425420547269, 0.5549072708287508, 0.5997072020617047, 0.6782807686562631, 0.8259254269937663, 0.6164545644844193, 0.8447634292585022, 0.8128702665198353, 0.7410830394395613, 0.6381517689024057, 0.5801758909527135, 0.9355407765234005, 0.6103425864003073, 0.9510911948626934, 0.7074615849518995, 0.7779697004945308, 0.5948997698101051, 0.9574652307606437, 0.9257022005518314, 0.9304411993858104, 0.7714389277563358, 0.7403108640336726, 0.7927648854681808, 0.8779498506045904, 0.8417912743210607, 0.7632031052645856, 0.9737330048207498, 0.5533489488810702, 0.725589137536154, 0.9435728297525108, 0.8815836222270967, 0.5657432145147836, 0.5330385624177935, 0.5118825962149913, 0.7996926410100624, 0.8047857420628356, 0.6605872921051528, 0.7175320110832865, 0.8173254431805335, 0.792303609043353, 0.7330592776351935, 0.6148946235768991, 0.9696788434671909, 0.8845708870064329, 0.602255625775779, 0.979576003277952, 0.7083736859066929, 0.7878697327340058, 0.818245567519256, 0.9355809291140826, 0.6580786521919938, 0.9500366473592455, 0.516794593774805, 0.6199087338491851, 0.9810096879536014, 0.6460913039873977, 0.5442750184223891, 0.687338721154383, 0.8008647825364319, 0.9200791697563885, 0.6241320704441293, 0.5154378624904649, 0.8995161255891344, 0.6617852355076734, 0.9486325574823, 0.9351011007934331, 0.5211677631792861, 0.7198125689376054, 0.6293601766649883, 0.5674222159673314, 0.6619247929633412, 0.6619622568478859, 0.6731897878938478, 0.8511755028963555, 0.5060486212022519, 0.8250015165035897, 0.5730001937475779, 0.8890423970047915, 0.8306846187161386, 0.8464094719815999, 0.90815818894269, 0.7061832160918047, 0.937039358642292, 0.9301203514099664, 0.8514971836693299, 0.9905259911034863, 0.6963511635780064, 0.5879385091130973, 0.73460670821302, 0.5510696706791054, 0.7651402422387492, 0.6200332720248402, 0.5430662818088483, 0.915061671872095, 0.8838772516514207, 0.8090749279660554, 0.7554576457592876, 0.9985006371961023, 0.7033738150500233, 0.969396905406154, 0.6343990417660048, 0.542103130036814, 0.5296782913870606, 0.5117504920226148, 0.6317373566150988, 0.9360924248140723, 0.7176545801203746, 0.7461516626122885, 0.6378061712885534, 0.814151491369322, 0.6524092789920145, 0.6890296600927269, 0.5639940124212125, 0.5083046714691637, 0.8721490973239081, 0.8487633206000733, 0.565382846309972, 0.7600820020198102, 0.6818826517309962, 0.6480946846614123, 0.6126887941497166, 0.7011331395714017, 0.9193286131516473, 0.923975692537767, 0.5515882379968615, 0.7481742306935557, 0.9250061093976418, 0.773416916204155, 0.7411970859845509, 0.8937308662383423, 0.563789032201379, 0.5936805467125388, 0.5572311253036857, 0.7687436595485176, 0.8006709040212581, 0.790727007118189, 0.6036344747519442, 0.552653595838863, 0.5069951229094458, 0.865855316406626, 0.7641974356309986, 0.5954140706009812, 0.7794992722939419, 0.851059115747004, 0.705964695054847, 0.6835271692066474, 0.8267460027324458, 0.5882361940012633, 0.8687192769698784, 0.8670696858196397, 0.8425276485229698, 0.7611738269468094, 0.7553968589289243, 0.8174013921447802, 0.5197938938543298, 0.5626127289819873, 0.9237091020954109, 0.7797690453251278, 0.5070325386176597, 0.9423999328739847, 0.9427645729663305, 0.9653851219065981, 0.9286871306223516, 0.761019250670307, 0.7438405059490001, 0.6510007349476915, 0.6863661212610623, 0.6090921241331593, 0.8644680561273945, 0.6617568133333676, 0.8862932811031592, 0.5868905566176486, 0.9969122841717362, 0.6258486750807137, 0.8767019808761498, 0.9564443393351033, 0.6467595420625522, 0.7118194387593442, 0.964933948340359, 0.5615616042232237, 0.8627356378547337, 0.6310151441276399, 0.6772706162772542, 0.9269086933088218, 0.6296404631380286, 0.8774805358993005, 0.7824101808496671, 0.6726346697842032, 0.9661984020525993, 0.5261901851586324, 0.8210528982504814, 0.7395712819417766, 0.7014811757523131, 0.8963724243255048, 0.8552041818880483, 0.9626655945627669, 0.6310639201617299, 0.7378812442398816, 0.544861352193907, 0.8258746391494427, 0.7785185625568449, 0.826387949378218, 0.6238937626895327, 0.7263805824181155, 0.6440356032581025, 0.7524755355333248, 0.8177532121397635, 0.6336836673807731, 0.77205752745185, 0.7469244436516004, 0.6536562118217447, 0.8969884982226277, 0.9113851483101761, 0.5000660207233254, 0.8159821560799456, 0.9566359633980703, 0.667080261725395, 0.5649675839575132, 0.8848662813450704, 0.9610742192718008, 0.9378684039607457, 0.9451160836772168, 0.7001484289317845, 0.7533282339038856, 0.8850123221951322, 0.5542187743973359, 0.9158783968174762, 0.9989136997080719, 0.5856861540323337, 0.5498302795389381, 0.6153109104797527, 0.9800322211423489, 0.5442149563641812, 0.6808815873448519, 0.9186704488036479, 0.9894718560864595, 0.7875679110537996, 0.6380790703402185, 0.8650541624849426, 0.6441637055102626, 0.9294280296560115, 0.5406342027306889, 0.8081648590552549, 0.5972298359671084, 0.5546653587871064, 0.7888348586030766, 0.6413501852081238, 0.9379138853870295, 0.534227025250213, 0.9006323785888688, 0.9925813497090232, 0.5214653296310836, 0.9722498924910965, 0.860571822866343, 0.953563311278594, 0.6568010799651897, 0.8169203347015053, 0.6673402946601488, 0.8501075676614057, 0.6211231253971536, 0.561288850052878, 0.6257902370085229, 0.7279849022280794, 0.7166454605211627, 0.8354218271361635, 0.9996889211549886, 0.5315862599835696, 0.8885438979528371, 0.6357467139613211, 0.7733523488064964, 0.5157824307735182, 0.6082118826439897, 0.7868876755027403, 0.8397567567108619, 0.7156337426596278, 0.5783421165123246, 0.9240745702972053, 0.6679628410764253, 0.8409767053256678, 0.5503205643913993, 0.5806583289773293, 0.6849621007395733, 0.8724874675982925, 0.585687560769888, 0.9138758454589047, 0.6864142860919711, 0.5005163619304996, 0.7807430510407829, 0.9283649910468348, 0.7646504702730005, 0.5314785045262693, 0.7798746829944345, 0.7610161733553221, 0.9162919208763245, 0.8487882622077265, 0.8190269766045668, 0.9467154059607413, 0.5749747897105187, 0.5779187453516048, 0.679185577465111, 0.8005557265166019, 0.9431724687946754, 0.5137956850570626, 0.8212349502008708, 0.9169090178595987, 0.8156882766807642, 0.6052546678062853, 0.7817226079485153, 0.5442609123907578, 0.83840740925581, 0.9372591259609466, 0.5279463227334348, 0.6636899222285132, 0.7201707635136124, 0.8234640927601602, 0.6827882626697225, 0.8148422158601544, 0.5448879080805678, 0.6756854407137719, 0.8359243876691893, 0.6737962682527575, 0.5724902393908772, 0.6257913647558748, 0.6179933536738367, 0.7205882824304723, 0.7817519357921809, 0.9106483523794646, 0.6899305918678008, 0.7130064634799332, 0.9140808625947343, 0.9568680735308049, 0.9994644162371582, 0.8346594642898784, 0.6061186813174231, 0.8393283434690506, 0.9554035643120546, 0.6229725862609673, 0.530585166746925, 0.9714453101697877, 0.9422805485787982, 0.7612763548952257, 0.5596106252495368, 0.8087257500240621, 0.8094272203584882, 0.8315005736623864, 0.5607174814554112, 0.6363985345692763, 0.5129457419088477, 0.552866663144248, 0.8640300499705751, 0.9441414054447756, 0.5247167279921574, 0.9935665746738755, 0.8502645033839304, 0.8088581511883057, 0.8937947500712464, 0.9580169898028934, 0.7374802420024043, 0.6541732027958862, 0.9987666084933213, 0.6219148917175332, 0.8754053970597764, 0.6767308093011883, 0.550216871149513, 0.7504331406159919, 0.5871962750510135, 0.9240286418881818, 0.5014878270309311, 0.6668583283845855, 0.5149337743713378, 0.7683270752403951, 0.7175674735798551, 0.6421083459557566, 0.9207692662504664, 0.7616210622909318, 0.9667523019345692, 0.8202723083751724, 0.7974847094630543, 0.9478268924132285, 0.7494630810552452, 0.586411393102837, 0.5548654588511341, 0.9240487510210185, 0.5008423502617771, 0.7171739491803271, 0.6207865862114421, 0.8199922154140178, 0.6319801748812308, 0.5240777355447952, 0.5552138753764573, 0.6156266295102921, 0.6664396258878722, 0.8507188506855662, 0.7593603536142199, 0.5975159273382091, 0.7539594967309868, 0.8249778592309354, 0.7087611921353614, 0.92119191732973, 0.928106364644554, 0.8729428659144021, 0.7762006692985379, 0.6987470711493149, 0.8656136017739748, 0.567272191799721, 0.760135111712989, 0.9682074527913509, 0.5020341801248382, 0.593492449229923, 0.7446534066825006, 0.8826976688514316, 0.6860971063330796, 0.7123419478069088, 0.7243140888929807, 0.893450755659698, 0.8575021510344949, 0.7993229409715832, 0.9931505281931584, 0.5799575883829089, 0.7560555864664398, 0.7038673642366673, 0.9945667827676341, 0.5588843113476027, 0.8515390330406043, 0.650079511691207, 0.5515042158339702, 0.5698516251224983, 0.688313864874202, 0.6119384220576065, 0.5337873968970912, 0.7754143723090762, 0.6497309406071485, 0.8120096574863449, 0.975967710493304, 0.8167488428980179, 0.6936693904508824, 0.5029325591457809, 0.8374636511801357, 0.5898517279429013, 0.5572370245540639, 0.5546290271248906, 0.9905147824883074, 0.7149022043853269, 0.5313609392564906, 0.6250682884371768, 0.9837186191256685, 0.5766202770004744, 0.7715848092801882, 0.9770124338323063, 0.5519522769312024, 0.9931094567757002, 0.5109262686656122, 0.7610264422572426, 0.9591402994119935, 0.7602514188203477, 0.9845803372938207, 0.7806676445358456, 0.7379947531810968, 0.8231719560620883, 0.9651371251518637, 0.6381995631586849, 0.5808110985174253, 0.9358523619899027, 0.6662251487767835, 0.8467305676723058, 0.537244164128649, 0.8100421313227193, 0.9809762750714257, 0.8028611346979122, 0.7089128988408224, 0.8367361652817091, 0.8713408919246182, 0.6705620543494973, 0.9267266567303847, 0.7153032455077708, 0.6331263749601188, 0.9608750003479204, 0.9202063221655599, 0.9001863553642606, 0.6453230267867092, 0.7702583884139578, 0.6313738822744487, 0.7467842924083443, 0.9034580845429171, 0.7102477427899461, 0.6949021031140492, 0.8306070536680479, 0.6351375528509138, 0.6274106798534029, 0.9812663744572304, 0.7081820648097621, 0.825853015904256, 0.8209053611607997, 0.7173913955958433, 0.6704045443499064, 0.7793830724594861, 0.9400977207764178, 0.899879568092435, 0.8044510746421033, 0.6484821774814298, 0.8928855189657148, 0.8076332084359841, 0.9477805121922811, 0.8753540245620164, 0.8606548732790477, 0.5249802390031035, 0.6042081264038552, 0.5140637381213459, 0.9443700503731493, 0.6443102987087593, 0.8055996247566319, 0.568780205389547, 0.6335064718565222, 0.9386067061321511, 0.67136541085089, 0.9876199079252798, 0.8964198312650105, 0.8618401064639393, 0.8293905820087646, 0.6493214287887781, 0.9699400498742065, 0.627177931845404, 0.8486154865238813, 0.8378833125959606, 0.5288243702550632, 0.880631483028743, 0.9257156889287524, 0.8670670151879792, 0.6269383762466441, 0.803788365814434, 0.9024442888754203, 0.8491575930032358, 0.7892598358715857, 0.7060250660578664, 0.695438611071886, 0.9477605799592788, 0.9580431103353088, 0.8734253354809909, 0.7367750805980293, 0.6433982541580022, 0.7921269633195034, 0.8668901948790413, 0.5101352106562655, 0.6176477412600202, 0.6082417700254894, 0.6768715069673213, 0.5001158655122026, 0.6722145135306243, 0.6960106101096946, 0.8154946065050042, 0.7085943638457838, 0.7457568993426293, 0.5128619267391051, 0.6709415617990033, 0.5747390864155077, 0.7429703420474575, 0.5881768190151336, 0.8225141030544139, 0.5661584512980349, 0.8602087847896646, 0.5146179822964289, 0.5413466596697775, 0.7827493658999893, 0.9532545291701493, 0.5902242499476821, 0.5002032837321382, 0.8563191873100694, 0.9279081850112743, 0.6710357605051455, 0.7463184057463741, 0.6678799187341924, 0.5377388661584593, 0.9969712705463323, 0.7958738133404988, 0.9621644053693117, 0.8148971678733217, 0.5975564513599966, 0.5724333147205685, 0.917568770530968, 0.8307034711467205, 0.7524606222224232, 0.5176275764544465, 0.9596763423551369, 0.5885519512768138, 0.79829738198204, 0.5829695928384875, 0.6331272350797389, 0.6332482249293065, 0.6327984497836887, 0.5719298662141208, 0.8677225307478122, 0.6368925570075653, 0.7900639978447664, 0.6170640101519755, 0.916945078295246, 0.782450518706096, 0.9349957198608005, 0.8970091717294866, 0.8974359097103349, 0.6076521726481399, 0.8557157417716428, 0.7529199421916652, 0.5878613223068876, 0.933261532371659, 0.7302941353176127, 0.9349415459931045, 0.6253696452227523, 0.5802245201183893, 0.6341348096653043, 0.7409369895133302, 0.814162142608317, 0.5064595827385139, 0.6671842291138868, 0.5320268266233261, 0.8437916436606662, 0.5402273309623582, 0.564143358387541, 0.975718733192781, 0.8027556051409923, 0.5797832118227353, 0.8585620452051927, 0.8717363225132748, 0.8965936868222683, 0.8040370568772224, 0.5222916005158094, 0.7826768944057312, 0.9999427275664958, 0.6610454467150464, 0.5713607431111332, 0.5476786718033677, 0.8013447337268522, 0.5042060618357755, 0.7829002614686398, 0.9580906416618196, 0.6788586986000106, 0.7693552089347105, 0.7910392253053153, 0.592003546374563, 0.9910290087363531, 0.5004879499589115, 0.8408053521996344, 0.8572260362940582, 0.5278099109001856, 0.6318219621828779, 0.9331910646587264, 0.9025315315540197, 0.6223620974555639, 0.9461550586147183, 0.6987938096676538, 0.988267080342528, 0.6233822327957367, 0.5697093103469499, 0.7013636791624187, 0.8876979059015131, 0.5547689805922571, 0.5598295354745386, 0.8315044242654659, 0.5087519045456879, 0.6595646036731424, 0.5412949122489662, 0.8998095021868908, 0.8603874995028422, 0.8964227989171437, 0.8185936987932884, 0.7047156887958277, 0.5541301128419968, 0.930965680754199, 0.776222420843613, 0.7256548503481147, 0.6028260505124705, 0.5915332504623089, 0.9578578380208081, 0.7708870182006489, 0.5857787428183492, 0.737269262675377, 0.5915570428628221, 0.9820783368558434, 0.5990189711879482, 0.530082465373916, 0.8189642632510254, 0.7249619282446691, 0.7675959490408584, 0.8739436520761172, 0.95043685805555, 0.5342649302508646, 0.5538040101541115, 0.82172472888445, 0.9768001255259132, 0.5964253764345735, 0.9897604503419806, 0.5533770952085877, 0.7489727399489783, 0.7019131061498618, 0.9070560600091593, 0.6905202543095099, 0.9148644239228725, 0.6167389645600205, 0.5799095637041234, 0.5674854639876307, 0.8868371602093229, 0.8293141260787091, 0.8205098411597274, 0.8173021645945282, 0.5718285155865008, 0.8813296610074068, 0.8040544710953557, 0.6140356867368434, 0.8447175012463318, 0.989822858750963, 0.6900884571949122, 0.9222382041469358, 0.812798563731699, 0.6990003284233091, 0.7885401022187808, 0.7153216174422873, 0.7594698332584993, 0.9972163433905903, 0.9349808579528134, 0.5667999501772707, 0.5848466835472008, 0.7866600747432106, 0.8436892437275546, 0.9969511699993308, 0.9570547085547649, 0.8530837877790055, 0.6624931606281736, 0.808841082974225, 0.6339301594572097, 0.65045975463888, 0.6646770769826393, 0.8798208053661272, 0.5801143827174591, 0.7325050252182027, 0.9496541312517242, 0.9090537152079381, 0.8948087252504885, 0.8375192781007725, 0.9310039677602935, 0.8238049984081117, 0.9192172674441624, 0.8303892625523834, 0.8512069504680182, 0.6608270790518371, 0.6091261666389718, 0.5435508669266101, 0.6999078506218901, 0.6953396861574281, 0.8258058456633913, 0.5645735235502232, 0.7589432707827916, 0.5160475230417207, 0.7430674390032774, 0.884122025575228, 0.6293582750905546, 0.7538704483441614, 0.7995639830592078, 0.6608063928987338, 0.768346841978172, 0.6230258478452267, 0.6139154094640015, 0.9964861442168552, 0.6347366963113379, 0.6393027741665109, 0.6816840467571899, 0.8738065580642661, 0.8833736111166983, 0.9674722484570947, 0.5734964629121131, 0.5543877677841236, 0.6541876151958534, 0.6620808352643428, 0.557235870237685, 0.9891731430171464, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0};
int h_B[]= {
1, 3, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480, 482, 484, 486, 488, 490, 493, 495, 497, 499, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 559, 561, 563, 565, 567, 569, 572, 574, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 699, 701, 704, 706, 708, 710, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802, 804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 845, 847, 849, 851, 853, 855, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894, 896, 898, 900, 902, 904, 906, 908, 910, 912, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941, 943, 945, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987, 989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1022, 1024, 1026, 1028, 1030, 1032, 1034, 1036, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1062, 1064, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1090, 1092, 1094, 1096, 1098, 1100, 1102, 1104, 1106, 1108, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1132, 1134, 1136, 1138, 1141, 1143, 1145, 1147, 1150, 1152, 1156, 1158, 1161, 1163, 1167, 1169, 1171, 1173, 1175, 1177, 1179, 1181, 1184, 1186, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1210, 1212, 1215, 1217, 1220, 1222, 1225, 1227, 1230, 1232, 1238, 1240, 1243, 1245, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1357, 1359, 1361, 1363, 1367, 1369, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1393, 1395, 1399, 1401, 1404, 1406, 1409, 1411, 1414, 1416, 1419, 1421, 1424, 1426, 1429, 1431, 1434, 1436, 1439, 1441, 1443, 1445, 1447, 1449, 1452, 1454, 1458, 1460, 1462, 1464, 1469, 1471, 1473, 1475, 1479, 1481, 1483, 1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521, 1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1556, 1558, 1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597, 1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635, 1637, 1639, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1695, 1697, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1718, 1720, 1722, 1724, 1726, 1728, 1732, 1734, 1740, 1742, 1744, 1746, 1748, 1750, 1753, 1755, 1758, 1760, 1762, 1764, 1766, 1768, 1771, 1773, 1776, 1778, 1781, 1783, 1786, 1788, 1791, 1793, 1796, 1798, 1800, 1802, 1804, 1806, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824, 1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1855, 1857, 1859, 1861, 1865, 1867, 1869, 1871, 1873, 1875, 1877, 1864, 1877, 1864, 1877, 1864, 1921, 1923, 1925, 1927, 1929, 1931, 1731, 1580, 1580, 1237, 1235, 1468, 1468, 1739, 1418, 1423, 1234, 1209, 1237, 1235, 1790, 1237, 1235, 1739, 1737, 1739, 1737, 1736, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 1555, 1237, 1235, 1641, 1555, 1237, 1235, 1237, 1235, 1752, 1694, 1731, 1237, 1235, 1224, 1229, 1224, 1229, 1237, 1235, 1757, 1752, 1643, 1877, 1209, 1237, 1235, 1234, 1237, 1235, 1234, 1209, 1237, 1235, 1224, 1229, 1224, 1229, 1209, 1234, 1237, 1235, 1224, 1229, 1224, 1229, 1209, 1234, 1237, 1235, 1061, 1060, 1808, 1643, 1641, 1643, 1790, 1736, 1808, 1641, 1237, 1235, 1757, 1694, 2285, 2287, 2289, 2291, 2294, 2296, 2298, 2300, 2303, 2305, 2307, 2309, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2349, 2351, 2353, 2355, 2357, 2359, 1456, 1451, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 1237, 1235, 1224, 1229, 1224, 1229, 1237, 1235, 1237, 1235, 1061, 1060, 1237, 1235, 1418, 1423, 1451, 1451, 1717, 1775, 1752, 1757, 1757, 1752, 1785, 1785, 1757, 1752, 1757, 1752, 1775, 1757, 1752, 1737, 1737, 1757, 1752, 1717, 2615, 2617, 2619, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2644, 2646, 2649, 2651, 2653, 2655, 1061, 1060, 1214, 1219, 1229, 1224, 1237, 1235, 1214, 1219, 1229, 1224, 1149, 1229, 1224, 1237, 1235, 1149, 1155, 1237, 1235, 1237, 1235, 1456, 1451, 1438, 1456, 1451, 1467, 1423, 1418, 1423, 1433, 1418, 1433, 1438, 1456, 1451, 1457, 1398, 1398, 1457, 1467, 1877, 1643, 1641, 1770, 1770, 1739, 1737, 1739, 1737, 1877, 1864, 1877, 1864, 1877, 1864, 1877, 1864, 1864, 1864, 2979, 2981, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002, 3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040, 3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3065, 3067, 3070, 3072, 3074, 3076, 3078, 3080, 3083, 3085, 3089, 3091, 3094, 3096, 3100, 3102, 3104, 3106, 3108, 3110, 3113, 3115, 3119, 3121, 3124, 3126, 3130, 3132, 3134, 3136, 3139, 3141, 3098, 3093, 3146, 3144, 3098, 3093, 3128, 3123, 3128, 3123, 3146, 3144, 2983, 2983, 3098, 3093, 3064, 3146, 3144, 3098, 3093, 3143, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3128, 3123, 3128, 3123, 3128, 3123, 3098, 3093, 3098, 3093, 3098, 3093, 3128, 3123, 3128, 3123, 3146, 3144, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3151, 3680, 3682, 3688, 3690, 3149, 3147, 3149, 3147, 3149, 3147, 2658, 3707, 3709, 3098, 3093, 3064, 3098, 3093, 3098, 3093, 3143, 2658, 2972, 2972, 4018, 4020, 3146, 3144, 4053, 4055, 4057, 4059, 4062, 4064, 3146, 3144, 3146, 3144, 3149, 3147, 3082, 3088, 3112, 3118, 3144, 3146, 3146, 3144, 3149, 3147, 3151, 4140, 4142, 4145, 4147, 4152, 4154, 4157, 4159, 4162, 4164, 4166, 4168, 4171, 4173, 4175, 4177, 4156, 4061, 4161, 4156, 4181, 4179, 4161, 4156, 4181, 4179, 4181, 4179, 4151, 4161, 4061, 4181, 4179, 4151, 4179, 4181, 4181, 4179, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688, 6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726, 6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764, 6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802, 6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840, 6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878, 6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916, 6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954, 6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992, 6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030, 7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068, 7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106, 7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144, 7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182, 7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220, 7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258, 7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296, 7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334, 7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372, 7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410, 7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448, 7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486, 7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524, 7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7543, 7544, 7545, 7546, 7547, 7548, 7550, 7552, 7554, 7555, 7556, 7557, 7558, 7559, 7560, 7561, 7562, 7563, 7564, 7565, 7566, 7567, 7568, 7569, 7570, 7571, 7572, 7573, 7574, 7575, 7576, 7577, 7578, 7579, 7580, 7581, 7582, 7583, 7584, 7585, 7586, 7587, 7588, 7589, 7590, 7591, 7592, 7593, 7594, 7595, 7596, 7597, 7598, 7599, 7600, 7601, 7602, 7603, 7604, 7605, 7606, 7607, 7608, 7609, 7610, 7611, 7612, 7613, 7614, 7615, 7616, 7617, 7618, 7619, 7620, 7621, 7622, 7623, 7624, 7625, 7626, 7627, 7628, 7629, 7630, 7631, 7632, 7633, 7634, 7635, 7636, 7637, 7638, 7639, 7640, 7641, 7642, 7643, 7644, 7645, 7646, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677, 7679, 7681, 7683, 7684, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7702, 7703, 7704, 7705, 7706, 7707, 7708, 7709, 7710, 7711, 7712, 7713, 7714, 7715, 7716, 7717, 7718, 7719, 7720, 7721, 7722, 7723, 7724, 7725, 7726, 7727, 7728, 7729, 7730, 7731, 7732, 7733, 7734, 7735, 7736, 7737, 7738, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753, 7755, 7757, 7759, 7760, 7761, 7762, 7763, 7764, 7765, 7766, 7767, 7768, 7769, 7770, 7771, 7772, 7773, 7774, 7775, 7776, 7777, 7778, 7779, 7780, 7781, 7782, 7783, 7784, 7785, 7786, 7787, 7788, 7789, 7790, 7791, 7792, 7793, 7794, 7795, 7796, 7797, 7798, 7799, 7800, 7801, 7802, 7803, 7804, 7805, 7806, 7807, 7808, 7809, 7810, 7811, 7812, 7813, 7814, 7815, 7816, 7817, 7818, 7819, 7820, 7821, 7823, 7825, 7827, 7829, 7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867, 7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7896, 7897, 7898, 7899, 7900, 7901, 7902, 7903, 7904, 7905, 7906, 7907, 7908, 7909, 7910, 7911, 7912, 7913, 7914, 7915, 7916, 7917, 7918, 7919, 7920, 7921, 7922, 7923, 7924, 7925, 7926, 7927, 7928, 7929, 7930, 7931, 7932, 7933, 7934, 7935, 7936, 7937, 7938, 7939, 7940, 7941, 7942, 7943, 7944, 7945, 7946, 7947, 7948, 7949, 7950, 7951, 7952, 7953, 7954, 7955, 7956, 7958, 7960, 7961, 7962, 7963, 7964, 7965, 7966, 7967, 7969, 7970, 7971, 7972, 7973, 7974, 7975, 7976, 7977, 7978, 7979, 7980, 7982, 7983, 7984, 7986, 7988, 7990, 7991, 7992, 7993, 7994, 7995, 7996, 7997, 7998, 7999, 8000, 8001, 8002, 8003, 8004, 8005, 8006, 8007, 8009, 8011, 8013, 8015, 8017, 8019, 8021, 8023, 8024, 8025, 8026, 8027, 8028, 8029, 8030, 8031, 8032, 8033, 8034, 8035, 8036, 8037, 8038, 8039, 8040, 8041, 8042, 8043, 8044, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 8449, 8451, 1808, 8543, 8623, 8635, 8586, 8631, 8633, 1877, 8449, 8451, 1808, 8543, 8625, 8637, 8627, 8639, 8586, 8631, 8633, 1877, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1438, 8193, 1456, 1451, 1438, 8193, 1456, 1451, 8195, 576, 576, 576, 576, 1580, 1580, 1580, 8197, 1188, 1183, 8201, 1188, 1183, 1209, 1234, 8647, 8205, 1188, 1183, 698, 698, 698, 1785, 1785, 8554, 576, 1790, 1770, 1790, 1468, 1468, 1770, 8652, 8210, 1699, 1699, 1699, 1699, 1699, 1214, 1229, 1224, 8654, 8656, 1731, 1736, 1736, 1731, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 698, 8224, 8578, 1736, 1731, 8227, 1752, 1736, 1731, 1739, 1737, 8227, 1752, 8225, 1752, 8227, 1752, 8554, 8230, 1188, 1183, 8234, 1188, 1183, 8238, 1757, 1752, 1775, 8241, 8243, 1808, 8245, 8247, 8249, 8251, 1757, 1209, 1234, 8659, 1438, 8255, 576, 1736, 1731, 8661, 1736, 1731, 8663, 576, 576, 576, 576, 8260, 8262, 1790, 1790, 1790, 1790, 1790, 1739, 1737, 8666, 8668, 8670, 8263, 8265, 1188, 1183, 8269, 1188, 1183, 1209, 1234, 8674, 1780, 1785, 1785, 1785, 1785, 8276, 1757, 1752, 1785, 1785, 1785, 1785, 1641, 1234, 1209, 8678, 1209, 1234, 8680, 698, 1752, 8284, 8286, 1752, 8287, 8288, 698, 1752, 1214, 1224, 1229, 1234, 1209, 8685, 1219, 8687, 1219, 8689, 8458, 1188, 1183, 1234, 1209, 8691, 8442, 8693, 1699, 1694, 1643, 1877, 8300, 1188, 1183, 1214, 1224, 1229, 8698, 1214, 1224, 1229, 8701, 1214, 1224, 1229, 8703, 8705, 1219, 8707, 1219, 8709, 8711, 8713, 1219, 8715, 1219, 8717, 8719, 8721, 8316, 1188, 1183, 8723, 1188, 1183, 1408, 1403, 1408, 1403, 1371, 1438, 844, 8328, 857, 576, 8554, 8726, 576, 1555, 1643, 1641, 1580, 1580, 1580, 1580, 1580, 1468, 1468, 1468, 1468, 576, 8578, 576, 8554, 576, 576, 1209, 1234, 8733, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 1214, 1229, 1224, 1219, 1229, 1224, 1731, 1736, 1737, 1739, 698, 1757, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 698, 698, 698, 8352, 8354, 8356, 8358, 8360, 698, 1757, 1736, 1731, 1739, 1737, 698, 1757, 8365, 8366, 1864, 8536, 8755, 1165, 1160, 1165, 1160, 1165, 1160, 8475, 8375, 1188, 1183, 1214, 1224, 1229, 1234, 1209, 8765, 1165, 1160, 1165, 1160, 1165, 1160, 8475, 8375, 1188, 1183, 1219, 8767, 1219, 8769, 1209, 1234, 8771, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8385, 1188, 1183, 1165, 1160, 8463, 1188, 1183, 1214, 1229, 1224, 1234, 1209, 8773, 1165, 1160, 8775, 1188, 1183, 1219, 1229, 1224, 1234, 1209, 8777, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1428, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1408, 1403, 8779, 8398, 844, 8401, 857, 8404, 8406, 8408, 8410, 8412, 8414, 1456, 1456, 8425, 8554, 1736, 1731, 1736, 1731, 1739, 8433, 1757, 1752, 1699, 1694, 8597, 1717, 8419, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8422, 8424, 1736, 1731, 8446, 8787, 1699, 1694, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8425, 8791, 1736, 1731, 8446, 8793, 8597, 1717, 8427, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8433, 1757, 1752, 8609, 1775, 1770, 1780, 8435, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8442, 8796, 1736, 1731, 1739, 8446, 8800, 1699, 1694, 1790, 8449, 8451, 8586, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8813, 1188, 1183, 1165, 1160, 8463, 1188, 1183, 8815, 8817, 1234, 1209, 8819, 8821, 8823, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8458, 1188, 1183, 1165, 1160, 1165, 1160, 1155, 8463, 1188, 1183, 1219, 1214, 8826, 1234, 1209, 8828, 1165, 1160, 1165, 1160, 1165, 1160, 8475, 8477, 1188, 1183, 1219, 1214, 1229, 1224, 1209, 8832, 1219, 1214, 1229, 1224, 1234, 8834, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 8536, 8540, 8490, 8836, 8540, 1408, 1403, 1408, 1403, 1413, 8536, 8839, 8540, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1413, 8843, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1428, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 8519, 8849, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1371, 8522, 1456, 1451, 8540, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1438, 8536, 1456, 1451, 8540, 1468, 1468, 1468, 1468, 1468, 1468, 8541, 8542, 8623, 8583, 8586, 8631, 8633, 1478, 8543, 1739, 1737, 8547, 1757, 1752, 1699, 1694, 1717, 8554, 8556, 8558, 8560, 1555, 1643, 1641, 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1736, 1731, 1739, 1737, 8605, 1757, 1752, 1694, 1699, 1770, 8597, 1775, 1717, 8578, 8609, 1775, 1770, 1780, 8580, 8582, 8857, 8621, 8623, 8583, 1877, 1864, 8586, 8631, 8633, 1736, 1731, 1737, 1736, 1731, 1739, 8605, 1757, 1752, 1699, 1694, 1699, 1694, 8597, 1775, 1717, 1790, 1785, 1736, 1731, 8861, 1736, 1731, 8863, 8605, 1757, 1752, 8609, 1775, 1770, 1780, 1790, 1785, 1795, 8617, 1808, 8619, 8621, 8865, 8623, 8867, 8625, 8869, 8627, 8871, 8629, 8631, 8633, 1877, 8642, 8912, 3128, 3123, 3128, 3123, 3128, 3123, 8807, 8914, 8809, 8916, 8918, 8920, 8643, 8922, 8809, 8799, 8798, 8799, 8798, 8799, 8798, 8799, 8798, 8860, 8874, 8873, 8874, 8873, 8798, 2983, 8874, 8873, 8874, 8873, 8658, 8790, 8799, 8798, 8799, 8798, 8799, 8676, 8676, 8860, 8859, 8799, 8732, 2983, 2983, 8729, 8798, 8789, 8732, 8790, 8789, 8859, 8926, 3128, 3123, 3128, 3123, 3128, 3123, 8807, 8929, 8738, 8886, 8931, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 8754, 8744, 8934, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 8754, 8812, 8936, 8938, 8940, 8740, 8942, 8944, 8946, 8741, 8948, 8950, 8952, 8742, 8954, 8956, 8743, 8958, 8758, 8886, 3128, 3123, 8754, 8744, 8960, 3128, 3123, 3128, 3123, 8750, 3143, 8962, 8964, 8966, 8968, 8970, 3143, 8754, 8758, 3098, 3093, 3098, 3093, 3098, 3093, 8763, 8764, 8975, 8977, 8979, 8851, 8841, 8841, 8851, 8790, 8789, 2983, 8983, 3128, 3123, 3128, 3123, 3128, 3123, 8807, 8809, 8986, 8988, 3128, 3123, 8812, 8847, 8847, 2983, 2983, 2983, 3098, 3093, 3064, 3098, 3093, 8903, 3093, 3098, 3098, 3093, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 3064, 3064, 3064, 8995, 8886, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 3143, 8897, 3098, 3093, 8891, 3098, 3093, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 3064, 3064, 3064, 9000, 8886, 3093, 3098, 3098, 3093, 3098, 3093, 8891, 3128, 3123, 3064, 3064, 3064, 9002, 9004, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 3143, 8897, 3093, 3098, 3098, 3093, 3098, 3093, 8903, 3128, 3123, 3128, 3123, 3128, 3123, 8909, 3143, 3143, 9012, 9014, 9011, 9010, 9011, 9010, 9011, 9010, 9011, 9010, 9027, 9022, 9024, 9011, 9010, 9011, 9010, 9011, 9010, 9011, 9010, 9011, 9010, 4149, 4144, 8973, 9022, 9024, 9029, 4149, 4144, 9031, 9022, 9024, 9033, 4149, 4144, 8973, 9024, 9035, 4149, 4144, 8973, 9022, 9024, 4061, 4061, 4151, 9011, 9010, 9011, 9010, 4149, 4144, 4156, 4156, 4156, 4149, 4144, 4161, 4161, 4161, 9040, 4149, 4144, 4161, 4156, 9022, 9024, 9045, 9044, 9043, 9044, 9043, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 9216, 9217, 9218, 9219, 9220, 9222, 9223, 9224, 9225, 9226, 9227, 9228, 9229, 9230, 9232, 9234, 9235, 9236, 9237, 9238, 9239, 9240, 9241, 9242, 9243, 9244, 9245, 9246, 9247, 9248, 9249, 9250, 9251, 9252, 9253, 9254, 9255, 9256, 9257, 9258, 9259, 9260, 9261, 9262, 9263, 9264, 9265, 9266, 9267, 9268, 9269, 9270, 9272, 9273, 9274, 9275, 9276, 9277, 9278, 9279, 9280, 9281, 9282, 9283, 9284, 9285, 9286, 9287, 9289, 9290, 9291, 9292, 9293, 9294, 9295, 9296, 9297, 9300, 9301, 9302, 9303, 9304, 9305, 9306, 9307, 9308, 9309, 9310, 9311, 9312, 9313, 9314, 9315, 9316, 9317, 9318, 9319, 9320, 9321, 9322, 9323, 9324, 9325, 9326, 9327, 9328, 9329, 9330, 9331, 9332, 9333, 9334, 9335, 9336, 9337, 9338, 9339, 9340, 9341, 9342, 9343, 9344, 9345, 9346, 9347, 9348, 9350, 9351, 9352, 9353, 9354, 9356, 9357, 9359, 9360, 9361, 9362, 9363, 9364, 9365, 9366, 9367, 9368, 9369, 9370, 9371, 9375, 9376, 9377, 9378, 9379, 9380, 9381, 9382, 9383, 9385, 9386, 9387, 9388, 9389, 9390, 9391, 9392, 9393, 9394, 9395, 9396, 9397, 9398, 9399, 9401, 9402, 9404, 9405, 9406, 9407, 9408, 9409, 9410, 9411, 9412, 9413, 9414, 9415, 9416, 9417, 9419, 9421, 9423, 9424, 9425, 9426, 9427, 9429, 9431, 9432, 9433, 9434, 9435, 9436, 9437, 9438, 9439, 9440, 9442, 9443, 9444, 9446, 9447, 9448, 9451, 9453, 9457, 9459, 9463, 9464, 9465, 9467, 9468, 9469, 9470, 9471, 9472, 9473, 9474, 9475, 9476, 9477, 9478, 9479, 9481, 9482, 9483, 9484, 9485, 9486, 9487, 9488, 9489, 9490, 9491, 9492, 9493, 9494, 9495, 9496, 9497, 9498, 9499, 9500, 9501, 9503, 9504, 9505, 9506, 9507, 9508, 9509, 9510, 9511, 9512, 9513, 9514, 9515, 9516, 9517, 9518, 9519, 9520, 9521, 9522, 9523, 9524, 9525, 9526, 9527, 9528, 9529, 9530, 9531, 9532, 9533, 9534, 9535, 9536, 9537, 9538, 9539, 9540, 9541, 9542, 9543, 9544, 9545, 9546, 9547, 9548, 9550, 9551, 9552, 9553, 9554, 9555, 9556, 9557, 9558, 9559, 9560, 9561, 9562, 9563, 9564, 9566, 9567, 9568, 9569, 9570, 9571, 9572, 9573, 9574, 9575, 9576, 9578, 9580, 9581, 9583, 9584, 9585, 9586, 9587, 9588, 9589, 9590, 9591, 9592, 9593, 9594, 9595, 9596, 9597, 9598, 9599, 9600, 9601, 9602, 9604, 9605, 9607, 9608, 9609, 9610, 9611, 9612, 9613, 9615, 9616, 9617, 9618, 9619, 9620, 9621, 9622, 9623, 9624, 9625, 9626, 9627, 9628, 9629, 9630, 9631, 9632, 9633, 9634, 9635, 9636, 9637, 9638, 9639, 9640, 9641, 9642, 9643, 9644, 9645, 9646, 9647, 9648, 9649, 9651, 9652, 9653, 9654, 9655, 9656, 9657, 9658, 9659, 9660, 9661, 9662, 9663, 9664, 9665, 9666, 9667, 9668, 9669, 9670, 9671, 9672, 9673, 9674, 9675, 9676, 9677, 9678, 9679, 9680, 9681, 9682, 9683, 9684, 9685, 9686, 9687, 9688, 9689, 9691, 9692, 9693, 9694, 9695, 9696, 9697, 9698, 9699, 9700, 9702, 9703, 9704, 9706, 9707, 9708, 9709, 9710, 9711, 9712, 9713, 9714, 9715, 9716, 9717, 9718, 9719, 9720, 9721, 9722, 9723, 9724, 9725, 9726, 9727, 9728, 9729, 9730, 9731, 9733, 9734, 9735, 9736, 9738, 9739, 9740, 9741, 9742, 9743, 9744, 9745, 9746, 9747, 9748, 9749, 9750, 9752, 9753, 9754, 9755, 9756, 9757, 9758, 9761, 9762, 9766, 9767, 9768, 9769, 9770, 9771, 9772, 9773, 9774, 9775, 9776, 9777, 9778, 9779, 9780, 9781, 9782, 9783, 9784, 9785, 9787, 9788, 9790, 9791, 9792, 9793, 9794, 9795, 9796, 9797, 9798, 9799, 9800, 9801, 9802, 9803, 9804, 9806, 9807, 9808, 9809, 9810, 9812, 9813, 9814, 9815, 9816, 9817, 9818, 9819, 9820, 9821, 9823, 9824, 9825, 9826, 9827, 9828, 9829, 9831, 9832, 9833, 9834, 9835, 9836, 9837, 9838, 9839, 9840, 9841, 9842, 9843, 9845, 9846, 9847, 9848, 9849, 9850, 9851, 9852, 9853, 9854, 9855, 9856, 9857, 9858, 9859, 9860, 9861, 9862, 9863, 9864, 9865, 9866, 9867, 9868, 9869, 9870, 9871, 9873, 9874, 9875, 9876, 9877, 9878, 9879, 9880, 9881, 9882, 9883, 9884, 9885, 9886, 9887, 9888, 9889, 9890, 9891, 9892, 9893, 9894, 9895, 9896, 9897, 9898, 9899, 9900, 9901, 9902, 9903, 9904, 9905, 9906, 9907, 9908, 9909, 9910, 9911, 9912, 9913, 9914, 9915, 9916, 9917, 9918, 9919, 9920, 9921, 9922, 9923, 9924, 9925, 9926, 9927, 9928, 9929, 9930, 9931, 9932, 9933, 9934, 9935, 9936, 9937, 9938, 9939, 9940, 9941, 9942, 9943, 9944, 9945, 9946, 9947, 9948, 9949, 9950, 9951, 9952, 9953, 9954, 9955, 9956, 9957, 9958, 9959, 9960, 9961, 9963, 9964, 9965, 9966, 9967, 9968, 9969, 9970, 9971, 9972, 9973, 9974, 9975, 9976, 9977, 9978, 9979, 9980, 9981, 9982, 9983, 9984, 9985, 9986, 9987, 9988, 9989, 9990, 9992, 9993, 9995, 9996, 9997, 9998, 9999, 10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007, 10008, 10010, 10012, 10014, 10016, 10017, 10018, 10019, 10020, 10022, 10023, 10024, 10025, 10026, 10027, 10028, 10030, 10034, 10036, 10037, 10038, 10039, 10040, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 10048, 10049, 10050, 10051, 10052, 10053, 10054, 10055, 8651, 8651, 8651, 8847, 9299, 10056, 10057, 10058, 10059, 10060, 10061, 8672, 8672, 8672, 10062, 10063, 10064, 10065, 10066, 10067, 9441, 9445, 9450, 9456, 9462, 10068, 10069, 10070, 10071, 10072, 10073, 10074, 10075, 10076, 10077, 10079, 10080, 10081, 10082, 10083, 10084, 10085, 10087, 10088, 10090, 10091, 10092, 10093, 10094, 10095, 10096, 10097, 10098, 10100, 10101, 10102, 10103, 10104, 10105, 10106, 10107, 10108, 10112, 10116, 10120, 10123, 10125, 10126, 10127, 10128, 10129, 10130, 10132, 10133, 10134, 10135, 10136, 10137, 10143, 10144, 8851, 10145, 10146, 10147, 10148, 10149, 10150, 10151, 10152, 10153, 8847, 10157, 10158, 10159, 10160, 10161, 10162, 10163, 10165, 10166, 10167, 10168, 10169, 10170, 10171, 10172, 10175, 10176, 10177, 9760, 9765, 8851, 8851, 10178, 8847, 10179, 8851, 10180, 10181, 10182, 10183, 10184, 10185, 10186, 10187, 10188, 10189, 10190, 10191, 10192, 10193, 10194, 10195, 10196, 10197, 10198, 10199, 10200, 10201, 10202, 10204, 10205, 10206, 10207, 10208, 10209, 10210, 10211, 10212, 10213, 10214, 10215, 10216, 10217, 10218, 10219, 10220, 10221, 10222, 10223, 10224, 10225, 10226, 10227, 10228, 10229, 10230, 10231, 10232, 10234, 10235, 10236, 10237, 10238, 10239, 10240, 10241, 10242, 10243, 10244, 10245, 10246, 10249, 10250, 10251, 10252, 10253, 10254, 10255, 10256, 10257, 10258, 10259, 10260, 10261, 10262, 10263, 10264, 10265, 10266, 10267, 10268, 10269, 10270, 10271, 10272, 10273, 10274, 10275, 9007, 9006, 10029, 10278, 10279, 9007, 9006, 9009, 9008, 9007, 9006, 9009, 9008, 10035, 10280, 10281, 10203, 10282, 10283, 10276, 10284, 10285, 10287, 10288, 9007, 9006, 10086, 10289, 10290, 9007, 9006, 10276, 10291, 10292, 9007, 9006, 10276, 10293, 10294, 9007, 9006, 9009, 9008, 9007, 9006, 9009, 9008, 10124, 10295, 10296, 9007, 9006, 10276, 10297, 10298, 10299, 10300, 10301, 10302, 10303, 9006, 9007, 10305, 10306, 10308, 10309, 9006, 9007, 9007, 9006, 10311, 10312, 10313, 10314, 9006, 9007, 9007, 9006, 10316, 10317, 10318, 10319, 10320, 10321, 10322, 8981, 8981, 8981, 10323, 9007, 9006, 10203, 10324, 10325, 9007, 9006, 10276, 10326, 10327, 10328, 10329, 10330, 10331, 10332, 10333, 10334, 10335, 10336, 10337, 9016, 9016, 10339, 10340, 10341, 10342, 10343, 10344, 9043, 10345, 10346, 10347, 9043, 9043, 10345, 10348, 10349, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10515, 10517, 10520, 10522, 10526, 10530, 10541, 10544, 10546, 10549, 10571, 10573, 10575, 10577, 10579, 10581, 10587, 10591, 10593, 10603, 10606, 10609, 10620, 10625, 10627, 10640, 10644, 10647, 10649, 10657, 10664, 10666, 10678, 10680, 10685, 10687, 10690, 10695, 10698, 10701, 10704, 10711, 10713, 10715, 10717, 10719, 10728, 10745, 10747, 10749, 10751, 10755, 10758, 10760, 10762, 10766, 10768, 10770, 10783, 10785, 10793, 10795, 10797, 10801, 10804, 10806, 10808, 10810, 10812, 10816, 10820, 10822, 10824, 10826, 10830, 10832, 10835, 10838, 10840, 10842, 10844, 10847, 10849, 10851, 10853, 10855, 10858, 10860, 10862, 10865, 10868, 10870, 10872, 10875, 10877, 10879, 10882, 10884, 10900, 10902, 10906, 10908, 10913, 10915, 10917, 10922, 10925, 10927, 10929, 10931, 10935, 10941, 10943, 10945, 10949, 10952, 10956, 10958, 10960, 10964, 10968, 10974, 10976, 10978, 10981, 10983, 10986, 10988, 10990, 10992, 10994, 10998, 11000, 11002, 11006, 11008, 11010, 11012, 11014, 11016, 11020, 11022, 11024, 11027, 11029, 11032, 11034, 11036, 11043, 11045, 11050, 11052, 11054, 11057, 11059, 11062, 11064, 11066, 11069, 11072, 11074, 11076, 11079, 11081, 11083, 11085, 11089, 11091, 11093, 11096, 11098, 11102, 11105, 11107, 11109, 11112, 11114, 11118, 11136, 11139, 11141, 11149, 11158, 11160, 11163, 11165, 11173, 11181, 11186, 11189, 11193, 11195, 11197, 11202, 11204, 11206, 11209, 11212, 11215, 10497, 10499, 9221, 10009, 10504, 8874, 8873, 10506, 10508, 9233, 9231, 10514, 8874, 8873, 10525, 8855, 10529, 8855, 11230, 11232, 11234, 10966, 10966, 10962, 10966, 8784, 11169, 11169, 11169, 10676, 8785, 8682, 10669, 10672, 10676, 10791, 11249, 8874, 8873, 8874, 8873, 10791, 11251, 10590, 8785, 8682, 10600, 10598, 8682, 8785, 8682, 10554, 10563, 10554, 10555, 8874, 8873, 10742, 11147, 8732, 8874, 8873, 8874, 8873, 8874, 8873, 10556, 10740, 8682, 8682, 8682, 8682, 8682, 8682, 8682, 8682, 10563, 10558, 10791, 11255, 10559, 10560, 8874, 8873, 10563, 10791, 11257, 11259, 11260, 11261, 11262, 11167, 8786, 8785, 8859, 8860, 8859, 11167, 11167, 11263, 8785, 8785, 8682, 8785, 8682, 10672, 10586, 10590, 10601, 10596, 10601, 10598, 10600, 10601, 10611, 8874, 8873, 10611, 10613, 10615, 8874, 8873, 8786, 8786, 8735, 10782, 8786, 8735, 10619, 10782, 10623, 10788, 10765, 8786, 8735, 8786, 8735, 8874, 8873, 10782, 10970, 10970, 10635, 10636, 10636, 10637, 10638, 10639, 10962, 10966, 11270, 11271, 11272, 10642, 10663, 10659, 10660, 10652, 10653, 10654, 10655, 10659, 10660, 10661, 10662, 10663, 8785, 10669, 8785, 8682, 8874, 8873, 8874, 8873, 10672, 11167, 11167, 8874, 8873, 10676, 11167, 9422, 9420, 9430, 8696, 8856, 8696, 8856, 11228, 8696, 8856, 10693, 11228, 10692, 8696, 8856, 10693, 11228, 8696, 8856, 10693, 11228, 8695, 8696, 8856, 10693, 11228, 11279, 11280, 11281, 9454, 9452, 11282, 9460, 9458, 11283, 10723, 8785, 11147, 8874, 8873, 8874, 8873, 8874, 8873, 8785, 10725, 11147, 9480, 8874, 8873, 8785, 10742, 8785, 10776, 8728, 8874, 8873, 8874, 8873, 8874, 8873, 8728, 8874, 8873, 8874, 8873, 8785, 10740, 8785, 10742, 8785, 8785, 11147, 8874, 8873, 8874, 8873, 8874, 8873, 10791, 8874, 8873, 10765, 8874, 8873, 10791, 8874, 8873, 8786, 11291, 8786, 8735, 10776, 8874, 8873, 8874, 8873, 8874, 8873, 10782, 8874, 8873, 10791, 8874, 8873, 10788, 8874, 8873, 10791, 8874, 8873, 11294, 11296, 11298, 11303, 11305, 11307, 11312, 11314, 11316, 11327, 11331, 11333, 10792, 11339, 11341, 11343, 11345, 9579, 9577, 11349, 10889, 10887, 10891, 10893, 11352, 10895, 8841, 8851, 9732, 10899, 8784, 10912, 8786, 8785, 9690, 11354, 9701, 9705, 8795, 10940, 10955, 9732, 9737, 10970, 10972, 11135, 10011, 10009, 10015, 10013, 11228, 8874, 8873, 11357, 11359, 11361, 11365, 11368, 11369, 9805, 9811, 11039, 8841, 11101, 8841, 11370, 11048, 8841, 11371, 11373, 11088, 11375, 11101, 8855, 11117, 8855, 10011, 10009, 11228, 8874, 8873, 11169, 11144, 11145, 11147, 9962, 10011, 8856, 8873, 11228, 8874, 8873, 11147, 11135, 10011, 10009, 10015, 10013, 11228, 8874, 8873, 11169, 11144, 11145, 11147, 10011, 8856, 8873, 11228, 8874, 8873, 11169, 11171, 11176, 11218, 9962, 10011, 10009, 11228, 8874, 8873, 11200, 11218, 11220, 10011, 10009, 10015, 10013, 11228, 8874, 8873, 11379, 11382, 11385, 11387, 11389, 11391, 11393, 11400, 11402, 11404, 11406, 11411, 11414, 11416, 11418, 11420, 11422, 11429, 11431, 11433, 11436, 11441, 11443, 11445, 11447, 11452, 11454, 11456, 11459, 11461, 11463, 8972, 8991, 8972, 8972, 11468, 11469, 11458, 11470, 8991, 11473, 11474, 11238, 11324, 11475, 11476, 11477, 11478, 11238, 11324, 11479, 11480, 11481, 8991, 11484, 11487, 11264, 11265, 11287, 11289, 11458, 11492, 11493, 11494, 8972, 8991, 11458, 11497, 11498, 11499, 8991, 8972, 11458, 11502, 11503, 11504, 8991, 8972, 11321, 11507, 11508, 11322, 11509, 11510, 11323, 11511, 11512, 11324, 11513, 11514, 11515, 8972, 8991, 11458, 11518, 11519, 11520, 8991, 8972, 11523, 11528, 11529, 11384, 9011, 9010, 10203, 8972, 8991, 11530, 11534, 11535, 11384, 9011, 9010, 10203, 8972, 8991, 11458, 11536, 11537, 10276, 9011, 9010, 8991, 8972, 11538, 11542, 11543, 11384, 8972, 8991, 11458, 11544, 11545, 8991, 8972, 11546, 10203, 9011, 9010, 8972, 8991, 10276, 9011, 9010, 8991, 8972, 8981, 8981, 8981, 8981, 11553, 11554, 11555, 11557, 11558, 11384, 11559, 8991, 11458, 11562, 11563, 11564, 8991, 9011, 9010, 10203, 11567, 10203, 9011, 9010, 9016, 10276, 9011, 9010, 9016, 11572, 10233, 9011, 9010, 9016, 10247, 9011, 9010, 11577, 10276, 9011, 9010, 9016, 10276, 9011, 9010, 11578, 11579, 11581, 10338, 9044, 9043, 10345, 9044, 11585, 11586, 10345, 9044, 10345, 9044, 11589, 10304, 9044, 9043, 10310, 9044, 11590, 10315, 9044, 9043, 10338, 9044, 9043, 10345, 9044, 9043, 10338, 9044, 9043, 10345, 9044, 11591, 10338, 9044, 9043, 10338, 9044, 9043, 10345, 9044, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 11975, 11976, 11977, 11978, 11979, 11980, 11981, 11982, 11983, 11984, 11985, 11986, 11987, 11988, 10519, 8853, 8852, 11779, 11989, 11990, 8854, 11991, 11992, 8854, 11996, 11801, 11959, 11830, 11997, 11998, 11999, 11954, 12000, 12001, 12002, 12003, 12004, 12005, 12006, 12007, 12008, 12009, 12010, 12012, 12013, 12014, 12015, 12016, 11782, 11783, 9271, 11785, 12018, 10583, 8799, 8798, 12019, 12020, 12021, 12022, 12023, 10583, 8799, 8798, 12024, 10966, 8799, 8798, 12025, 12026, 12027, 12028, 12029, 12030, 12031, 12032, 12033, 12034, 12035, 12036, 12037, 12038, 12039, 12040, 12041, 12042, 10966, 8799, 8798, 12043, 12044, 10966, 8799, 8798, 12045, 12046, 10966, 8799, 8798, 12047, 12048, 12049, 12050, 12051, 12052, 12053, 12055, 12056, 12057, 12058, 12059, 12060, 12066, 12067, 12068, 12069, 12070, 12071, 12072, 12073, 11786, 12075, 10966, 8799, 8798, 10583, 8799, 8798, 12076, 12077, 12078, 12079, 12080, 12081, 11801, 12082, 12083, 11794, 12084, 12085, 12086, 11954, 12087, 12088, 11795, 11796, 11797, 12089, 12090, 12091, 12092, 12093, 12094, 12095, 12096, 12097, 12098, 12099, 12100, 12101, 12102, 12103, 12104, 9349, 12105, 12106, 9358, 9355, 12107, 12108, 12109, 12110, 12111, 12112, 12113, 12114, 12115, 12116, 12117, 12118, 12119, 12120, 12121, 12122, 11830, 11959, 11954, 11801, 12123, 12124, 12128, 12129, 11802, 11803, 9384, 12130, 12131, 12132, 12133, 12134, 12135, 11805, 12136, 12137, 12138, 12139, 12140, 9400, 9403, 12141, 12142, 12143, 12144, 12145, 12146, 12147, 12148, 12149, 12150, 12151, 12152, 12153, 12154, 12155, 11808, 9418, 12156, 12157, 11810, 9428, 12158, 8859, 12159, 12160, 12161, 12162, 12163, 12164, 12165, 12166, 12167, 12168, 12169, 12170, 12171, 12172, 12173, 12174, 12175, 12176, 12177, 12178, 12179, 12180, 12181, 11813, 11814, 11815, 11816, 12185, 12186, 12188, 12189, 11817, 11818, 11061, 8853, 8852, 12191, 10721, 12192, 12193, 12194, 12195, 12196, 12197, 12198, 12199, 12200, 12201, 12202, 12203, 12204, 12205, 12206, 12207, 11822, 12208, 12209, 12210, 12211, 12212, 12213, 12214, 12215, 12216, 12217, 12218, 12219, 12220, 12221, 12222, 12223, 12224, 12225, 12226, 12227, 12228, 12229, 12230, 12231, 12232, 9502, 10753, 8825, 8830, 11827, 11828, 12233, 12234, 12235, 12236, 12237, 11830, 12238, 12239, 12240, 12241, 12242, 12243, 10772, 8799, 8798, 12244, 12246, 12247, 12248, 12249, 12250, 12251, 12252, 12253, 12254, 12255, 12256, 12257, 12258, 12259, 12260, 11835, 12261, 12262, 12263, 12264, 12265, 12266, 12279, 10799, 8831, 8830, 11839, 11840, 9565, 10814, 8831, 8830, 11845, 12284, 12285, 9582, 10828, 8825, 8830, 11850, 11004, 8825, 8830, 11852, 11853, 9603, 10996, 8825, 8830, 11856, 11857, 9614, 10857, 8853, 8852, 10864, 8853, 8852, 10867, 10874, 8853, 8852, 10881, 8853, 8852, 8845, 11061, 8853, 8852, 12287, 12288, 12289, 12290, 12292, 12293, 12294, 12295, 12296, 10904, 8799, 8798, 11876, 12297, 8860, 12298, 10919, 8799, 8798, 12299, 12300, 10966, 8799, 8798, 12301, 8859, 10933, 8799, 8798, 12303, 10966, 8799, 8798, 12304, 12305, 12306, 10947, 8799, 8798, 11890, 11891, 12307, 10962, 8799, 8798, 12308, 10966, 8799, 8798, 12309, 8859, 12310, 12311, 12312, 12313, 12314, 12315, 12316, 12317, 12318, 12319, 10980, 8825, 8830, 11900, 11004, 8825, 8830, 11902, 9763, 10996, 8825, 8830, 11907, 11004, 8825, 8830, 11910, 9786, 9789, 11018, 8831, 8830, 11916, 11918, 12326, 11920, 12327, 11038, 8853, 8852, 12328, 12329, 12330, 12331, 11047, 8853, 8852, 12333, 12334, 11056, 8853, 8852, 11061, 8853, 8852, 11068, 8853, 8852, 11071, 11078, 8853, 8852, 8845, 11087, 8853, 8852, 12337, 11095, 8853, 8852, 11946, 12339, 12340, 8854, 11111, 8853, 8852, 11952, 12341, 12342, 8854, 11957, 12343, 12344, 11963, 12345, 12346, 12347, 11954, 11955, 12348, 11167, 12349, 11962, 12350, 12351, 12352, 12353, 12354, 12355, 11963, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 11954, 11955, 12368, 11167, 12369, 11962, 12370, 12371, 11957, 12372, 12373, 12374, 11963, 12375, 12376, 12377, 11959, 11960, 12378, 11167, 12379, 11962, 12380, 12381, 12382, 12383, 12384, 11963, 12385, 12386, 12387, 11191, 11188, 11966, 12388, 8860, 8859, 11969, 9994, 9991, 11972, 11973, 11974, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12429, 12430, 12431, 12432, 12435, 12433, 11236, 9009, 9008, 12436, 12437, 12440, 12438, 12441, 12446, 12444, 12447, 12450, 12451, 12452, 12453, 12245, 11377, 8992, 11377, 8992, 11377, 8992, 11377, 8992, 12454, 12455, 12245, 12245, 12456, 12457, 12245, 12458, 11300, 9009, 9008, 12461, 12462, 12463, 12464, 11309, 9009, 9008, 12467, 12468, 12469, 12470, 11318, 9009, 9008, 12473, 12474, 12475, 12476, 12479, 12482, 12485, 12488, 12489, 12490, 12491, 11465, 9009, 9008, 12494, 12495, 12496, 12500, 12498, 11363, 9009, 9008, 12501, 12502, 12503, 12504, 12505, 12509, 12507, 11363, 9009, 9008, 12510, 12511, 12512, 12513, 12514, 12515, 11408, 9009, 9008, 12518, 12519, 12520, 12521, 12522, 12526, 12524, 11363, 9009, 9008, 12527, 12528, 12529, 12532, 12533, 12535, 12536, 12537, 12538, 12539, 12540, 12541, 12542, 12543, 12544, 12545, 12546, 12547, 11347, 9007, 9006, 12548, 12554, 12552, 11363, 9009, 9008, 12555, 12556, 12557, 11408, 9009, 9008, 12560, 12561, 9007, 9006, 11384, 11395, 9009, 9008, 12562, 12563, 12564, 9007, 9006, 11384, 11395, 9009, 9008, 12566, 12567, 12568, 12569, 11458, 9007, 9006, 11408, 9009, 9008, 12570, 12571, 12572, 12573, 9006, 9007, 11413, 11424, 9009, 9008, 12575, 12576, 12577, 12578, 11435, 9007, 9006, 11465, 9009, 9008, 12579, 12580, 12581, 11458, 9007, 9006, 11449, 9009, 9008, 12583, 12584, 12585, 12586, 11458, 9007, 9006, 11465, 9009, 9008, 12587, 12588, 12589, 11551, 12593, 12594, 12595, 9042, 12596, 12597, 9042, 12599, 9026, 9026, 9037, 12600, 12601, 9043, 12602, 12603, 9042, 12605, 12606, 12607, 9037, 12608, 12609, 9042, 12611, 12612, 12613, 9037, 12614, 12615, 12616, 9042, 12617, 12618, 12619, 11551, 12620, 12621, 12622, 11552, 11556, 12623, 12624, 9043, 9042, 12625, 9039, 12626, 12627, 12628, 9039, 12629, 12630, 12631, 9042, 12632, 12633, 9043, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 12802, 12804, 12809, 12811, 12814, 12815, 12816, 12817, 12820, 12823, 12824, 12825, 12826, 12827, 12828, 12829, 12830, 12831, 12843, 12845, 12848, 12849, 12850, 12851, 12853, 12854, 12855, 12861, 12862, 12863, 12865, 12866, 12867, 12873, 12878, 12880, 12882, 12886, 12887, 12888, 12891, 12892, 12893, 12896, 12897, 12898, 12908, 12913, 12920, 12922, 12923, 12924, 12925, 12926, 12927, 12934, 12937, 12941, 12944, 12945, 12946, 12948, 12953, 12963, 12966, 12967, 12973, 12984, 12985, 12986, 12987, 12988, 12989, 12992, 12993, 12994, 13001, 13007, 13008, 13013, 13015, 13020, 13024, 13025, 13026, 13028, 13029, 13031, 13032, 13034, 13037, 13042, 13046, 13051, 13055, 13056, 13057, 13058, 13059, 13061, 13063, 13064, 13065, 13066, 13067, 13069, 13072, 13074, 13076, 13082, 13086, 13090, 13092, 13094, 13097, 13099, 13108, 13110, 13112, 13113, 13114, 13115, 13116, 13117, 13118, 13121, 13123, 13125, 13128, 13130, 13131, 13132, 13137, 13139, 13141, 13144, 13147, 13149, 13151, 13154, 13157, 13158, 13159, 13160, 13161, 13162, 13163, 13164, 13165, 13166, 13167, 13169, 13170, 13171, 13172, 13173, 13174, 13175, 13176, 13177, 13178, 13179, 13180, 13181, 13182, 13183, 13184, 13185, 13186, 13187, 13188, 13189, 13190, 13191, 13192, 13193, 13194, 13195, 13196, 13197, 13198, 13199, 13200, 13201, 13202, 13203, 13208, 13212, 13213, 13214, 13215, 13217, 13219, 13220, 13221, 13222, 13224, 13225, 13226, 13228, 13229, 13230, 13231, 13233, 13234, 13235, 13239, 13240, 13241, 13242, 13243, 13245, 13246, 13247, 13249, 13250, 13251, 13253, 13257, 13259, 13261, 13264, 13265, 13266, 13267, 13268, 13269, 13270, 13271, 13272, 13273, 13274, 13275, 13276, 13277, 13278, 13279, 13280, 13281, 13282, 13283, 13284, 13285, 13286, 13287, 13289, 13291, 13292, 13293, 13297, 13298, 13299, 13300, 13302, 13303, 13304, 13305, 13306, 13307, 13308, 13309, 13310, 13311, 13312, 13313, 13314, 13315, 13316, 13317, 13318, 13319, 13321, 13322, 13323, 13324, 13327, 13328, 13329, 13330, 13331, 13334, 13335, 13336, 13338, 13339, 13342, 13343, 13345, 13347, 13351, 13354, 13355, 13360, 13362, 13364, 13367, 13368, 13370, 13372, 13375, 13376, 13379, 13380, 13383, 13384, 13386, 13388, 13392, 13394, 13395, 13398, 13399, 13400, 13402, 13403, 13404, 13405, 13406, 13407, 13408, 13409, 13412, 13414, 13416, 12801, 12808, 13419, 13424, 13425, 13426, 13427, 13428, 13431, 13432, 13434, 13435, 13436, 13438, 13439, 12291, 11351, 13079, 13081, 13085, 8993, 11284, 13089, 12291, 11351, 13218, 13238, 12952, 13346, 13350, 8993, 13371, 8993, 13387, 12291, 11351, 13346, 13350, 8993, 13440, 13218, 12952, 13371, 8993, 13218, 13371, 13346, 13387, 12291, 11351, 13441, 8993, 13442, 12904, 12907, 13256, 13443, 8993, 13444, 12291, 11351, 13079, 13081, 13089, 13085, 8993, 12877, 12291, 11351, 12869, 13350, 8993, 12871, 8993, 12872, 13256, 13005, 12291, 11351, 13103, 13106, 13084, 13085, 8993, 13078, 13079, 13081, 13087, 13089, 13101, 13105, 13070, 11284, 12875, 12877, 12884, 12885, 12291, 11351, 11284, 13089, 13085, 8993, 13079, 13081, 12904, 13445, 8993, 13446, 12907, 13256, 12983, 13447, 8993, 13448, 13103, 13087, 13106, 13084, 13105, 13070, 13101, 13078, 12291, 11351, 13371, 8993, 13254, 12245, 13218, 13211, 12952, 13346, 13350, 8993, 13387, 12933, 12936, 12939, 12943, 13373, 13389, 13348, 8993, 13244, 12952, 12291, 11351, 13346, 13350, 8993, 13371, 8993, 13451, 13256, 11275, 12977, 11284, 13087, 13101, 13103, 13070, 13084, 13078, 13105, 13106, 12291, 11351, 13070, 11284, 13078, 13087, 13089, 13084, 13085, 13101, 13105, 13106, 13103, 11290, 12991, 11274, 11274, 12991, 12291, 11351, 13006, 11275, 12998, 11284, 11356, 11275, 11377, 8993, 13006, 13254, 13452, 13218, 13211, 13387, 13346, 8993, 13371, 12291, 11351, 13087, 13089, 13078, 13079, 13081, 13103, 13084, 13085, 8993, 13070, 11284, 13105, 13106, 13101, 13238, 13096, 11356, 13041, 11376, 8993, 13041, 11377, 8993, 13050, 11377, 8993, 13050, 11377, 8993, 12291, 11351, 13070, 11284, 13078, 13079, 13081, 13084, 13085, 8993, 13087, 13089, 13096, 13101, 13102, 13103, 13104, 8993, 13105, 13106, 11290, 12291, 11351, 8993, 13371, 8993, 13254, 13455, 13211, 13256, 13387, 8993, 13346, 13350, 8993, 13456, 13457, 13458, 13459, 13460, 13461, 13463, 13464, 13465, 13466, 13467, 13468, 13470, 13471, 13472, 13473, 13474, 13475, 13477, 13478, 13479, 13480, 13481, 13482, 13484, 13485, 13486, 13487, 13488, 13489, 13492, 13493, 13494, 13495, 13496, 13499, 13502, 13503, 13504, 13505, 13506, 13509, 13511, 13512, 13513, 13514, 13515, 13518, 13521, 13522, 13523, 13524, 13525, 13527, 13528, 13530, 13533, 13535, 13538, 12280, 13543, 13544, 13545, 12291, 11351, 13238, 13256, 13548, 13549, 13550, 13551, 13552, 13554, 13555, 13556, 13557, 13558, 13295, 12338, 13350, 13359, 13391, 13411, 13560, 13561, 13562, 13563, 13564, 13565, 13566, 13569, 13570, 13571, 13572, 13573, 13574, 13575, 13579, 13580, 13581, 13582, 13583, 13584, 13585, 13589, 13590, 13591, 13592, 13593, 13594, 13595, 13599, 13600, 13601, 13602, 13603, 13604, 13605, 13608, 13609, 13610, 13611, 13612, 13613, 13614, 13618, 13619, 13620, 13621, 13622, 13623, 13624, 13627, 13628, 13631, 13632, 13634, 13635, 13636, 13637, 13638, 13641, 13639, 13642, 13644, 13645, 13648, 13649, 13651, 13652, 13655, 13656, 13659, 13660, 13663, 13664, 13667, 13668, 13671, 13669, 13672, 13673, 13674, 13675, 13678, 13679, 13682, 13685, 13683, 250, 251, 252, 253, 254, 255, 13825, 13827, 13828, 12819, 12822, 13834, 13838, 13839, 13840, 13848, 13851, 13854, 13861, 13864, 13867, 13873, 13876, 13888, 13895, 13896, 13926, 13068, 13943, 13953, 13964, 13970, 13976, 13980, 13986, 13992, 13995, 13999, 14002, 14006, 14011, 13216, 14016, 14020, 14024, 14027, 14030, 14035, 14038, 14044, 14045, 14049, 14054, 14058, 14064, 14070, 14074, 14078, 14081, 14084, 14088, 14092, 14095, 13326, 14100, 13333, 14108, 13344, 14113, 14115, 14118, 13369, 14124, 14126, 13385, 14133, 14134, 13401, 14140, 14147, 14148, 8992, 14149, 8993, 14152, 14157, 14159, 12184, 12183, 12182, 12190, 12187, 14010, 14163, 14164, 13078, 14165, 14166, 8924, 8992, 13101, 13084, 14167, 13934, 11376, 14168, 8992, 14169, 8992, 11285, 8993, 14170, 8992, 11286, 8993, 13103, 13907, 13899, 13910, 13991, 14010, 14171, 14172, 11350, 14173, 14174, 14175, 8993, 8992, 11356, 13900, 14176, 14177, 11377, 14178, 8992, 13884, 14179, 14123, 11376, 14180, 8992, 14181, 13901, 13902, 13910, 13991, 14010, 14182, 14183, 12836, 14184, 14185, 11377, 14186, 8992, 14188, 14189, 8992, 11356, 8993, 12840, 12841, 14190, 14123, 11376, 14191, 8992, 13969, 14192, 14193, 14194, 14195, 13907, 13910, 13991, 14010, 14196, 14197, 11350, 14199, 14201, 14202, 14203, 11356, 8993, 8992, 14205, 12184, 12183, 12182, 13846, 13985, 13991, 14010, 14207, 14208, 12852, 14209, 14210, 8924, 8992, 14211, 8992, 11286, 8993, 12858, 14212, 14123, 11376, 14213, 8992, 14214, 8992, 11254, 8993, 13907, 13899, 13910, 13991, 14010, 14215, 14216, 11350, 13900, 14217, 14218, 11377, 14219, 8992, 13884, 14220, 13934, 11376, 14221, 8992, 14222, 14223, 11356, 8993, 8992, 14224, 12184, 12183, 12182, 12190, 12187, 13985, 14010, 14225, 14226, 14227, 14228, 14229, 14230, 13934, 11376, 14231, 8992, 14232, 14233, 14234, 8924, 8992, 14235, 14236, 8992, 11286, 8993, 14237, 14238, 14239, 14240, 8992, 11285, 8993, 14241, 14242, 8992, 11254, 8993, 14243, 14244, 12184, 12183, 12182, 12190, 12187, 13985, 13991, 14010, 14245, 14246, 14247, 8992, 11285, 8993, 12890, 14248, 8992, 11286, 8993, 12900, 14249, 13934, 11376, 14250, 8992, 12901, 14251, 14252, 8924, 8992, 12902, 14253, 14255, 14257, 14258, 11356, 8993, 8992, 14259, 14261, 14263, 14264, 14265, 14266, 14267, 14268, 14269, 14270, 13901, 13902, 13910, 13991, 14010, 14271, 14272, 11350, 14120, 14273, 14123, 11376, 14274, 8992, 14275, 14276, 14277, 14278, 14279, 8993, 8992, 11356, 14110, 14280, 14281, 11377, 14282, 8992, 14283, 12184, 12074, 12932, 14284, 12935, 14285, 12938, 14286, 12940, 12942, 14287, 13884, 14288, 14289, 14290, 11377, 14291, 8992, 14292, 14293, 8993, 8992, 11356, 12961, 12961, 12962, 13942, 13887, 13985, 13991, 14010, 14294, 14295, 12965, 14296, 14297, 11377, 14298, 8992, 14299, 14123, 11376, 14300, 8992, 14302, 8992, 8993, 11356, 12975, 12981, 14303, 14304, 12978, 12979, 14305, 12981, 12983, 13084, 13101, 13103, 13078, 14306, 14307, 14308, 14309, 14310, 14311, 14312, 14313, 12184, 12183, 12182, 12190, 12187, 13985, 13991, 14010, 14314, 14315, 14316, 14317, 8992, 11285, 8993, 14318, 14319, 14320, 8992, 11286, 8993, 14321, 14322, 13934, 14323, 14324, 14325, 14326, 13899, 12990, 14327, 14328, 13900, 14329, 14330, 14331, 13907, 13899, 13910, 13991, 14010, 14332, 14333, 11350, 12996, 14334, 14335, 12997, 14336, 12302, 13000, 14337, 14338, 8993, 8992, 13900, 13003, 14339, 14340, 14341, 8992, 13005, 14342, 13901, 13902, 14343, 14345, 14346, 8993, 8992, 11356, 13017, 14347, 13022, 14348, 11377, 14349, 8992, 13022, 14350, 12184, 12183, 12182, 12190, 12187, 13985, 13991, 14010, 14351, 14352, 14353, 14354, 8992, 11286, 8993, 14355, 14356, 14357, 8924, 8992, 14358, 14359, 14360, 13934, 11376, 14361, 8992, 14362, 14363, 8992, 11285, 8993, 14364, 14365, 14366, 13907, 13975, 13910, 12302, 13244, 14367, 14368, 14369, 8993, 8992, 14120, 13373, 14370, 14371, 14372, 8992, 14373, 14374, 14375, 8992, 14110, 13348, 14376, 14377, 14378, 8992, 14379, 14380, 14381, 8992, 12184, 12183, 12182, 12190, 12187, 13985, 13991, 14010, 14382, 14383, 14384, 14385, 8992, 11285, 8993, 14386, 14387, 14388, 8924, 8992, 14389, 14390, 13934, 11376, 14391, 8992, 14392, 14393, 8992, 11286, 8993, 14394, 8992, 11377, 8993, 14395, 14396, 14397, 14398, 8925, 14399, 14400, 14401, 14402, 8992, 11377, 8993, 13942, 13975, 13985, 13991, 14010, 14403, 14404, 11350, 11377, 14405, 8992, 13124, 14406, 14123, 11376, 14407, 8992, 14408, 14410, 14411, 8992, 11356, 8993, 13143, 14412, 11377, 14413, 8992, 13150, 14414, 14415, 11377, 14416, 8992, 14417, 14418, 14423, 14424, 14429, 14430, 14435, 14436, 14437, 14438, 14441, 14442, 14448, 14451, 14454, 14457, 14459, 14460, 14463, 14466, 14470, 14472, 14474, 14476, 14477, 13969, 13975, 13985, 13991, 14010, 14480, 14481, 11350, 13211, 12302, 14482, 13244, 13254, 14483, 8993, 8992, 14485, 14489, 14490, 14053, 14063, 14063, 13290, 13288, 14494, 14077, 14073, 14077, 14495, 14105, 8993, 8992, 14110, 13348, 14496, 8993, 14497, 8993, 8992, 14120, 13373, 14123, 8993, 14128, 13389, 14498, 8993, 8992, 14144, 14499, 8993, 8992, 14500, 14503, 14506, 14507, 14510, 14513, 14514, 14517, 14520, 14521, 14524, 14527, 14528, 14531, 14534, 14535, 14538, 14541, 14542, 14545, 14548, 14550, 13421, 13422, 14552, 13429, 13437, 13559, 14559, 14473, 14475, 14560, 14422, 14428, 14434, 14440, 14446, 14562, 14564, 14566, 14568, 14570, 14572, 14576, 13553, 13559, 14580, 14582, 14585, 14554, 14578, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 14594, 14601, 14602, 14603, 14604, 14605, 14606, 14607, 14608, 14612, 14614, 14615, 14616, 14617, 14618, 14619, 14620, 14621, 14622, 14623, 14624, 14625, 14626, 14628, 14629, 14630, 14631, 14632, 14633, 14634, 14636, 14637, 14638, 14639, 14640, 14641, 14642, 14643, 14644, 14645, 14646, 14647, 14648, 14650, 14663, 8924, 14667, 8925, 14669, 14595, 14596, 14670, 14673, 14674, 14675, 14676, 14677, 14678, 11350, 13106, 14681, 14684, 14685, 14686, 14687, 14689, 14690, 14692, 13070, 14694, 14695, 14696, 13105, 13087, 14698, 14699, 14700, 14701, 14702, 14703, 14704, 14705, 14651, 14706, 14709, 14713, 14714, 14715, 14716, 14719, 14721, 14722, 14724, 14725, 14727, 14729, 14730, 14731, 14732, 14651, 14733, 11350, 14736, 14739, 14741, 14744, 14745, 14746, 14747, 14748, 14750, 14751, 14753, 14754, 14759, 14760, 14761, 14651, 14762, 14765, 14198, 14770, 14771, 14772, 14204, 14774, 14775, 14776, 14777, 14778, 14779, 14651, 14780, 11350, 14783, 14786, 14787, 14789, 14790, 14791, 14792, 12859, 14794, 14795, 14797, 14799, 14800, 14801, 14802, 14803, 14804, 14805, 14651, 14806, 14809, 14810, 14813, 14815, 14816, 14818, 14819, 14821, 14824, 14825, 14826, 14828, 14829, 14830, 14831, 14832, 14833, 14651, 14834, 11350, 14841, 14842, 14844, 14848, 14849, 14852, 14853, 14854, 14859, 14860, 14861, 14864, 14865, 14866, 14869, 14870, 14871, 14872, 14873, 14874, 14875, 14651, 14876, 11350, 14880, 14881, 14882, 14883, 14885, 14886, 14887, 14888, 14890, 14891, 14893, 14894, 14897, 14898, 14899, 14254, 14904, 14905, 14906, 14260, 14917, 14918, 14919, 14920, 14651, 14921, 14924, 14925, 14927, 14928, 14930, 14936, 14937, 14938, 14939, 14942, 14944, 14946, 14947, 14948, 14950, 14952, 14954, 14955, 14957, 14961, 14963, 14966, 14967, 14968, 14969, 14970, 14971, 14972, 14973, 14974, 14975, 14651, 14976, 11350, 14979, 14982, 14984, 12968, 14986, 14987, 14989, 14991, 14992, 14993, 14994, 14995, 12980, 12976, 14998, 14999, 15001, 12980, 15002, 12982, 13087, 15003, 15004, 15005, 13106, 15006, 13105, 13070, 15015, 15016, 15017, 15018, 15019, 15020, 15021, 15022, 11350, 15027, 15028, 15029, 15033, 15034, 15035, 15038, 15043, 15044, 15047, 15051, 15052, 15053, 15054, 14651, 15055, 15058, 15059, 12995, 15062, 15064, 12999, 15065, 15068, 15069, 15070, 15071, 13002, 15075, 15073, 15076, 13004, 15078, 15079, 15083, 15084, 15085, 15086, 15088, 15090, 15092, 15093, 15095, 15096, 15097, 15098, 15099, 15100, 15101, 15102, 11350, 15107, 15108, 15109, 15113, 15114, 15118, 15119, 15121, 15124, 15125, 15126, 15130, 15131, 15132, 15133, 15134, 13218, 15138, 15139, 15140, 15141, 13371, 15145, 15143, 15149, 15147, 15150, 15151, 13346, 15155, 15153, 15159, 15157, 15160, 15161, 15162, 15163, 15164, 15165, 15166, 15167, 11350, 15172, 15173, 15174, 15178, 15179, 15182, 15183, 15185, 15188, 15189, 15190, 15192, 15193, 15194, 15199, 15204, 15205, 15206, 15207, 15208, 15209, 15210, 14651, 15211, 15214, 15215, 15217, 15218, 15220, 15221, 15223, 15227, 15228, 15229, 15230, 15232, 15234, 15235, 15238, 15240, 15242, 15244, 15246, 15252, 15253, 15255, 15258, 15260, 15265, 15266, 15267, 15268, 15269, 15270, 15273, 15274, 13218, 15275, 15277, 15278, 11356, 15280, 15281, 15282, 15284, 15285, 15286, 15287, 15288, 15289, 15291, 15292, 15293, 14649, 14651, 15295, 11377, 15296, 15297, 15298, 15299, 13346, 11377, 15301, 8992, 11377, 15303, 15304, 15305, 15306, 13371, 15307, 11376, 15308, 8992, 15309, 15310, 13387, 11377, 15312, 15313, 14136, 14142, 15314, 11378, 15316, 15317, 15318, 15319, 15321, 15322, 15324, 15325, 15327, 15328, 15330, 15331, 15333, 15334, 15336, 15337, 14150, 14471, 15340, 15341, 15343, 14672, 14671, 15344, 15345, 13598, 12582, 13617, 12590, 15347, 15348, 15198, 13454, 13453, 14840, 14846, 13454, 13453, 15196, 13454, 13453, 15198, 13454, 13453, 13454, 13453, 15037, 13454, 13453, 15196, 15176, 13454, 13453, 15196, 15198, 13454, 13453, 15037, 15176, 13454, 13453, 13454, 13453, 15176, 13454, 13453, 15037, 15196, 13454, 13453, 15198, 13454, 13453, 15111, 15198, 15117, 13454, 13453, 13454, 13453, 15196, 13454, 13453, 15176, 15181, 13454, 13453, 15196, 15198, 13454, 13453, 15350, 15351, 15352, 15250, 15248, 15353, 15354, 14452, 14458, 14464, 14469, 14471, 14473, 14475, 13540, 13541, 13542, 13546, 12549, 12550, 12551, 15362, 15363, 13578, 13578, 13588, 13598, 12582, 13617, 12590, 15339, 15342, 15367, 15364, 15365, 15346, 15349, 15355, 15356, 15357, 15358, 15359, 15360, 15365, 15361, 15368, 15364, 15365, 15366, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 15661, 15663, 13831, 15665, 15666, 13967, 15668, 13973, 15671, 14005, 11374, 12286, 11372, 13998, 15674, 15673, 15675, 15677, 15682, 15684, 15685, 15688, 15689, 15690, 13882, 13883, 13983, 13909, 14061, 13989, 14103, 15698, 14005, 12286, 11374, 13998, 11372, 15699, 14708, 14014, 13236, 13232, 15701, 15705, 15709, 13882, 13883, 13983, 13909, 14061, 13989, 14103, 15715, 13998, 11374, 11372, 12065, 14005, 15717, 15716, 15719, 12838, 12837, 12839, 12895, 13105, 15721, 15727, 13844, 13983, 13909, 14061, 13989, 14103, 15733, 14005, 11372, 12286, 13998, 11374, 15734, 14764, 15736, 14033, 15737, 15740, 13844, 15741, 13845, 13983, 13847, 14061, 13989, 14103, 15747, 12286, 14005, 13998, 11372, 11374, 15749, 15748, 15751, 12857, 12856, 15753, 15757, 15759, 12860, 12928, 12868, 12864, 15761, 13897, 13898, 13983, 13909, 14061, 13989, 14103, 15768, 11372, 14005, 12286, 11374, 13998, 15769, 14808, 15772, 15776, 14033, 15778, 13918, 15781, 13973, 15784, 13983, 13924, 14103, 15787, 11372, 12286, 14005, 11374, 13998, 15789, 15788, 15791, 15793, 15795, 15798, 15801, 13967, 15804, 13973, 15807, 13983, 13924, 14061, 13925, 14103, 15811, 13998, 14005, 12065, 11374, 11372, 15813, 15812, 12889, 15814, 12894, 12895, 12899, 15818, 15823, 15826, 15829, 15830, 15833, 13882, 13883, 13983, 13909, 14061, 13989, 14103, 15838, 14005, 11372, 11374, 13998, 12065, 15839, 14923, 15843, 13227, 13871, 14014, 15845, 15849, 15851, 12931, 12921, 12929, 12928, 13106, 12931, 12930, 13882, 13883, 15859, 15861, 12960, 12955, 12957, 12956, 12958, 12960, 12959, 13897, 13898, 13983, 13979, 14061, 13989, 14103, 15871, 12286, 11372, 11374, 14005, 13998, 15873, 15872, 15875, 15877, 15879, 12970, 12969, 12972, 12971, 15881, 13897, 13898, 13983, 13909, 15886, 15887, 15891, 15893, 15894, 15898, 15900, 15901, 15902, 15905, 13983, 13924, 14061, 13925, 11374, 12286, 14005, 11372, 13998, 15910, 15909, 15911, 15914, 13983, 13979, 13897, 13898, 13983, 13909, 14061, 13989, 14103, 15925, 11372, 11374, 13998, 14005, 12286, 15926, 15057, 15929, 14033, 13236, 13232, 13227, 13030, 14014, 15932, 15067, 15938, 15940, 15942, 13012, 13009, 13010, 13012, 13011, 15945, 15950, 13967, 15953, 13973, 15956, 13983, 13924, 14061, 13925, 11374, 14005, 13998, 11372, 12286, 15961, 15960, 15962, 15965, 15968, 15970, 13983, 13909, 13227, 13030, 14033, 13236, 13232, 14014, 15978, 15137, 15983, 15985, 15987, 15990, 15992, 15994, 13918, 15995, 13973, 15998, 13983, 13924, 14061, 13925, 11374, 11372, 12286, 13998, 14005, 16003, 16002, 16004, 16007, 16010, 16012, 16015, 16018, 16019, 13967, 13973, 13983, 13979, 14061, 13989, 14103, 16026, 11372, 11374, 12286, 14005, 13998, 16027, 15213, 16029, 16033, 13135, 13133, 13135, 13134, 16035, 16039, 16042, 13967, 13973, 13983, 13979, 14061, 13989, 11372, 11374, 14005, 13998, 12286, 16057, 15272, 13252, 13210, 14014, 16060, 13227, 14019, 13236, 13232, 14033, 13252, 13248, 16064, 14052, 14048, 14061, 14057, 14067, 16072, 14087, 11372, 12336, 14091, 11374, 12336, 11372, 14087, 14091, 11374, 14087, 11374, 14091, 11372, 12336, 14098, 16077, 14103, 16078, 16080, 16085, 16086, 16088, 16089, 16094, 16096, 16098, 16101, 16102, 16105, 14139, 16106, 16108, 16112, 16125, 16126, 16067, 16068, 15667, 16130, 16131, 16068, 16118, 16134, 16120, 16135, 16122, 16136, 16124, 16137, 14682, 15196, 14688, 15198, 14717, 14723, 14728, 14737, 15087, 14749, 14756, 14757, 14758, 14767, 14784, 15198, 15196, 14811, 14817, 14827, 16140, 16141, 16142, 16143, 16144, 16145, 16146, 16147, 16148, 16149, 14862, 14862, 14867, 14868, 15198, 14889, 14895, 15196, 14900, 14907, 16150, 16151, 16152, 16153, 16154, 16155, 16156, 16157, 16158, 16159, 14926, 14940, 14945, 14949, 14953, 14951, 14956, 14956, 14958, 14959, 14960, 15094, 15089, 15087, 14980, 15231, 15037, 15196, 15198, 15176, 16160, 16161, 16162, 16163, 16164, 16165, 16166, 16167, 16168, 16169, 16170, 16171, 16172, 16173, 16174, 16175, 16176, 16177, 16178, 16179, 15087, 15089, 15094, 16180, 16181, 16182, 16183, 16184, 16185, 16186, 16187, 16188, 16189, 16190, 16191, 16192, 16193, 16194, 16195, 16196, 16197, 16198, 16199, 15219, 15231, 15236, 16044, 16045, 16046, 16203, 16204, 16047, 16048, 16207, 16049, 16208, 16050, 16209, 16051, 16210, 16116, 16211, 16067, 16212, 16068, 16213, 16112, 16214, 16215, 16216, 16118, 16217, 16120, 16218, 16122, 16219, 16124, 16220, 16067, 16068, 16112, 16223, 16114, 16224, 16116, 16225, 16118, 16226, 16120, 16227, 16122, 16228, 16124, 16229, 16230, 16231, 16233, 16234, 16235, 16236, 16237, 16238, 16239, 16240, 16241, 16242, 16243, 16244, 16246, 16247, 16248, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 16384, 16385, 16386, 16389, 16390, 16391, 16393, 16394, 16395, 16396, 16397, 14680, 16402, 16404, 16407, 16408, 16409, 16410, 16411, 16412, 16413, 16414, 16416, 16417, 16418, 16419, 16420, 16421, 16423, 16424, 16425, 16426, 16427, 16428, 16429, 16430, 16431, 16432, 16433, 16434, 16435, 16437, 16438, 16439, 16440, 16441, 14735, 16444, 16445, 16446, 16447, 16448, 16449, 16450, 16451, 16452, 16453, 16454, 16455, 16456, 16457, 16459, 16460, 16461, 16462, 16463, 16464, 16467, 16468, 16470, 16471, 16472, 16473, 16474, 16475, 16476, 16477, 16479, 16480, 16481, 16482, 16483, 14782, 16487, 16488, 16489, 16491, 16492, 16493, 16494, 16495, 16496, 16497, 16498, 16499, 16500, 16501, 16502, 16503, 16505, 16506, 16507, 16508, 16509, 16510, 16512, 16513, 16514, 16515, 16516, 16517, 16518, 16520, 16521, 16522, 16524, 16525, 16526, 16527, 16528, 14836, 16531, 16533, 16534, 16535, 16536, 16537, 16538, 16540, 16541, 16542, 16543, 16544, 16546, 16547, 16548, 16549, 16550, 14878, 16553, 16554, 16555, 16556, 16557, 16558, 16559, 16562, 16564, 16565, 16566, 16567, 16568, 16569, 16570, 16572, 16573, 16574, 16575, 16576, 16577, 16579, 16580, 16581, 16582, 16583, 16584, 16586, 16587, 16588, 16589, 16590, 16591, 16592, 16593, 16594, 16595, 16596, 16597, 16598, 16599, 16600, 16601, 16602, 16603, 16604, 16605, 16606, 16607, 16608, 16609, 16610, 16612, 16613, 16614, 16615, 16616, 14978, 16619, 16621, 16622, 16623, 16624, 16625, 16626, 16627, 16628, 16629, 16630, 15885, 15890, 15892, 16639, 16641, 16642, 16643, 16644, 16645, 16646, 16647, 16648, 16649, 15024, 16652, 16653, 16654, 16655, 16656, 16657, 16658, 16659, 16660, 16661, 16662, 16664, 16665, 16666, 16667, 16668, 16669, 15928, 16672, 16673, 16674, 16675, 16676, 16677, 16679, 15937, 15941, 16683, 16684, 16685, 16686, 16687, 16688, 16689, 16690, 16691, 16692, 16694, 16695, 16696, 16697, 16698, 16699, 16700, 16701, 16702, 15104, 16705, 16707, 16708, 16709, 16710, 16711, 16712, 16713, 16714, 16715, 16716, 16718, 15982, 15989, 16725, 16726, 16727, 16729, 16730, 16731, 16732, 16733, 16734, 16735, 16736, 16737, 15169, 16740, 16742, 16743, 16744, 16746, 16747, 16748, 16749, 16750, 16751, 16752, 16753, 16755, 16756, 16757, 16758, 16759, 16760, 16762, 16763, 16764, 16765, 16766, 16767, 16768, 16769, 16770, 16771, 16772, 16773, 16774, 16775, 16776, 16777, 16778, 16779, 16780, 16781, 16782, 16784, 16785, 16786, 16788, 16789, 16790, 16791, 16792, 16793, 16794, 16795, 16796, 16797, 16798, 16799, 16800, 16802, 16803, 16804, 16805, 16806, 16807, 16808, 16809, 16810, 16811, 16812, 16813, 16814, 16815, 16816, 16817, 16819, 16821, 16084, 16823, 16825, 16093, 16827, 16100, 16830, 16832, 16834, 16835, 16838, 16839, 16840, 16841, 16843, 16107, 16844, 16846, 16848, 16850, 13454, 13453, 16852, 16401, 16853, 16854, 13454, 13453, 13454, 13453, 16855, 16856, 16857, 16858, 16859, 16860, 16861, 16862, 16863, 16864, 16466, 16865, 16469, 16866, 16486, 16867, 16868, 14793, 16869, 16870, 16871, 16873, 16532, 16877, 16880, 16882, 16883, 16884, 16885, 16886, 16887, 16888, 16560, 16889, 16890, 16561, 16891, 16563, 16893, 16895, 16898, 16902, 16903, 16904, 16905, 16906, 16907, 16908, 16909, 16910, 16911, 16912, 16913, 16914, 16915, 16916, 14985, 16917, 16723, 13454, 13453, 16918, 16919, 16920, 13454, 13453, 16921, 13454, 13453, 16922, 16926, 16930, 16932, 16935, 16939, 16720, 16723, 16724, 16721, 16720, 16724, 16681, 16721, 16942, 16943, 16944, 16945, 16706, 16950, 16952, 16720, 16721, 16723, 16724, 16955, 16741, 16959, 16745, 16963, 16965, 16966, 16967, 16968, 16969, 16970, 16971, 16973, 16974, 16976, 16978, 16980, 16982, 16984, 16986, 16107, 16988, 16992, 16994, 16996, 16998, 17000, 17001, 16107, 17002, 17004, 17006, 17008, 17010, 17012, 17014, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17158, 17160, 16399, 17169, 17171, 17174, 17176, 17181, 17188, 17190, 17193, 17195, 16443, 17200, 17203, 17208, 17210, 17213, 17215, 17224, 17226, 17229, 17231, 16485, 17235, 17239, 17241, 17246, 17248, 17251, 17253, 17264, 17267, 17269, 16530, 17280, 17282, 17285, 17287, 16552, 17301, 17303, 17306, 17308, 17313, 17318, 17320, 17323, 17329, 17331, 17334, 17338, 17340, 17343, 17345, 16618, 17351, 17353, 17358, 17364, 17366, 17368, 17370, 16651, 17376, 17380, 17382, 17385, 17387, 17393, 17395, 17401, 17404, 17411, 17413, 17415, 17417, 16704, 17424, 17426, 17429, 17438, 17440, 17442, 17444, 16739, 17455, 17457, 17460, 17462, 17468, 17470, 17477, 17479, 17481, 17483, 17487, 17490, 17492, 17495, 17497, 17498, 17500, 17503, 17505, 17508, 17510, 17513, 17515, 17520, 17522, 17523, 17525, 17527, 17529, 16801, 16818, 16820, 16062, 16787, 17153, 17152, 17521, 17524, 16054, 16053, 16818, 16388, 16387, 17524, 16787, 16062, 17521, 16054, 16053, 16818, 16820, 17521, 16787, 16062, 17524, 16801, 16818, 16820, 17536, 17528, 16392, 17156, 16818, 16754, 17541, 17542, 17544, 17164, 17547, 17548, 17165, 17549, 17550, 17166, 15695, 15694, 16818, 16415, 14710, 17183, 17184, 17185, 15712, 15711, 16818, 16436, 17199, 14742, 17205, 17206, 15974, 15729, 14755, 15918, 15730, 16818, 16458, 17561, 14768, 17220, 17563, 15744, 17222, 16818, 16478, 17565, 17237, 17568, 17238, 17243, 15765, 15764, 16818, 16504, 17257, 17258, 14822, 17260, 16519, 17262, 16818, 16523, 17273, 17573, 17274, 17275, 17276, 16539, 17278, 16818, 16545, 13454, 13453, 17292, 13454, 13453, 13454, 13453, 17296, 17297, 17583, 15918, 15973, 17586, 14902, 17298, 17588, 15835, 15834, 16818, 16571, 17312, 14933, 17316, 17317, 16023, 16585, 17596, 15918, 15973, 17327, 14964, 17328, 15081, 15868, 15867, 16818, 16611, 17349, 17607, 17350, 17355, 15918, 15973, 17360, 17609, 15888, 16632, 17432, 17361, 17362, 17610, 17611, 17615, 17616, 17618, 17619, 16640, 17363, 16818, 16754, 17374, 17375, 17467, 15918, 15973, 16717, 15919, 17432, 17626, 17434, 17627, 17628, 17629, 15922, 15921, 16818, 16663, 17391, 17630, 17631, 15930, 16678, 17398, 17399, 17632, 17400, 17633, 15944, 15943, 15081, 17406, 17407, 16693, 17409, 16818, 16754, 17421, 17638, 17422, 17423, 15974, 15973, 16717, 15977, 17432, 17433, 17641, 17642, 17434, 17643, 17644, 16728, 17436, 16818, 16754, 17448, 17646, 17449, 17450, 17451, 17648, 17452, 16023, 16022, 16818, 16754, 17466, 17467, 17472, 17473, 17474, 16054, 16053, 16818, 16820, 17524, 16787, 16062, 17521, 16801, 16818, 16820, 17524, 17521, 17665, 17528, 16054, 16053, 16818, 16820, 17521, 17524, 16062, 16787, 16801, 16818, 16820, 17521, 17524, 17526, 17673, 17528, 16837, 16836, 16837, 16981, 16128, 16127, 16133, 16132, 16129, 16851, 16849, 16847, 16845, 16139, 16138, 16201, 16202, 16200, 16206, 16205, 16983, 16975, 16979, 16977, 16983, 16981, 16987, 16985, 16991, 16989, 16991, 16990, 16999, 16997, 16995, 16993, 16222, 16221, 17007, 17003, 17007, 17005, 17015, 17013, 17011, 17009, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17920, 17925, 17930, 17937, 17941, 17949, 17952, 17957, 17962, 17966, 17973, 17981, 17987, 17995, 18003, 18008, 18014, 18023, 18025, 18027, 18035, 16056, 16055, 18036, 18037, 18038, 16061, 16063, 16059, 18039, 15276, 18040, 18041, 18042, 18030, 18043, 18032, 16056, 16055, 18044, 18045, 18046, 18047, 18048, 18049, 18032, 16059, 16063, 18050, 16061, 18051, 15276, 18020, 18052, 18030, 16056, 16055, 18053, 18054, 18055, 18056, 18057, 18030, 16063, 16061, 16059, 18058, 15276, 18059, 18020, 18060, 18032, 18061, 16071, 16070, 16069, 18062, 18063, 18065, 18034, 15908, 15907, 18066, 18067, 18068, 18069, 18070, 18073, 18074, 18076, 18077, 18079, 15697, 15696, 18080, 18081, 18082, 18083, 14711, 18084, 18085, 18086, 18087, 15714, 15713, 18088, 18089, 18090, 18091, 18092, 15082, 18093, 14187, 15080, 18094, 18095, 16025, 15975, 18096, 18097, 18098, 15732, 15731, 18099, 18100, 18101, 18102, 18104, 18105, 15746, 15745, 18107, 18108, 18109, 18110, 13454, 13453, 18112, 18114, 13454, 13453, 13454, 13453, 18115, 15767, 15766, 18116, 18117, 18118, 18119, 18120, 18121, 18122, 18123, 15908, 15786, 18124, 18125, 18126, 18127, 18128, 18130, 18131, 18132, 14867, 15810, 15809, 18133, 18134, 18135, 18136, 18137, 18138, 18139, 18140, 18141, 18142, 18143, 18144, 18145, 16025, 15975, 18147, 18148, 18150, 18151, 15837, 15836, 18153, 18154, 18155, 18156, 18157, 14934, 18158, 14932, 14931, 18159, 18160, 15908, 16024, 18161, 18162, 14953, 13449, 16025, 15975, 18164, 18165, 18166, 18167, 18168, 15082, 18169, 14344, 15080, 15870, 15869, 18170, 18171, 18172, 18173, 18174, 18176, 15225, 14301, 15224, 18177, 16025, 15975, 18178, 18179, 18180, 15889, 18182, 16063, 14997, 18183, 18184, 18185, 18186, 18187, 18189, 18191, 15908, 15907, 18193, 18194, 18195, 18196, 18197, 18198, 18199, 16025, 15975, 18200, 18201, 18202, 15135, 18203, 16059, 16063, 15976, 18204, 18206, 15924, 15923, 18210, 18211, 18212, 18213, 18214, 18217, 15931, 15933, 15063, 18218, 18219, 18220, 18222, 16025, 15975, 18224, 18225, 15082, 18226, 14344, 15080, 18227, 18228, 15959, 15958, 18229, 18230, 18231, 18232, 18233, 18235, 18236, 16025, 15975, 18237, 18238, 18239, 15135, 16059, 18240, 16063, 15976, 18241, 18242, 18245, 16001, 16000, 18248, 18249, 18250, 18251, 18252, 18254, 18255, 18256, 18258, 16025, 16024, 18259, 18260, 18261, 18262, 18263, 18264, 15225, 14409, 15224, 18265, 18266, 18267, 16056, 16055, 18268, 18269, 18270, 18271, 18272, 18032, 18273, 15276, 18274, 16061, 16059, 16063, 18020, 18275, 18030, 18276, 16071, 16070, 16069, 18277, 18278, 18279, 18032, 18280, 18030, 18282, 18034, 16056, 16055, 18283, 18284, 18285, 18286, 18287, 18030, 18288, 18032, 16061, 18289, 15276, 16063, 18290, 16059, 18020, 18291, 16071, 16070, 16069, 18292, 18293, 18029, 18294, 18030, 18031, 18295, 18032, 18296, 18033, 18298, 18034, 18299, 18300, 18301, 18302, 18303, 18304, 18305, 18306, 18307, 18308, 18309, 18310, 18311, 18312, 18313, 18072, 18257, 18106, 18103, 18257, 18111, 18257, 18129, 18257, 18253, 18257, 18146, 18152, 18149, 18253, 18257, 18253, 18257, 18253, 18257, 18257, 18253, 18253, 18257, 18257, 18234, 18257, 18253, 18314, 18315, 18316, 18317, 18318, 18319, 18320, 18321, 18322, 18323, 18324, 18325, 18326, 18327, 18328, 18329, 18330, 18331, 18332, 18333, 18334, 18335, 18336, 18337, 18338, 18339, 18340, 18341, 18342, 18343, 18344, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18432, 18433, 18434, 18435, 18436, 18437, 18438, 18439, 18440, 18442, 18443, 18444, 18445, 18446, 18447, 18448, 18449, 18450, 18451, 18453, 18454, 18458, 18459, 18460, 18462, 18463, 18466, 18468, 18469, 18470, 18471, 18474, 18477, 18478, 18479, 18481, 18483, 18484, 18486, 18487, 18488, 18489, 18494, 18495, 18496, 18497, 18499, 18501, 18503, 18505, 18506, 18507, 18064, 18511, 18512, 18513, 18514, 18524, 18525, 18526, 18530, 18535, 18536, 18537, 18542, 18544, 18545, 18548, 18549, 18550, 18553, 18554, 18555, 18561, 18562, 18563, 18567, 18568, 18571, 18572, 18573, 18574, 18576, 18577, 18578, 18586, 18587, 18588, 18596, 18597, 18598, 18599, 18603, 18606, 18608, 18612, 18613, 18614, 18618, 18619, 18620, 18625, 18627, 18628, 18631, 18632, 18633, 18635, 13450, 18636, 18637, 18638, 18639, 18644, 18646, 18647, 18648, 18649, 18650, 18656, 18657, 18658, 18660, 18661, 18662, 18665, 18667, 18668, 18676, 18677, 18678, 18685, 18686, 18687, 18690, 18692, 18693, 18694, 18697, 18698, 18699, 18705, 18706, 18707, 18712, 18713, 18714, 18716, 18718, 18719, 18722, 18723, 18724, 18731, 18732, 18733, 18736, 18737, 18739, 18740, 18744, 18745, 18746, 18755, 18756, 18757, 18763, 18764, 18765, 18769, 18770, 18771, 18776, 18778, 18780, 18781, 18782, 18783, 18785, 18787, 18788, 18789, 18793, 18795, 18281, 18797, 18798, 18799, 18800, 18805, 18807, 18808, 18810, 18811, 18813, 18814, 18816, 18817, 18818, 18821, 18823, 18824, 18826, 18828, 18297, 18830, 18831, 18833, 18835, 18837, 18840, 18842, 18844, 18521, 18519, 18761, 18754, 18767, 18846, 18847, 18523, 18753, 18534, 18767, 18761, 18533, 18761, 18541, 18767, 18547, 18720, 18721, 18761, 18762, 18767, 18848, 18560, 18762, 18849, 18850, 18767, 18851, 18570, 18761, 18583, 18767, 18585, 18761, 18582, 18761, 18852, 18753, 18767, 18593, 18594, 18592, 18754, 18853, 18854, 18855, 18762, 18761, 18767, 18856, 18611, 18761, 18857, 18767, 18858, 18617, 18767, 18859, 18754, 18683, 18860, 18684, 18761, 18767, 18682, 18861, 18753, 18761, 18624, 18767, 18630, 18767, 18862, 18863, 18761, 18762, 18767, 18762, 18643, 18641, 18761, 18767, 18721, 18762, 18761, 18654, 18767, 18655, 18761, 18181, 18216, 18223, 18215, 18864, 18753, 18761, 18682, 18684, 18683, 18767, 18865, 18754, 18753, 18866, 18683, 18767, 18682, 18761, 18867, 18754, 18684, 18868, 18761, 18684, 18754, 18683, 18682, 18869, 18753, 18767, 18209, 18207, 18208, 18205, 18223, 18215, 18216, 18221, 18721, 18767, 18762, 18761, 18730, 18753, 18729, 18728, 18870, 18767, 18871, 18754, 18761, 18246, 18247, 18244, 18243, 18752, 18767, 18754, 18751, 18753, 18872, 18873, 18750, 18761, 18762, 18761, 18767, 18768, 18874, 18876, 18879, 18881, 18883, 18885, 18887, 18889, 18891, 18893, 18895, 18897, 18899, 18901, 18903, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18452, 17486, 18457, 18966, 18461, 18972, 17486, 18977, 18480, 18482, 18983, 17486, 18987, 18989, 18990, 18504, 18994, 16074, 16076, 15290, 15264, 16075, 18998, 17922, 19001, 17179, 19004, 19005, 17932, 19008, 19009, 19011, 19014, 17218, 19017, 17943, 19020, 19022, 19024, 19026, 17256, 19029, 17954, 19033, 17959, 19039, 19042, 17311, 19045, 19046, 19048, 19052, 19054, 19057, 19058, 19060, 17975, 19063, 19066, 19069, 19070, 19072, 17983, 19075, 18689, 18691, 19080, 19082, 17390, 18704, 19086, 19088, 19091, 19092, 19094, 17997, 19097, 18735, 19101, 19102, 19104, 18005, 19107, 17465, 19110, 19113, 17486, 18777, 18779, 19119, 18786, 19124, 16075, 15264, 16076, 16074, 15290, 19130, 17486, 19135, 19136, 18812, 18815, 19141, 15290, 16076, 16075, 16074, 15294, 18971, 19147, 18970, 19143, 19147, 18982, 19143, 18976, 18986, 19147, 18992, 19143, 19153, 19145, 19122, 19143, 19116, 19147, 18997, 19154, 19157, 19158, 19159, 19160, 19161, 19164, 19165, 19166, 19167, 19168, 19169, 19170, 19171, 19172, 19173, 19174, 19175, 19176, 19177, 19178, 19180, 19181, 19184, 19186, 19187, 19188, 19189, 19190, 19191, 19192, 19193, 19195, 19196, 19197, 19198, 19199, 19200, 18753, 18595, 19204, 19205, 19206, 18683, 19202, 18754, 18610, 19208, 19209, 19211, 18605, 18753, 19213, 19214, 19216, 19217, 19219, 19220, 19221, 19222, 19224, 19225, 19226, 19227, 19228, 19229, 19232, 19233, 19230, 19234, 19235, 19236, 19237, 19238, 19239, 19240, 19241, 19242, 19243, 19244, 19245, 19246, 19247, 19248, 19249, 19250, 19252, 19253, 19254, 19255, 19256, 19257, 19259, 19260, 19262, 19263, 19264, 19265, 19267, 19268, 19270, 19271, 19272, 19273, 19274, 19276, 19277, 19278, 19279, 19280, 19281, 19282, 19283, 19284, 19285, 19286, 19287, 19288, 19289, 19290, 19291, 19292, 19293, 19295, 19297, 19298, 19299, 19300, 19301, 19302, 19303, 19304, 19305, 19306, 19307, 19310, 19311, 19312, 19313, 19314, 19315, 19316, 19147, 19122, 19143, 19116, 19127, 19145, 19147, 19126, 19129, 19143, 19324, 19143, 19147, 19134, 19133, 19144, 19143, 19145, 19147, 19149, 19146, 19329, 17020, 17028, 17016, 17017, 17019, 17018, 17021, 17025, 17024, 17023, 17026, 17028, 17027, 16245, 17031, 17030, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 19456, 19457, 19458, 19461, 19462, 19463, 19466, 19467, 19468, 19471, 19473, 19474, 19475, 19476, 19477, 19478, 19479, 19480, 19481, 19483, 19484, 19485, 19487, 19488, 19489, 19490, 19491, 19495, 19496, 19497, 19498, 19499, 19500, 19501, 19502, 19503, 19504, 19506, 19051, 19508, 19509, 19511, 19512, 19513, 19514, 19515, 19517, 19518, 19519, 19520, 19523, 19524, 19525, 19527, 19528, 19530, 19531, 19532, 19533, 19536, 19537, 19538, 19539, 19540, 19541, 19542, 19543, 19546, 19548, 19549, 19550, 19551, 19552, 19553, 19554, 19555, 19558, 19560, 19561, 19562, 19563, 19564, 19565, 19566, 19567, 19568, 19569, 19570, 19571, 19572, 19573, 19574, 19575, 19576, 19578, 19579, 19580, 19581, 19582, 19583, 19585, 19587, 19589, 19163, 18532, 19592, 19594, 19597, 19600, 19602, 19179, 19606, 18753, 18569, 18575, 19183, 19608, 19610, 19612, 19615, 19616, 19618, 19620, 19622, 19623, 19627, 19624, 19629, 19630, 19634, 19635, 19632, 19212, 19637, 19638, 19218, 19641, 19643, 19646, 19650, 19653, 19655, 19659, 19663, 19667, 19251, 19671, 19673, 19675, 19677, 19678, 19680, 19266, 19269, 19685, 19687, 19275, 19692, 19696, 19699, 19703, 19705, 19294, 19296, 19710, 19714, 19716, 19718, 19309, 19722, 19725, 19726, 19727, 19728, 19729, 19730, 19731, 19732, 19733, 19734, 19735, 19737, 19738, 19739, 19740, 19741, 19742, 19743, 19744, 19745, 19746, 19748, 19749, 19750, 19751, 16232, 17020, 19752, 19753, 19754, 19755, 19756, 19757, 19758, 17029, 19759, 19760, 19761, 17032, 19762, 19763, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 19969, 19970, 19972, 19973, 19975, 19976, 19978, 19979, 19981, 19984, 19986, 19988, 19992, 19994, 19996, 19998, 20000, 20003, 20006, 20010, 20013, 20015, 20017, 20019, 20020, 20024, 20026, 20028, 20030, 20033, 20034, 20036, 20037, 18791, 20042, 20043, 20045, 20047, 18819, 20050, 20052, 20054, 20056, 20058, 20062, 20064, 20066, 20068, 20070, 20072, 20073, 18546, 20076, 20078, 20080, 20081, 20082, 20085, 20087, 20089, 20091, 19626, 20095, 20096, 19633, 20100, 20102, 20104, 18629, 20108, 18720, 18659, 20113, 20115, 20117, 20119, 20121, 20123, 18720, 20128, 20130, 20133, 20135, 18766, 20140, 20143, 20145, 20147, 20150, 20153, 20155, 20157, 20160, 20163, 20164, 17022, 20168, 20172, 20176, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20224, 20226, 20228, 20230, 20232, 20233, 20234, 20235, 20236, 20237, 20238, 20239, 20240, 20241, 20243, 20245, 20247, 20249, 20251, 20252, 20253, 20255, 20257, 20258, 20260, 20262, 18969, 20263, 18981, 20265, 18991, 20268, 20271, 20274, 20275, 20276, 20278, 20279, 20281, 20282, 19628, 20094, 20286, 20099, 20290, 20292, 18766, 20293, 20294, 20295, 18670, 20296, 20298, 20300, 18695, 18709, 20302, 20303, 18741, 20305, 20307, 19121, 20309, 19139, 20313, 20318, 20319, 20321, 20322, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20483, 20501, 20504, 20480, 20506, 20481, 20508, 20482, 20510, 20511, 20485, 20512, 20486, 20487, 19596, 20499, 20488, 20489, 20083, 20084, 20490, 20491, 20519, 20520, 20492, 20522, 20499, 20524, 20493, 19645, 20495, 20526, 20499, 19658, 20494, 19662, 20499, 20530, 20531, 20532, 20495, 20533, 20499, 20534, 20496, 20535, 20499, 20536, 20497, 20537, 20499, 20538, 20498, 20539, 20499, 19721, 20500, 20541, 20542, 20503, 20543, 20544, 20545, 20170, 20547, 20548, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20739, 20507, 20741, 20509, 20743, 20060, 20736, 20746, 20748, 20749, 20750, 20751, 20752, 20753, 20754, 20756, 20757, 20760, 20761, 20762, 20764, 20765, 20766, 19649, 20768, 20769, 20770, 20771, 20772, 19666, 20776, 20778, 20779, 20780, 20781, 20782, 20127, 20784, 20786, 19712, 20788, 20790, 20791, 20792, 20139, 20737, 20795, 20796, 20738, 20320, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20267, 20998, 21002, 21013, 21015, 21017, 21019, 21021, 21024, 21026, 21028, 20132, 21034, 21036, 21037, 21039, 21040, 20993, 20995, 20777, 20747, 20763, 20785, 20789, 20289, 20513, 20518, 21010, 20759, 20774, 20775, 20527, 20758, 20515, 21006, 20277, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21248, 21252, 21255, 21256, 21257, 21259, 21261, 21263, 21265, 21266, 20745, 21267, 21268, 21269, 21253, 21270, 21271, 21272, 21273, 21250, 21260, 21274, 21275, 21276, 21251, 21258, 21277, 21278, 21254, 21279, 21280, 21281, 21282, 21283, 20794, 20797, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21504, 21514, 21518, 21523, 21508, 21524, 21528, 21506, 21507, 21509, 21529, 21532, 21505, 21515, 21519, 21521, 21526, 21533, 21536, 21510, 21538, 21511, 21539, 20162, 20316, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21760, 21764, 21767, 21768, 21769, 21772, 21517, 21765, 21770, 21531, 21774, 21779, 21781, 20801, 21783, 21784, 20800, 20798, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21763, 21766, 22019, 22021, 21773, 22025, 22029, 20159, 20167, 20171, 20175, 22032, 20317, 22033, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22272, 21776, 22274, 22275, 22276, 22279, 22280, 22281, 22282, 22284, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22528, 22530, 22278, 22534, 22031, 22536, 22537, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22532, 22785, 22786, 22788, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23040, 23042, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21041, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23552, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22790, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23297, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255};
int h_C[]= {
2, 4, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481, 483, 485, 487, 489, 491, 494, 496, 498, 500, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 560, 562, 564, 566, 568, 570, 573, 575, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 700, 702, 705, 707, 709, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803, 805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 846, 848, 850, 852, 854, 856, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895, 897, 899, 901, 903, 905, 907, 909, 911, 913, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940, 942, 944, 946, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986, 988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1023, 1025, 1027, 1029, 1031, 1033, 1035, 1037, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1063, 1065, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1091, 1093, 1095, 1097, 1099, 1101, 1103, 1105, 1107, 1109, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1133, 1135, 1137, 1139, 1142, 1144, 1146, 1148, 1151, 1153, 1157, 1159, 1162, 1164, 1168, 1170, 1172, 1174, 1176, 1178, 1180, 1182, 1185, 1187, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1211, 1213, 1216, 1218, 1221, 1223, 1226, 1228, 1231, 1233, 1239, 1241, 1244, 1246, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330, 1332, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1358, 1360, 1362, 1364, 1368, 1370, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1394, 1396, 1400, 1402, 1405, 1407, 1410, 1412, 1415, 1417, 1420, 1422, 1425, 1427, 1430, 1432, 1435, 1437, 1440, 1442, 1444, 1446, 1448, 1450, 1453, 1455, 1459, 1461, 1463, 1465, 1470, 1472, 1474, 1476, 1480, 1482, 1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520, 1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1557, 1559, 1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596, 1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634, 1636, 1638, 1640, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1696, 1698, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1719, 1721, 1723, 1725, 1727, 1729, 1733, 1735, 1741, 1743, 1745, 1747, 1749, 1751, 1754, 1756, 1759, 1761, 1763, 1765, 1767, 1769, 1772, 1774, 1777, 1779, 1782, 1784, 1787, 1789, 1792, 1794, 1797, 1799, 1801, 1803, 1805, 1807, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825, 1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1856, 1858, 1860, 1862, 1866, 1868, 1870, 1872, 1874, 1876, 1863, 1863, 1854, 1854, 1863, 1863, 1922, 1924, 1926, 1928, 1930, 1932, 286, 1477, 1660, 1236, 1236, 1477, 1660, 1730, 1276, 1276, 492, 492, 1236, 1236, 948, 1236, 1236, 1730, 1730, 1738, 1738, 286, 571, 571, 571, 571, 571, 571, 1738, 577, 1236, 1236, 1642, 558, 1236, 1236, 1236, 1236, 1038, 395, 571, 1236, 1236, 1068, 1068, 1131, 1131, 1140, 1140, 1021, 1021, 1642, 1863, 492, 1236, 1236, 492, 1236, 1236, 501, 501, 1236, 1236, 1068, 1068, 1131, 1131, 492, 492, 1236, 1236, 1068, 1068, 1131, 1131, 501, 501, 1236, 1236, 1089, 1089, 558, 1642, 1642, 1809, 947, 571, 577, 1809, 1236, 1236, 1038, 703, 2286, 2288, 2290, 2292, 2295, 2297, 2299, 2301, 2304, 2306, 2308, 2310, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2350, 2352, 2354, 2356, 2358, 2360, 1365, 1365, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 1236, 1236, 1068, 1068, 1131, 1131, 1236, 1236, 1140, 1140, 1089, 1089, 1140, 1140, 1276, 1276, 1365, 1242, 914, 1700, 1021, 1021, 1038, 1038, 947, 948, 1021, 1021, 1038, 1038, 1693, 1021, 1021, 1730, 1738, 1038, 1038, 1039, 2616, 2618, 2620, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2645, 2647, 2650, 2652, 2654, 2656, 1089, 1089, 1066, 1066, 1068, 1068, 1140, 1140, 1067, 1067, 1068, 1068, 1166, 1131, 1131, 1140, 1140, 1154, 1154, 1236, 1236, 1236, 1236, 1242, 1242, 1247, 1365, 1365, 1366, 1354, 1276, 1276, 1333, 1354, 1355, 1356, 1365, 1365, 1366, 1392, 1397, 1466, 1466, 1854, 1642, 1642, 1693, 1700, 1730, 1730, 1738, 1738, 1854, 1854, 1863, 1863, 1854, 1854, 1863, 1863, 1854, 1863, 2980, 2982, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003, 3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041, 3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3066, 3068, 3071, 3073, 3075, 3077, 3079, 3081, 3084, 3086, 3090, 3092, 3095, 3097, 3101, 3103, 3105, 3107, 3109, 3111, 3114, 3116, 3120, 3122, 3125, 3127, 3131, 3133, 3135, 3137, 3140, 3142, 2643, 2643, 3145, 3145, 2643, 2643, 2302, 2302, 2302, 2302, 3145, 3145, 2964, 2971, 2348, 2348, 2293, 3145, 3145, 2348, 2348, 2293, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2302, 2302, 2302, 2302, 2302, 2302, 2348, 2348, 2348, 2348, 2348, 2348, 2311, 2311, 2311, 2311, 3145, 3145, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2657, 3681, 3683, 3689, 3691, 3148, 3148, 3069, 3069, 3148, 3148, 3150, 3708, 3710, 2643, 2643, 2648, 2643, 2643, 2643, 2643, 2648, 2657, 2964, 2971, 4019, 4021, 3145, 3145, 4054, 4056, 4058, 4060, 4063, 4065, 3145, 3145, 3145, 3145, 3148, 3148, 3087, 3087, 3117, 3117, 3138, 3138, 3145, 3145, 3148, 3148, 3150, 4141, 4143, 4146, 4148, 4153, 4155, 4158, 4160, 4163, 4165, 4167, 4169, 4172, 4174, 4176, 4178, 3846, 3676, 3846, 3846, 4180, 4180, 3846, 3846, 4180, 4180, 4180, 4180, 3676, 3846, 4150, 4180, 4180, 4150, 4170, 4170, 4180, 4180, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689, 6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727, 6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765, 6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803, 6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841, 6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879, 6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917, 6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955, 6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993, 6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031, 7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069, 7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107, 7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145, 7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183, 7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221, 7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259, 7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297, 7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335, 7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373, 7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411, 7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449, 7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487, 7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525, 7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 1883, 1884, 1894, 1895, 1897, 1898, 7549, 7551, 7553, 1933, 1938, 1942, 1951, 1952, 1964, 1970, 1971, 1972, 1973, 1983, 1984, 1985, 1986, 1987, 2037, 2038, 2044, 2045, 2048, 2049, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2070, 2072, 2081, 2082, 2093, 2096, 2100, 2101, 2104, 2105, 2108, 2116, 2117, 2123, 2124, 2126, 2127, 2129, 2130, 2136, 2137, 2139, 2140, 2144, 2145, 2153, 2154, 2155, 2159, 2160, 2161, 2165, 2166, 2167, 2168, 2170, 2171, 2173, 2174, 2175, 2176, 2177, 2178, 2180, 2181, 2183, 2184, 2185, 2186, 2187, 2188, 2192, 2193, 2205, 2208, 2209, 2219, 2229, 2230, 2232, 2233, 2236, 2237, 2267, 2281, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676, 7678, 7680, 7682, 2362, 2363, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 2411, 2412, 2424, 2425, 2427, 2428, 2431, 2432, 2453, 2454, 2457, 2458, 2466, 2467, 2503, 2504, 2511, 2514, 2520, 2533, 2544, 2546, 2550, 2551, 2554, 2555, 2564, 2565, 2569, 2570, 2572, 2598, 2599, 2602, 2603, 2606, 2607, 2610, 7740, 7742, 7744, 7746, 7748, 7750, 7752, 7754, 7756, 7758, 2666, 2667, 2675, 2676, 2677, 2678, 2681, 2682, 2683, 2684, 2685, 2686, 2699, 2708, 2709, 2712, 2713, 2716, 2719, 2731, 2732, 2738, 2739, 2750, 2751, 2758, 2760, 2761, 2763, 2771, 2777, 2778, 2798, 2806, 2807, 2808, 2810, 2811, 2812, 2831, 2834, 2846, 2848, 2881, 2907, 2908, 2928, 2931, 2939, 2940, 2943, 2944, 2959, 2960, 2962, 2963, 2966, 2967, 2969, 2970, 2974, 2976, 7822, 7824, 7826, 7828, 7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866, 7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 3177, 3178, 3186, 3187, 3189, 3190, 3191, 3192, 3193, 3194, 3196, 3197, 3505, 3528, 3587, 3588, 3596, 3597, 3598, 3601, 3602, 3610, 3613, 3614, 3624, 3625, 3626, 3627, 3628, 3629, 3631, 3632, 3633, 3634, 3635, 3636, 3638, 3639, 3640, 3641, 3642, 3643, 3645, 3646, 3647, 3648, 3650, 3651, 3658, 3659, 3666, 3667, 3668, 3669, 3670, 3671, 3672, 3673, 3674, 3675, 3679, 7957, 7959, 3700, 3701, 3702, 3703, 3704, 3705, 3706, 7968, 3826, 3827, 3835, 3837, 3838, 3839, 3840, 3843, 3845, 4007, 4010, 7981, 4039, 4040, 7985, 7987, 7989, 4083, 4084, 4098, 4099, 4100, 4101, 4115, 4118, 4124, 4127, 4132, 4133, 4135, 4136, 4137, 4138, 4139, 8008, 8010, 8012, 8014, 8016, 8018, 8020, 8022, 4221, 4222, 4229, 4230, 5163, 5164, 5178, 5179, 5182, 5183, 5210, 5211, 5227, 5314, 5422, 5426, 5427, 5470, 5473, 5475, 5477, 5478, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 8448, 8450, 8618, 1809, 8622, 8636, 8567, 8630, 8632, 8634, 8448, 8450, 8618, 1809, 8624, 8638, 8626, 8640, 8567, 8630, 8632, 8634, 8525, 8520, 8528, 8527, 8529, 8531, 8530, 8533, 8532, 8192, 0, 8416, 8194, 8192, 5, 8416, 8194, 8416, 8282, 8445, 8572, 8546, 8624, 8626, 8567, 8196, 8199, 8198, 8200, 8203, 8202, 8204, 8280, 8648, 1089, 8207, 8206, 8332, 8343, 8223, 8208, 8208, 947, 8223, 8208, 8610, 8555, 8624, 8626, 8610, 8653, 8209, 8551, 8551, 8551, 8551, 8573, 8211, 8213, 8212, 8655, 8657, 8215, 8214, 8603, 8216, 8218, 8217, 8603, 8219, 8603, 8220, 8221, 8222, 8223, 948, 8569, 8568, 8363, 8228, 8569, 8568, 8545, 8544, 8363, 8228, 8343, 8226, 8363, 8228, 948, 8229, 8232, 8231, 8233, 8236, 8235, 8237, 8278, 8239, 8611, 8240, 8242, 8244, 1809, 8246, 8248, 8250, 8549, 8252, 8280, 8660, 8253, 8254, 8363, 8257, 8256, 8662, 8603, 8258, 8664, 8343, 8350, 8445, 8445, 8259, 8261, 8274, 8553, 8555, 8577, 8579, 8545, 8544, 8667, 8669, 8671, 8274, 8264, 8267, 8266, 8268, 8271, 8270, 8272, 8280, 8675, 8273, 8274, 8447, 8447, 8553, 8275, 8278, 8277, 8553, 8555, 8577, 8579, 8562, 8280, 8279, 8679, 8281, 8280, 8681, 8282, 8606, 8283, 8285, 8606, 1477, 1477, 8546, 8289, 8290, 8292, 8291, 8294, 8293, 8686, 8295, 8688, 8296, 8690, 1089, 8460, 8459, 8469, 8468, 8692, 8441, 8694, 8298, 8297, 8563, 8585, 8299, 8302, 8301, 8303, 8305, 8304, 8699, 8306, 8308, 8307, 8702, 8309, 8311, 8310, 8704, 8706, 8312, 8708, 8313, 8710, 8712, 8714, 8314, 8716, 8315, 8718, 8720, 8722, 1089, 8318, 8317, 8724, 8320, 8319, 8322, 8321, 8528, 8323, 8325, 8324, 8326, 8327, 8329, 8363, 947, 8727, 8343, 8330, 8563, 8331, 8353, 8355, 8357, 8359, 8620, 8353, 8355, 8357, 8359, 8572, 947, 8332, 947, 8441, 8333, 8335, 8334, 8734, 8454, 8336, 8474, 8455, 8474, 8456, 8457, 8466, 8338, 8337, 8467, 8338, 8337, 8340, 8339, 8342, 8341, 8343, 8344, 8346, 8345, 8603, 8347, 8603, 8348, 8349, 8350, 8441, 8445, 8351, 8353, 8355, 8357, 8359, 8572, 8607, 8603, 8361, 8444, 8362, 8363, 8364, 1477, 1477, 8584, 8535, 8756, 8471, 8367, 8474, 8472, 8474, 8473, 1166, 8374, 8479, 8478, 8368, 8370, 8369, 8372, 8371, 8766, 8471, 8373, 8474, 8472, 8474, 8473, 1166, 8374, 8479, 8478, 8376, 8768, 8377, 8770, 8379, 8378, 8772, 8381, 8380, 8474, 8382, 8474, 8383, 8384, 1089, 8387, 8386, 8474, 8461, 1110, 8465, 8464, 8388, 8390, 8389, 8392, 8391, 8774, 8454, 8393, 8776, 8460, 8459, 8394, 8396, 8395, 8469, 8468, 8778, 8514, 8513, 8528, 8515, 8528, 8516, 8517, 8500, 8499, 8528, 8501, 8528, 8502, 8503, 8512, 8504, 8505, 8493, 8491, 8528, 8494, 8528, 8495, 8496, 8507, 8506, 8528, 8508, 8528, 8509, 8510, 8512, 8511, 8528, 8497, 8780, 8397, 8399, 8400, 8402, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8416, 8441, 8553, 8429, 8417, 8603, 8431, 8432, 8572, 8607, 8606, 8551, 8418, 8596, 8576, 8447, 8437, 8420, 8603, 8438, 8603, 8439, 8440, 8421, 8423, 8603, 8443, 8445, 8788, 8551, 8550, 8437, 8436, 8603, 8438, 8603, 8439, 8440, 8441, 8792, 8603, 8443, 8445, 8794, 8596, 8426, 8447, 8429, 8428, 8603, 8430, 8603, 8431, 8432, 8572, 8607, 8434, 8608, 8611, 8610, 8612, 8555, 8437, 8436, 8603, 8438, 8603, 8439, 8440, 8441, 8797, 8603, 8443, 8444, 8445, 8801, 8551, 8550, 8447, 8448, 8450, 8567, 8454, 8453, 8474, 8455, 8474, 8456, 8457, 8814, 8460, 8452, 8474, 8461, 1110, 8465, 8464, 8816, 8818, 8469, 8468, 8820, 8822, 8824, 8454, 8453, 8474, 8455, 8474, 8456, 8457, 1089, 8460, 8459, 8474, 8461, 8474, 8474, 8462, 1110, 8465, 8464, 8467, 8466, 8827, 8469, 8468, 8829, 8471, 8470, 8474, 8472, 8474, 8473, 1166, 8476, 8479, 8478, 8481, 8480, 8483, 8482, 8484, 8833, 8486, 8485, 8488, 8487, 8489, 8835, 8493, 8492, 8528, 8494, 8528, 8495, 8496, 8535, 1365, 8534, 8837, 1242, 8493, 8491, 8528, 8495, 8496, 8535, 8840, 1365, 8493, 8492, 8528, 8494, 8528, 8495, 8496, 8528, 8497, 8528, 8528, 8498, 8844, 8500, 8499, 8528, 8501, 8528, 8502, 8503, 8512, 8504, 8505, 8507, 8506, 8528, 8508, 8528, 8509, 8510, 8512, 8511, 8514, 8513, 8528, 8515, 8528, 8516, 8517, 8518, 8850, 8525, 8520, 8528, 8526, 8528, 8527, 8529, 8531, 8530, 8533, 8532, 8534, 8521, 8538, 8523, 8539, 8525, 8524, 8528, 8526, 8528, 8527, 8529, 8531, 8530, 8533, 8532, 8534, 8535, 8538, 8537, 8539, 8620, 8622, 8566, 8567, 8630, 8632, 1477, 1477, 1477, 1477, 1477, 1477, 1477, 8581, 1809, 8545, 8544, 8546, 8549, 8548, 8551, 8550, 8552, 8553, 8555, 8557, 8559, 8561, 8563, 8562, 8564, 8565, 8622, 8566, 8567, 8630, 8632, 8569, 8568, 8571, 8570, 8572, 8607, 8606, 8574, 8573, 8575, 8596, 8598, 8576, 8577, 8608, 8611, 8610, 8612, 8579, 8581, 8858, 1660, 1660, 1660, 8585, 8584, 1660, 1660, 1660, 8588, 8587, 8589, 8603, 8590, 8591, 8604, 8607, 8592, 8594, 8593, 8595, 8595, 8596, 8598, 8599, 8601, 8600, 8603, 8602, 8862, 8603, 8603, 8864, 8604, 8607, 8606, 8608, 8611, 8610, 8612, 8614, 8613, 8615, 8616, 8618, 1809, 8620, 8866, 8622, 8868, 8624, 8870, 8626, 8872, 8628, 8630, 8632, 8634, 8641, 8913, 8804, 8737, 8908, 8805, 8908, 8806, 3129, 8915, 3069, 8917, 8919, 8921, 3099, 8923, 3148, 8644, 8644, 8644, 8644, 8644, 8644, 8644, 8644, 8736, 8645, 8645, 8646, 8646, 8684, 8875, 8649, 8649, 8650, 8650, 8802, 8802, 8665, 8665, 8665, 8665, 8730, 8731, 8725, 8683, 8683, 8684, 8725, 8875, 8875, 8802, 8730, 8802, 8731, 8802, 8802, 8736, 8927, 8804, 8737, 8908, 8805, 8908, 8806, 3129, 8930, 8757, 8883, 8932, 8882, 8881, 8908, 8906, 8908, 8907, 3129, 8753, 8811, 8935, 8882, 8739, 8908, 8906, 8908, 8907, 3129, 3069, 3069, 8937, 8939, 8941, 3099, 8943, 8945, 8947, 3129, 8949, 8951, 8953, 3099, 8955, 8957, 3129, 8959, 8757, 8883, 8882, 8881, 3148, 3148, 8961, 8746, 8745, 8748, 8747, 8749, 8751, 8963, 8965, 8967, 8969, 8971, 8752, 8753, 8757, 8760, 8759, 8902, 8761, 8902, 8762, 3099, 3069, 8976, 8978, 8980, 8781, 8781, 8782, 8782, 8802, 8802, 8875, 8984, 8804, 8803, 8908, 8805, 8908, 8806, 3129, 8808, 8987, 8989, 8905, 8810, 8811, 8842, 8846, 8875, 8875, 8875, 8877, 8876, 8878, 8902, 8901, 3099, 8879, 8898, 8902, 8900, 8905, 8880, 8908, 8906, 8908, 8907, 3129, 8893, 8894, 8885, 8996, 8883, 8899, 8898, 8882, 8881, 8908, 8906, 8908, 8907, 3129, 8910, 8883, 8902, 8890, 3099, 8902, 8889, 8888, 8887, 8905, 8884, 8908, 8906, 8908, 8907, 3129, 8893, 8894, 8885, 9001, 3069, 8888, 8887, 8902, 8889, 8902, 8890, 3099, 8905, 8892, 8893, 8894, 8895, 9003, 9005, 8899, 8898, 8905, 8896, 8908, 8906, 8908, 8907, 3129, 8910, 3069, 8899, 8898, 8902, 8900, 8902, 8901, 3099, 8905, 8904, 8908, 8906, 8908, 8907, 3129, 8910, 8911, 9013, 9015, 8985, 8985, 8985, 8985, 8928, 8928, 8933, 8933, 9028, 3846, 3846, 8928, 8928, 8933, 8933, 8990, 8990, 8985, 8985, 8990, 8990, 9018, 9017, 3846, 3846, 3846, 9030, 9018, 9017, 9032, 3846, 3846, 9034, 9018, 9017, 3684, 3684, 9036, 9018, 9017, 3684, 3684, 3684, 8974, 8974, 8982, 8985, 8985, 8990, 8990, 8998, 8997, 8994, 9021, 9023, 8998, 8997, 8999, 9021, 9023, 9041, 9018, 9017, 9020, 9019, 9021, 9023, 9046, 9025, 9025, 9038, 9038, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 1878, 1879, 1880, 1881, 1882, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1896, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1934, 1935, 1936, 1937, 1939, 1940, 1941, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1965, 1966, 1967, 1968, 1969, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2039, 2040, 2041, 2042, 2043, 2046, 2047, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2071, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2080, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2091, 2092, 2094, 2095, 2097, 2098, 2099, 2102, 2103, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2118, 2119, 2120, 2121, 2122, 2125, 2128, 2131, 2132, 2133, 2134, 2135, 2138, 2141, 2142, 2143, 2146, 2147, 2148, 2149, 2150, 2151, 2152, 2156, 2157, 2158, 2162, 2163, 2164, 2169, 2172, 2179, 2182, 2189, 2190, 2191, 2194, 2195, 2196, 2197, 2198, 2199, 2200, 2201, 2202, 2203, 2204, 2206, 2207, 2210, 2211, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2220, 2221, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2231, 2234, 2235, 2238, 2239, 2240, 2241, 2242, 2243, 2244, 2245, 2246, 2247, 2248, 2249, 2250, 2251, 2252, 2253, 2254, 2255, 2256, 2257, 2258, 2259, 2260, 2261, 2262, 2263, 2264, 2265, 2266, 2268, 2269, 2270, 2271, 2272, 2273, 2274, 2275, 2276, 2277, 2278, 2279, 2280, 2282, 2283, 2284, 2361, 2396, 2397, 2398, 2399, 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2408, 2409, 2410, 2413, 2414, 2415, 2416, 2417, 2418, 2419, 2420, 2421, 2422, 2423, 2426, 2429, 2430, 2433, 2434, 2435, 2436, 2437, 2438, 2439, 2440, 2441, 2442, 2443, 2444, 2445, 2446, 2447, 2448, 2449, 2450, 2451, 2452, 2455, 2456, 2459, 2460, 2461, 2462, 2463, 2464, 2465, 2468, 2469, 2470, 2471, 2472, 2473, 2474, 2475, 2476, 2477, 2478, 2479, 2480, 2481, 2482, 2483, 2484, 2485, 2486, 2487, 2488, 2489, 2490, 2491, 2492, 2493, 2494, 2495, 2496, 2497, 2498, 2499, 2500, 2501, 2502, 2505, 2506, 2507, 2508, 2509, 2510, 2512, 2513, 2515, 2516, 2517, 2518, 2519, 2521, 2522, 2523, 2524, 2525, 2526, 2527, 2528, 2529, 2530, 2531, 2532, 2534, 2535, 2536, 2537, 2538, 2539, 2540, 2541, 2542, 2543, 2545, 2547, 2548, 2549, 2552, 2553, 2556, 2557, 2558, 2559, 2560, 2561, 2562, 2563, 2566, 2567, 2568, 2571, 2573, 2574, 2575, 2576, 2577, 2578, 2579, 2580, 2581, 2582, 2583, 2584, 2585, 2586, 2587, 2588, 2589, 2590, 2591, 2592, 2593, 2594, 2595, 2596, 2597, 2600, 2601, 2604, 2605, 2608, 2609, 2611, 2612, 2613, 2614, 2659, 2660, 2661, 2662, 2663, 2664, 2665, 2668, 2669, 2670, 2671, 2672, 2673, 2674, 2679, 2680, 2687, 2688, 2689, 2690, 2691, 2692, 2693, 2694, 2695, 2696, 2697, 2698, 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2710, 2711, 2714, 2715, 2717, 2718, 2720, 2721, 2722, 2723, 2724, 2725, 2726, 2727, 2728, 2729, 2730, 2733, 2734, 2735, 2736, 2737, 2740, 2741, 2742, 2743, 2744, 2745, 2746, 2747, 2748, 2749, 2752, 2753, 2754, 2755, 2756, 2757, 2759, 2762, 2764, 2765, 2766, 2767, 2768, 2769, 2770, 2772, 2773, 2774, 2775, 2776, 2779, 2780, 2781, 2782, 2783, 2784, 2785, 2786, 2787, 2788, 2789, 2790, 2791, 2792, 2793, 2794, 2795, 2796, 2797, 2799, 2800, 2801, 2802, 2803, 2804, 2805, 2809, 2813, 2814, 2815, 2816, 2817, 2818, 2819, 2820, 2821, 2822, 2823, 2824, 2825, 2826, 2827, 2828, 2829, 2830, 2832, 2833, 2835, 2836, 2837, 2838, 2839, 2840, 2841, 2842, 2843, 2844, 2845, 2847, 2849, 2850, 2851, 2852, 2853, 2854, 2855, 2856, 2857, 2858, 2859, 2860, 2861, 2862, 2863, 2864, 2865, 2866, 2867, 2868, 2869, 2870, 2871, 2872, 2873, 2874, 2875, 2876, 2877, 2878, 2879, 2880, 2882, 2883, 2884, 2885, 2886, 2887, 2888, 2889, 2890, 2891, 2892, 2893, 2894, 2895, 2896, 2897, 2898, 2899, 2900, 2901, 2902, 2903, 2904, 2905, 2906, 2909, 2910, 2911, 2912, 2913, 2914, 2915, 2916, 2917, 2918, 2919, 2920, 2921, 2922, 2923, 2924, 2925, 2926, 2927, 2929, 2930, 2932, 2933, 2934, 2935, 2936, 2937, 2938, 2941, 2942, 2945, 2946, 2947, 2948, 2949, 2950, 2951, 2952, 2953, 2954, 2955, 2956, 2957, 2958, 2961, 2965, 2968, 2973, 2975, 2977, 2978, 3176, 3179, 3180, 3181, 3182, 3183, 3184, 3185, 3188, 3195, 3198, 3200, 3201, 3206, 3207, 3209, 3210, 3212, 3213, 3225, 3227, 3228, 3234, 3235, 3263, 3271, 3296, 3297, 3304, 3305, 9372, 9374, 9373, 9288, 9298, 3321, 3330, 3390, 3391, 3393, 3394, 9372, 9373, 9374, 3398, 3400, 3414, 3422, 3425, 3437, 8697, 8700, 9449, 9455, 9461, 3491, 3496, 3516, 3530, 3531, 3533, 3535, 3562, 3563, 3581, 3589, 3590, 3591, 3592, 3593, 3594, 3595, 3599, 3600, 3603, 3604, 3605, 3606, 3607, 3608, 3609, 3611, 3612, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3630, 3637, 3644, 3649, 3652, 3653, 3654, 3655, 3656, 3657, 3660, 3661, 3662, 3663, 3664, 3665, 3677, 3678, 9549, 3687, 3692, 3693, 3694, 3695, 3696, 3697, 3698, 3699, 9650, 3760, 3762, 3764, 3765, 3788, 3789, 3825, 3828, 3829, 3830, 3831, 3832, 3833, 3834, 3836, 3841, 3842, 3844, 9759, 9764, 9822, 9830, 3893, 9844, 3909, 9872, 3974, 3990, 4014, 4015, 4016, 4017, 4022, 4023, 4024, 4025, 4026, 4027, 4028, 4029, 4030, 4031, 4032, 4033, 4034, 4035, 4036, 4037, 4038, 4041, 4042, 4043, 4044, 4045, 4046, 4047, 4048, 4049, 4050, 4051, 4052, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, 4078, 4079, 4080, 4081, 4082, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095, 4096, 4097, 4102, 4103, 4104, 4105, 4106, 4107, 4108, 4109, 4110, 4111, 4112, 4113, 4114, 4116, 4117, 4119, 4120, 4121, 4122, 4123, 4125, 4126, 4128, 4129, 4130, 4131, 4134, 10174, 10021, 8985, 4202, 4203, 10174, 10031, 10033, 10032, 10174, 10174, 10122, 10122, 8985, 4218, 4219, 8928, 4224, 4225, 8933, 4227, 4228, 4231, 4232, 10142, 10078, 8928, 5104, 5105, 10142, 10089, 8933, 5115, 5116, 10142, 10099, 8990, 5126, 5127, 10110, 10109, 10114, 10113, 10118, 10117, 10122, 10121, 8985, 5143, 5144, 10142, 10142, 8990, 5154, 5155, 5158, 5159, 5160, 5161, 5162, 10131, 10139, 5176, 5177, 5180, 5181, 10140, 10139, 10142, 10141, 5206, 5207, 5208, 5209, 10140, 10139, 10142, 10141, 5225, 5226, 5238, 5239, 5240, 5243, 5246, 10154, 10155, 10156, 5254, 10174, 10164, 8985, 5301, 5302, 10174, 10173, 8990, 5311, 5312, 5395, 5396, 5397, 5398, 5399, 5420, 5421, 5423, 5424, 5425, 10248, 10277, 5468, 5469, 5471, 5472, 5474, 5476, 10286, 9025, 5558, 5559, 10286, 10307, 9038, 6140, 6141, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10516, 10518, 10521, 10523, 10527, 10531, 10542, 10545, 10547, 10550, 10572, 10574, 10576, 10578, 10580, 10582, 10588, 10592, 10594, 10604, 10607, 10610, 10621, 10626, 10628, 10641, 10645, 10648, 10650, 10658, 10665, 10667, 10679, 10681, 10686, 10688, 10691, 10696, 10699, 10702, 10705, 10712, 10714, 10716, 10718, 10720, 10729, 10746, 10748, 10750, 10752, 10756, 10759, 10761, 10763, 10767, 10769, 10771, 10784, 10786, 10794, 10796, 10798, 10802, 10805, 10807, 10809, 10811, 10813, 10817, 10821, 10823, 10825, 10827, 10831, 10833, 10836, 10839, 10841, 10843, 10845, 10848, 10850, 10852, 10854, 10856, 10859, 10861, 10863, 10866, 10869, 10871, 10873, 10876, 10878, 10880, 10883, 10885, 10901, 10903, 10907, 10909, 10914, 10916, 10918, 10923, 10926, 10928, 10930, 10932, 10936, 10942, 10944, 10946, 10950, 10953, 10957, 10959, 10961, 10965, 10969, 10975, 10977, 10979, 10982, 10984, 10987, 10989, 10991, 10993, 10995, 10999, 11001, 11003, 11007, 11009, 11011, 11013, 11015, 11017, 11021, 11023, 11025, 11028, 11030, 11033, 11035, 11037, 11044, 11046, 11051, 11053, 11055, 11058, 11060, 11063, 11065, 11067, 11070, 11073, 11075, 11077, 11080, 11082, 11084, 11086, 11090, 11092, 11094, 11097, 11099, 11103, 11106, 11108, 11110, 11113, 11115, 11119, 11137, 11140, 11142, 11150, 11159, 11161, 11164, 11166, 11174, 11182, 11187, 11190, 11194, 11196, 11198, 11203, 11205, 11207, 11210, 11213, 11216, 10496, 10498, 10500, 11221, 10503, 10502, 10501, 10505, 10507, 10510, 10509, 10513, 10512, 10511, 10524, 10532, 10528, 10532, 11231, 11233, 11235, 8644, 8644, 8644, 8644, 10910, 11168, 11168, 11168, 10536, 10533, 10534, 10535, 10739, 10536, 8645, 11250, 10538, 10537, 11156, 10539, 8646, 11252, 10551, 10584, 10553, 10787, 10552, 10553, 10584, 10553, 11175, 11172, 11175, 10954, 11226, 10973, 8783, 11146, 11219, 10778, 10777, 10780, 10779, 11222, 11221, 8783, 8783, 10557, 10741, 10557, 10632, 10557, 10726, 10724, 10739, 11172, 11175, 8649, 11256, 10951, 10954, 10562, 10561, 11172, 8650, 11258, 3306, 3307, 3308, 3309, 10568, 10618, 10564, 10565, 10566, 10567, 10568, 10569, 3319, 10963, 10584, 10585, 10898, 10967, 11162, 11170, 10589, 8802, 10595, 8783, 10597, 10599, 11170, 11172, 11129, 11127, 10951, 10612, 10614, 10617, 10616, 10963, 10618, 10924, 10905, 10898, 10967, 11138, 11162, 10622, 10624, 10629, 10630, 10631, 10743, 10632, 10634, 10633, 10739, 10911, 10939, 10651, 8783, 11170, 11175, 11170, 11175, 8665, 8665, 3395, 3396, 3397, 10651, 8673, 11170, 11175, 10651, 10939, 10911, 8783, 11143, 11175, 11170, 11175, 8677, 10668, 10781, 10774, 10775, 10671, 10670, 11224, 11223, 10781, 8683, 8683, 10674, 10673, 10675, 8683, 10683, 10682, 10689, 11222, 11221, 11224, 11223, 11227, 11153, 11152, 11154, 11157, 11148, 11122, 11121, 11123, 11126, 11129, 11128, 11130, 11133, 11219, 11179, 11178, 11180, 11185, 3471, 3473, 3475, 10707, 10706, 3478, 10709, 10708, 3481, 10722, 10744, 11177, 10778, 10777, 10780, 10779, 11222, 11221, 10724, 8802, 11177, 11219, 10790, 10789, 10726, 8802, 10744, 11177, 11219, 10731, 10730, 10733, 10732, 11153, 10734, 11219, 10736, 10735, 10738, 10737, 10739, 8802, 10741, 8802, 10743, 10744, 11146, 10778, 10777, 11222, 11221, 11179, 11178, 11180, 11184, 11183, 10764, 11153, 11151, 11154, 11156, 11155, 10773, 11292, 10774, 10775, 11177, 10778, 10777, 10780, 10779, 11222, 11221, 10781, 11122, 11121, 11123, 11125, 11124, 10787, 10790, 10789, 11130, 11132, 11131, 11295, 11297, 11299, 11304, 11306, 11308, 11313, 11315, 11317, 11328, 11332, 11334, 8848, 3686, 11342, 11344, 11346, 10819, 10818, 3757, 10888, 10886, 10890, 10892, 11353, 10894, 10897, 10896, 10898, 8783, 10910, 10911, 10921, 10920, 10924, 11355, 10934, 10937, 10938, 10939, 10954, 10963, 10967, 8802, 10971, 11219, 11222, 11221, 11224, 11223, 11227, 11226, 10973, 11358, 11360, 11362, 11366, 3855, 3857, 11026, 11031, 8848, 11040, 11041, 11042, 3883, 8838, 11049, 3889, 3897, 8848, 3911, 11100, 11104, 11116, 11120, 11122, 11121, 11126, 11125, 11124, 11168, 11143, 11175, 11177, 11219, 11129, 11128, 11127, 11133, 11132, 11131, 11134, 11219, 11222, 11221, 11224, 11223, 11227, 11226, 11225, 11168, 11143, 11175, 11146, 11153, 11152, 11151, 11157, 11156, 11155, 11168, 11170, 11175, 11177, 11219, 11179, 11178, 11185, 11184, 11183, 11199, 11217, 11219, 11222, 11221, 11224, 11223, 11227, 11226, 11225, 11380, 11383, 11386, 11388, 11390, 11392, 11394, 11401, 11403, 11405, 11407, 11412, 11415, 11417, 11419, 11421, 11423, 11430, 11432, 11434, 11437, 11442, 11444, 11446, 11448, 11453, 11455, 11457, 11460, 11462, 11464, 11399, 11340, 11229, 11338, 4195, 4196, 10174, 4201, 11237, 4205, 4206, 10174, 10033, 4209, 4210, 4211, 4212, 10174, 10122, 4215, 4216, 4217, 11239, 4223, 4226, 11293, 11293, 11293, 11293, 10142, 5098, 5099, 5103, 11302, 11301, 10142, 5109, 5110, 5114, 11311, 11310, 10142, 5120, 5121, 5125, 11320, 11319, 10111, 5131, 5132, 10115, 5134, 5135, 10119, 5137, 5138, 10122, 5140, 5141, 5142, 11326, 11325, 10142, 5148, 5149, 5153, 11330, 11329, 11524, 5165, 5166, 10138, 11397, 11396, 11381, 11399, 11340, 11531, 5184, 5185, 10138, 11397, 11396, 11381, 11399, 11335, 10142, 5196, 5197, 11467, 11467, 11336, 11367, 11410, 11539, 5212, 5213, 10138, 11399, 11340, 10142, 5221, 5222, 11367, 11410, 11547, 11398, 11397, 11396, 11399, 11340, 11467, 11467, 11337, 11367, 11338, 11340, 11340, 11367, 11348, 5251, 5252, 5253, 5294, 5295, 10174, 5300, 11364, 10174, 5305, 5306, 5310, 11367, 11397, 11396, 11381, 11568, 11398, 11397, 11396, 11399, 11467, 11467, 11409, 11410, 11573, 11427, 11426, 11425, 11428, 11440, 11439, 11438, 5447, 11467, 11467, 11450, 11451, 11467, 11467, 11466, 5467, 11580, 11582, 11571, 11570, 11569, 11491, 11490, 5530, 5557, 11584, 11583, 11491, 11490, 5592, 11527, 11526, 11525, 11533, 11532, 6034, 11541, 11549, 11540, 11550, 11549, 11548, 11550, 11549, 11548, 11571, 11570, 11569, 11584, 11583, 6139, 11571, 11570, 11569, 11576, 11575, 11574, 11584, 11583, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160, 3161, 3162, 3163, 3164, 3165, 11777, 11943, 11776, 11778, 3170, 3171, 11780, 3173, 3174, 11781, 3199, 8644, 8644, 8644, 3205, 3208, 3211, 8644, 3215, 3216, 3217, 3218, 3219, 3220, 3221, 3222, 3223, 3224, 3226, 3229, 3230, 3231, 3232, 3233, 10540, 10543, 11784, 10548, 3240, 11791, 11790, 11789, 3244, 3245, 3246, 3247, 3248, 11791, 11790, 11789, 3252, 11971, 11971, 11881, 3256, 3257, 3258, 3259, 3260, 3261, 3262, 3264, 3265, 3266, 3267, 3268, 3269, 3270, 3272, 3273, 3274, 3275, 11971, 11971, 11881, 3279, 3280, 11971, 11971, 11881, 3284, 3285, 11971, 11971, 11881, 3289, 3290, 3291, 3292, 3293, 3294, 3295, 3298, 3299, 3300, 3301, 3302, 3303, 3310, 3311, 3312, 3313, 3314, 3315, 3316, 3317, 10570, 3320, 11971, 11788, 11787, 11791, 11790, 11789, 3328, 3329, 3331, 3332, 3333, 3334, 11792, 3336, 3337, 11793, 3339, 3340, 3341, 11958, 3343, 3344, 10602, 10605, 10608, 3348, 3349, 3350, 3351, 3352, 3353, 3354, 3355, 3356, 3357, 3358, 3359, 3360, 3361, 3362, 3363, 11798, 3365, 3366, 11800, 11799, 3369, 3370, 3371, 3372, 3373, 3374, 3375, 3376, 3377, 3378, 3379, 3380, 3381, 3382, 3383, 3384, 8665, 8665, 8665, 8665, 3389, 3392, 3399, 3401, 10643, 10646, 11804, 3405, 3406, 3407, 3408, 3409, 3410, 10656, 3412, 3413, 3415, 3416, 3417, 11806, 11807, 3420, 3421, 3423, 3424, 3426, 3427, 3428, 3429, 3430, 3431, 3432, 3433, 3434, 3435, 3436, 10677, 11809, 3440, 3441, 10684, 11811, 3444, 11812, 3446, 3447, 3448, 3449, 3450, 3451, 3452, 3453, 3454, 3455, 3456, 3457, 3458, 3459, 3460, 3461, 3462, 3463, 3464, 3465, 3466, 3467, 3468, 10694, 10697, 10700, 10703, 3476, 3477, 3479, 3480, 10710, 9466, 11930, 11820, 11819, 3487, 11821, 3489, 3490, 3492, 3493, 3494, 3495, 3497, 3498, 3499, 3500, 3501, 3502, 3503, 3504, 3506, 3507, 10727, 3509, 3510, 3511, 3512, 3513, 3514, 3515, 3517, 3518, 3519, 3520, 3521, 3522, 3523, 3524, 3525, 3526, 3527, 3529, 3532, 3534, 3536, 3537, 3538, 3539, 11823, 11826, 11825, 11824, 10754, 10757, 3546, 3547, 3548, 3549, 3550, 11829, 3552, 3553, 3554, 3555, 3556, 3557, 11833, 11832, 11831, 3561, 3564, 3565, 3566, 3567, 3568, 3569, 3570, 3571, 3572, 3573, 3574, 3575, 3576, 3577, 3578, 11834, 3580, 3582, 3583, 3584, 3585, 3586, 3685, 11838, 11837, 11836, 10800, 10803, 11841, 11844, 11843, 11842, 10815, 3721, 3722, 11846, 11849, 11848, 11847, 10829, 11909, 11909, 11851, 10834, 10837, 11854, 11906, 11905, 11855, 9606, 10846, 11858, 11861, 11860, 11859, 11864, 11863, 11862, 11865, 11868, 11867, 11866, 11871, 11870, 11869, 11872, 11930, 11930, 11873, 3758, 3759, 3761, 3763, 3766, 3767, 3768, 3769, 3770, 11875, 11888, 11874, 10905, 3775, 11877, 3777, 11880, 11879, 11878, 3781, 3782, 11971, 11971, 11881, 3786, 11882, 11885, 11884, 11883, 3793, 11971, 11971, 11886, 3797, 3798, 3799, 11889, 11888, 11887, 10948, 10951, 3805, 11894, 11893, 11892, 3809, 11971, 11971, 11895, 3813, 11896, 3815, 3816, 3817, 3818, 3819, 3820, 3821, 3822, 3823, 3824, 11899, 11898, 11897, 9751, 11909, 11909, 11901, 10985, 11903, 11906, 11905, 11904, 10997, 11909, 11909, 11908, 11005, 11911, 11912, 11915, 11914, 11913, 11019, 11917, 3873, 11919, 3875, 11923, 11922, 11921, 3879, 3880, 3881, 3882, 11925, 11927, 11924, 3887, 3888, 11928, 11927, 11926, 11930, 11930, 11929, 11933, 11932, 11931, 11934, 11937, 11936, 11935, 11938, 11941, 11940, 11939, 3910, 11944, 11943, 11942, 11945, 3916, 3917, 11947, 11950, 11949, 11948, 11951, 3923, 3924, 11953, 11148, 3927, 3928, 11123, 3930, 3931, 3932, 11958, 11138, 3935, 11956, 3937, 11172, 3939, 3940, 3941, 3942, 3943, 3944, 11130, 3946, 3947, 3948, 3949, 3950, 3951, 3952, 3953, 3954, 3955, 3956, 3957, 11958, 11138, 3960, 11956, 3962, 11172, 3964, 3965, 11148, 3967, 3968, 3969, 11154, 3971, 3972, 3973, 11958, 11162, 3977, 11961, 3979, 11172, 3981, 3982, 3983, 3984, 3985, 11180, 3987, 3988, 3989, 11965, 11964, 11192, 3994, 11968, 11967, 11201, 11971, 11970, 11208, 11211, 11214, 4003, 4004, 4005, 4006, 4008, 4009, 4011, 4012, 4013, 4188, 4189, 4193, 4194, 4197, 12434, 11995, 11994, 11993, 11471, 4204, 4207, 12439, 4208, 4213, 12445, 4214, 11482, 4220, 11485, 11488, 11277, 12011, 12011, 12017, 12017, 12054, 12054, 12061, 12061, 4660, 4664, 11293, 11277, 5038, 5040, 11293, 5097, 12269, 12268, 12267, 11495, 5106, 5107, 5108, 12272, 12271, 12270, 11500, 5117, 5118, 5119, 12275, 12274, 12273, 11505, 5128, 5129, 5130, 5133, 5136, 5139, 11516, 5145, 5146, 5147, 12428, 12427, 12276, 11521, 5156, 5157, 5167, 12499, 12322, 12321, 12277, 5171, 5172, 5173, 5174, 5175, 5186, 12508, 12322, 12321, 12278, 5190, 5191, 5192, 5193, 5194, 5195, 12408, 12407, 12406, 5201, 5202, 5203, 5204, 5205, 5214, 12525, 12322, 12321, 12320, 5218, 5219, 5220, 5223, 5224, 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5235, 5236, 5237, 5242, 5244, 5245, 12283, 12282, 12281, 5250, 5296, 12553, 12322, 12321, 12320, 11560, 5303, 5304, 12408, 12407, 12323, 11565, 5313, 12401, 12398, 12399, 12404, 12403, 12402, 5392, 5393, 5394, 12401, 12400, 12399, 12404, 12403, 12402, 5406, 5407, 5408, 5409, 12425, 12424, 12405, 12408, 12407, 12406, 5416, 5417, 5418, 5419, 12411, 12410, 12409, 12414, 12413, 12412, 5434, 5435, 5436, 5437, 12417, 12416, 12415, 12428, 12427, 12418, 5444, 5445, 5446, 12425, 12424, 12419, 12422, 12421, 12420, 5454, 5455, 5456, 5457, 12425, 12424, 12423, 12428, 12427, 12426, 5464, 5465, 5466, 12565, 5500, 5501, 5502, 12591, 5528, 5529, 12591, 11587, 12565, 12574, 12591, 5585, 5586, 12592, 5590, 5591, 12497, 6026, 6027, 6028, 12506, 6032, 6033, 12523, 6040, 6041, 6042, 12534, 6048, 6049, 6050, 12591, 6074, 6075, 6076, 12565, 6098, 6099, 6100, 12574, 12591, 6113, 6114, 12592, 12591, 11592, 12565, 6167, 6168, 6169, 12574, 6175, 6176, 6177, 12591, 6187, 6188, 12592, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 12803, 12805, 12810, 12812, 3166, 3167, 3168, 3169, 3172, 3175, 11240, 3202, 3203, 3204, 11242, 11244, 11246, 3214, 12844, 12846, 3236, 3237, 3238, 3239, 3241, 3242, 3243, 3249, 3250, 3251, 3253, 3254, 3255, 12874, 12879, 12881, 12883, 3276, 3277, 3278, 3281, 3282, 3283, 3286, 3287, 3288, 12909, 12914, 3318, 3322, 3323, 3324, 3325, 3326, 3327, 3335, 3338, 3342, 3345, 3346, 3347, 12949, 12954, 3364, 3367, 3368, 12974, 3385, 3386, 3387, 3388, 11266, 11268, 3402, 3403, 3404, 3411, 3418, 3419, 13014, 13016, 13021, 3438, 3439, 13027, 3442, 3443, 3445, 13033, 13035, 13038, 13043, 13047, 13052, 3469, 3470, 3472, 3474, 13060, 13062, 3482, 3483, 3484, 3485, 3486, 3488, 13073, 13075, 13077, 13083, 3508, 13091, 13093, 13095, 13098, 13100, 13109, 13111, 3540, 3541, 3542, 3543, 3544, 3545, 13119, 13122, 3551, 13126, 13129, 3558, 3559, 3560, 13138, 13140, 13142, 13145, 13148, 3579, 13152, 13155, 3711, 3712, 3713, 3714, 3715, 3716, 3717, 3718, 3719, 3720, 13168, 3723, 3724, 3725, 3726, 3727, 3728, 3729, 3730, 3731, 3732, 3733, 3734, 3735, 3736, 3737, 3738, 3739, 3740, 3741, 3742, 3743, 3744, 3745, 3746, 3747, 3748, 3749, 3750, 3751, 3752, 3753, 3754, 3755, 3756, 13204, 13209, 3771, 3772, 3773, 3774, 3776, 3778, 3779, 3780, 13223, 3783, 3784, 3785, 3787, 3790, 3791, 3792, 3794, 3795, 3796, 3800, 3801, 3802, 3803, 3804, 3806, 3807, 3808, 3810, 3811, 3812, 3814, 13258, 13260, 13262, 3847, 3848, 3849, 3850, 3851, 3852, 3853, 3854, 3856, 3858, 3859, 3860, 3861, 3862, 3863, 3864, 3865, 3866, 3867, 3868, 3869, 3870, 3871, 3872, 3874, 3876, 3877, 3878, 12332, 3884, 3885, 3886, 12335, 3890, 3891, 3892, 3894, 3895, 3896, 3898, 3899, 3900, 3901, 3902, 3903, 3904, 3905, 3906, 3907, 3908, 3912, 3913, 3914, 3915, 3918, 3919, 3920, 3921, 3922, 3925, 3926, 13337, 3929, 13340, 3933, 3934, 3936, 3938, 13352, 3945, 13356, 13361, 13363, 13365, 3958, 3959, 3961, 3963, 3966, 13377, 3970, 13381, 3975, 3976, 3978, 3980, 13393, 3986, 13396, 3991, 3992, 3993, 3995, 3996, 3997, 3998, 3999, 4000, 4001, 4002, 13413, 13415, 13417, 12800, 12807, 13420, 13423, 4198, 4199, 4200, 11472, 13430, 12442, 13433, 12448, 11483, 11486, 11489, 13206, 13205, 11293, 13080, 11293, 13127, 13071, 13088, 13206, 13205, 12832, 13237, 12951, 12833, 13349, 13153, 12834, 13127, 12835, 13206, 13205, 13019, 13349, 13153, 4346, 11276, 13136, 13023, 13127, 11248, 11248, 11248, 11248, 13206, 13205, 4386, 12842, 4388, 12870, 12906, 12951, 4396, 12847, 4398, 13206, 13205, 11293, 13080, 13088, 11293, 13127, 12876, 13206, 13205, 12903, 13349, 13153, 12870, 13127, 12906, 12951, 12910, 13206, 13205, 11253, 11253, 11253, 11293, 13127, 11253, 11293, 13080, 11253, 13088, 11253, 11253, 11253, 13071, 11293, 12876, 11293, 11293, 13206, 13205, 13071, 13088, 11293, 13127, 11293, 13080, 12903, 4591, 12905, 4593, 12906, 12951, 12910, 4600, 12911, 4602, 12063, 12063, 12063, 12062, 12063, 12063, 12063, 12064, 13206, 13205, 12912, 13127, 12917, 12915, 12916, 12917, 12951, 12918, 13349, 13153, 12919, 11293, 11293, 11293, 11293, 12947, 12947, 12947, 13153, 12950, 12951, 13206, 13205, 11293, 13349, 13153, 11293, 13127, 4735, 13136, 13349, 13237, 13136, 12127, 12127, 12127, 12127, 12125, 12126, 12127, 12127, 13206, 13205, 11273, 13071, 11273, 11273, 13088, 11273, 11293, 11273, 11273, 11273, 11273, 13136, 13374, 13349, 13390, 13390, 13206, 13205, 13374, 13390, 13237, 13136, 13036, 13349, 13049, 13048, 13390, 11277, 4878, 11276, 11277, 13018, 13019, 13153, 13023, 13206, 13205, 11278, 13088, 11278, 11293, 13080, 11278, 11278, 11293, 13127, 11278, 13071, 11278, 11278, 11278, 13237, 13255, 13036, 13374, 13040, 13039, 13390, 13045, 13044, 13349, 13049, 13048, 13390, 13054, 13053, 13206, 13205, 11288, 13071, 11288, 11293, 13080, 11288, 11293, 13127, 11288, 13088, 13107, 11288, 11293, 11288, 11293, 13153, 11288, 11288, 13107, 13206, 13205, 13120, 11293, 13127, 11293, 5078, 11293, 13136, 11293, 13146, 11293, 13349, 13153, 12459, 5100, 5101, 5102, 11496, 13462, 12465, 5111, 5112, 5113, 11501, 13469, 12471, 5122, 5123, 5124, 11506, 13476, 12477, 12480, 12483, 12486, 11517, 13483, 12492, 5150, 5151, 5152, 11522, 13490, 13491, 5168, 5169, 5170, 13497, 13500, 13501, 5187, 5188, 5189, 13507, 13510, 12516, 5198, 5199, 5200, 13516, 13519, 13520, 5215, 5216, 5217, 13526, 12530, 13529, 13531, 13534, 13536, 13539, 13156, 5247, 5248, 5249, 13206, 13205, 13237, 13255, 13547, 5297, 5298, 5299, 11561, 12558, 5307, 5308, 5309, 11566, 13294, 13320, 13349, 13358, 13390, 13410, 5386, 5387, 5388, 5389, 5390, 5391, 13567, 5400, 5401, 5402, 5403, 5404, 5405, 13576, 5410, 5411, 5412, 5413, 5414, 5415, 13586, 5428, 5429, 5430, 5431, 5432, 5433, 13596, 5438, 5439, 5440, 5441, 5442, 5443, 13606, 5448, 5449, 5450, 5451, 5452, 5453, 13615, 5458, 5459, 5460, 5461, 5462, 5463, 13625, 5499, 13629, 5527, 13633, 5556, 11588, 5574, 5575, 5584, 5587, 13640, 13643, 6025, 13646, 6031, 13650, 6039, 13653, 6047, 13657, 6073, 13661, 6097, 13665, 6103, 6112, 6115, 13670, 6138, 11593, 6166, 13676, 6174, 13680, 6186, 6189, 13684, 250, 251, 252, 253, 254, 255, 12806, 12813, 13829, 13832, 13833, 11241, 11243, 11245, 11247, 13849, 13852, 13855, 13862, 13865, 13868, 13874, 13877, 13889, 11267, 11269, 13927, 13929, 13944, 13954, 13965, 13971, 13977, 13981, 13987, 13993, 13996, 14000, 14003, 14007, 14012, 14015, 14017, 14021, 14025, 14028, 14031, 14036, 14039, 13263, 14046, 14050, 14055, 14059, 14065, 14071, 14075, 14079, 14082, 14085, 14089, 14093, 14096, 14099, 14101, 14104, 13341, 14111, 13353, 13357, 13366, 14121, 13378, 13382, 14129, 13397, 14135, 14137, 14141, 13418, 4182, 13824, 4185, 13826, 14153, 12443, 12449, 13921, 13920, 13919, 13923, 13922, 13207, 4246, 4247, 13835, 4251, 4252, 13963, 13933, 13836, 13837, 4257, 13374, 13952, 4260, 13951, 4263, 13932, 13931, 13930, 4269, 13937, 13936, 13935, 13841, 13906, 13908, 13946, 13947, 13207, 4292, 4293, 14009, 4296, 4299, 4300, 13904, 13958, 13886, 14109, 4305, 4306, 13963, 4308, 13885, 14119, 4311, 13374, 13952, 4314, 13951, 4316, 13906, 13908, 13946, 13947, 13207, 4335, 4336, 14109, 4339, 4340, 13963, 4342, 13905, 4348, 4351, 13958, 13903, 13956, 14127, 14119, 4357, 13374, 13952, 4360, 13951, 13906, 4363, 4364, 4365, 4366, 13906, 13946, 13947, 13207, 4383, 4384, 14009, 4387, 4389, 4391, 4392, 13843, 13842, 13937, 4397, 13921, 13920, 13919, 13974, 13984, 13990, 13207, 4419, 4420, 13879, 4423, 4424, 13963, 13933, 4429, 13937, 13936, 13935, 13881, 4435, 13374, 13952, 4438, 13951, 4444, 13860, 13859, 13858, 13906, 13908, 13946, 13947, 13207, 4466, 4467, 14009, 13961, 4470, 4471, 13963, 4473, 13885, 13950, 4476, 13374, 13952, 4479, 13951, 4482, 4483, 13857, 13904, 13958, 4487, 13921, 13920, 13919, 13923, 13922, 13984, 13207, 4506, 4507, 4509, 4510, 4511, 4512, 13374, 13952, 4515, 13951, 4517, 4518, 4519, 13963, 13933, 4522, 4523, 13937, 13936, 13935, 4527, 4528, 4529, 4530, 13932, 13931, 13930, 4534, 4535, 13860, 13859, 13858, 4539, 4540, 13921, 13920, 13919, 13923, 13922, 13984, 13990, 13207, 4562, 4563, 4566, 13932, 13931, 13930, 13881, 4574, 13937, 13936, 13935, 13950, 4579, 13374, 13952, 4582, 13951, 13879, 4585, 4586, 13963, 13933, 14127, 4590, 4592, 4594, 4595, 13960, 13870, 13959, 4599, 4601, 4603, 4604, 4605, 4606, 4607, 4608, 4609, 4610, 13906, 13908, 13946, 13947, 13207, 4629, 4630, 14009, 14119, 4633, 13374, 13952, 4636, 13951, 4638, 4641, 4643, 4644, 4645, 13904, 13958, 13903, 14109, 4650, 4651, 13963, 4653, 13905, 4655, 13921, 13872, 14127, 4668, 13879, 4670, 13880, 4672, 13950, 13881, 4675, 14119, 4679, 4680, 4681, 13963, 4683, 13885, 4685, 4686, 13904, 13958, 13886, 14119, 14109, 14127, 13968, 13974, 13946, 13947, 13207, 4718, 4719, 14109, 4722, 4723, 13963, 4725, 13962, 4728, 13374, 13952, 4731, 13951, 4738, 13958, 13956, 13890, 14127, 14112, 4749, 4751, 14034, 14041, 4754, 14122, 14130, 13891, 13892, 13893, 13894, 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, 13921, 13920, 13919, 13923, 13922, 13984, 13990, 13207, 4792, 4793, 4795, 4796, 13932, 13931, 13930, 4800, 4801, 4802, 13937, 13936, 13935, 4806, 4807, 13374, 4809, 4810, 4811, 4812, 13908, 14034, 4817, 4818, 14109, 4820, 4821, 4822, 13906, 13908, 13946, 13990, 13207, 4841, 4842, 14009, 14122, 4846, 4847, 14034, 4852, 13911, 14041, 4859, 4860, 13913, 13912, 14109, 14112, 4866, 4867, 4868, 13916, 14130, 4872, 13968, 13974, 4875, 4880, 4883, 13904, 13958, 13903, 14127, 4888, 14109, 4890, 13963, 4892, 13905, 14119, 4895, 13921, 13920, 13919, 13923, 13922, 13984, 13990, 13207, 4915, 4916, 4918, 4919, 13937, 13936, 13935, 4923, 4924, 4925, 13963, 13933, 4928, 4929, 4930, 13374, 13952, 4933, 13951, 4935, 4936, 13932, 13931, 13930, 4940, 4941, 4942, 13906, 13908, 13946, 13911, 14034, 4955, 4958, 4959, 13913, 13912, 14119, 14122, 4965, 4966, 4967, 13914, 4969, 4970, 4971, 13915, 14109, 14112, 4976, 4977, 4978, 13916, 4980, 4981, 4982, 13917, 13921, 13920, 13919, 13923, 13922, 13984, 13990, 13207, 5003, 5004, 5006, 5007, 13932, 13931, 13930, 5011, 5012, 5013, 13963, 13933, 5016, 5017, 13374, 13952, 5020, 13951, 5022, 5023, 13937, 13936, 13935, 5027, 13959, 13939, 13938, 5031, 5032, 5033, 5034, 13963, 5036, 5037, 5039, 5041, 13941, 13957, 13940, 13968, 13974, 13946, 13947, 13207, 5063, 5064, 14009, 13949, 5067, 13948, 13950, 5070, 13374, 13952, 5073, 13951, 5075, 5081, 5082, 13958, 13957, 13956, 14127, 5087, 13960, 5089, 13959, 13961, 5092, 5093, 13963, 5095, 13962, 12460, 14419, 12466, 14425, 12472, 14431, 12478, 12481, 12484, 12487, 12493, 14443, 14449, 13498, 14455, 13508, 12517, 14461, 13517, 14467, 12531, 13532, 13537, 5241, 14478, 13968, 13974, 13984, 13990, 13207, 5271, 5272, 14009, 14041, 14023, 5284, 14034, 14041, 5290, 14043, 14042, 14486, 12559, 14491, 12324, 12325, 14062, 14069, 14068, 5330, 13325, 13296, 13301, 5344, 13390, 14107, 14106, 14109, 14112, 5356, 14114, 5360, 14117, 14116, 14119, 14122, 13374, 14125, 14127, 14130, 5374, 14132, 14131, 14143, 5382, 14146, 14145, 14501, 14504, 13568, 14508, 14511, 13577, 14515, 14518, 13587, 14522, 14525, 13597, 14529, 14532, 13607, 14536, 14539, 13616, 14543, 14546, 13626, 13630, 14488, 14493, 12598, 14155, 14160, 14493, 14558, 14161, 14162, 12604, 14421, 14427, 14433, 14439, 14445, 13647, 12610, 13654, 13658, 13662, 13666, 14575, 14488, 14493, 13677, 13681, 14584, 14553, 14577, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 13830, 13850, 13853, 13856, 13863, 13866, 13869, 13875, 13878, 13928, 13945, 13955, 13966, 13972, 13978, 13982, 13988, 13994, 13997, 14001, 14004, 14008, 14013, 14018, 14022, 14026, 14029, 14032, 14037, 14040, 14047, 14051, 14056, 14060, 14066, 14072, 14076, 14080, 14083, 14086, 14090, 14094, 14097, 14102, 14138, 14592, 4184, 14593, 4187, 12818, 12821, 14154, 4234, 4235, 4236, 4238, 4239, 4245, 14613, 14597, 4250, 4253, 4254, 4255, 4256, 4258, 4259, 4261, 14598, 4264, 4265, 4266, 14599, 14600, 4270, 4271, 4272, 4273, 4275, 4277, 4280, 4283, 13332, 4291, 4294, 4301, 4302, 4303, 4304, 4307, 4309, 4310, 4312, 4313, 4315, 4318, 4320, 4323, 4326, 12964, 4334, 14613, 4338, 4341, 4343, 4352, 4353, 4354, 4355, 4356, 4358, 4359, 4361, 4362, 4368, 4371, 4374, 13332, 4382, 4385, 14766, 4393, 4394, 4395, 14773, 4400, 4401, 4402, 4404, 4407, 4410, 13332, 4418, 14613, 4422, 4425, 4426, 4430, 4431, 4432, 4433, 14609, 4436, 4437, 4439, 4445, 4446, 4447, 4449, 4451, 4454, 4457, 13332, 4465, 4468, 4469, 4472, 4474, 4475, 4477, 4478, 4480, 4484, 4485, 4486, 4489, 4490, 4491, 4493, 4494, 4497, 13332, 4505, 14613, 4513, 4514, 4516, 4520, 4521, 4524, 4525, 4526, 4531, 4532, 4533, 4536, 4537, 4538, 4542, 4543, 4544, 4546, 4547, 4550, 4553, 13332, 4561, 14613, 4567, 4568, 4569, 4570, 4575, 4576, 4577, 4578, 4580, 4581, 4583, 4584, 4587, 4588, 4589, 14901, 4596, 4597, 4598, 14908, 4612, 4614, 4617, 4620, 12964, 4628, 4631, 4632, 4634, 4635, 4637, 4646, 4647, 4648, 4649, 4652, 4654, 4656, 4657, 4667, 4669, 4671, 4673, 4674, 4678, 4682, 4684, 4687, 4688, 4689, 4690, 4698, 4699, 4701, 4703, 4706, 4709, 12964, 4717, 14613, 4721, 4724, 4726, 14609, 4729, 4730, 4732, 4739, 4740, 4741, 4742, 4747, 14653, 14627, 4752, 4753, 4755, 14657, 4757, 14660, 14611, 4760, 4761, 4762, 14611, 4764, 14610, 14611, 4775, 4776, 4777, 4778, 4779, 4782, 4785, 4791, 14613, 4797, 4798, 4799, 4803, 4804, 4805, 4808, 4813, 4816, 4819, 4824, 4826, 4829, 4832, 13332, 4840, 4843, 4844, 14657, 4849, 4855, 14627, 4858, 4861, 4862, 4863, 4864, 14653, 4869, 15074, 4870, 14660, 4873, 4874, 4884, 4885, 4886, 4887, 4889, 4891, 4893, 4894, 4897, 4898, 4899, 4901, 4902, 4905, 4908, 4914, 14613, 4920, 4921, 4922, 4926, 4927, 4931, 4932, 4934, 4937, 4938, 4939, 4943, 4944, 4947, 4950, 4952, 14627, 4960, 4961, 4962, 4963, 14657, 4968, 15144, 4972, 15148, 4973, 4974, 14653, 4979, 15154, 4983, 15158, 4985, 4986, 4987, 4989, 4990, 4993, 4996, 5002, 14613, 5008, 5009, 5010, 5014, 5015, 5018, 5019, 5021, 5024, 5025, 5026, 5028, 5029, 5030, 5035, 5042, 5043, 5044, 5046, 5048, 5051, 5054, 13332, 5062, 5065, 5066, 5068, 5069, 5071, 5072, 5074, 5083, 5084, 5085, 5086, 5088, 5090, 5091, 5094, 5096, 14420, 14426, 14432, 14444, 14450, 14456, 14462, 14468, 14479, 5256, 5258, 5261, 5264, 5270, 5273, 5276, 14627, 5281, 5286, 5289, 14635, 5292, 5293, 14487, 14492, 5317, 5318, 5321, 5323, 5324, 5331, 5332, 5338, 13325, 13332, 5349, 14652, 5351, 5352, 5353, 5354, 14653, 14655, 5358, 14654, 14656, 5362, 5363, 5364, 5365, 14657, 5367, 14659, 5369, 14658, 5371, 5372, 14660, 14661, 5376, 5377, 14662, 14664, 5381, 14665, 5384, 5385, 14502, 14505, 14509, 14512, 14516, 14519, 14523, 14526, 14530, 14533, 14537, 14540, 14544, 14547, 15320, 15326, 5524, 5526, 5550, 14158, 14156, 5553, 5555, 15329, 15332, 15335, 15338, 5588, 5589, 14837, 14838, 14856, 14839, 14845, 14850, 14856, 14855, 14857, 14856, 14909, 14910, 14913, 14911, 14913, 14912, 14914, 14913, 14915, 14916, 15007, 15013, 15008, 15009, 15010, 15013, 15011, 15012, 15014, 15013, 15025, 15040, 15030, 15031, 15040, 15036, 15039, 15041, 15040, 15042, 15105, 15127, 15110, 15115, 15116, 15122, 15127, 15128, 15127, 15129, 15170, 15201, 15175, 15180, 15186, 15201, 15195, 15197, 15202, 15201, 6015, 6017, 6019, 15249, 15247, 6022, 6024, 15254, 15256, 15259, 15323, 15263, 15262, 15263, 15320, 15323, 15326, 15329, 15332, 15335, 15338, 6135, 6137, 15320, 15323, 15326, 15329, 15332, 15335, 15338, 14549, 14551, 6219, 14555, 14556, 14557, 14569, 14561, 14563, 14565, 14567, 14569, 14571, 14573, 14574, 6491, 14579, 14581, 14583, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 4183, 4186, 15616, 4191, 4192, 15628, 15669, 15629, 15672, 15636, 15633, 15637, 15625, 15634, 4248, 14679, 4249, 15678, 14691, 4262, 15686, 4267, 4268, 15691, 15628, 15629, 15631, 15626, 15649, 15632, 15659, 4285, 15636, 15637, 15633, 15634, 15635, 14707, 15700, 15638, 15642, 15641, 15702, 14720, 14726, 15628, 15629, 15631, 15626, 15649, 15632, 15659, 4328, 15634, 15633, 15635, 15637, 15636, 4337, 14734, 14740, 15640, 15639, 15638, 15645, 15644, 15722, 14752, 15628, 15631, 15630, 15649, 15632, 15659, 4376, 15636, 15635, 15637, 15634, 15633, 14763, 15735, 14200, 15643, 15738, 14206, 15628, 15742, 15629, 15631, 15626, 15649, 15632, 15659, 4412, 15637, 15636, 15634, 15625, 15633, 4421, 14781, 15752, 15622, 15617, 15754, 4434, 14796, 15621, 15624, 15619, 15618, 15762, 15628, 15629, 15631, 15630, 15649, 15632, 15659, 4459, 15635, 15636, 15637, 15633, 15634, 14807, 15770, 14814, 14820, 15643, 15779, 15628, 15782, 15629, 15785, 15631, 15626, 15659, 4499, 15625, 15637, 15636, 15633, 15634, 4508, 14835, 14843, 15794, 15796, 15799, 15802, 15628, 15805, 15629, 15808, 15631, 15626, 15649, 15632, 15659, 4555, 15634, 15636, 15637, 15633, 15625, 4564, 14877, 15620, 15815, 15621, 15645, 15622, 15819, 14892, 15827, 14256, 15831, 14262, 15628, 15629, 15631, 15626, 15649, 15632, 15659, 4622, 15636, 15635, 15633, 15634, 15637, 14922, 15840, 14929, 15640, 15639, 15638, 15846, 14943, 15852, 15645, 15644, 15640, 15624, 15623, 15645, 15644, 15628, 15629, 14962, 15862, 15645, 15644, 15640, 15639, 15638, 15645, 15644, 15628, 15629, 15631, 15626, 15649, 15632, 15659, 4711, 15637, 15635, 15633, 15636, 15634, 4720, 14977, 14983, 4727, 14988, 15640, 15627, 15645, 15644, 15882, 15628, 15629, 15631, 15630, 4748, 4750, 4756, 4758, 4759, 4763, 4765, 4766, 15903, 15906, 15631, 15626, 15649, 15632, 15633, 15637, 15636, 15625, 15634, 4794, 15023, 15912, 15915, 15631, 15630, 15628, 15629, 15631, 15630, 15649, 15632, 15659, 4834, 15635, 15633, 15634, 15636, 15637, 15056, 15927, 4845, 15643, 15642, 15641, 15640, 15639, 15638, 4857, 15934, 4865, 15939, 4871, 15640, 15639, 15638, 15645, 15644, 15946, 15091, 15628, 15954, 15629, 15957, 15631, 15626, 15649, 15632, 15633, 15636, 15634, 15625, 15637, 4917, 15103, 15963, 15966, 15120, 15971, 15631, 15626, 15640, 15639, 15643, 15642, 15641, 15638, 4957, 15979, 4964, 15984, 15986, 4975, 15991, 15993, 15628, 15996, 15629, 15999, 15631, 15626, 15649, 15632, 15633, 15625, 15637, 15634, 15636, 5005, 15168, 16005, 16008, 15184, 16013, 16016, 15200, 16020, 15628, 15629, 15631, 15626, 15649, 15632, 15659, 5056, 15635, 15633, 15637, 15636, 15634, 15212, 16028, 15216, 15222, 15640, 15627, 15645, 15644, 16036, 15233, 15239, 15628, 15629, 15631, 15630, 15649, 15632, 15635, 15633, 15636, 15634, 15637, 15271, 16058, 15645, 15644, 15638, 5278, 15640, 15639, 15642, 15641, 15643, 15645, 15644, 5291, 15647, 15646, 15649, 15648, 15650, 16073, 15655, 15651, 15654, 15656, 15657, 15654, 15652, 15655, 15656, 15657, 15655, 15657, 15656, 15653, 15654, 15658, 5346, 15659, 5348, 5350, 5355, 5357, 5359, 5361, 5366, 5368, 5370, 5373, 5375, 5378, 15660, 5380, 5383, 16111, 5498, 5503, 14484, 15283, 14151, 5551, 5552, 15283, 16117, 5577, 16119, 5579, 16121, 5581, 16123, 5583, 15676, 15679, 15680, 15693, 15704, 15707, 16099, 15718, 15724, 15725, 15981, 15988, 16099, 15858, 15750, 15756, 15948, 15771, 15774, 16099, 5708, 5709, 5710, 5711, 5713, 5715, 5716, 5718, 5719, 5720, 15856, 15857, 15855, 15853, 15817, 15821, 15825, 15828, 15920, 16099, 5759, 5760, 5761, 5762, 5763, 5764, 5765, 5766, 5767, 5768, 15841, 15848, 16099, 15853, 15855, 15854, 15856, 15857, 15858, 16099, 15920, 15864, 15865, 15866, 15874, 15884, 15895, 15896, 15897, 15899, 5855, 5856, 5857, 5858, 5859, 5860, 5861, 5862, 5863, 5864, 5872, 5873, 5875, 5876, 5877, 5879, 5881, 5882, 5883, 5884, 15948, 15949, 15952, 5941, 5942, 5944, 5946, 5947, 5949, 5950, 5952, 5953, 5954, 5979, 5980, 5982, 5984, 5986, 5987, 5990, 5991, 5993, 5994, 16031, 16038, 16041, 15241, 15243, 15245, 6020, 6021, 15251, 14447, 6030, 14453, 6036, 15257, 6038, 14465, 6044, 15261, 6046, 14484, 6070, 15283, 6072, 16111, 6096, 6101, 6102, 16052, 6105, 16119, 6107, 16121, 6109, 16123, 6111, 14484, 15283, 16111, 6165, 16113, 6171, 16115, 6173, 16117, 6179, 16119, 6181, 16121, 6183, 16123, 6185, 6198, 6209, 6227, 6228, 6233, 6236, 6446, 6449, 6452, 6455, 6464, 6474, 6477, 6482, 6501, 6504, 6509, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 15662, 15664, 4190, 4233, 15670, 4237, 4240, 4241, 4242, 4243, 4244, 16398, 15683, 15687, 15692, 4274, 4276, 4278, 4279, 4281, 4282, 4284, 4286, 4287, 4288, 4289, 4290, 16422, 4295, 4297, 4298, 15703, 15706, 15710, 4317, 4319, 4321, 4322, 4324, 4325, 4327, 4329, 4330, 4331, 4332, 4333, 16442, 15720, 4344, 4345, 4347, 4349, 4350, 15723, 15728, 4367, 4369, 4370, 4372, 4373, 4375, 4377, 4378, 4379, 4380, 4381, 16465, 4390, 15739, 4399, 15743, 4403, 4405, 4406, 4408, 4409, 4411, 4413, 4414, 4415, 4416, 4417, 16484, 4427, 4428, 15755, 15760, 4440, 4441, 4442, 4443, 15763, 4448, 4450, 4452, 4453, 4455, 4456, 4458, 4460, 4461, 4462, 4463, 4464, 16511, 15773, 15777, 4481, 15780, 4488, 15783, 4492, 4495, 4496, 4498, 4500, 4501, 4502, 4503, 4504, 16529, 15792, 15797, 15800, 15803, 4541, 15806, 4545, 4548, 4549, 4551, 4552, 4554, 4556, 4557, 4558, 4559, 4560, 16551, 4565, 15816, 4571, 4572, 4573, 15820, 15824, 15832, 4611, 4613, 4615, 4616, 4618, 4619, 4621, 4623, 4624, 4625, 4626, 4627, 16578, 15844, 4639, 4640, 4642, 15847, 15850, 4658, 4659, 4661, 4662, 4663, 4665, 4666, 4676, 4677, 15860, 15863, 4691, 4692, 4693, 4694, 4695, 4696, 4697, 4700, 4702, 4704, 4705, 4707, 4708, 4710, 4712, 4713, 4714, 4715, 4716, 16617, 15876, 15880, 4733, 4734, 4736, 4737, 15883, 4743, 4744, 4745, 4746, 16631, 16633, 16634, 15904, 4780, 4781, 4783, 4784, 4786, 4787, 4788, 4789, 4790, 16650, 15913, 15916, 4814, 4815, 4823, 4825, 4827, 4828, 4830, 4831, 4833, 4835, 4836, 4837, 4838, 4839, 16670, 16671, 4848, 4850, 4851, 4853, 4854, 4856, 15935, 16680, 16682, 4876, 4877, 4879, 4881, 4882, 15947, 15951, 4896, 15955, 4900, 4903, 4904, 4906, 4907, 4909, 4910, 4911, 4912, 4913, 16703, 15964, 15969, 15972, 4945, 4946, 4948, 4949, 4951, 4953, 4954, 4956, 15980, 16719, 16722, 4984, 15997, 4988, 4991, 4992, 4994, 4995, 4997, 4998, 4999, 5000, 5001, 16738, 16006, 16011, 16014, 16017, 16021, 5045, 5047, 5049, 5050, 5052, 5053, 5055, 5057, 5058, 5059, 5060, 5061, 16761, 16030, 16034, 5076, 5077, 5079, 5080, 16037, 16040, 16043, 5255, 5257, 5259, 5260, 5262, 5263, 5265, 5266, 5267, 5268, 5269, 16783, 5274, 5275, 5277, 5279, 5280, 5282, 5283, 5285, 5287, 5288, 16065, 5315, 5316, 5319, 5320, 5322, 5325, 5326, 5327, 5328, 5329, 5333, 5334, 5335, 5336, 5337, 5339, 5340, 5341, 5342, 5343, 5345, 5347, 16081, 16822, 16087, 16090, 16826, 16097, 16829, 16103, 5379, 16109, 5497, 5523, 5525, 5549, 16842, 5554, 16833, 5576, 5578, 5580, 5582, 16400, 16405, 5602, 14683, 5604, 5605, 16403, 16405, 16406, 16405, 5613, 5624, 5626, 5628, 5636, 5643, 5644, 5651, 5652, 5653, 15237, 5662, 15311, 5673, 14785, 5678, 5679, 16490, 5694, 5696, 5700, 16874, 14847, 16878, 16881, 5722, 5723, 5725, 5727, 5738, 5744, 5746, 14896, 5748, 5753, 15237, 5757, 15311, 16894, 16896, 16899, 5776, 5783, 5785, 5793, 5794, 5795, 5796, 5797, 5802, 5803, 5804, 5808, 5813, 5814, 5822, 16620, 5830, 14996, 16635, 16637, 5847, 5848, 5849, 16636, 16637, 5852, 16638, 16637, 16923, 16927, 16931, 16933, 16936, 16940, 15046, 15048, 15049, 15050, 15060, 15061, 15072, 15077, 5930, 5931, 5933, 16946, 15112, 16951, 16953, 15142, 15146, 15152, 15156, 16956, 15177, 16960, 15311, 16964, 6004, 6010, 6012, 6014, 6016, 6018, 16972, 6023, 6029, 6035, 6037, 6043, 6045, 6069, 6071, 16833, 6095, 6104, 6106, 6108, 6110, 6134, 6136, 16833, 6164, 6170, 6172, 6178, 6180, 6182, 6184, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17159, 17161, 17163, 17170, 17172, 17175, 17177, 17182, 17189, 17191, 17194, 17196, 17198, 17201, 17204, 17209, 17211, 17214, 17216, 17225, 17227, 17230, 17232, 17234, 17236, 17240, 17242, 17247, 17249, 17252, 17254, 17265, 17268, 17270, 17272, 17281, 17283, 17286, 17288, 17290, 17302, 17304, 17307, 17309, 17314, 17319, 17321, 17324, 17330, 17332, 17335, 17339, 17341, 17344, 17346, 17348, 17352, 17354, 17359, 17365, 17367, 17369, 17371, 17373, 17377, 17381, 17383, 17386, 17388, 17394, 17396, 17402, 17405, 17412, 17414, 17416, 17418, 17420, 17425, 17427, 17430, 17439, 17441, 17443, 17445, 17447, 17456, 17458, 17461, 17463, 17469, 17471, 17478, 17480, 17482, 17484, 17488, 17491, 17493, 17496, 16066, 17499, 17501, 17504, 17506, 17509, 17511, 17514, 17516, 16082, 16824, 16091, 16828, 16104, 16110, 17502, 17518, 17519, 17494, 17489, 14668, 14666, 16083, 16092, 17476, 17475, 17518, 17518, 17154, 16092, 17489, 17494, 16083, 17476, 17475, 17518, 17519, 16083, 17489, 17494, 16092, 17502, 17518, 17519, 5571, 16831, 17157, 17155, 17518, 17459, 5600, 5601, 5603, 15681, 5607, 5608, 14693, 5610, 5611, 14697, 17168, 17167, 17518, 17173, 17180, 14712, 14718, 15708, 17187, 17186, 17518, 17192, 14738, 17202, 14743, 15726, 17454, 17453, 17431, 17454, 17207, 17518, 17212, 5661, 17219, 14769, 5665, 17223, 17221, 17518, 17228, 5674, 14788, 5680, 15758, 14798, 17245, 17244, 17518, 17250, 14812, 15775, 17259, 14823, 17263, 17261, 17518, 17266, 15790, 5714, 14851, 14858, 14863, 17279, 17277, 17518, 17284, 17291, 17294, 14879, 17293, 17294, 17295, 17294, 14884, 15822, 5747, 17454, 17453, 5754, 17428, 14903, 5758, 17300, 17299, 17518, 17305, 15842, 17315, 14935, 14941, 17454, 17453, 17597, 17326, 17325, 15237, 17428, 14965, 17333, 17337, 17336, 17518, 17342, 14981, 5824, 15878, 14990, 17357, 17356, 15920, 5836, 17428, 17431, 15000, 15981, 16099, 5845, 5846, 5850, 5851, 5853, 5854, 17454, 17453, 17518, 17459, 15026, 15032, 15917, 17454, 17453, 17431, 17428, 15045, 5896, 15920, 5898, 5899, 5900, 17379, 17378, 17518, 17384, 15981, 5909, 5910, 17392, 17397, 15066, 15936, 5918, 16099, 5920, 17454, 17453, 17403, 15226, 15237, 17410, 17408, 17518, 17459, 15106, 5945, 15967, 15123, 17454, 17453, 17431, 17428, 15136, 15981, 5967, 5968, 15988, 5970, 5971, 17437, 17435, 17518, 17459, 15171, 5983, 16009, 15187, 15191, 5992, 15203, 17454, 17453, 17518, 17459, 15311, 16032, 15226, 16079, 15237, 17476, 17475, 17518, 17519, 16092, 17489, 17494, 16083, 17502, 17518, 17519, 16092, 16083, 6092, 16831, 17476, 17475, 17518, 17519, 16083, 16092, 17494, 17489, 17502, 17518, 17519, 16083, 16092, 16099, 6161, 16831, 17676, 17530, 17676, 17675, 17532, 17531, 17535, 17534, 17533, 17540, 17539, 17538, 17537, 17664, 17663, 17654, 17655, 17653, 17657, 17656, 17662, 17658, 17660, 17659, 17662, 17661, 17664, 17663, 17676, 17666, 17676, 17675, 17670, 17669, 17668, 17667, 17672, 17671, 17676, 17674, 17676, 17675, 17680, 17679, 17678, 17677, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17921, 17926, 17931, 17938, 17942, 17950, 17953, 17958, 17963, 17322, 17974, 17982, 17988, 17996, 18004, 18009, 18015, 18024, 18026, 18028, 5479, 18013, 18012, 5483, 5484, 5485, 18017, 18019, 18016, 5489, 18018, 5491, 5492, 5493, 15300, 5495, 16095, 18013, 18012, 5506, 5507, 5509, 5510, 5511, 5512, 16095, 18016, 18019, 5516, 18017, 5518, 18018, 15279, 5521, 15300, 18013, 18012, 5533, 5534, 5536, 5537, 5538, 15300, 18019, 18017, 18016, 5543, 18018, 5545, 15279, 5547, 16095, 5560, 18022, 18022, 18021, 5565, 5569, 5572, 15315, 17980, 17979, 5595, 5596, 5598, 5599, 18071, 5606, 18075, 5609, 18078, 5612, 17924, 17923, 5616, 5617, 5619, 5620, 17927, 5622, 5623, 5625, 5627, 17929, 17928, 5631, 5632, 5634, 5635, 5637, 17934, 5639, 17933, 17934, 5642, 5645, 18007, 17998, 5648, 5649, 5650, 17936, 17935, 5656, 5657, 5659, 5660, 5663, 5664, 17940, 17939, 5668, 5669, 5671, 5672, 17944, 17992, 5677, 5681, 17945, 17992, 17946, 17992, 5686, 17948, 17947, 5689, 5690, 5692, 5693, 5695, 5697, 5698, 5699, 17980, 17951, 5703, 5704, 5706, 5707, 5712, 5717, 5721, 5724, 17967, 17956, 17955, 5730, 5731, 5733, 5734, 5735, 5736, 5737, 5739, 5740, 5741, 5742, 5743, 5745, 18007, 17978, 5751, 5752, 5755, 5756, 17961, 17960, 5771, 5772, 5774, 5775, 5777, 18016, 5779, 17964, 18019, 5782, 5784, 18007, 18006, 5788, 5789, 17967, 17965, 18007, 17998, 5800, 5801, 5805, 5806, 5807, 17970, 5810, 17969, 17968, 17972, 17971, 5817, 5818, 5820, 5821, 5823, 5825, 17977, 17976, 17977, 5829, 18007, 17978, 5833, 5834, 5835, 18016, 5838, 18019, 18000, 5841, 5842, 5843, 5844, 18188, 18190, 18192, 17980, 17979, 5867, 5868, 5870, 5871, 5874, 5878, 5880, 18007, 17984, 5887, 5888, 5889, 18000, 5891, 18016, 18019, 17999, 5895, 5897, 17986, 17985, 5903, 5904, 5906, 5907, 5908, 5911, 17990, 18016, 17989, 5915, 5916, 5917, 5919, 18007, 17998, 5923, 5924, 17992, 5926, 17991, 17992, 5929, 5932, 17994, 17993, 5936, 5937, 5939, 5940, 5943, 5948, 5951, 18007, 17998, 5957, 5958, 5959, 18000, 18016, 5962, 18019, 17999, 5965, 5966, 5969, 18002, 18001, 5974, 5975, 5977, 5978, 5981, 5985, 5988, 5989, 5995, 18007, 18006, 5998, 5999, 6001, 6002, 6003, 6005, 18011, 18010, 18011, 6009, 6011, 6013, 18013, 18012, 6053, 6054, 6056, 6057, 6058, 16095, 6060, 18018, 6062, 18017, 18016, 18019, 15279, 6067, 15300, 6077, 18022, 18022, 18021, 6082, 6085, 6088, 16095, 6090, 15300, 6093, 15315, 18013, 18012, 6118, 6119, 6121, 6122, 6123, 15300, 6125, 16095, 18017, 6128, 18018, 18019, 6131, 18016, 15279, 6142, 18022, 18022, 18021, 6150, 6152, 16079, 6154, 15300, 15302, 6157, 16095, 6159, 15311, 6162, 15315, 6196, 6197, 6199, 6200, 6207, 6208, 6216, 6217, 6218, 6229, 6230, 6231, 6232, 6234, 6235, 17543, 17551, 17587, 17585, 17566, 17564, 16872, 16876, 17577, 17578, 17580, 17582, 17587, 17585, 16901, 16892, 18163, 17599, 17617, 17614, 16925, 16929, 16934, 16941, 16948, 16947, 16962, 16957, 6441, 6442, 6443, 6444, 6445, 6447, 6448, 6450, 6451, 6453, 6454, 6462, 6463, 6472, 6473, 6475, 6476, 6478, 6479, 6480, 6481, 6489, 6490, 6499, 6500, 6502, 6503, 6505, 6506, 6507, 6508, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17162, 17178, 17197, 17217, 17233, 17255, 17271, 17289, 17310, 17347, 17372, 17389, 17419, 17446, 17464, 17485, 17507, 17512, 17517, 5480, 5481, 5486, 5487, 5488, 5490, 18464, 5494, 5496, 5504, 5505, 18472, 18475, 5513, 5514, 5515, 5517, 5519, 5520, 5522, 5531, 5532, 18490, 5539, 5540, 5541, 5542, 5544, 5546, 5548, 5561, 5562, 5563, 18510, 5573, 5593, 5594, 18515, 5614, 5615, 18527, 5621, 5629, 5630, 18538, 5638, 5640, 5641, 5646, 5647, 18551, 5654, 5655, 18556, 5666, 5667, 18564, 5675, 5676, 5682, 5683, 5684, 5685, 5687, 5688, 18579, 5701, 5702, 18589, 5726, 5728, 5729, 18600, 18604, 18607, 18609, 5749, 5750, 18615, 5769, 5770, 18621, 5778, 5780, 5781, 5786, 5787, 18634, 5790, 18441, 5792, 5798, 5799, 18640, 5809, 5811, 5812, 5815, 5816, 18651, 5826, 5827, 5828, 5831, 5832, 18663, 5837, 5839, 5840, 5865, 5866, 18679, 5885, 5886, 18688, 5890, 5892, 5893, 5894, 5901, 5902, 18700, 5912, 5913, 5914, 5921, 5922, 18715, 5925, 5927, 5928, 5934, 5935, 18725, 5955, 5956, 18734, 5960, 5961, 5963, 5964, 5972, 5973, 18747, 5996, 5997, 18758, 6006, 6007, 6008, 6051, 6052, 18772, 6059, 6061, 6063, 6064, 6065, 6066, 6068, 6078, 6079, 6080, 6089, 6091, 18796, 6094, 6116, 6117, 18801, 6124, 6126, 6127, 6129, 6130, 6132, 6133, 6143, 6144, 6145, 6153, 6155, 6156, 6158, 6160, 18829, 6163, 18832, 18834, 18836, 18838, 18841, 18843, 18845, 18520, 17546, 17545, 18518, 17545, 6243, 6244, 18522, 18518, 17553, 17554, 17554, 17552, 17556, 17555, 17556, 17557, 18552, 17559, 17560, 17558, 17560, 6266, 18559, 17562, 6269, 6271, 17567, 6274, 18113, 17567, 17570, 17571, 18584, 17571, 17569, 16879, 6287, 17572, 16879, 17574, 17575, 16875, 17572, 6294, 6295, 6296, 17576, 17579, 17579, 6305, 17581, 17584, 6309, 17584, 6314, 18616, 17587, 6317, 17590, 17589, 6320, 16897, 16900, 16900, 17591, 6325, 17590, 17594, 17592, 17594, 17593, 17595, 6336, 6337, 17595, 17598, 17601, 17600, 18642, 17602, 17601, 17605, 17604, 17603, 17605, 17606, 17608, 18175, 17608, 18664, 18672, 18672, 18671, 6363, 18674, 17613, 18675, 17612, 18673, 17613, 6370, 18674, 17622, 6373, 17620, 16924, 17621, 16924, 6378, 17622, 16928, 6382, 16938, 16937, 17625, 17624, 17623, 6388, 17625, 16938, 18827, 18696, 18827, 18742, 18711, 18703, 18711, 18710, 17635, 17634, 17636, 17634, 17639, 17640, 16949, 17637, 6414, 16954, 6416, 17640, 16954, 18743, 18827, 18827, 18742, 17647, 16961, 17649, 16958, 17649, 6431, 6432, 17645, 16961, 17650, 17651, 17651, 17652, 18875, 18877, 18880, 18882, 18884, 18886, 18888, 18890, 18892, 18894, 18896, 18898, 18900, 18902, 18904, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18963, 18959, 18965, 18967, 18968, 18973, 18959, 18978, 18979, 18980, 18984, 18959, 18988, 18498, 18500, 18993, 18995, 18961, 18961, 18960, 18962, 18961, 18999, 18944, 19002, 18945, 18531, 19006, 18946, 18543, 19010, 19012, 19015, 18947, 19018, 18948, 19021, 19023, 19025, 19027, 18949, 19030, 18950, 19034, 18951, 19040, 19043, 18952, 18626, 19047, 19049, 5791, 19055, 18645, 19059, 19061, 18953, 19064, 19067, 18666, 19071, 19073, 18954, 19076, 19078, 19079, 19081, 19083, 18955, 19085, 19087, 19089, 18717, 19093, 19095, 18956, 19098, 19100, 18738, 19103, 19105, 18957, 19108, 18958, 19111, 19114, 18959, 19117, 19118, 19120, 19123, 19125, 18961, 18962, 18961, 18961, 18960, 19131, 18959, 18809, 19137, 19138, 19140, 19142, 18960, 18961, 18961, 18961, 18962, 18467, 18827, 18465, 18827, 18827, 18485, 18827, 18476, 18493, 18827, 18502, 18827, 18839, 18827, 18784, 18827, 18775, 18827, 18996, 19155, 6238, 6239, 6240, 6241, 6242, 6245, 6246, 6248, 6249, 6250, 6251, 6254, 6256, 6257, 6258, 6260, 6261, 6262, 6263, 6264, 6267, 6268, 6272, 6275, 6276, 6280, 6281, 6282, 6283, 6284, 6286, 6288, 6289, 6290, 6291, 6292, 6293, 19032, 19032, 6299, 6300, 6301, 19032, 19203, 19037, 19038, 6307, 6308, 6310, 19036, 19037, 6315, 6316, 6318, 6319, 6321, 6322, 6323, 6324, 6326, 6328, 6330, 6331, 6332, 6334, 6338, 6339, 19231, 6341, 6342, 6343, 6344, 6345, 6346, 6348, 6349, 6350, 6352, 6354, 6355, 6356, 6358, 6360, 6361, 6362, 6364, 6365, 6366, 6367, 6368, 6369, 6371, 6372, 6374, 6375, 6376, 6377, 6379, 6380, 6383, 6384, 6385, 6386, 6387, 6389, 6390, 6393, 6394, 6395, 6396, 6399, 6400, 6401, 6402, 6404, 6405, 6407, 6408, 6410, 6411, 6412, 6413, 6415, 6417, 6418, 6420, 6421, 6422, 6424, 6426, 6427, 6428, 6429, 6430, 6433, 6434, 6436, 6438, 6439, 6440, 19317, 18827, 18784, 18827, 18775, 18794, 18827, 18827, 18792, 19128, 18827, 19325, 18827, 18827, 18806, 18804, 18822, 18827, 18827, 18827, 19148, 18825, 19330, 19321, 19151, 19150, 19152, 19328, 19327, 19156, 19320, 19319, 19318, 19321, 19323, 19322, 19326, 19328, 19327, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18964, 5482, 19459, 18974, 5508, 19464, 18985, 5535, 19469, 19472, 5564, 5566, 5567, 5568, 5570, 19000, 5597, 19003, 5618, 19007, 5633, 19486, 19013, 19016, 5658, 19019, 5670, 19028, 5691, 19031, 5705, 19035, 5732, 19041, 19044, 5773, 19505, 19050, 19507, 19056, 19510, 19062, 5819, 19065, 19068, 19516, 19074, 5869, 19077, 19521, 19084, 5905, 19526, 19090, 19529, 19096, 5938, 19099, 19534, 19106, 5976, 19109, 6000, 19112, 19115, 6055, 19544, 19547, 6081, 6083, 6084, 6086, 6087, 19132, 6120, 19556, 19559, 6146, 6147, 6148, 6149, 6151, 6191, 6192, 6193, 6194, 6202, 6203, 6204, 6205, 6211, 6212, 6213, 6215, 6221, 6222, 6223, 6224, 6225, 6226, 19586, 19588, 19162, 19590, 19482, 19593, 19595, 19598, 19601, 19603, 19605, 19182, 19493, 19492, 19494, 19607, 19609, 19611, 19613, 19194, 19617, 19619, 19621, 6297, 6298, 6302, 19625, 6304, 6306, 6311, 6312, 19210, 19636, 19215, 19639, 19640, 19642, 19223, 19647, 19651, 19654, 19656, 19660, 19664, 19668, 19670, 19672, 19674, 19258, 19261, 19679, 19681, 19682, 19684, 19686, 19688, 19689, 19693, 19697, 19700, 19704, 19706, 19707, 19708, 19711, 19715, 19717, 19308, 19719, 19723, 18878, 6457, 6459, 6460, 6461, 6466, 6467, 6468, 6469, 6470, 6471, 6485, 6486, 6487, 6488, 6493, 6494, 6495, 6496, 6497, 6498, 6510, 6512, 6513, 6515, 19577, 19584, 6520, 6521, 6522, 6551, 6552, 6553, 6556, 19736, 6559, 6560, 6562, 19747, 6565, 6566, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18455, 19460, 18473, 19465, 18491, 19470, 18508, 19980, 18509, 18516, 18528, 18539, 18557, 18565, 18580, 18590, 18601, 18622, 19053, 18652, 18669, 18680, 19522, 18701, 18708, 18726, 19535, 18748, 18759, 18773, 19545, 18790, 20038, 20039, 18802, 19557, 20046, 20048, 20049, 20051, 20053, 20055, 20057, 20059, 20063, 20065, 20067, 20069, 20071, 6252, 20074, 19989, 20077, 20079, 6273, 6277, 6278, 20086, 20088, 20090, 20092, 20093, 19207, 19631, 20097, 20101, 20103, 20105, 20004, 20109, 20008, 20011, 20114, 20116, 20118, 20120, 20122, 20124, 20022, 20129, 20131, 20134, 20136, 20031, 20141, 20144, 20146, 20148, 20151, 20154, 20156, 20158, 20161, 6517, 6519, 20138, 20169, 6558, 6564, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18456, 18975, 18492, 20231, 19982, 18517, 18529, 18540, 18558, 18566, 18581, 18591, 18602, 18623, 18653, 18681, 18702, 18727, 18749, 18760, 18774, 20256, 20040, 18803, 20261, 18820, 20225, 20264, 20227, 20266, 20229, 20269, 20272, 20273, 6255, 19604, 19185, 20280, 19614, 20283, 20284, 20285, 20287, 20288, 20291, 6329, 20242, 19657, 6347, 6353, 20244, 20297, 20299, 20301, 20246, 20248, 6406, 20304, 20250, 20306, 6437, 20254, 20310, 20259, 20314, 20165, 6554, 20173, 20177, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20484, 20502, 20505, 19968, 6195, 19971, 6206, 19974, 6214, 20270, 19983, 19591, 19985, 19987, 20514, 19990, 19991, 19993, 20516, 20517, 19995, 19997, 19201, 20521, 19999, 20523, 20001, 19644, 20002, 20525, 20005, 6335, 20007, 20528, 20009, 20529, 20012, 6359, 19676, 19683, 20014, 19690, 20016, 6392, 20018, 6398, 20021, 19701, 20023, 19709, 20025, 6423, 20027, 19720, 20029, 20540, 20032, 6458, 20311, 20041, 6484, 20315, 20166, 20546, 20174, 20178, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 6190, 20740, 6201, 20742, 6210, 20744, 19977, 6237, 6247, 6253, 20075, 6259, 6265, 6270, 20755, 6279, 6285, 6303, 20098, 6313, 6327, 20106, 6333, 20767, 6340, 20110, 6351, 20111, 6357, 20773, 6381, 6391, 19691, 6397, 19695, 6403, 20783, 6409, 6419, 20787, 6425, 6435, 20137, 6456, 20793, 20035, 6483, 20149, 20044, 20799, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20997, 6220, 19599, 19648, 19652, 19661, 19665, 20112, 20125, 20126, 19702, 21031, 19724, 20308, 6465, 20312, 6492, 20992, 20994, 21022, 20999, 21022, 21029, 21032, 21011, 21000, 21007, 21009, 21014, 21022, 21022, 21016, 21008, 21003, 21005, 21004, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20061, 20107, 19669, 19694, 19698, 19713, 20142, 20152, 6511, 6514, 21249, 6523, 6524, 6525, 21027, 6527, 6528, 6529, 6530, 21001, 21033, 6534, 6535, 6536, 21012, 21027, 6542, 6543, 21018, 6545, 6546, 6548, 6549, 6550, 21262, 21264, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 20996, 6518, 6526, 6531, 21025, 6533, 6537, 21020, 21023, 21030, 6541, 6544, 21014, 21516, 21520, 21522, 21527, 21534, 21537, 21035, 6557, 21038, 6563, 21513, 21512, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 6516, 6532, 6538, 6539, 6540, 6547, 21762, 21525, 21530, 21771, 21775, 6555, 6561, 21782, 6570, 6571, 21780, 21761, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22017, 22018, 22020, 21535, 22022, 21777, 6567, 22027, 22027, 22027, 22028, 6574, 22016, 6576, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22023, 22273, 22024, 21778, 22026, 6568, 6569, 6572, 6573, 6575, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22529, 22277, 22533, 22030, 22535, 22283, 22285, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22784, 22531, 22787, 22789, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23041, 23043, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23296, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 6577, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23808, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 24064, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255};
bool h_Op[]= {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#define THREADS_PER_BLOCK 256
#define BLOCKS_PER_GRID 1
#define SIZE_OF_IN 6656
#define SIZE_OF_AC 17920
__device__ void
ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) {
int i= blockDim.x * blockIdx.x + threadIdx.x;
__shared__ float R[96*THREADS_PER_BLOCK];
const int t= THREADS_PER_BLOCK;
__shared__ float final;
final=0;
R[i + 0*t] = A[i + 0*t];
R[i + 1*t] = A[i + 1*t];
R[i + 2*t] = A[i + 2*t];
R[i + 3*t] = A[i + 3*t];
R[i + 4*t] = A[i + 4*t];
R[i + 5*t] = A[i + 5*t];
R[i + 6*t] = A[i + 6*t];
R[i + 7*t] = A[i + 7*t];
R[i + 8*t] = A[i + 8*t];
R[i + 9*t] = A[i + 9*t];
R[i + 10*t] = A[i + 10*t];
R[i + 11*t] = A[i + 11*t];
R[i + 12*t] = A[i + 12*t];
R[i + 13*t] = A[i + 13*t];
R[i + 14*t] = A[i + 14*t];
R[i + 15*t] = A[i + 15*t];
R[i + 16*t] = A[i + 16*t];
R[i + 17*t] = A[i + 17*t];
R[i + 18*t] = A[i + 18*t];
R[i + 19*t] = A[i + 19*t];
R[i + 20*t] = A[i + 20*t];
R[i + 21*t] = A[i + 21*t];
R[i + 22*t] = A[i + 22*t];
R[i + 23*t] = A[i + 23*t];
R[i + 24*t] = A[i + 24*t];
R[i + 25*t] = A[i + 25*t];
__syncthreads();
for (int iter=0; iter< n_iter; iter++) {
R[i + 26*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]];
R[i + 27*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]];
R[i + 28*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]];
R[i + 29*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]];
R[i + 30*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]];
R[i + 31*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]];
__syncthreads();
R[i + 32*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]];
R[i + 33*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]];
R[i + 34*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]];
R[i + 35*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]];
__syncthreads();
R[i + 36*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]];
R[i + 37*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]];
R[i + 38*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]];
R[i + 39*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]];
R[i + 40*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]];
__syncthreads();
R[i + 41*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]];
R[i + 42*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]];
R[i + 43*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]];
R[i + 44*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]];
R[i + 45*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]];
__syncthreads();
R[i + 46*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]];
R[i + 47*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]];
R[i + 48*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]];
R[i + 49*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]];
__syncthreads();
R[i + 50*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]];
R[i + 51*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]];
R[i + 52*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]];
R[i + 53*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]];
__syncthreads();
R[i + 54*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]];
R[i + 55*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]];
R[i + 56*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]];
__syncthreads();
R[i + 57*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]];
R[i + 58*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]];
R[i + 59*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]];
R[i + 60*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]];
__syncthreads();
R[i + 61*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]];
R[i + 62*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]];
R[i + 63*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]];
__syncthreads();
R[i + 64*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]];
R[i + 65*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]];
R[i + 66*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]];
__syncthreads();
R[i + 67*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]];
R[i + 68*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]];
R[i + 69*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]];
__syncthreads();
R[i + 70*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]];
R[i + 71*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]];
__syncthreads();
R[i + 72*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]];
R[i + 73*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]];
__syncthreads();
R[i + 74*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]];
R[i + 75*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]];
__syncthreads();
R[i + 76*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]];
R[i + 77*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]];
__syncthreads();
R[i + 78*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]];
__syncthreads();
R[i + 79*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]];
__syncthreads();
R[i + 80*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]];
__syncthreads();
R[i + 81*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]];
__syncthreads();
R[i + 82*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]];
__syncthreads();
R[i + 83*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]];
__syncthreads();
R[i + 84*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]];
__syncthreads();
R[i + 85*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]];
__syncthreads();
R[i + 86*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]];
__syncthreads();
R[i + 87*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]];
__syncthreads();
R[i + 88*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]];
__syncthreads();
R[i + 89*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]];
__syncthreads();
R[i + 90*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]];
__syncthreads();
R[i + 91*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]];
__syncthreads();
R[i + 92*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]];
__syncthreads();
R[i + 93*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]];
__syncthreads();
R[i + 94*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]];
__syncthreads();
R[i + 95*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]];
if (i==0) { final += R[95*t]; }
__syncthreads();
}
if (i==0) { A[0]= final;}
}
|
11,237 |
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <iostream>
#include <chrono>
#include <utility>
constexpr int kb=1<<10;
constexpr int bytes=24;
constexpr int size_per_thread=bytes*kb/(512*sizeof(int));
namespace kernel
{
__global__
void global_to_shared(int *global)
{
__shared__ int shared[bytes*kb/sizeof(int)];
__syncthreads();
#pragma unroll 12
for(int i=0; i<size_per_thread; ++i)
{
shared[threadIdx.x*size_per_thread+i]=global[threadIdx.x*size_per_thread+i];
}
}
__global__
void shared_to_global(int *global)
{
__shared__ int shared[bytes*kb/sizeof(int)];
__syncthreads();
#pragma unroll 12
for(int i=0; i<size_per_thread; ++i)
{
global[threadIdx.x*size_per_thread+i]=shared[threadIdx.x*size_per_thread+i];
}
}
}
auto measure_global_shared()
{
constexpr int repeat=10000;
float gs_sum=0, sg_sum=0, time;
int *global;
cudaMalloc((void **)&global, bytes*kb);
const dim3 grid(1);
const dim3 block(512);
for(int i=0; i<repeat; ++i)
{
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
kernel::global_to_shared<<<grid, block>>>(global);
cudaEventRecord(stop);
cudaEventElapsedTime(&time, start, stop);
gs_sum+=time;
cudaEventRecord(start);
kernel::shared_to_global<<<grid, block>>>(global);
cudaEventRecord(stop);
cudaEventElapsedTime(&time, start, stop);
sg_sum+=time;
cudaEventDestroy(start);
cudaEventDestroy(stop);
}
cudaFree(global);
return std::make_pair(gs_sum, sg_sum);
}
int main()
{
std::cout<<"data size[KB], global to shared[ms], shared to global[ms]"<<std::endl;
const auto time=measure_global_shared();
std::cout<<bytes<<","<<time.first<<","<<time.second<<std::endl;
}
|
11,238 | #include <stdio.h>
#include <cuda.h>
#include <time.h>
/**
* Nathan Dunn
* CS-4370-90 Par. Prog. Many-Core GPUs
* Professor Liu
* 10-24-19
* Tiled Matrix Multiplication
*/
#define N 8 // size of the matrices to be multiplied
#define TILE_WIDTH 4 // size of the tiles
/**
* Computes the matrix multiplication on the CPU
* m - First matrix to be multiplied
* n - Second matrix to be multiplied
* p - Product of m and n
* width - Size of the matrices being operated upon
*/
void MatrixMulOnHost(float *m, float *n, float *p, int width){
for(int row = 0; row < width; ++row){
for(int col = 0; col < width; ++col){
double sum = 0;
for(int k = 0; k < width; ++k){
float a = m[row * width + k];
float b = n[k * width + col];
sum += a * b;
}
p[row * width + col] = sum;
}
}
}
/**
* Computes the matrix multiplication on the GPU Device
* d_M - First matrix to be multiplied
* d_N - Second matrix to be multiplied
* p - Product of d_M and d_N
* Width - Size of the matrices being operated upon
*/
__global__ void MatrixMulKernel(float* d_M, float* d_N, float* d_P, int Width)
{
__shared__ float ds_M[TILE_WIDTH][TILE_WIDTH];
__shared__ float ds_N[TILE_WIDTH][TILE_WIDTH];
int bx = blockIdx.x; int by = blockIdx.y;
int tx = threadIdx.x; int ty = threadIdx.y;
// Identify the row and column of the Pd element to work on
int Row = by * TILE_WIDTH + ty;
int Col = bx * TILE_WIDTH + tx;
double Pvalue = 0;
// Loop over the Md and Nd tiles required to compute the Pd element
for (int m = 0; m < Width/TILE_WIDTH; ++m){
// Collaborative loading of Md and Nd tiles into shared memory
ds_M[ty][tx] = d_M[Row*Width + m*TILE_WIDTH+tx];
ds_N[ty][tx] = d_N[Col+(m*TILE_WIDTH+ty)*Width];
__syncthreads();
for (int k = 0; k < TILE_WIDTH; ++k)
Pvalue += ds_M[ty][k] * ds_N[k][tx];
__syncthreads();
}
d_P[Row*Width+Col] = Pvalue;
}
/**
Verifies that an input matrix matches the product of two matrices. Each matrix
element is computed individually and compared. If the comparison is not within
the tolerance, the function automatically returns false
A - Matrix to use for testing
B - Matrix to use for testing
C - Matrix to be tested
width - size of input matrices
*/
bool verify(float *A, float *B, float *C, int width) {
const float relativeTolerance = 1e-6; // 1e-6 = 0.000001
for(int row = 0; row < width; ++row) {
for(int col = 0; col < width; ++col) {
double sum = 0;
for(unsigned int k = 0; k < width; ++k) {
sum += A[row*width + k]*B[k*width + col];
}
float relativeError = (sum - C[row*width + col])/sum;
if (relativeError > relativeTolerance
|| relativeError < -relativeTolerance) {
return false;
}
}
}
return true;
}
/**
Prints a matrix.
matrix - matrix to be printed
size - size of the matrix
*/
void printMatrix(float *matrix, int size){
for(int i = 0; i < size; i++){
for(int j = 0; j < size; j++){
printf("%f ", matrix[i * size + j]);
}
printf("\n");
}
printf("\n");
}
int main(int argc, char* argv[])
{
// matrices on the device
float *a, *b, *c, *d;
// matrices for the gpu
float *dev_a, *dev_b, *dev_c;
// allocate matrices
a = (float*)malloc(sizeof(float) * N * N);
b = (float*)malloc(sizeof(float) * N * N);
c = (float*)malloc(sizeof(float) * N * N);
d = (float*)malloc(sizeof(float) * N * N);
// allocate device matrices
cudaMalloc((void **)(&dev_a), N*N*sizeof(float));
cudaMalloc((void **)(&dev_b), N*N*sizeof(float));
cudaMalloc((void **)(&dev_c), N*N*sizeof(float));
// initialize matrices a and b
int init =1325;
for(int i=0; i<N; i++){
for(int j=0; j<N; j++){
int index = i * N + j;
init=3125*init%65536;
a[index]=(init-32768.0)/16384.0;
init=3125*init%65536;
b[index]=(init-32768.0)/16384.0;
}
}
// Variables to measure GPU computation time and memory transfer time
float timeGPU, timeTransfer, timeBack;
cudaEvent_t gpuStart,gpuStop;
// Begin measuring time for copying memory over to device
cudaEventCreate(&gpuStart);
cudaEventCreate(&gpuStop);
cudaEventRecord(gpuStart,0);
// copy array a,b (system memory) to dev_a, dev_b (device memory)
cudaMemcpy(dev_a,a,N*N*sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(dev_b,b,N*N*sizeof(float), cudaMemcpyHostToDevice);
// Finish measuring time for copying memory over to device
cudaDeviceSynchronize();
cudaEventRecord(gpuStop,0);
cudaEventSynchronize(gpuStop);
cudaEventElapsedTime(&timeTransfer,gpuStart,gpuStop);
cudaEventDestroy(gpuStart);
cudaEventDestroy(gpuStop);
printf("Matrix A: \n");
printMatrix(a, N);
printf("Matrix B: \n");
printMatrix(b, N);
// block and grid initialization for GPUs
dim3 dimBlock(TILE_WIDTH, TILE_WIDTH);
dim3 dimGrid(N/dimBlock.x, N/dimBlock.y);
// Begin measuring GPU computation time
cudaEventCreate(&gpuStart);
cudaEventCreate(&gpuStop);
cudaEventRecord(gpuStart,0);
// Launch kernels
MatrixMulKernel<<<dimGrid, dimBlock>>>(dev_a, dev_b, dev_c, N);
// Finish measuring GPU computation time
cudaDeviceSynchronize();
cudaEventRecord(gpuStop,0);
cudaEventSynchronize(gpuStop);
cudaEventElapsedTime(&timeGPU,gpuStart,gpuStop);
cudaEventDestroy(gpuStart);
cudaEventDestroy(gpuStop);
// Begin measuring time for copying memory back to host
cudaEventCreate(&gpuStart);
cudaEventCreate(&gpuStop);
cudaEventRecord(gpuStart,0);
// copy results from GPU back to system memory
cudaMemcpy(c, dev_c, N*N*sizeof(float), cudaMemcpyDeviceToHost);
// Finish measuring time for copying memory back to host
cudaDeviceSynchronize();
cudaEventRecord(gpuStop,0);
cudaEventSynchronize(gpuStop);
cudaEventElapsedTime(&timeBack,gpuStart,gpuStop);
cudaEventDestroy(gpuStart);
cudaEventDestroy(gpuStop);
// total transfer time includes copy to and copy back time
timeTransfer += timeBack;
// display GPU device result
printf("GPU Device Product: \n");
printMatrix(c, N);
// variables used to measure cpu computation time
clock_t cpuStart, cpuEnd;
float cpuTimeTaken;
// start measuring cpu computation time
cpuStart = clock();
// compute result on CPU and display it
MatrixMulOnHost(a, b, d, N);
// stop measuring cpu computation time
cpuEnd = clock();
cpuTimeTaken = ((float)cpuEnd - cpuStart)/CLOCKS_PER_SEC; // in seconds
printf("CPU Product: \n");
printMatrix(d, N);
int cpuValid = verify(a, b, d, N);
int gpuValid = verify(a, b, c, N);
if(cpuValid && gpuValid){
printf("Validating results...TEST PASSED\n");
} else {
printf("Validating results...TEST FAILED\n");
}
// display GPU computation and copy time
printf("GPU Time: %f, Memory Copy Time: %f\n", timeGPU, timeTransfer);
// display CPU computation time
printf("CPU Time: %f\n", cpuTimeTaken);
// free system and device memory
free(a);
free(b);
free(c);
free(d);
cudaFree(dev_a);
cudaFree(dev_b);
cudaFree(dev_c);
return 0;
} |
11,239 | #include <cstdio>
#include <cstdlib>
#include <vector>
#include <memory>
#include <iostream>
#include <cuda.h>
#include <curand.h>
#include <curand_kernel.h>
__device__ float generate(curandState* globalState, int ind)
{
curandState localState = globalState[ind];
float RANDOM = curand_uniform( &localState );
globalState[ind] = localState;
return RANDOM;
}
__global__ void setup_kernel ( curandState * state, unsigned long seed )
{
int id = threadIdx.x + blockIdx.x * blockDim.x;
curand_init ( seed, id, 0, &state[id] );
}
__global__ void addToCount(int N, int *y, curandState* globalState)
{
int id = threadIdx.x + blockIdx.x * blockDim.x;
while (id < N)
{
int number = generate(globalState, id) * 1000000;
printf("%i\n", number);
atomicAdd(&(y[0]), number);
id += blockDim.x * gridDim.x;
}
}
__device__ float3 generate3(curandState* globalState, int ind)
{
/*
generate random x, y, z position for particles
*/
float3 newposition = make_float3(0.0,0.0,0.0);
curandState localState = globalState[ind];
newposition.x = curand_uniform( &localState );
newposition.y = curand_uniform( &localState );
newposition.z = curand_uniform( &localState );
globalState[ind] = localState;
return newposition;
}
__global__ void initPosition(int N, float4 *d_par, curandState* globalState)
{
int id = threadIdx.x + blockIdx.x * blockDim.x;
if (id < N) // final block may not have all the threads
{
auto position = generate3(globalState, id);
d_par[id].w = (float) id / 1000.0; // mass
d_par[id].x = 2.0*position.x-1.0;
d_par[id].y = 2.0*position.y-1.0;
d_par[id].z = 2.0*position.z-1.0;
//id += blockDim.x * gridDim.x;
}
}
int main(int argc, char** argv)
{
int N = 768;
int *d_y;
float4 *d_par; // particle positions and mass
std::vector<int> y(N);
std::vector<float4> par(N);
int blocksize = 256; // value usually chosen by tuning and hardware constraints
int nblocks = ceil( (float) N/blocksize);
printf("nblocks = %i\n", nblocks);
cudaMalloc(&d_y, N * sizeof(int));
//cudaMemcpy(d_y, &y[0], N * sizeof(int), cudaMemcpyHostToDevice);
// allocate memory on device
cudaMalloc(&d_par, N * sizeof(float4));
curandState* devStates;
cudaMalloc (&devStates, N * sizeof(curandState));
//srand(time(0));
srand(1234);
/** ADD THESE TWO LINES **/
int seed = rand();
setup_kernel<<<nblocks, blocksize>>>(devStates,seed);
/** END ADDITION **/
addToCount<<<nblocks, blocksize>>>(N, d_y, devStates);
initPosition<<<nblocks, blocksize>>>(N, d_par, devStates);
cudaDeviceSynchronize();
cudaMemcpy(&y[0], d_y, N*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(y, d_y, N*sizeof(int), cudaMemcpyDeviceToHost);
cudaMemcpy(&par[0], d_par, N*sizeof(float4), cudaMemcpyDeviceToHost);
printf("final = %i\n", y[0]);
printf("# %i particles:\n", N);
for (auto particle : par) {
printf("%15.7f %15.7f %15.7f %15.7f\n", particle.w,
particle.x, particle.y, particle.z);
}
// Free the GPU memory here
cudaFree(d_y);
cudaFree(d_par);
cudaFree(devStates);
}
|
11,240 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <cstring>
#include <stdlib.h>
#include <stdio.h>
#include <string>
#include "gloveparser.cuh"
float* parseFile(char* path, int& n, int& dimensions) {
FILE *fp;
float* list;
fp = fopen(path, "r");
if (fp == NULL) {
printf("Error while opening file: %s. \n", path);
exit(-1);
}
bool number_is_negative = false; //Checks whether it is a negative number.
int ch = 0;
int comma_counter = 0; //The placement of the next comma digit.
int index = 0; //The index in the matrix.
bool comma = false; //Whether a comma has been registered yet.
bool isID = true;
float x = 0;
char _n[256], d[256];
fgets(_n, sizeof(_n), fp);
fgets(d, sizeof(d), fp);
n = atoi(_n);
dimensions = atoi(d);
list = (float*)malloc(n * dimensions * sizeof(float));
while ((ch = fgetc(fp)) != EOF) {
while (ch != 10 && ch != EOF) {
if (ch == 32) {
if (isID) { //Just continue after this. Otherwise we will be adding an unnecassary 0.
isID = false;
}
else { //Add number.
if (number_is_negative) x = x * -1.0; //Check if negative.
//printf("Adding to list[%d] value %f \n", index, x);
list[index] = x;
//Reset values.
x = 0;
index++;
comma = false;
number_is_negative = false;
comma_counter = 0;
}
}
else if (isID) { // Is the number the ID.
//Id is omitted for now.
}
else if (isdigit(ch)) { //Is it a number.
if (comma) { //If comma, than compute correct digit.
double digit = (ch - 48.0) / pow(10, comma_counter);
x = x + digit;
comma_counter++;
}
else { //Otherwise just assign number.
x = ch - 48.0;
}
}
else if (ch == 45) { //Set negative flag.
number_is_negative = true;
}
else if (ch == 46) { //Set comma flag.
comma = true;
comma_counter++;
}
ch = fgetc(fp); //Get next character.
}
x = 0;
comma = false;
comma_counter = 0;
isID = true;
}
fclose(fp);
return list;
} |
11,241 | #include "includes.h"
__device__ __forceinline__ size_t gpu_scalar_index(unsigned int x, unsigned int y, unsigned int z)
{
return NX*(NY*z + y)+x;
}
__global__ void gpu_efield(double *fi, double *ex, double *ey, double *ez){
unsigned int y = blockIdx.y;
unsigned int z = blockIdx.z;
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int xp1 = (x + 1) % NX;
unsigned int yp1 = (y + 1) % NY;
unsigned int zp1 = (z + 1) % NZ;
unsigned int xm1 = (NX + x - 1) % NX;
unsigned int ym1 = (NY + y - 1) % NY;
unsigned int zm1 = (NZ + z - 1) % NZ;
ex[gpu_scalar_index(x, y, z)] = 0.5*(fi[gpu_scalar_index(xm1,y,z)] - fi[gpu_scalar_index(xp1, y, z)]) / dx;
ey[gpu_scalar_index(x, y, z)] = 0.5*(fi[gpu_scalar_index(x, ym1, z)] - fi[gpu_scalar_index(x, yp1, z)]) / dy;
ez[gpu_scalar_index(x, y, z)] = 0.5*(fi[gpu_scalar_index(x, y, zm1)] - fi[gpu_scalar_index(x, y, zp1)]) / dz;
} |
11,242 | #include "pixel.cuh"
using namespace std;
// Pixel constructor
Pixel::Pixel(int a, int b) {
x = a;
y = b;
}
Pixel::Pixel() {
x = 0;
y = 0;
} |
11,243 | // #include "MultiLineCurve_GPU.hpp"
// #include "yuzu/foundation/memory/pointer.hpp"
// #include "domain/curves/MultiLineCurve.hpp"
//
// #if defined(_DEBUG) || defined(DEBUG)
// #define cuda_assert(x) { if ((x) != CUDA_SUCCESS) throw 0xDEAD; }
// #else
// #define cuda_assert(x) x
// #endif
//
// #define X(x) points[2*(x) + 0]
// #define Y(x) points[2*(x) + 1]
//
// namespace adcu = axis::domain::curves;
// namespace ayfm = axis::yuzu::foundation::memory;
//
// extern adcu::Curve::GPUCurveOperator_t adcu::__multiLineCurveOperatorAddr = nullptr;
//
// __device__ real MultiLine_GPU_Curve_Op(void *curveDataPtr, real xCoord)
// {
// ayfm::RelativePointer& dataPtr = *(ayfm::RelativePointer *)curveDataPtr;
// void *curveDataRegion = *dataPtr;
// uint64 numPoints = *(uint64 *)curveDataRegion;
// const real *points = (real *)((uint64)curveDataRegion + sizeof(uint64));
// for (size_t i = 1; i < numPoints; i++)
// {
// if ((X(i) > xCoord) || (i == numPoints-1 && (abs(X(i) - xCoord) <= 1e-15)))
// {
// // trivial case: horizontal line
// // if (abs(Y(i-1) - Y(i)) <= 1e-15)
// // {
// // return Y(i);
// // }
// real a = Y(i) * (xCoord - X(i-1));
// real b = Y(i-1) * (xCoord - X(i));
// real c = 1.0 / (X(i) - X(i-1));
// a = a - b;
// a = a * c;
// return a;
// // ret urn (a - b) / c;
// // return (X(i)-X(i-1));
// // return ((Y(i)-Y(i-1))) * (xCoord - X(i-1)) / (X(i)-X(i-1));
// // return ((Y(i)-Y(i-1)) * (xCoord-X(i-1)) / (X(i)-X(i-1))) + Y(i-1);
// }
// }
// return 0;
// }
//
// __global__ void GetMultiLineCurveOperatorAddr(void *addr)
// {
// *((adcu::Curve::GPUCurveOperator_t *)addr) = &MultiLine_GPU_Curve_Op;
// }
//
// extern adcu::Curve::GPUCurveOperator_t
// adcu::GetMultiLineCurve_GPUOperatorAddress(void)
// {
// if (__multiLineCurveOperatorAddr == nullptr)
// {
// void *devPtr;
// cuda_assert(cudaMalloc(&devPtr, sizeof(void *)));
// cudaStream_t stream;
// cuda_assert(cudaStreamCreate(&stream));
// GetMultiLineCurveOperatorAddr<<<1,1,0, stream>>>(devPtr);
// cuda_assert(cudaStreamSynchronize(stream));
// cuda_assert(cudaStreamDestroy(stream));
// cuda_assert(cudaMemcpy(&__multiLineCurveOperatorAddr, devPtr,
// sizeof(void *), cudaMemcpyDeviceToHost));
// cuda_assert(cudaFree(devPtr));
// }
// return __multiLineCurveOperatorAddr;
// }
|
11,244 | #include "includes.h"
//this function is a kernel
//__global__ is a C construct called declaration specifier and that’s how CUDA knows that this is not CPU code but a kernel
//threadIdx: CUDA has a built in variable called threadIdx which tells each thread its index within a block. Its a C construct
//with 3 members “x”, “y” and “z” and the struct is called “dim3"
__global__ void cube(float * d_out, float * d_in){
int idx = threadIdx.x; //
float f = d_in[idx];
d_out[idx] = f*f*f;
} |
11,245 | // includes, system
#include <cassert>
#include <iostream>
#include <vector>
// Here you can set the device ID that was assigned to you
#define MYDEVICE 0
// Simple utility function to check for CUDA runtime errors
void checkCUDAError(const char* msg);
///////////////////////////////////////////////////////////////////////////////
// Program main
///////////////////////////////////////////////////////////////////////////////
int main()
{
cudaSetDevice(MYDEVICE);
// pointer and dimension for host memory
int dimA = 8;
std::vector<float> h_a(dimA); // x = dimA, y = 1, z = 1
// pointers for device memory
float *d_a, *d_b;
// allocate and initialize host memory
for (int i = 0; i < dimA; ++i) {
h_a[i] = i;
}
// Part 1 of 5: allocate device memory
size_t memSize = dimA * sizeof(float);
cudaMalloc(&d_a, memSize);
cudaMalloc(&d_b, memSize);
// Part 2 of 5: host to device memory copy
// Hint: the raw pointer to the underlying array of a vector
// can be obtained by calling std::vector<T>::data()
cudaMemcpy(d_a, h_a.data(), memSize, cudaMemcpyHostToDevice);
// Part 3 of 5: device to device memory copy
cudaMemcpy(d_b, d_a, memSize, cudaMemcpyDeviceToDevice);
// clear host memory
std::fill(h_a.begin(), h_a.end(), 0);
// Part 4 of 5: device to host copy
cudaMemcpy(h_a.data(), d_b, memSize, cudaMemcpyDeviceToHost);
// Check for any CUDA errors
checkCUDAError("cudaMemcpy calls");
// verify the data on the host is correct
for (int i = 0; i < dimA; ++i) {
assert(h_a[i] == (float)i);
}
// Part 5 of 5: free device memory pointers d_a and d_b
cudaFree(d_a);
cudaFree(d_b);
// Check for any CUDA errors
checkCUDAError("cudaFree");
// If the program makes it this far, then the results are correct and
// there are no run-time errors. Good work!
std::cout << "Correct!" << std::endl;
return 0;
}
void checkCUDAError(const char* msg)
{
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err) {
std::cerr << "Cuda error: " << msg << " " << cudaGetErrorString(err)
<< std::endl;
exit(-1);
}
}
|
11,246 | #include <cuda_runtime.h>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <math.h>
#include <sys/time.h>
#define N 1048576
#define TAMSIMPLESORT 1024
#define TPB 1024
#define DEBUG false
#define PRINTARRAY false
typedef float dato; //Tipo de dato a ordenar
__device__ void simple_sort(dato *data, int left, int right) {
for (int i = left; i <= right; ++i) {
dato min_val = data[i];
int min_idx = i;
// Find the smallest value in the range [left, right].
for (int j = i + 1; j <= right; ++j) {
unsigned val_j = data[j];
if (val_j < min_val) {
min_idx = j;
min_val = val_j;
}
}
// Swap the values.
if (i != min_idx) {
data[min_idx] = data[i];
data[i] = min_val;
}
}
}
__global__ void run_simple_sort(dato * clasificado, int * indices) {
int thread = blockIdx.x*blockDim.x + threadIdx.x;
int min = thread * TAMSIMPLESORT * 2;
int max = (thread * TAMSIMPLESORT * 2) + indices[thread] - 1;
if (DEBUG) printf("RSS Hilo %d - min:%d - max:%d\n", thread, min, max);
simple_sort(clasificado, min, max);
}
__global__ void clasificar(dato * v, dato * fronteras, int * indices, int bloques, dato * clasificado) {
int thread = blockIdx.x*blockDim.x + threadIdx.x;
int ind, i;
for (i = 0; i <= bloques; i++) {
if (v[thread] <= fronteras[i]) {
ind = atomicAdd(indices + i, 1);
break;
}
}
clasificado[2 * TAMSIMPLESORT*i + ind] = v[thread];
}
__global__ void rellenarIndFront(dato min, dato paso, int * indices, dato * fronteras) {
int thread = blockIdx.x*blockDim.x + threadIdx.x;
indices[thread] = 0;
fronteras[thread] = min + paso*(thread+1);
if (DEBUG) printf("RELLENAR hilo:%d frontera:%f min:%f paso:%f\n", thread, fronteras[thread], min, paso);
}
void runsort(dato * v_H, int l) {
dato * v_D;
cudaMalloc((void **)&v_D, sizeof(dato)*l);
cudaMemcpy(v_D, v_H, sizeof(dato)*l, cudaMemcpyHostToDevice);
dato min = v_H[0];
dato max = v_H[0];
for (int i = 0; i < l; i++) {
if (v_H[i] < min) {
min = v_H[i];
}
if (v_H[i] > max) {
max = v_H[i];
}
}
int bloques = l / TAMSIMPLESORT;
if (l % TAMSIMPLESORT != 0) bloques++;
dato * fronteras;
cudaMalloc((void**)&fronteras, bloques * sizeof(dato));
int * indices;
cudaMalloc((void**)&indices, bloques * sizeof(int));
int cudablocks = bloques / TPB;
if (bloques % TPB != 0) cudablocks++;
int cudathreads = bloques / cudablocks;
if (DEBUG) printf("RUNSORT RELLENAR blocks:%d threads:%d min:%f, max:%f\n", cudablocks, cudathreads, min, max);
rellenarIndFront <<<cudablocks, cudathreads>>> (min, (max-min) / (dato)bloques, indices, fronteras);
cudablocks = l / TPB;
if (l % TPB != 0) cudablocks++;
cudathreads = l / cudablocks;
dato * destino;
cudaMalloc((void**)&destino, l * 2 * sizeof(dato));
clasificar <<<cudablocks, cudathreads >>> (v_D, fronteras, indices, bloques, destino);
cudaFree(v_D);
cudablocks = bloques / TPB;
if (bloques % TPB != 0) cudablocks++;
cudathreads = bloques / cudablocks;
run_simple_sort <<<cudablocks, cudathreads >>> (destino, indices);
int * H_indices = (int *) malloc(sizeof(int)*bloques);
cudaMemcpy(H_indices, indices, sizeof(int)*bloques, cudaMemcpyDeviceToHost);
int offset = 0;
for (int i = 0; i < bloques; i++) {
if (DEBUG) printf("COPY Dest: %d Origen: %d Long: %d\n", offset, i*TAMSIMPLESORT * 2, H_indices[i]+1);
cudaMemcpy(v_H + offset, destino + i*TAMSIMPLESORT * 2, sizeof(dato)*(H_indices[i]+1), cudaMemcpyDeviceToHost);
offset += H_indices[i];
}
if (DEBUG) printf("NUM ELEMENTOS = %d\n", offset);
cudaFree(v_D);
cudaFree(destino);
cudaFree(fronteras);
cudaFree(indices);
}
int main()
{
dato *vector;
vector = (dato *)malloc(N * sizeof(dato));
srand(time(NULL));
for (int i = 0; i<N; i++)
vector[i] = rand();
bool correcto = true;
struct timeval t1, t2;
printf("Ordenando vector de %d elementos...\n", N);
gettimeofday(&t1, 0);
runsort(vector, N);
gettimeofday(&t2, 0);
double time = (1000000.0*(t2.tv_sec-t1.tv_sec) + t2.tv_usec-t1.tv_usec)/1000.0;
printf("Ordenado en %f ms \n", time);
dato aux = 0;
for (int i = 0; i<N; i++) {
if (aux > vector[i]) {
correcto = false;
}
aux = vector[i];
if (PRINTARRAY) printf("%d - %f\n", i, aux);
}
if (correcto) {
printf("El vector se ha ordenado correctamente.\n\n");
}
else {
printf("Ha fallado la ordenacion del vector.\n\n");
}
return 0;
}
|
11,247 | #include <stdio.h>
#include <cuda.h>
__global__ void testKernel(int *in, int *out, int size)
{
bool oddeven=true;
__shared__ bool swappedodd;
__shared__ bool swappedeven;
int temp,i,rem1;
swappedodd=true;
swappedeven=true;
while(true)
{
if(oddeven==true)
{
__syncthreads();
swappedodd=false;
__syncthreads();
if (threadIdx.y == 0) {
int idx=threadIdx.x;
if(idx<(size/2))
{
if (in[2*idx]>in[2*idx+1])
{
// swap(in[],in[2*idx+1]);
temp= in[2*idx];
in[2*idx]=in[2*idx+1];
in[2*idx+1]=temp;
swappedodd=true;
}
}
}
__syncthreads();
}
else
{
__syncthreads();
swappedeven=false;
__syncthreads();
if (threadIdx.y == 0) {
int idx=threadIdx.x;
if(idx<(size/2)-1)
{
if (in[2*idx+1]>in[2*idx+2])
{
// swap(in[2*idx+1],in[2*idx+2]);
temp= in[2*idx+1];
in[2*idx+1]=in[2*idx+2];
in[2*idx+2]=temp;
swappedeven=true;
}
}
}
__syncthreads();
}
if(!(swappedodd||swappedeven))
break;
oddeven=!oddeven;//switch mode of sorting
}
__syncthreads();
int idx=threadIdx.x;
if ( idx <size )
out[idx]=in[idx];
}
int main(void)
{
int *a,*a_sorted,i;
int *d_a,*d_sorted;
int n=20;
int size = sizeof(int)*n;
cudaMalloc((void**)&d_a,size);
cudaMalloc( (void**)&d_sorted, size);
a=(int*)malloc(size);
a_sorted=(int*)malloc(size);
cudaMalloc((void**)&d_sorted, size);
printf("enter the unsorted numbers\n");
for(i=0;i<n;i++)
{
scanf("%d",&a[i]);
}
cudaMemcpy(d_a, a, size, cudaMemcpyHostToDevice);
testKernel<<<1,n>>>(d_a, d_sorted,n);
cudaMemcpy(a, d_a, size, cudaMemcpyDeviceToHost);
cudaMemcpy(a_sorted, d_sorted, size, cudaMemcpyDeviceToHost);
for (i=0;i<n;i++)
{
printf("%d",a_sorted[i]);
printf("\t");
}
free(a);
free(a_sorted);
cudaFree(d_sorted);
cudaFree(d_a);
}
|
11,248 | __global__ void foo(int *inp, int *out)
{
if(inp[0] == 1) {
if(inp[2] == 2) {
out[3] = 3;
}
else {
__syncthreads();
}
}
else {
if(inp[3] == 4) {
out[0] = 4;
__syncthreads();
}
else {
if(inp[4] == 5) {
out[5] = 5;
}
else if(inp[5] == 5) {
out[4] = inp[5] + 4;
if(inp[14] == 66) {
out[44] = 5;
}
else {
out[32] = 5;
}
__syncthreads();
}
else {
__syncthreads();
out[4] = 5;
}
}
}
}
|
11,249 | #include <thrust/device_ptr.h>
#include <thrust/device_malloc.h>
#include <thrust/device_free.h>
#include <thrust/sort.h>
#include "thrust.cuh"
void my_thrust::stable_sort() {
thrust::device_ptr<float> d_ptr = thrust::device_malloc<float>(3);
thrust::device_ptr<float> first = d_ptr;
thrust::device_ptr<float> last = d_ptr + 3;
d_ptr[0] = 3.0; d_ptr[1] = 2.0; d_ptr[2] = 1.0;
thrust::stable_sort(first, last);
std::cout << d_ptr[0] << ", " << d_ptr[1] << ", " << d_ptr[2] << std::endl;
thrust::device_free(d_ptr);
}
|
11,250 | #include <cuda.h>
#include <cuda_runtime_api.h>
#define ID2_BASE_WIDTH 8
#define ID2_BASE_HEIGHT 4
#define ID2_THRESHOLD .08f //definitely needs to be changed
#define ID2_SKIP_AMOUNT 4 //amount to skip in pixels, we can change this to be multiplied by scale if necessary/desirable
//This identifier is 2 horizontal bars with dark (negative) on top and light (positive) on bottom
__global__
void ID2kernel(float* intImage, size_t stride, int* offsets, int windowSize, int numSubWindows, int scale, int* faceDetected, float* results, float* heatMap) {
int threadNum = blockIdx.x * blockDim.x + threadIdx.x;
if(threadNum < numSubWindows){
float maxFitValue = 0.0f;
int startX = offsets[threadNum]/(stride);
int startY = offsets[threadNum]%stride;
for (int i = startX; (i+ID2_BASE_WIDTH*scale) < (startX+windowSize); i = i+ID2_SKIP_AMOUNT){ //use ID2_SKIP_AMOUNT * scale for it to scale up as identifier scales
for (int j = startY; (j+ID2_BASE_HEIGHT*scale) < (startY + windowSize); j = j+ID2_SKIP_AMOUNT){
// take important corners from image
float upperLeft = intImage[i*stride + j];
float upperRight = intImage[(i+ID2_BASE_WIDTH*scale)*stride + j];
float midLeft = intImage[i*stride + j+(ID2_BASE_HEIGHT*scale>>1)];
float midRight = intImage[(i+ID2_BASE_WIDTH*scale)*stride + j+(ID2_BASE_HEIGHT*scale>>1)];
float lowerLeft = intImage[i*stride + j+(ID2_BASE_HEIGHT*scale)];
float lowerRight = intImage[(i+ID2_BASE_WIDTH*scale)*stride + j+(ID2_BASE_HEIGHT*scale)];
//calulate fit value based on identifier (hard-coded)
float fitValue = midLeft*2 - midRight*2 - upperLeft + lowerRight + upperRight - lowerLeft;
if(fitValue > maxFitValue){
maxFitValue = fitValue;
}
}
}
float goodnessValue = maxFitValue/(ID2_BASE_WIDTH*scale*ID2_BASE_HEIGHT*scale); // goodnessValue = fit/area
// results[threadNum] = goodnessValue;
if(goodnessValue > ID2_THRESHOLD){
faceDetected[threadNum] = 1;
// for(int i = 0; i < windowSize; ++i){
// for(int j = 0; j < windowSize; ++j){
// heatMap[offsets[threadNum] + i*stride + j] = heatMap[offsets[threadNum] + i*stride + j] + 1.0f;
// }
// }
}
}
}
|
11,251 | #include <iostream>
#include <cstdlib>
#include <math.h>
#include <stdio.h>
#include <assert.h>
#include <fstream>
#include <time.h>
#define TILE_WIDTH 16
#define maskCols 5
#define maskRows 5
#define w (TILE_WIDTH + maskCols -1)
__global__ void tilingKernelProcessing(float * InputImageData, const float *__restrict__ kernel,
float* outputImageData, int channels, int width, int height)
{
__shared__ float N_ds[w][w]; //block of image in shared memory
// allocation in shared memory of image blocks
int maskRadius = maskRows/2;
for (int k = 0; k <channels; k++) {
int dest = threadIdx.y * TILE_WIDTH + threadIdx.x;
int destY = dest/w; //row of shared memory
int destX = dest%w; //col of shared memory
int srcY = blockIdx.y *TILE_WIDTH + destY - maskRadius; // index to fetch data from input image
int srcX = blockIdx.x *TILE_WIDTH + destX - maskRadius; // index to fetch data from input image
int src = (srcY *width +srcX) * channels + k; // index of input image
if(srcY>= 0 && srcY < height && srcX>=0 && srcX < width)
N_ds[destY][destX] = InputImageData[src]; // copy element of image in shared memory
else
N_ds[destY][destX] = 0;
dest = threadIdx.y * TILE_WIDTH+ threadIdx.x + TILE_WIDTH * TILE_WIDTH;
destY = dest/w;
destX = dest%w;
srcY = blockIdx.y *TILE_WIDTH + destY - maskRadius;
srcX = blockIdx.x *TILE_WIDTH + destX - maskRadius;
src = (srcY *width +srcX) * channels + k;
if(destY < w){
if(srcY>= 0 && srcY < height && srcX>=0 && srcX < width)
N_ds[destY][destX] = InputImageData[src];
else
N_ds[destY][destX] = 0;
}
__syncthreads();
//compute kernel convolution
float accum = 0;
int y, x;
for (y= 0; y < maskCols; y++)
for(x = 0; x<maskRows; x++)
accum += N_ds[threadIdx.y + y][threadIdx.x + x] *kernel[y * maskCols + x];
y = blockIdx.y * TILE_WIDTH + threadIdx.y;
x = blockIdx.x * TILE_WIDTH + threadIdx.x;
if(y < height && x < width)
outputImageData[(y * width + x) * channels + k] = accum;
__syncthreads();
}
}
void MC(float * input,float* output, int img_height, int img_width, const int r, float & gpu_elapsed_time_ms)
{
// initialize kernel here
int kernel_height = r;
int kernel_width = r;
float *kernel;
kernel = new float[r*r];
for (int i = 0; i < r*r; i++){
kernel[i] = rand() % 10 + 1;
}
float * mask = new float[kernel_height*kernel_width];
for (int i = 0; i < kernel_height*kernel_width; i++)
{
mask[i] = kernel[i];
}
float * d_input, * d_output, * d_kernel;
cudaMalloc(&d_input, img_width*img_height*sizeof(float));
cudaMalloc(&d_output, img_width*img_height*sizeof(float));
cudaMalloc(&d_kernel, kernel_height*kernel_width*sizeof(float));
cudaMemcpy(d_input, input, img_width*img_height*sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(d_kernel, mask, kernel_height*kernel_width*sizeof(float), cudaMemcpyHostToDevice);
dim3 blocksize(16,16);
dim3 gridsize;
gridsize.x=(img_width+blocksize.x-1)/blocksize.x;
gridsize.y=(img_height+blocksize.y-1)/blocksize.y;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start, 0);
tilingKernelProcessing<<<gridsize,blocksize>>>(d_input, d_kernel, d_output, 1, img_width, img_height);
cudaMemcpy(output, d_output, img_width*img_height*sizeof(float), cudaMemcpyDeviceToHost);
cudaEventRecord(stop, 0);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&gpu_elapsed_time_ms, start, stop);
}
int main(){
// number of instances of data generated
int NUM = 500;
std::ofstream ofile;
// customize output filename
ofile.open("matrix_conv_gpu_500_points_Tesla_2.csv");
for (int iterator = 0; iterator < NUM; iterator++) {
if (iterator % 10 == 0) std::cout << "iter: " << iterator << std::endl;
float *in, *out;
int m = rand() % 1024 + 10;
int n = rand() % 1024 + 10;
int is = n * m;
int r = (rand() % 3 + 1) * 2 + 1;
in = new float[is];
out = new float[is];
// density
int power;
double d;
power = rand() % int((log2(double(m * n)) + 1));
d = 1 / pow(2, power);
// initialize matrix A
// if A is a sparse matrix
if (d <= 0.5) {
int count_a = m * n * d;
for (int it = 0; it < count_a; it++) {
int i = rand() % m;
int j = rand() % n;
in[i * n + j] = rand() % 1024 + 1;
}
// if A is a dense matrix
} else {
for (int i = 0; i < m * n; i++) {
in[i] = rand() % 1024 + 1;
}
}
float time;
// perform kernel operation
MC(in, out, n, m, r, time);
int c = (m-r+1)*(n-r+1)*r*r;
ofile << time / 1000;
ofile << "," << m << "," << n << "," << r << "," << d << "," << c << ",\n";
}
ofile.close();
return 0;
} |
11,252 | // Simple CUDA example by Ingemar Ragnemalm 2009. Simplest possible?
// Assigns every element in an array with its index.
// nvcc simple.cu -L /usr/local/cuda/lib -lcudart -o simple
#include <stdio.h>
const int N = 16;
const int blocksize = 16;
__global__
void simple(float *c_gpu, float *result_gpu)
{
result_gpu[threadIdx.x] = sqrt(c_gpu[threadIdx.x]);
}
int main()
{
const int size = N*sizeof(float);
float *c = new float[N];
float *result = new float[N];
float *c_gpu, *result_gpu;
for (int i = 0; i < N; i++)
c[i] = i;
cudaMalloc( (void**)&c_gpu, size );
cudaMalloc( (void**)&result_gpu, size );
dim3 dimBlock( blocksize, 1 );
dim3 dimGrid( 1, 1 );
cudaMemcpy( c_gpu, c, size, cudaMemcpyHostToDevice );
simple<<<dimGrid, dimBlock>>>(c_gpu, result_gpu);
cudaThreadSynchronize();
cudaMemcpy( result, result_gpu, size, cudaMemcpyDeviceToHost );
cudaFree( c_gpu );
cudaFree( result );
for (int i = 0; i < N; i++) {
printf("%f ", result[i]);
if(result[i] != sqrt(c[i])) {
printf("Diff, %e", result[i] - sqrt(c[i]));
}
printf("\n");
}
delete[] c;
printf("done\n");
return EXIT_SUCCESS;
}
|
11,253 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <sys/time.h>
#include <unistd.h>
static int ROW_A = 16384;
static int COL_A = 16384;
static int COL_B = 16384;
double get_time() {
struct timeval tv;
gettimeofday(&tv, NULL);
return (double)tv.tv_sec + (double)1e-6 * tv.tv_usec;
}
void mat_mul_seq(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
void mat_mul_opencl(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
void mat_mul_cuda(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
void mat_mul_cuda_multi(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
void mat_mul_cublas(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
void verify(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B);
int main(int argc, char *argv[]) {
if (argc < 2) {
printf("Usage: %s <option>\n", argv[0]);
exit(EXIT_FAILURE);
}
int option = atoi(argv[1]);
float *A = (float*)malloc(sizeof(float) * ROW_A * COL_A);
float *B = (float*)malloc(sizeof(float) * COL_A * COL_B);
float *C = (float*)malloc(sizeof(float) * ROW_A * COL_B);
int i, j;
for (i = 0; i < ROW_A; i++) {
for (j = 0; j < COL_A; j++) {
A[i * COL_A + j] = (float)(rand() % 1000) / 100.0f;
}
}
for (i = 0; i < COL_A; i++) {
for (j = 0; j < COL_B; j++) {
B[i * COL_B + j] = (float)(rand() % 1000) / 100.0f;
}
}
printf("Matrix Multiplication\n");
printf("C[%lu X %lu] = A[%lu X %lu] X B[%lu X %lu]\n",
ROW_A, COL_B, ROW_A, COL_A, COL_A, COL_B);
double start_time = get_time();
if (option == 0) {
printf("Sequential version...\n");
mat_mul_seq(A, B, C, ROW_A, COL_A, COL_B);
} else if (option == 1) {
printf("OpenCL version...\n");
mat_mul_opencl(A, B, C, ROW_A, COL_A, COL_B);
} else if (option == 2) {
printf("CUDA version...\n");
mat_mul_cuda(A, B, C, ROW_A, COL_A, COL_B);
} else if (option == 3) {
printf("CUDA multi device version...\n");
mat_mul_cuda_multi(A, B, C, ROW_A, COL_A, COL_B);
} else if (option == 4) {
printf("cuBLAS version...\n");
mat_mul_cublas(A, B, C, ROW_A, COL_A, COL_B);
} else {
printf("Invalid option!\n");
exit(EXIT_FAILURE);
}
double end_time = get_time();
printf("Elapsed time: %f sec\n", end_time - start_time);
verify(A, B, C, ROW_A, COL_A, COL_B);
free(A);
free(B);
free(C);
return 0;
}
void verify(float *A, float *B, float *C,
int ROW_A, int COL_A, int COL_B) {
int i, j, k;
float sum;
for (i = 0; i < ROW_A; i+=97) {
for (j = 0; j < COL_B; j+=89) {
sum = 0.0f;
for (k = 0; k < COL_A; k++) {
sum += A[i * COL_A + k] * B[k * COL_B + j];
}
if (fabsf(C[i * COL_B + j] - sum) > 0.1) {
printf("Verification failed! C[%d][%d]: %f vs. %f\n",
i, j, C[i * COL_B + j], sum);
return;
}
}
}
printf("Verification success!\n");
}
|
11,254 | #include <stdio.h>
#include <stdlib.h>
#include "cuda.h"
// This is my DEVICE function
// __global__ means this function is visible to the host
__global__ void kernelHelloWorld() {
int thread = threadIdx.x; //local thread number in a block
int block = blockIdx.x; //block number
printf("Hello World from thread %d of block %d!\n", thread, block);
}
int main(int argc, char** argv) {
int Nblocks = 10; //number of blocks
int Nthreads = 3; //number of threads per block
// run the function 'kernelHellowWorld' on the DEVICE
kernelHelloWorld <<< Nblocks, Nthreads >>> ();
//wait for the DEVICE function to complete before moving on
cudaDeviceSynchronize();
return 0;
}
|
11,255 | /* This should be a C++ file */
|
11,256 | /*********************************************************
* Name : kernelInteractions.cu
* Author : Kevin Mooney
* Created : 12/08/16
* Updated :
*
* Description:
*
* Notes:
*********************************************************/
#include "cudaErrors.cuh"
void transfer2Host (float * begin, float * end, float * begin2 ) {
size_t sizeOfTransfer = sizeof(float)*std::distance(begin,end);
gpuErrchk ( cudaMemcpy ( begin2, begin, sizeOfTransfer, cudaMemcpyDeviceToHost ) );
}
|
11,257 | #include "includes.h"
__global__ void selection_sort_gpu(int b, int n, int m, int k, const float *dist, int *outi, float *out) {
int batch_index = blockIdx.x;
dist+=m*n*batch_index;
outi+=m*n*batch_index;
out+=m*n*batch_index;
int index = threadIdx.x;
int stride = blockDim.x;
// copy from dist to dist_out
for (int j=index;j<m;j+=stride) {
for (int s=0;s<n;++s) {
out[j*n+s] = dist[j*n+s];
outi[j*n+s] = s;
}
}
float *p_dist;
for (int j=index;j<m;j+=stride) {
p_dist = out+j*n;
// selection sort for the first k elements
for (int s=0;s<k;++s) {
int min=s;
// find the min
for (int t=s+1;t<n;++t) {
if (p_dist[t]<p_dist[min]) {
min = t;
}
}
// swap min-th and i-th element
if (min!=s) {
float tmp = p_dist[min];
p_dist[min] = p_dist[s];
p_dist[s] = tmp;
int tmpi = outi[j*n+min];
outi[j*n+min] = outi[j*n+s];
outi[j*n+s] = tmpi;
}
}
}
} |
11,258 | __global__ void _saxpy_centroid_values(
int N,
double a,
double b,
double * centroid_values,
double * centroid_backup_values)
{
const int k =
threadIdx.x+threadIdx.y*blockDim.x+
(blockIdx.x+blockIdx.y*gridDim.x)*blockDim.x*blockDim.y;
centroid_values[k]= a*centroid_values[k] + b*centroid_backup_values[k];
}
|
11,259 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
__device__ double modulus_3_d_gpu(double * v)
{
double tmp = v[0] * v[0] + v[1] * v[1] + v[2] * v[2];
tmp = pow(tmp, .5);
return tmp;
}
__device__ float modulus_3_f_gpu(float * v)
{
float tmp = v[0] * v[0] + v[1] * v[1] + v[2] * v[2];
tmp = powf(tmp, .5);
return tmp;
}
__device__ int modulus_3_i_gpu(int * v)
{
float tmp = v[0] * v[0] + v[1] * v[1] + v[2] * v[2];
tmp = powf(tmp, .5);
return tmp;
} |
11,260 | /**********************************************************************
* DESCRIPTION:
* Serial Concurrent Wave Equation - C Version
* This program implements the concurrent wave equation
*********************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#define MAXPOINTS 1000000
#define MAXSTEPS 1000000
#define MINPOINTS 20
#define PI 3.14159265
void check_param(void);
void init_line(void);
void update (void);
void printfinal (void);
/**********************************************************************
* Checks input values from parameters
*********************************************************************/
__host__ void check_param(int *tpoints,int *nsteps)
{
char tchar[20];
/* check number of points, number of iterations */
while ((*tpoints < MINPOINTS) || (*tpoints > MAXPOINTS)) {
printf("Enter number of points along vibrating string [%d-%d]: "
,MINPOINTS, MAXPOINTS);
scanf("%s", tchar);
*tpoints = atoi(tchar);
if ((*tpoints < MINPOINTS) || (*tpoints > MAXPOINTS))
printf("Invalid. Please enter value between %d and %d\n",
MINPOINTS, MAXPOINTS);
}
while ((*nsteps < 1) || (*nsteps > MAXSTEPS)) {
printf("Enter number of time steps [1-%d]: ", MAXSTEPS);
scanf("%s", tchar);
*nsteps = atoi(tchar);
if ((*nsteps < 1) || (*nsteps > MAXSTEPS))
printf("Invalid. Please enter value between 1 and %d\n", MAXSTEPS);
}
printf("Using points = %d, steps = %d\n", *tpoints, *nsteps);
}
/**********************************************************************
* Calculate new values using wave equation
*********************************************************************/
__device__ float do_math(float currentvalue,float oldval)
{
float dtime, c, dx, tau, sqtau;
dtime = 0.3;
c = 1.0;
dx = 1.0;
tau = (c * dtime / dx);
sqtau = tau * tau;
return ((2.0 * currentvalue) - oldval + (sqtau * (-2.0)*currentvalue));
}
/**********************************************************************
* Initialize points on line
*********************************************************************/
__global__ void init_line(float *values,int tpoint,int nsteps)
{
float x, fac;
float currentval,oldval,newval;
int indx = blockIdx.x * blockDim.x + threadIdx.x+1;
/* Calculate initial values based on sine curve */
fac = 2.0 * PI;
x = (indx -1.0)/(float)(tpoint-1.0);
currentval = sin(fac * x);
oldval= currentval;
/*
k = 0.0;
tmp = tpoints - 1;
for (j = 1; j <= tpoints; j++) {
x = k/tmp;
values[j] = sin (fac * x);
k = k + 1.0;
}
*/
/* Initialize old values array */
/*
for (i = 1; i <= tpoints; i++)
oldval[i] = values[i];
*/
/**********************************************************************
* Update all values along line a specified number of times
*********************************************************************/
int i;
#pragma unroll 1024
for ( i = 1; i<=nsteps; i++) {
if (indx ==0 || indx == tpoint)
currentval =0.0;
else{
newval = do_math(currentval,oldval);
oldval = currentval;
currentval = newval;
}
}
values[indx] = currentval;
}
/**********************************************************************
* Print final results
*********************************************************************/
__host__ void printfinal(float values[],int tpoints)
{
int i;
for (i = 1; i <= tpoints; i++) {
printf("%6.4f ", values[i]);
if (i%10 == 0)
printf("\n");
}
}
/**********************************************************************
* Main program
*********************************************************************/
__host__ int main(int argc, char *argv[])
{
float *values,*final_result;
int nsteps, /* number of time steps */
tpoints; /* total points along string */
int blocknumber,datasize;
sscanf(argv[1],"%d",&tpoints);
sscanf(argv[2],"%d",&nsteps);
blocknumber = tpoints/1024+1;
datasize = (blocknumber * 1024 + 1) * sizeof(float);
check_param(&tpoints,&nsteps);
cudaMalloc( (void**)&values, datasize);
final_result =(float *) malloc(datasize);
printf("Initializing points on the line...\n");
printf("Updating all points for all time steps...\n");
init_line<<<blocknumber,1024>>>(values,tpoints,nsteps);
printf("Printing final results...\n");
cudaMemcpy( final_result, values, datasize, cudaMemcpyDeviceToHost );
printfinal(final_result,tpoints);
printf("\nDone.\n\n");
cudaFree(values);
return 0;
} |
11,261 | #include <stdio.h>
int main(void)
{
printf("\nhello world\n");
return 0;
}
|
11,262 | // https://github.com/FFmpeg/FFmpeg/blob/master/libavfilter/vf_chromakey.c
#define CUDA_KERNEL_LOOP_x(i,n) \
for(int i = blockIdx.x * blockDim.x + threadIdx.x; \
i < (n); \
i += blockDim.x * gridDim.x)
#define CUDA_KERNEL_LOOP_y(j,m) \
for(int j = blockIdx.y * blockDim.y + threadIdx.y; \
j < (m); \
j += blockDim.y * gridDim.y)
#define FIXNUM(x) lrint((x) * (1 << 10))
#define RGB_TO_U(rgb) (((- FIXNUM(0.16874) * rgb[0] - FIXNUM(0.33126) * rgb[1] + FIXNUM(0.50000) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
#define RGB_TO_V(rgb) ((( FIXNUM(0.50000) * rgb[0] - FIXNUM(0.41869) * rgb[1] - FIXNUM(0.08131) * rgb[2] + (1 << 9) - 1) >> 10) + 128)
#define BGR_TO_U(bgr) (((- FIXNUM(0.16874) * bgr[2] - FIXNUM(0.33126) * bgr[1] + FIXNUM(0.50000) * bgr[0] + (1 << 9) - 1) >> 10) + 128)
#define BGR_TO_V(bgr) ((( FIXNUM(0.50000) * bgr[2] - FIXNUM(0.41869) * bgr[1] - FIXNUM(0.08131) * bgr[0] + (1 << 9) - 1) >> 10) + 128)
#define av_clipd(v, v_min, v_max) (max(min(v, v_max), v_min))
__device__ float do_chromakey_pixel_diff(
float similarity, float blend,
float * diff_list)
{
float diff = 0.0;
int i;
for (i = 0; i < 9; ++i) {
diff += diff_list[i];
}
diff /= 9.0;
if (blend > 0.0001) {
return av_clipd((diff - similarity) / blend, 0.0, 1.0);
} else {
return (diff > similarity) ? 1.0 : 0.0;
}
}
// ---------------------------------------
__global__ void rendering_kernel(const int h, const int w,
const unsigned char * chromakey_bgr, const float * similarity_blend,
unsigned char * img,
float * img_diff)
{
const unsigned char * p_tmp;
int u, v, du, dv;
float diff;
// ---------------------------------------------------------
unsigned char chromakey_uv[2];
chromakey_uv[0] = BGR_TO_U(chromakey_bgr);
chromakey_uv[1] = BGR_TO_V(chromakey_bgr);
// ---------------------------------------------------------
// rgb2uv
CUDA_KERNEL_LOOP_y(jj, h){
CUDA_KERNEL_LOOP_x(ii, w){
int idx_base = jj * w + ii;
p_tmp = img + idx_base * 3;
u = BGR_TO_U(p_tmp);
v = BGR_TO_V(p_tmp);
// diff
du = u - chromakey_uv[0];
dv = v - chromakey_uv[1];
diff = sqrt((du * du + dv * dv) / (255.0 * 255.0 * 2));
img_diff[idx_base] = diff;
}
}
__syncthreads();
// ---------------------------------------------------------
int xo, yo;
int x, y;
float diff_list[9];
const float similarity = similarity_blend[0];
const float blend = similarity_blend[1];
CUDA_KERNEL_LOOP_y(jj, h){
CUDA_KERNEL_LOOP_x(ii, w){
int idx_base = jj * w + ii;
for (yo = 0; yo < 3; ++yo) {
for (xo = 0; xo < 3; ++xo) {
y = jj + yo - 1;
x = ii + xo - 1;
if (x < 0 || x >= w || y < 0 || y >= h)
continue;
int idx_base_tmp = w * y + x;
diff_list[yo * 3 + xo] = img_diff[idx_base_tmp];
}
}
float alpha = do_chromakey_pixel_diff(similarity, blend, diff_list);
img[idx_base * 3 + 0] = img[idx_base * 3 + 0] * alpha;
img[idx_base * 3 + 1] = img[idx_base * 3 + 1] * alpha;
img[idx_base * 3 + 2] = img[idx_base * 3 + 2] * alpha;
}
}
}
|
11,263 | /* compile with: nvcc -O3 hw1.cu -o hw1 */
#include <stdio.h>
#include <sys/time.h>
///////////////////////////////////////////////// DO NOT CHANGE ///////////////////////////////////////
#define IMG_HEIGHT 256
#define IMG_WIDTH 256
#define N_IMAGES 10000
#define HISTOGRAM_SIZE 256
#define THREADS_PER_BLOCK_SERIAL 1024
#define THREADS_PER_BLOCK_BULK 256
typedef unsigned char uchar;
#define CUDA_CHECK(f) do { \
cudaError_t e = f; \
if (e != cudaSuccess) { \
printf("Cuda failure %s:%d: '%s'\n", __FILE__, __LINE__, cudaGetErrorString(e)); \
exit(1); \
} \
} while (0)
#define SQR(a) ((a) * (a))
void process_image(uchar *img_in, uchar *img_out) {
int histogram[256] = { 0 };
for (int i = 0; i < IMG_WIDTH * IMG_HEIGHT; i++) {
histogram[img_in[i]]++;
}
int cdf[256] = { 0 };
int hist_sum = 0;
for (int i = 0; i < 256; i++) {
hist_sum += histogram[i];
cdf[i] = hist_sum;
}
int cdf_min = 0;
for (int i = 0; i < 256; i++) {
if (cdf[i] != 0) {
cdf_min = cdf[i];
break;
}
}
uchar map[256] = { 0 };
for (int i = 0; i < 256; i++) {
int map_value = (float)(cdf[i] - cdf_min) / (IMG_WIDTH * IMG_HEIGHT - cdf_min) * 255;
map[i] = (uchar)map_value;
}
for (int i = 0; i < IMG_WIDTH * IMG_HEIGHT; i++) {
img_out[i] = map[img_in[i]];
}
}
double static inline get_time_msec(void) {
struct timeval t;
gettimeofday(&t, NULL);
return t.tv_sec * 1e+3 + t.tv_usec * 1e-3;
}
long long int distance_sqr_between_image_arrays(uchar *img_arr1, uchar *img_arr2) {
long long int distance_sqr = 0;
for (int i = 0; i < N_IMAGES * IMG_WIDTH * IMG_HEIGHT; i++) {
distance_sqr += SQR(img_arr1[i] - img_arr2[i]);
}
return distance_sqr;
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
__device__ int array_min_positive(int *arr, int len){
int tid = threadIdx.x;
__shared__ int min_arr[HISTOGRAM_SIZE];
if (tid < len) {
min_arr[tid] = arr[tid]; //copy the arr to preserve it
}
__syncthreads();
int half_size = len /2;
while (half_size >=1){
if (tid < half_size) {
bool change_flag = (min_arr[tid + half_size] > 0 && min_arr[tid]
> min_arr[tid + half_size] || min_arr[tid] == 0);
min_arr[tid] = change_flag * min_arr[tid + half_size] +
(!change_flag) * min_arr[tid];
}
__syncthreads();
half_size /=2;
}
return min_arr[0];
}
__device__ void prefix_sum(int *arr, int len){
int tid = threadIdx.x;
int increment;
for (int stride = 1; stride < len; stride *= 2) {
if (tid < len && tid >= stride) { // in case # threads bigger than array length
increment = arr[tid - stride];
}
__syncthreads();
if (tid < len && tid >= stride) { // in case # threads bigger than array length
arr[tid] += increment;
}
__syncthreads();
}
return;
}
__device__ void map(int *cdf, int cdfMin, uchar* mapOut, int len){
int tid = threadIdx.x;
if (tid < len) {
int map_value = (float)(cdf[tid] - cdfMin) / (IMG_WIDTH * IMG_HEIGHT - cdfMin) * 255;
mapOut[tid] =(uchar)map_value;
}
return;
}
__global__ void process_image_kernel(uchar *in, uchar *out) {
int tid = threadIdx.x;
int imageStartIndex = IMG_WIDTH * IMG_HEIGHT * blockIdx.x;
__shared__ int hist_shared[HISTOGRAM_SIZE];
__shared__ uchar mapOut[HISTOGRAM_SIZE];
if (tid < HISTOGRAM_SIZE) {
hist_shared[tid] = 0;
}
__syncthreads();
for(int startOffset = 0; startOffset < IMG_WIDTH * IMG_HEIGHT; startOffset += blockDim.x){
int pixelValue = in[imageStartIndex + startOffset + tid];
atomicAdd(hist_shared + pixelValue, 1);
}
__syncthreads();
prefix_sum(hist_shared, HISTOGRAM_SIZE);
__syncthreads();
int * cdf = hist_shared;
int cdfMin = array_min_positive(cdf, HISTOGRAM_SIZE);
__syncthreads();
map(cdf, cdfMin, mapOut, HISTOGRAM_SIZE);
__syncthreads();
for(int startOffset = 0; startOffset < IMG_WIDTH * IMG_HEIGHT; startOffset += blockDim.x){
int pixelValue = in[imageStartIndex + startOffset + tid];
out[imageStartIndex + startOffset + tid] = mapOut[pixelValue];
}
return;
}
int main() {
///////////////////////////////////////////////// DO NOT CHANGE ///////////////////////////////////////
uchar *images_in;
uchar *images_out_cpu; //output of CPU computation. In CPU memory.
uchar *images_out_gpu_serial; //output of GPU task serial computation. In CPU memory.
uchar *images_out_gpu_bulk; //output of GPU bulk computation. In CPU memory.
CUDA_CHECK( cudaHostAlloc(&images_in, N_IMAGES * IMG_HEIGHT * IMG_WIDTH, 0) );
CUDA_CHECK( cudaHostAlloc(&images_out_cpu, N_IMAGES * IMG_HEIGHT * IMG_WIDTH, 0) );
CUDA_CHECK( cudaHostAlloc(&images_out_gpu_serial, N_IMAGES * IMG_HEIGHT * IMG_WIDTH, 0) );
CUDA_CHECK( cudaHostAlloc(&images_out_gpu_bulk, N_IMAGES * IMG_HEIGHT * IMG_WIDTH, 0) );
/* instead of loading real images, we'll load the arrays with random data */
srand(0);
for (long long int i = 0; i < N_IMAGES * IMG_WIDTH * IMG_HEIGHT; i++) {
images_in[i] = rand() % 256;
}
double t_start, t_finish;
// CPU computation. For reference. Do not change
printf("\n=== CPU ===\n");
t_start = get_time_msec();
for (int i = 0; i < N_IMAGES; i++) {
uchar *img_in = &images_in[i * IMG_WIDTH * IMG_HEIGHT];
uchar *img_out = &images_out_cpu[i * IMG_WIDTH * IMG_HEIGHT];
process_image(img_in, img_out);
}
t_finish = get_time_msec();
printf("total time %f [msec]\n", t_finish - t_start);
long long int distance_sqr;
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// GPU task serial computation
printf("\n=== GPU Task Serial ===\n"); //Do not change
uchar *image_in_device_serial, *image_out_device_serial;
/* allocating device memory for one image */
CUDA_CHECK(cudaMalloc((void **)&image_in_device_serial,IMG_HEIGHT * IMG_WIDTH ));
CUDA_CHECK(cudaMalloc((void **)&image_out_device_serial,IMG_HEIGHT * IMG_WIDTH ));
t_start = get_time_msec(); //Do not change
for (int i = 0; i < N_IMAGES; i++) {
int imageStartIndex = IMG_HEIGHT * IMG_WIDTH * i;
CUDA_CHECK(cudaMemcpy(image_in_device_serial, images_in + imageStartIndex,
IMG_HEIGHT * IMG_WIDTH,
cudaMemcpyHostToDevice));
process_image_kernel <<< 1, THREADS_PER_BLOCK_SERIAL >>> (image_in_device_serial, image_out_device_serial);
CUDA_CHECK(cudaMemcpy(images_out_gpu_serial + imageStartIndex, image_out_device_serial,
IMG_HEIGHT * IMG_WIDTH, cudaMemcpyDeviceToHost));
cudaDeviceSynchronize();
}
t_finish = get_time_msec(); //Do not change
/* free device memory for one image */
CUDA_CHECK(cudaFree(image_out_device_serial));
CUDA_CHECK(cudaFree(image_in_device_serial));
distance_sqr = distance_sqr_between_image_arrays(images_out_cpu, images_out_gpu_serial); // Do not change
printf("total time %f [msec] distance from baseline %lld (should be zero)\n", t_finish - t_start, distance_sqr); //Do not change
// GPU bulk
printf("\n=== GPU Bulk ===\n"); //Do not change
uchar *image_in_device_bulk, *image_out_device_bulk;
/* allocate device memory for all image */
CUDA_CHECK(cudaMalloc((void **)&image_in_device_bulk,IMG_HEIGHT * IMG_WIDTH * N_IMAGES ));
CUDA_CHECK(cudaMalloc((void **)&image_out_device_bulk,IMG_HEIGHT * IMG_WIDTH * N_IMAGES ));
t_start = get_time_msec(); //Do not change
CUDA_CHECK(cudaMemcpy(image_in_device_bulk, images_in, IMG_HEIGHT * IMG_WIDTH * N_IMAGES, cudaMemcpyHostToDevice));
process_image_kernel <<< N_IMAGES, THREADS_PER_BLOCK_BULK >>> (image_in_device_bulk, image_out_device_bulk);
CUDA_CHECK(cudaMemcpy(images_out_gpu_bulk, image_out_device_bulk, IMG_HEIGHT * IMG_WIDTH * N_IMAGES, cudaMemcpyDeviceToHost));
cudaDeviceSynchronize();
t_finish = get_time_msec(); //Do not change
/* free device memory for all image */
CUDA_CHECK(cudaFree(image_out_device_bulk));
CUDA_CHECK(cudaFree(image_in_device_bulk));
distance_sqr = distance_sqr_between_image_arrays(images_out_cpu, images_out_gpu_bulk); // Do not change
printf("total time %f [msec] distance from baseline %lld (should be zero)\n", t_finish - t_start, distance_sqr); //Do not change
CUDA_CHECK(cudaFreeHost(images_out_gpu_bulk));
CUDA_CHECK(cudaFreeHost(images_out_gpu_serial));
CUDA_CHECK(cudaFreeHost(images_out_cpu));
CUDA_CHECK(cudaFreeHost(images_in));
return 0;
}
|
11,264 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
//CUDA kernel
__global__ void vecAddKernel(float *a, float *b, float *c, int n)
{
//ID del thread
int id = blockIdx.x*blockDim.x+threadIdx.x;
//No salir del tamaño del vector
if (id < n)
c[id] = a[id] + b[id];
}
void vecAdd(float *a, float *b, float *c, int n);
int main( int argc, char* argv[] )
{
int vector_size = 256*1024;
//host entradas
float *h_a=0;
float *h_b=0;
//host salida
float *h_c=0;
//tamaño de cada vector
size_t bytes = vector_size*sizeof(float);
//Asignacion de memoria en cpu
h_a = (float*)malloc(bytes);
h_b = (float*)malloc(bytes);
h_c = (float*)malloc(bytes);
if(h_a==0 || h_b==0 || h_c==0)
{
printf("Error asignando memoria cpu\n");
return 1;
}
//inicializar en host
int i;
for( i = 0; i < vector_size; i++) {
h_a[i] = sin(i)*sin(i);
h_b[i] = cos(i)*cos(i);
}
vecAdd(h_a, h_b, h_c, vector_size);
bool sucess = true;
for(i=0; i<vector_size; i++)
{
if(h_a[i]+h_b[i]!=h_c[i])
{
sucess = false;
break;
}
}
if(sucess)
printf("Exitoooo en GPU!!\n");
//Liberando memoria en el host
free(h_a);
free(h_b);
free(h_c);
return 0;
}
void vecAdd(float *a, float *b, float *c, int n)
{
size_t bytes = n*sizeof(float);
//Device entradas
float *d_a=0;
float *d_b=0;
//Device salida
float *d_c=0;
//Asignacion de memoria en gpu
cudaMalloc((void **) &d_a, bytes);
cudaMalloc((void **) &d_b, bytes);
cudaMalloc((void **) &d_c, bytes);
if(d_a==0 || d_b==0 || d_c==0)
{
printf("Error asignando memoria gpu\n");
}
//Copia de host a device
cudaMemcpy( d_a, a, bytes, cudaMemcpyHostToDevice);
cudaMemcpy( d_b, b, bytes, cudaMemcpyHostToDevice);
int blockSize, gridSize;
//Numero de threads por bloque
blockSize = 256;
//Numero de bloques de threads
gridSize = (int)ceil((float)n/blockSize);
//Ejecucion
vecAddKernel<<<gridSize, blockSize>>>(d_a, d_b, d_c, n);
//Copiando al host los resultados
cudaMemcpy(c, d_c, bytes, cudaMemcpyDeviceToHost );
//Liberando memoria en el device
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
}
|
11,265 | #include <stdio.h>
#include <stdlib.h>
#include <cuda_runtime.h>
int main()
{
const int m=100;
const int k=100;
const int n=100;
int a[m][k];
int b[k][n];
int c[m][n];
int i,j,q;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start,0);
// printf("matrix A\n");
for( i = 0; i < m; i++)
{
for( j = 0; j < k; j++)
{
a[i][j]=rand()%10+1;
// printf( " %d \t" , a[i][j] ) ;
}
// printf("\n");
}
// printf("matrix B\n");
for( i = 0; i < k; i++)
{
for( j = 0; j < n; j++)
{
b[i][j]=rand()%10+1;
// printf( " %d \t" , b[i][j] ) ;
}
// printf("\n");
}
// printf("Result of multiplying\n");
for(i = 0; i < m; i++)
{
for(j = 0; j < n; j++)
{
c[i][j] = 0;
for(q = 0; q < k; q++)
c[i][j] += a[i][q] * b[q][j];
// printf( " %d \t" , c[i][j] ) ;
}
// printf("\n");
}
cudaEventRecord(stop,0);
cudaEventSynchronize(stop);
float milliseconds = 0;
cudaEventElapsedTime(&milliseconds, start, stop);
printf("The time is %.6f ms\n", milliseconds);
return 0;
}
|
11,266 | #pragma kernel tune(threads_per_block = 32, 64, 128, 256, 512, 1024)
#pragma kernel tune(items_per_thread = 1, 2, 3, 4)
#pragma kernel tune(tiling_strategy = 0, 1, 2)
#pragma kernel set(items_per_block = items_per_thread * threads_per_block)
#pragma kernel set(block_size = items_per_block)
#pragma kernel restriction(items_per_block <= 1024)
#pragma kernel problem_size(n)
#pragma kernel block_size(threads_per_block)
#pragma kernel grid_divisor(items_per_block)
#pragma kernel buffers(C[n], A[n], B[n])
#pragma kernel tuning_key("vector_add_" + T)
template<
typename T,
int block_size = 32,
int items_per_thread = 2,
int tiling_strategy = 2>
__global__ void vector_add(int n, T* C, const T* A, const T* B) {
static_assert(
tiling_strategy >= 0 && tiling_strategy <= 2,
"invalid tiling strategy");
for (int k = 0; k < items_per_thread; k++) {
int i;
// contiguous. thread processes items i, i+1, i+2, ...
if (tiling_strategy == 0) {
i = (blockIdx.x * block_size + threadIdx.x) * items_per_thread + k;
}
// block-strided. thread processes items i, i + block_size, i + 2*block_size
else if (tiling_strategy == 1) {
i = blockIdx.x * items_per_thread * block_size + threadIdx.x
+ k * block_size;
}
// grid-strided. thread processes items i, i + grid_size, i + 2 * grid_size
else if (tiling_strategy == 2) {
i = blockIdx.x * block_size + threadIdx.x
+ k * (gridDim.x * block_size);
}
if (i < n) {
C[i] = A[i] + B[i];
}
}
}
|
11,267 | __global__ void delta_stepping(int* V, int* E, int* W, int* n, int* srcNum, int* srclist, int* delta, int* dist, int* predist){
const int u0 = threadIdx.z * blockDim.x * blockDim.y + threadIdx.y * blockDim.x + threadIdx.x;
const int s0 = gridDim.x * blockIdx.y + blockIdx.x;
const int offset = blockDim.x * blockDim.y * blockDim.z; // the number of threads in a block.
const int blockNum = gridDim.x * gridDim.y; // the number of block
const int localSize = 100;
// nowIsNull, global var, indicate whether the bucket is empty or not, 1 is not.
// quickBreak 1 indicate the bucket has vertices.
int B[localSize]; // max support 102400 vertices. B[i] in thread u is the vertex i * blockDimx.x + u is belong to which bucket.
bool hadin[localSize]; // indicate one vertex is in or once in a bucket.
bool vis[localSize];
int id; // the bucket id
int u = -1; // the vertex
int pos = -1; // the index of vertices will be calc by the thread.
int sIndex = -1;
int sn = -1;
__shared__ int nowIsNull[1];
__shared__ int quickBreak[1];
sIndex = s0;
while(sIndex < (*srcNum)){
// init
id = 0;
for(int i = 0; i < localSize; i++){
B[i] = -1;
hadin[i] = 0;
vis[i] = 0;
}
if(u0 == srclist[sIndex] % offset){
pos = srclist[sIndex] / offset;
B[pos] = 0; // put source vertex into bucket 0.
vis[pos] = 1;
hadin[pos] = 1;
}
nowIsNull[0] = 1;
quickBreak[0] = 1;
sn = sIndex * (*n);
__syncthreads();
while(1){
u = u0;
while(u < (*n) && quickBreak[0] == 0){
if(B[u / offset] != -1){ // at least one vertex is in bucket.
quickBreak[0] = 1;
break;
}
u += offset;
}
__syncthreads();
if(quickBreak[0] == 0){ // 0 is all bucket is empty.
break;
}
// update bucket
while(nowIsNull[0]){
// iterate every vertices, and tell it is in this bucket or not.
u = u0;
while(u < *n){
pos = u / offset;
if(B[pos] == id){ // u is belong to the current bucket
B[pos] = -1; // delete from current bucket
hadin[pos] = 1;
if(vis[pos]){ // the distence of the vertex u has been changed
vis[pos] = 0;
for(int j = V[u]; j < V[u + 1]; j++){ // iterate all neibor
if(W[j] <= (*delta)){ // light edge
atomicMin(&predist[E[j] + sn], dist[u + sn] + W[j]);
}
}
}
}
u += offset;
}
nowIsNull[0] = 0; // set current bucket is empty
__syncthreads();
u = u0;
while(u < (*n)){
if(predist[u + sn] < dist[u + sn]){ // if predist is less, then dist can be update
pos = u / offset; // calc the offset
dist[u + sn] = predist[u + sn]; // update the dist
B[pos] = dist[u + sn] / (*delta); // calc after updated should goto which bucket vis[pos] = 1; // 标记其距离是发生过变化了
if(B[pos] == id){
// set current bucket is not empty
nowIsNull[0] = 1;
}
}
u += offset;
}
__syncthreads();
}
// heavy edge
u = u0;
while(u < (*n)){
pos = u / offset;
if(hadin[pos]){
hadin[pos] = 0; // set as 0, indicate this vertex to next bucket
for(int j = V[u]; j < V[u + 1]; j++){
if(W[j] > (*delta)){ // heavy edge
atomicMin(&predist[E[j] + sn], dist[u + sn] + W[j]);
}
}
}
u += offset;
}
__syncthreads();
u = u0;
while(u < (*n)){
if(predist[u + sn] < dist[u + sn]){
pos = u / offset;
dist[u + sn] = predist[u + sn];
B[pos] = dist[u + sn] / (*delta);
vis[pos] = 1;
}
u += offset;
}
id += 1; // goto next bucket
nowIsNull[0] = 1; // assume the next bucket has vertices
quickBreak[0] = 0;
__syncthreads();
}
sIndex += blockNum;
}
} |
11,268 | #include <cuda_runtime.h>
//#include <cublas_v2.h>
//#include <cublasXt.h>
//#include <cudnn.h>
//#include <nccl.h>
#include <cassert>
#include <chrono>
#include <iostream>
#define CUDA_CHECK(e) (assert(cudaSuccess == (e)))
#define CUBLAS_CHECK(e) (assert(CUBLAS_STATUS_SUCCESS == (e)))
#define CUDNN_CHECK(e) (assert(CUDNN_STATUS_SUCCESS == (e)))
class AtomicReduceMap {
public:
template <typename T>
__device__ __forceinline__ static void Op(T& dst, const T& src);
};
template <>
__device__ __forceinline__
void AtomicReduceMap::Op<float>(float& dst, const float& src) {
(void)atomicAdd(&dst, src);
}
template <typename T, typename Map>
__global__ void reduce(
uint32_t buf_size,
const T* x,
T* y)
{
uint32_t idx = threadIdx.x + blockDim.x * blockIdx.x;
if (idx < buf_size) {
Map::template Op<T>(y[idx], x[idx]);
}
}
int main(int argc, const char** argv) {
int num_devices = 0;
CUDA_CHECK(cudaGetDeviceCount(&num_devices));
std::clog << "num devices: " << num_devices << std::endl;
num_devices = 1;
const size_t buf_size = 64UL * 1024UL * 1024UL;
cudaStream_t stream = NULL;
float* x = NULL;
float* y = NULL;
CUDA_CHECK(cudaSetDevice(0));
CUDA_CHECK(cudaStreamCreate(&stream));
CUDA_CHECK(cudaMalloc((void**)&x, buf_size * sizeof(float)));
CUDA_CHECK(cudaMalloc((void**)&y, buf_size * sizeof(float)));
CUDA_CHECK(cudaStreamSynchronize(stream));
CUDA_CHECK(cudaDeviceSynchronize());
const int num_trials = 1000;
double avg_elapsed_ms = 0.0;
std::clog << "running: reduce" << std::endl;
for (int t = 0; t < num_trials; ++t) {
auto start = std::chrono::steady_clock::now();
CUDA_CHECK(cudaSetDevice(0));
reduce<float, AtomicReduceMap><<<(buf_size+1024-1)/1024, 1024, 0, stream>>>(
buf_size, x, y);
CUDA_CHECK(cudaStreamSynchronize(stream));
auto lap = std::chrono::steady_clock::now();
auto diff = lap - start;
avg_elapsed_ms += std::chrono::duration<double, std::milli>(diff).count();
}
avg_elapsed_ms /= num_trials;
double avg_bandwidth = ((double)(buf_size * sizeof(float)) * 1.0e-9) / (avg_elapsed_ms * 1.0e-3);
std::clog << " avg wallclock: " << avg_elapsed_ms << " ms" << std::endl;
std::clog << " avg bandwidth: " << avg_bandwidth << " GB/s" << std::endl;
return 0;
}
|
11,269 | __device__ float clamp(float x, float a, float b) {
return min(max(x, a), b);
}
|
11,270 | //
// main.cpp
//
//
// Created by Elijah Afanasiev on 25.09.2018.
//
//
// System includes
#include <stdio.h>
#include <assert.h>
// CUDA runtime
#include <cuda.h>
#include <cuda_runtime.h>
#ifndef MAX
#define MAX(a,b) (a > b ? a : b)
#endif
__global__ void vectorAddGPU(float *a, float *b, float *c, int N, int offset)
{
int idx = blockIdx.x*blockDim.x + threadIdx.x;
if (idx < N)
{
c[offset + idx] = a[offset + idx] + b[offset + idx];
}
}
void sample_vec_add(int size = 1048576)
{
int n = size;
int nBytes = n*sizeof(int);
float *a, *b; // host data
float *c; // results
a = (float *)malloc(nBytes);
b = (float *)malloc(nBytes);
c = (float *)malloc(nBytes);
float *a_d,*b_d,*c_d;
dim3 block(256);
dim3 grid((unsigned int)ceil(n/(float)block.x));
for(int i=0;i<n;i++)
{
a[i] = rand() / (float)RAND_MAX;
b[i] = rand() / (float)RAND_MAX;
c[i] = 0;
}
printf("Allocating device memory on host..\n");
cudaMalloc((void **)&a_d,n*sizeof(float));
cudaMalloc((void **)&b_d,n*sizeof(float));
cudaMalloc((void **)&c_d,n*sizeof(float));
printf("Copying to device..\n");
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
cudaMemcpy(a_d,a,n*sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(b_d,b,n*sizeof(float), cudaMemcpyHostToDevice);
printf("Doing GPU Vector add\n");
vectorAddGPU<<<grid, block>>>(a_d, b_d, c_d, n, 0);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milliseconds = 0;
cudaEventElapsedTime(&milliseconds, start, stop);
printf("time: %f ms\n", milliseconds);
cudaDeviceSynchronize();
cudaFree(a_d);
cudaFree(b_d);
cudaFree(c_d);
}
void streams_vec_add(int size = 1048576)
{
int n = size;
int nBytes = n*sizeof(int);
float *a, *b; // host data
float *c; // results
cudaHostAlloc( (void**) &a, n * sizeof(float) ,cudaHostAllocDefault );
cudaHostAlloc( (void**) &b, n * sizeof(float) ,cudaHostAllocDefault );
cudaHostAlloc( (void**) &c, n * sizeof(float) ,cudaHostAllocDefault );
float *a_d,*b_d,*c_d;
for(int i=0;i<n;i++)
{
a[i] = rand() / (float)RAND_MAX;
b[i] = rand() / (float)RAND_MAX;
c[i] = 0;
}
printf("Allocating device memory on host..\n");
cudaMalloc((void **)&a_d,n*sizeof(float));
printf("here\n");
cudaMalloc((void **)&b_d,n*sizeof(float));
cudaMalloc((void **)&c_d,n*sizeof(float));
printf("Copying to device..\n");
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
printf("Doing GPU Vector add\n");
//vectorAddGPU<<<grid, block>>>(a_d, b_d, c_d, n);
const int NbStreams = 8;
const int StreamSize = n / NbStreams;
cudaStream_t Stream[ NbStreams ];
for ( int i = 0; i < NbStreams; i++ )
cudaStreamCreate(&Stream[ i ]);
for ( int i = 0; i < NbStreams; i++ )
{
int Offset = i * StreamSize;
cudaMemcpyAsync(&a_d[ Offset ], &a[ Offset ], StreamSize * sizeof(float), cudaMemcpyHostToDevice, Stream[ i ]);
cudaMemcpyAsync(&b_d[ Offset ], &b[ Offset ], StreamSize * sizeof(float), cudaMemcpyHostToDevice, Stream[ i ]);
cudaMemcpyAsync(&c_d[ Offset ], &c[ Offset ], StreamSize * sizeof(float), cudaMemcpyHostToDevice, Stream[ i ]);
dim3 block(1024);
dim3 grid((StreamSize - 1)/1024 + 1);
vectorAddGPU<<<grid, block>>>(a_d, b_d, c_d, StreamSize, Offset);
cudaMemcpyAsync(&c[ Offset ], &c_d[ Offset ], StreamSize * sizeof(float), cudaMemcpyDeviceToHost, Stream[ i ]);
}
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milliseconds = 0;
cudaEventElapsedTime(&milliseconds, start, stop);
printf("time: %f ms\n", milliseconds);
cudaDeviceSynchronize();
cudaFree(a_d);
cudaFree(b_d);
cudaFree(c_d);
cudaFreeHost(a);
cudaFreeHost(b);
cudaFreeHost(c);
}
int main(int argc, char **argv)
{
//sample_vec_add(atoi(argv[1]));
streams_vec_add(atoi(argv[1]));
return 0;
}
|
11,271 | #include "includes.h"
__global__ void Reconstruct(int *nex, unsigned long nextsize, double4 *pc, double4 *vc, double4 *a3, double4 *a, double4 *a1, double4 *a2, double4 *pva3, double4 *aaa) {
unsigned int gtid = blockIdx.x*blockDim.x + threadIdx.x;
int k = gtid/nextsize;
int who = nex[gtid - k*nextsize];
if(gtid<nextsize){
pc[who] = pva3[gtid];
}
else if(gtid >= nextsize && gtid < 2*nextsize){
vc[who] = pva3[gtid];
}
else if(gtid >= 2*nextsize && gtid < 3*nextsize){
a3[who] = pva3[gtid];
}
else if(gtid >= 3*nextsize && gtid < 4*nextsize){
a[who] = aaa[gtid - 3*nextsize];
}
else if(gtid>= 4*nextsize && gtid < 5*nextsize){
a1[who] = aaa[gtid - 3*nextsize];
}
else if(gtid>= 5*nextsize && gtid < 6*nextsize){
a2[who] = aaa[gtid - 3*nextsize];
}
} |
11,272 | #include "shared.cuh"
__device__ inline void move_impl(const ParticleView &view, int i) {
*view.get_pos(i).x += *view.get_dir(i).x * view.get_nextdist(i);
*view.get_pos(i).y += *view.get_dir(i).y * view.get_nextdist(i);
*view.get_pos(i).z += *view.get_dir(i).z * view.get_nextdist(i);
}
__global__ void move(ParticleView view) {
int i = thread_id();
if (i >= view.size) return;
move_impl(view, i);
}
|
11,273 | #include <vector>
#include <algorithm>
#include <cstdlib>
#include <cstdio>
#include <unistd.h>
#include <time.h>
#include <cassert>
#define ITERATIONS 5
#define M 50
using namespace std;
#ifndef DATA_TYPE
#define DATA_TYPE float
#endif
struct Foo{
__host__ __device__ DATA_TYPE operator() (DATA_TYPE i){
for(int c = 0; c < M; c++){
i += 5;
}
return i;
}
} myobject;
int main(int argc, char ** argv){
long total_time = 0;
struct timespec start, stop;
vector<DATA_TYPE> int_vector;
DATA_TYPE lastVal = 0;
unsigned long N = 32000;
if(argc > 1){
N = atol(argv[1]);
}
for(int i = 0; i < ITERATIONS; i++){
int_vector.clear();
for(int c = N; c > 0; c--){
int_vector.push_back((DATA_TYPE)(rand() % 500000));
}
lastVal = int_vector[0];
clock_gettime(CLOCK_REALTIME,&start);
for(int c = 0; c < ITERATIONS; c++){
std::transform(int_vector.begin(),int_vector.end(),int_vector.begin(),myobject);
}
clock_gettime(CLOCK_REALTIME,&stop);
total_time += ((stop.tv_sec-start.tv_sec)*1000000000) + (stop.tv_nsec - start.tv_nsec);
}
assert(lastVal != int_vector[0]);
printf("%lu\n", total_time);
return 0;
}
|
11,274 | #include "includes.h"
#define DIMENSIONS 2
#define GPU_DEVICE_ZERO 0
__global__ void distanceArrCalc(int pointsCounter, int threads, double *distanceFromPointToCluster, double *pointsInGpu, double *clustersInGpu)
{
/**
This Function computes distances. Every index is a point. Every value inside an index is a distance.
**/
double distanceX = 0;
double distanceY= 0;
int threadsLeft=pointsCounter % blockDim.x;
if ((threadsLeft > threadIdx.x) || (blockIdx.x+1 != gridDim.x))
{
int offsetPointIndex=(blockIdx.x * threads + threadIdx.x)*DIMENSIONS;
int offsetClusterIndexForPoint=threadIdx.y * DIMENSIONS;
//calc X
double a=pointsInGpu[offsetPointIndex];
double b= clustersInGpu[offsetClusterIndexForPoint];
distanceX = (a - b);
distanceX*=distanceX;
//calc Y
a=pointsInGpu[offsetPointIndex+1];
b= clustersInGpu[offsetClusterIndexForPoint+1];
distanceY =(a - b);
distanceY*=distanceY;
double totalDistance=sqrt(distanceY+distanceX);
int currentPointIndexY = pointsCounter*threadIdx.y;
int currentPointIndexX=(blockIdx.x * threads + threadIdx.x);
int pointIndex=currentPointIndexY+currentPointIndexX;
distanceFromPointToCluster[pointIndex] = totalDistance;
}
} |
11,275 | #include "includes.h"
__global__ void add_f32 (float* left_op, float* right_op, float* output, int len) {
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx < len) {
output[idx] = left_op[idx] + right_op[idx];
}
} |
11,276 |
int main() {
int a = 10;
#pragma omp parallel for
for (int i = 0; i < 10; ++i);
return 0;
} |
11,277 | /*
* A very simple cuda implementation of reduce. Uses an array of 1024x1024
* items which are summed into a 1024 array and then summed into a value.
*/
#include <stdio.h>
#include <stdlib.h>
/*
* this kernel will sum all of the data from in into out - at
* least as far as the block will carry you
*/
__global__ void reduce(float* out, float* in, int size);
void startClock(char*);
void stopClock(char*);
void printClock(char*);
int main(int argc, char** argv) {
int size = 1024*1024;
printf("size = %d\n",size);
void *d_in; // device data
void *d_mid; // device data - middle results
void *d_out; // device data - the answer
float *h_in; // host data
float h_out;
int numBlocks = 1024;
cudaMalloc(&d_in,size*sizeof(float));
cudaMalloc(&d_mid,numBlocks*sizeof(float));
cudaMalloc(&d_out,sizeof(float));
h_in = (float*) malloc(size*sizeof(float));
for (int i = 0; i < size; i++) {
h_in[i] = 1;
}
startClock("copy data to device");
cudaMemcpy(d_in,h_in,size*sizeof(float),cudaMemcpyHostToDevice);
stopClock("copy data to device");
startClock("compute");
// use max threads/block and the required # of blocks AND
// ask for some shared memory
reduce<<<1024,1024,1024>>>((float*) d_mid,(float*) d_in,size);
reduce<<<1,1024,1024>>>((float*)d_out,(float*)d_mid,1024);
cudaThreadSynchronize();
stopClock("compute");
startClock("copy data to host");
h_out = -17;
cudaMemcpy(&h_out,d_out,sizeof(float),cudaMemcpyDeviceToHost);
stopClock("copy data to host");
printf("The total is %f\n",h_out);
free(h_in);
cudaFree(d_in);
cudaFree(d_out);
printClock("copy data to device");
printClock("compute");
printClock("copy data to host");
}
|
11,278 | #include "Evaluator.cuh"
extern __shared__ float shared[];
__global__ void Evaluate(float* vectors, float* results, char* expression, int vectorLen, int numOfVars, int expLen)
{
unsigned inWholeIdx = blockIdx.x*blockDim.x+threadIdx.x;
unsigned inBlockIdx = threadIdx.x;
unsigned alignedNumOfVars = numOfVars;
if(numOfVars % 4 != 0)
alignedNumOfVars = alignedNumOfVars + 4 - (numOfVars%4);
unsigned width = (expLen + 1) / 4;
unsigned inSubproblemId = threadIdx.x % width;
unsigned threads = blockDim.x-(blockDim.x%width);
unsigned subproblemNoInWhole = (blockIdx.x * (blockDim.x/width)) + (inBlockIdx / width);
unsigned subproblemNoInBlock = inBlockIdx / width;
unsigned memBlockSize = alignedNumOfVars + width;
char* exp = (char*)shared;
float* vals = (float*)&exp[expLen+1];
unsigned expId = expLen - ((expLen + 1) / 2) - ((expLen + 1) / 4) + inSubproblemId;
unsigned varStart = memBlockSize * subproblemNoInBlock;
unsigned treeStart = varStart + alignedNumOfVars;
unsigned valId = inSubproblemId + treeStart;
if(inBlockIdx < expLen)
exp[inBlockIdx] = expression[inBlockIdx];
if(subproblemNoInWhole < vectorLen)
{
if(inSubproblemId < numOfVars)
vals[(subproblemNoInBlock*memBlockSize) + inSubproblemId] = vectors[(subproblemNoInWhole*numOfVars) + inSubproblemId];
if(inSubproblemId + width < numOfVars)
vals[(subproblemNoInBlock*memBlockSize) + inSubproblemId + width] = vectors[(subproblemNoInWhole*numOfVars) + inSubproblemId + width];
}
__syncthreads();
if(subproblemNoInWhole < vectorLen)
{
if(exp[expId] == '+')
vals[valId]=vals[varStart+exp[expId*2+1]-65]+vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '-')
vals[valId]=vals[varStart+exp[expId*2+1]-65]-vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '/')
vals[valId]=vals[varStart+exp[expId*2+1]-65]/vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '*')
vals[valId]=vals[varStart+exp[expId*2+1]-65]*vals[varStart+exp[expId*2+2]-65];
else if(exp[expId]>=65 && exp[expId]<=122)
vals[valId]=vals[varStart+exp[expId]-65];
}
__syncthreads();
unsigned level = 1;
while(expId!=0)
{
expId = (expId -1) / 2;
if(inSubproblemId%level == 0)
{
if(exp[expId] == '+')
vals[valId]=vals[valId]+vals[valId+level];
else if(exp[expId] == '-')
vals[valId]=vals[valId]-vals[valId+level];
else if(exp[expId] == '/')
vals[valId]=vals[valId]/vals[valId+level];
else if(exp[expId] == '*')
vals[valId]=vals[valId]*vals[valId+level];
else if(exp[expId]>=65 && exp[expId]<=122)
vals[valId]=vals[varStart+exp[expId]-65];
level = level*2;
}
__syncthreads();
}
if(inSubproblemId == 0)
results[subproblemNoInWhole] = vals[valId];
}
__global__ void EvaluateSinglePerBlock(float* vectors, float* results, char* expression, int vectorLen, int numOfVars, int expLen)
{
unsigned inWholeIdx = blockIdx.x*blockDim.x+threadIdx.x;
unsigned inBlockIdx = threadIdx.x;
unsigned alignedNumOfVars = numOfVars;
if(numOfVars % 4 != 0)
alignedNumOfVars = alignedNumOfVars + 4 - (numOfVars%4);
unsigned width = (expLen + 1) / 4;
unsigned inSubproblemId = threadIdx.x % width;
unsigned threads = blockDim.x-(blockDim.x%width);
unsigned subproblemNoInWhole = blockIdx.x;//(blockIdx.x * (blockDim.x/width)) + (inBlockIdx / width);
unsigned subproblemNoInBlock = inBlockIdx / width;
unsigned memBlockSize = alignedNumOfVars + width;
char* exp = (char*)shared;
float* vals = (float*)&exp[expLen+1];
unsigned expId = expLen - ((expLen + 1) / 2) - ((expLen + 1) / 4) + inSubproblemId;
unsigned varStart = memBlockSize * subproblemNoInBlock;
unsigned treeStart = varStart + alignedNumOfVars;
unsigned valId = inSubproblemId + treeStart;
if(inBlockIdx < expLen)
exp[inBlockIdx] = expression[inBlockIdx];
if(subproblemNoInWhole < vectorLen && subproblemNoInBlock == 0)
{
if(inSubproblemId < numOfVars)
vals[(subproblemNoInBlock*memBlockSize) + inSubproblemId] = vectors[(subproblemNoInWhole*numOfVars) + inSubproblemId];
if(inSubproblemId + width < numOfVars)
vals[(subproblemNoInBlock*memBlockSize) + inSubproblemId + width] = vectors[(subproblemNoInWhole*numOfVars) + inSubproblemId + width];
}
__syncthreads();
if(subproblemNoInWhole < vectorLen && subproblemNoInBlock == 0)
{
if(exp[expId] == '+')
vals[valId]=vals[varStart+exp[expId*2+1]-65]+vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '-')
vals[valId]=vals[varStart+exp[expId*2+1]-65]-vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '/')
vals[valId]=vals[varStart+exp[expId*2+1]-65]/vals[varStart+exp[expId*2+2]-65];
else if(exp[expId] == '*')
vals[valId]=vals[varStart+exp[expId*2+1]-65]*vals[varStart+exp[expId*2+2]-65];
else if(exp[expId]>=65 && exp[expId]<=122)
vals[valId]=vals[varStart+exp[expId]-65];
}
__syncthreads();
unsigned level = 1;
while(expId!=0)
{
expId = (expId -1) / 2;
if(inSubproblemId%level == 0 && subproblemNoInBlock == 0)
{
if(exp[expId] == '+')
vals[valId]=vals[valId]+vals[valId+level];
else if(exp[expId] == '-')
vals[valId]=vals[valId]-vals[valId+level];
else if(exp[expId] == '/')
vals[valId]=vals[valId]/vals[valId+level];
else if(exp[expId] == '*')
vals[valId]=vals[valId]*vals[valId+level];
else if(exp[expId]>=65 && exp[expId]<=122)
vals[valId]=vals[varStart+exp[expId]-65];
level = level*2;
}
__syncthreads();
}
if(inSubproblemId == 0 && subproblemNoInBlock == 0)
results[subproblemNoInWhole] = vals[valId];
}
|
11,279 | /**
* Copyright 2010 Duane Merrill
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information, see our Google Code project site:
* http://code.google.com/p/back40computing/
*
* Thanks!
*/
//------------------------------------------------------------------------------
// Common B40C Defines, Properties, and Routines
//------------------------------------------------------------------------------
#pragma once
#include <cuda.h>
namespace b40c {
//------------------------------------------------------------------------------
// Device properties
//------------------------------------------------------------------------------
#ifndef __CUDA_ARCH__
#define __CUDA_ARCH__ 0
#endif
#define B40C_FERMI(version) (version >= 200)
#define B40C_LOG_WARP_THREADS 5 // 32 threads in a warp
#define B40C_WARP_THREADS (1 << B40C_LOG_WARP_THREADS)
#define B40C_LOG_MEM_BANKS(version) ((version >= 200) ? 5 : 4) // 32 banks on fermi, 16 on tesla
#define B40C_MEM_BANKS(version) (1 << B40C_LOG_MEM_BANKS(version))
// TODO refactor these
#if __CUDA_ARCH__ >= 200
#define FastMul(a, b) (a * b)
#else
#define FastMul(a, b) (__umul24(a, b))
#endif
#if __CUDA_ARCH__ >= 120
#define WarpVoteAll(active_threads, predicate) (__all(predicate))
#else
#define WarpVoteAll(active_threads, predicate) (EmulatedWarpVoteAll<active_threads>(predicate))
#endif
#if __CUDA_ARCH__ >= 200
#define TallyWarpVote(active_threads, predicate, storage) (__popc(__ballot(predicate)))
#else
#define TallyWarpVote(active_threads, predicate, storage) (TallyWarpVoteSm10<active_threads>(predicate, storage))
#endif
#if defined(_WIN64) || defined(__LP64__)
#define _B40C_LP64_ true
#define _B40C_ASM_PTR_ "l"
#else
#define _B40C_LP64_ false
#define _B40C_ASM_PTR_ "r"
#endif
//------------------------------------------------------------------------------
// Handy routines
//------------------------------------------------------------------------------
/**
* Select maximum
*/
#define B40C_MAX(a, b) ((a > b) ? a : b)
/**
* Perform a swap
*/
template <typename T>
void __host__ __device__ __forceinline__ Swap(T &a, T &b) {
T temp = a;
a = b;
b = temp;
}
/**
* MagnitudeShift(). Allows you to shift left for positive magnitude values,
* right for negative.
*
* N.B. This code is a little strange; we are using this meta-programming
* pattern of partial template specialization for structures in order to
* decide whether to shift left or right. Normally we would just use a
* conditional to decide if something was negative or not and then shift
* accordingly, knowing that the compiler will elide the untaken branch,
* i.e., the out-of-bounds shift during dead code elimination. However,
* the pass for bounds-checking shifts seems to happen before the DCE
* phase, which results in a an unsightly number of compiler warnings, so
* we force the issue earlier using structural template specialization.
*/
template <typename K, int magnitude, bool shift_left> struct MagnitudeShiftOp;
template <typename K, int magnitude>
struct MagnitudeShiftOp<K, magnitude, true> {
__device__ __forceinline__ static K Shift(K key) {
return key << magnitude;
}
};
template <typename K, int magnitude>
struct MagnitudeShiftOp<K, magnitude, false> {
__device__ __forceinline__ static K Shift(K key) {
return key >> magnitude;
}
};
template <typename K, int magnitude>
__device__ __forceinline__ K MagnitudeShift(K key) {
return MagnitudeShiftOp<K, (magnitude > 0) ? magnitude : magnitude * -1, (magnitude > 0)>::Shift(key);
}
/**
* Supress warnings for unused constants
*/
template <typename T>
__device__ __forceinline__ void SuppressUnusedConstantWarning(const T) {}
//------------------------------------------------------------------------------
// Common device routines
//------------------------------------------------------------------------------
/**
* Perform a warp-synchrounous prefix scan. Allows for diverting a warp's
* threads into separate scan problems (multi-scan).
*/
template <int NUM_ELEMENTS, bool MULTI_SCAN>
__device__ __forceinline__ int WarpScan(
volatile int warpscan[][NUM_ELEMENTS],
int partial_reduction,
int copy_section) {
int warpscan_idx;
if (MULTI_SCAN) {
warpscan_idx = threadIdx.x & (NUM_ELEMENTS - 1);
} else {
warpscan_idx = threadIdx.x;
}
warpscan[1][warpscan_idx] = partial_reduction;
if (NUM_ELEMENTS > 1) warpscan[1][warpscan_idx] = partial_reduction =
partial_reduction + warpscan[1][warpscan_idx - 1];
if (NUM_ELEMENTS > 2) warpscan[1][warpscan_idx] = partial_reduction =
partial_reduction + warpscan[1][warpscan_idx - 2];
if (NUM_ELEMENTS > 4) warpscan[1][warpscan_idx] = partial_reduction =
partial_reduction + warpscan[1][warpscan_idx - 4];
if (NUM_ELEMENTS > 8) warpscan[1][warpscan_idx] = partial_reduction =
partial_reduction + warpscan[1][warpscan_idx - 8];
if (NUM_ELEMENTS > 16) warpscan[1][warpscan_idx] = partial_reduction =
partial_reduction + warpscan[1][warpscan_idx - 16];
if (copy_section > 0) {
warpscan[1 + copy_section][warpscan_idx] = partial_reduction;
}
return warpscan[1][warpscan_idx - 1];
}
/**
* Perform a warp-synchronous reduction
*/
template <int NUM_ELEMENTS>
__device__ __forceinline__ void WarpReduce(
int idx,
volatile int *storage,
int partial_reduction)
{
storage[idx] = partial_reduction;
if (NUM_ELEMENTS > 16) storage[idx] = partial_reduction = partial_reduction + storage[idx + 16];
if (NUM_ELEMENTS > 8) storage[idx] = partial_reduction = partial_reduction + storage[idx + 8];
if (NUM_ELEMENTS > 4) storage[idx] = partial_reduction = partial_reduction + storage[idx + 4];
if (NUM_ELEMENTS > 2) storage[idx] = partial_reduction = partial_reduction + storage[idx + 2];
if (NUM_ELEMENTS > 1) storage[idx] = partial_reduction = partial_reduction + storage[idx + 1];
}
/**
* Tally a warp-vote regarding the given predicate using the supplied storage
*/
template <int ACTIVE_THREADS>
__device__ __forceinline__ int TallyWarpVoteSm10(int predicate, int storage[]) {
WarpReduce<ACTIVE_THREADS>(threadIdx.x, storage, predicate);
return storage[0];
}
__shared__ int vote_reduction[B40C_WARP_THREADS];
/**
* Tally a warp-vote regarding the given predicate
*/
template <int ACTIVE_THREADS>
__device__ __forceinline__ int TallyWarpVoteSm10(int predicate) {
return TallyWarpVoteSm10<ACTIVE_THREADS>(predicate, vote_reduction);
}
/**
* Emulate the __all() warp vote instruction
*/
template <int ACTIVE_THREADS>
__device__ __forceinline__ int EmulatedWarpVoteAll(int predicate) {
return (TallyWarpVoteSm10<ACTIVE_THREADS>(predicate) == ACTIVE_THREADS);
}
/**
* Have each thread concurrently perform a serial reduction over its specified segment
*/
template <int LENGTH>
__device__ __forceinline__ int
SerialReduce(int segment[]) {
int reduce = segment[0];
#pragma unroll
for (int i = 1; i < (int) LENGTH; i++) {
reduce += segment[i];
}
return reduce;
}
/**
* Have each thread concurrently perform a serial scan over its specified segment
*/
template <int LENGTH>
__device__ __forceinline__
void SerialScan(int segment[], int seed0) {
int seed1;
#pragma unroll
for (int i = 0; i < (int) LENGTH; i += 2) {
seed1 = segment[i] + seed0;
segment[i] = seed0;
seed0 = seed1 + segment[i + 1];
segment[i + 1] = seed1;
}
}
//------------------------------------------------------------------------------
// Simple Kernels
//------------------------------------------------------------------------------
/**
* Empty Kernel
*/
template <typename T>
__global__ void FlushKernel(void)
{
}
/**
* Initialization kernel for synchronization counters
*/
template <typename T>
__global__ void InitSync(int *d_sync)
{
if (threadIdx.x == 0) {
d_sync[blockIdx.x] = 0;
}
}
} // namespace b40c
|
11,280 | #include <iostream>
#include <cuda.h>
#include <cstdlib>
#include <time.h>
using namespace std;
#define M 40000
#define N 40000
__global__ void addkernel(bool* a, bool* b, bool*c, int m, int n){
int i=blockIdx.x*blockDim.x+ threadIdx.x;
int j=blockIdx.y*blockDim.y+ threadIdx.y;
if(i<m && j<n)
c[i*n+j] = a[i*n+j] ^ b[i*n+j];
}
int main(){
clock_t start, end;
int size= sizeof(bool) * M * N;
bool* mat1 = (bool*)malloc(size);
bool* mat2 = (bool*)malloc(size);
bool* res = (bool*)malloc(size);
for(int i=0; i<M; i++){
for(int j=0; j<N; j++)
mat1[i*N+j]=rand()%2;
}
for(int i=0; i<M; i++){
for(int j=0; j<N; j++)
mat2[i*N+j]=rand()%2;
}
bool *d_mat1, *d_mat2, *d_res;
dim3 threadsPerBlock(16, 32);
dim3 numBlocks((M+threadsPerBlock.x-1)/threadsPerBlock.x,(N+threadsPerBlock.y-1)/threadsPerBlock.y);
cudaMalloc((void**)&d_mat1, size);
cudaMalloc((void**)&d_mat2, size);
cudaMalloc((void**)&d_res, size);
start=clock();
cudaMemcpy(d_mat1, mat1, size, cudaMemcpyHostToDevice);
cudaMemcpy(d_mat2, mat2, size, cudaMemcpyHostToDevice);
for(int j=0; j<=100; j++)
{
addkernel<<<numBlocks, threadsPerBlock>>>(d_mat1,d_mat2,d_res,M,N);
}
cudaMemcpy(res, d_res, size, cudaMemcpyDeviceToHost);
end = clock();
double cpu_time_used = ((double)(end-start))/CLOCKS_PER_SEC;
cout << "GPU: " << cpu_time_used << endl;
/* start=clock();
for (int k = 0; k< 10; k++){
for(int i=0; i<M; i++){
for(int j=0; j<N; j++)
res[i*N+j] = mat1[i*N+j]^ mat2[i*N+j];
}
}
end = clock();
cpu_time_used = ((double)(end-start))/CLOCKS_PER_SEC;
cout << "CPU: " << cpu_time_used << endl;*/
return 0;
}
|
11,281 | #include "includes.h"
__device__ void get_largest_value(short *vec, const int vec_length, int *max) {
for (int i = threadIdx.x; i < vec_length; i = i + blockDim.x) {
atomicMax(max, vec[i]);
}
}
__global__ void get_largest_value(int *vec, const int vec_length, int* max) {
for (int i = threadIdx.x; i < vec_length; i = i + blockDim.x) {
atomicMax(max, vec[i]);
}
} |
11,282 | #include <algorithm>
#include <cstdio>
#include <cstdlib>
__global__ void add(double* result, double const* arr1, double const* arr2,
size_t const arr_size)
{
size_t const idx = blockIdx.x * blockDim.x + threadIdx.x;
size_t const stride = blockDim.x * gridDim.x;
for (size_t i = 0; i < arr_size; i += stride)
{
if (idx < arr_size)
{
result[idx] = arr1[idx] + arr2[idx];
}
}
}
bool verify_result(double const* result, size_t const arr_size)
{
return std::all_of(result, result + arr_size,
[arr_size](double x) { return x == arr_size; });
//for (size_t i = 0; i < arr_size; i++)
//{
// if (result[i] != arr_size)
// {
// std::printf("result[i] = %f is an invalid result.\n", result[i]);
// return false;
// }
//}
//std::printf("all results were valid.\n");
//return true;
}
int main()
{
cudaError_t error;
error = cudaGetLastError();
if (error != cudaSuccess)
{
std::printf("0 %s\n", cudaGetErrorString(error));
std::exit(1);
}
constexpr size_t arr_size = 2 << 24;
double* arr1;
double* arr2;
double* result;
cudaMallocManaged(&arr1, arr_size * sizeof(double));
cudaMallocManaged(&arr2, arr_size * sizeof(double));
cudaMallocManaged(&result, arr_size * sizeof(double));
for (size_t i = 0; i < arr_size; i++)
{
arr1[i] = static_cast<double>(i);
arr2[i] = static_cast<double>(arr_size - i);
}
size_t const block_size = 256;
size_t const num_blocks = (arr_size + block_size - 1) / block_size;
add<<<num_blocks, block_size>>>(result, arr1, arr2, arr_size);
cudaDeviceSynchronize();
if (verify_result(result, arr_size))
{
std::printf("all results were valid.\n");
}
else
{
std::printf("at least one result is invalid.\n");
std::exit(1);
}
cudaFree(arr1);
cudaFree(arr2);
cudaFree(result);
}
|
11,283 | #include <thrust/complex.h>
//GTX1070
#define SM 1 //1 .. 15
#define CUDA_PER_SM 128
#define THREAD_PER_SM 256 //32X - maximium 1024, 256 is optimal number
#define FOR() for(int i = blockIdx.x*blockDim.x + threadIdx.x; !(i>>n) && i < (1<<n); i += blockDim.x * gridDim.x)
using cudouble = thrust::complex<double>;
__global__ void cuda_prepare_state(cudouble *data, int n, int period, cudouble amp) {
FOR() {
data[i] = (i % (period) == 0 ? (amp) : 0.0);
}
}
void gpu_prepare_state(int sm, cudouble *data, int n, int period) {
const int total_period = ((1 << n) - 1) / period + 1;
const cudouble amp = 1.0 / sqrt(total_period);
cuda_prepare_state<<<sm, THREAD_PER_SM>>>(data, n, period, amp);
}
__global__ void cuda_hadamard(cudouble *data, int n, const cudouble sqrt_1_2, int mask_q) {
FOR() {
if (i & mask_q) continue;
const int ii = i ^ mask_q;
const cudouble a = sqrt_1_2 * (data[i] + data[ii]);
const cudouble b = sqrt_1_2 * (data[i] - data[ii]);
data[i] = a;
data[ii] = b;
}
}
void gpu_hadamard(int sm, cudouble *data, int n, int q) {
static const cudouble sqrt_1_2 = sqrt(0.5);
const int mask_q = 1 << q;
cuda_hadamard<<<sm, THREAD_PER_SM>>>(data, n, sqrt_1_2, mask_q);
}
__global__ void cuda_controlled_rz(cudouble *data, int n, const cudouble omega, const int mask_q) {
FOR() {
if ((~i) & mask_q) continue;
data[i] *= omega;
}
}
void gpu_controlled_rz(int sm, cudouble *data, int n, const cudouble omega, const int mask_q) {
cuda_controlled_rz<<<sm, THREAD_PER_SM>>>(data, n, omega, mask_q);
}
void gpu_init(cudouble **data, int n) {
cudaMalloc(data, sizeof(cudouble) * (1 << n));
}
void gpu_deinit(cudouble *data) {
cudaFree(data);
}
void gpu_memcpy(cudouble *dst, cudouble *src, int n) {
cudaMemcpy(dst, src, sizeof(cudouble) * (1 << n), cudaMemcpyDeviceToHost);
}
|
11,284 | // Sizes are W H D
__device__ __constant__ int d_ax_size[3];
__device__ __constant__ int d_ay_size[3];
__device__ __constant__ int d_aout_size[3];
__device__ __constant__ int d_padding[3];
// Signal correlation kernel. Only works with 3D arrays. One thread per output entry.
__global__ void correlate_kernel(float *ax, float *ay, float *aout) {
// Extract dimensions and padding
const int ax_w = d_ax_size[0];
const int ax_h = d_ax_size[1];
const int ax_d = d_ax_size[2];
const int ay_w = d_ay_size[0];
const int ay_h = d_ay_size[1];
const int ay_d = d_ay_size[2];
const int aout_w = d_aout_size[0];
const int aout_h = d_aout_size[1];
const int aout_d = d_aout_size[2];
const int xpad = d_padding[0];
const int ypad = d_padding[1];
const int zpad = d_padding[2];
// Get the id, and make sure it is not out of bounds
const int id = threadIdx.x + blockIdx.x * blockDim.x;
if (id >= aout_w * aout_h * aout_d) {
return;
}
// Now get the coordinates in the output matrix
const int x = id / (aout_h*aout_d);
const int y = (id % (aout_h*aout_d)) / aout_d;
const int z = (id % (aout_h*aout_d)) % aout_d;
int ax_id, ay_id, i, j, k;
int ax_x, ax_y, ax_z;
float sum = 0.0;
// Each output entry is a reduction of matrix dot product between the two
// inputs at that point
for (i = 0; i < ay_w; ++i) {
for (j = 0; j < ay_h; ++j) {
for (k = 0; k < ay_d; ++k) {
// Bounds check. If out of bound, ignore because ax value is 0.
ax_x = (x+i)-xpad;
ax_y = (y+j)-ypad;
ax_z = (z+k)-zpad;
if ((ax_x < 0) ||
(ax_y < 0) ||
(ax_z < 0) ||
(ax_x >= ax_w) ||
(ax_y >= ax_h) ||
(ax_z >= ax_d))
continue;
// Correlation is sum of dot product between overlapping points
ax_id = (ax_x * ax_h * ax_d) + (ax_y * ax_d) + ax_z;
ay_id = (i * ay_h * ay_d) + (j * ay_d) + k;
sum += (ax[ax_id] * ay[ay_id]);
}
}
}
aout[id] = sum;
}
|
11,285 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
// CUDA kernel, each thread takes care of one element of c
__global__ void vec_addition(double *a, double *b, double *c, int n) {
//get the global thread ID
int id = blockIdx.x * blockDim.x + threadIdx.x;
//be sure that id is less than n
if (id < n)
c[id] = a[id] + b[id];
}
int main(int argc, char* argv[]) {
int n = 100000, i;
//host io vectors
double *h_a;
double *h_b;
double *h_c;
//device io vectors
double *d_a;
double *d_b;
double *d_c;
size_t bytes = n * sizeof(double);
//allocating memory for each vector on host
h_a = (double *)malloc(bytes);
h_b = (double *)malloc(bytes);
h_c = (double *)malloc(bytes);
//allocating memory for each vector on GPU
cudaMalloc(&d_a, bytes);
cudaMalloc(&d_b, bytes);
cudaMalloc(&d_c, bytes);
for (i = 0; i < n; i++) {
h_a[i] = i + 1;
h_b[i] = n - i;
}
//copy host vectors to device
cudaMemcpy(d_a, h_a, bytes, cudaMemcpyHostToDevice);
cudaMemcpy(d_b, h_b, bytes, cudaMemcpyHostToDevice);
int blockSize, gridSize;
//number of threads in each thread block
blockSize = 256;
//number of thread blocks in grid
gridSize = (int)ceil((float)n / blockSize);
//executing the kernel
vec_addition<<<gridSize, blockSize>>>(d_a, d_b, d_c, n);
//copy array solution to host
cudaMemcpy(h_c, d_c, bytes, cudaMemcpyDeviceToHost);
long long sum = 0;
for (i = 0; i < n; i++) sum += h_c[i];
sum = sum / 2;
printf("the result is: %lld\n", sum);
//release device memory this is very important
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
//release host memory
free(h_a);
free(h_b);
free(h_c);
return 0;
}
|
11,286 | // sudo apt install nvidia-cuda-toolkit
// nvcc -O3 -std=c++11 es_thrust.cu -o es_thrust
//
#include <thrust/iterator/counting_iterator.h>
#include <thrust/functional.h>
#include <thrust/transform_reduce.h>
#include <iostream>
#include <iomanip>
#include <cmath>
#include <chrono>
const unsigned power_of_4 = 8;
const unsigned numGroups = 1 << 2 * power_of_4;
__constant__ uint64_t g_seed[1];
__device__
unsigned hash(unsigned a)
{
a = (a+0x7ed55d16) + (a<<12);
a = (a^0xc761c23c) ^ (a>>19);
a = (a+0x165667b1) + (a<<5);
a = (a+0xd3a2646c) ^ (a<<9);
a = (a+0xfd7046c5) + (a<<3);
a = (a^0xb55a4f09) ^ (a>>16);
return a;
}
// Adopted from http://prng.di.unimi.it/xoshiro256plus.c
__device__
double xoshiro256plus(uint64_t s[4]) {
const uint64_t result = s[0] + s[3];
const uint64_t t = s[1] << 17;
s[2] ^= s[0];
s[3] ^= s[1];
s[1] ^= s[2];
s[0] ^= s[3];
s[2] ^= t;
s[3] = (s[3] << 45) | (s[3] >> (64 - 45));
// random_real53() from http://mumble.net/~campbell/tmp/random_real.c
return (double)(result >> 11) / (1ULL << 53);
}
struct estimate_es : public thrust::unary_function<unsigned, double>
{
__device__
double operator()(unsigned thread_id)
{
__uint64_t prng_state[4];
__uint64_t mask = hash(thread_id);
for (unsigned i = 0; i < 4; i++) {
mask = (mask << 32) | hash(mask);
mask = (mask << 32) | hash(mask);
prng_state[i] = g_seed[0] ^ mask;
}
double sum = 0;
for (unsigned group = 0; group < numGroups; group++) {
double x[5]; double y[5];
for (unsigned shot = 0; shot < 5; shot++) {
auto u = xoshiro256plus(prng_state);
auto v = xoshiro256plus(prng_state);
// Box-Muller transform
auto r = sqrt(-2 * log(u));
auto theta = 2 * M_PI * v;
x[shot] = r * cos(theta);
y[shot] = r * sin(theta);
}
double es2 = 0;
for (unsigned i = 0; i < 4; i++) {
for (unsigned j = i + 1; j < 5; j++) {
double dx = x[i] - x[j];
double dy = y[i] - y[j];
double d2 = dx * dx + dy * dy;
if (es2 < d2) {
es2 = d2;
}
}
}
sum += sqrt(es2);
}
return sum / numGroups;
}
};
int main(void)
{
auto start_time = std::chrono::system_clock::now();
__uint128_t mcg128_state = ((__uint128_t)time(NULL) << 1) | 1;
double avg = 0, min = 100, max = 0;
const unsigned nt = 12;
for (unsigned j = 0; j < nt; j++) {
// MCG 128 PRNG from http://www.pcg-random.org/posts/on-vignas-pcg-critique.html
mcg128_state *= 0xda942042e4dd58b5ULL;
uint64_t seed[1] = {(uint64_t)(mcg128_state >> 64)};
cudaMemcpyToSymbol(g_seed, seed, sizeof(uint64_t));
double r = thrust::transform_reduce(thrust::counting_iterator<unsigned>(0),
thrust::counting_iterator<unsigned>(numGroups),
estimate_es(),
double(0),
thrust::plus<double>());
avg += r / numGroups;
min = fmin(r / numGroups, min);
max = fmax(r / numGroups, max);
}
avg /= nt;
auto end_time = std::chrono::system_clock::now();
std::chrono::duration<double> seconds = end_time - start_time;
std::cout.precision(14);
std::cout << "code,threads,power_of_4,min,avg,max,time\n";
std::cout << "CUDA thrust," << nt << "," << power_of_4 << "," << min << "," << avg << "," << max << "," << seconds.count() << "\n";
return 0;
}
|
11,287 | #include <cuda.h>
namespace cyclic_reduction{
/**
* Calculates:
* alpha_i = -a_i/b_(i-2^(l-i))
* where:
* x = a_i
* y = b_(i-2^(l-i))
*
* beta_i = -c_i/b_(i+2^(l-i))
* where:
* x = c_i
* y = b_(i+2^(l-i))
**/
struct AlphaBeta{
__host__ __device__
double operator()(double x, double y){
if(y == 0.00){
return 0.00;
} else{
return (-x)/y;
}
}
};
struct FirstStepFunctor{
__host__ __device__
double operator()(double a, double b){
if(b == 0.00){
return 0.00;
} else{
return -a/b;
}
}
};
struct SecondStepFunctor{
__host__ __device__
double operator()(double c, double b){
if(b == 0.00){
return 0.00;
}else{
return -c/b;
}
}
};
}
/**
* Calulates:
* a_i = alpha_u * a_(i-2^(l-1))_prime
* where:
* x,y = as in AlphaBeta
* z = a_(i-2^(l-1))_prime
*
* c_i = beta_i * c_(i+2^(l-1))_prime
* where:
* x,y = as in AlphaBeta
* z = c_(i+2^(l-1))_prime
**/
struct AC{
__host__ __device__
double operator()(double x, double y, double z){
cyclic_reduction::AlphaBeta alphaBeta;
return alphaBeta(x,y)*z;
}
};
/**
* Calculates:
* b_i = b_i_prime + alpha_i * c_(i-2^(l-1))_prime + beta_i * a_(i+2^(l-1))_prime
* where:
* alpha_i , beta_i are results from AlphaBeta
* x = b_i_prime
* y = c_(i-2^(l-1))_prime
* z = (a_(i+2^(l-1))_prime
*
* d_i = d_i_prime + alpha_i * d_(i-2^(l-1))_prime + beta_i * d_(i+2^(l-1))_prime
* where:
* alpha_i , beta_i are results from AlphaBeta
* x = d_i_prime
* y = d_(i-2^(l-1))_prime
* z = (d_(i+2^(l-1))_prime
*
*
**/
struct BD{
__host__ __device__
double operator()(double x, double alpha, double y, double beta, double z){
double a = alpha*y;
double b = beta*z;
return x + a + b;
}
};
|
11,288 | #include "includes.h"
__global__ void cuda_activateTanh(double* pA, int n)
{
int id = blockIdx.x * blockDim.x + threadIdx.x;
if (id < n) {
pA[id] = tanh(pA[id]);
}
} |
11,289 | #include <stdio.h>
#include <stdlib.h>
#include <cuda.h>
#include <math.h>
#include <limits.h>
#include <float.h>
#include <iostream>
#include <sys/time.h>
#define G 6.67408E-11 //Gravitational constant
#define lvl 9 //depth of quad tree till which we'll divide plane
using namespace std;
struct vect //Structure for 2D coordinate
{
float x; //X coordinate
float y; //Y coordinate
};
struct node //Structure for each node of the quad tree
{
vect body; //centre of mass of bodies in current node
float mass; //total mass of bodies in current node
int child[4]; //children indices in nodes array
int l,r; //index limit in body array of bodies in current node
vect min, max; //min and max X and Y coordinates of bodies belonging to current node
};
//Function to calculate Gravitational force between two bodies
__device__ vect gravity (vect a, vect b, float m1, float m2)
{
float res=G*m1*m2;
float r=(a.y-b.y)*(a.y-b.y)+(a.x-b.x)*(a.x-b.x);
if (r>0) res/=r;
vect vec;
vec.y=a.y-b.y;
vec.x=a.x-b.x;
r=sqrt(r);
if (r>0) vec.y/=r, vec.x/=r;
vec.y*=res;
vec.x*=res;
return vec;
}
//part1 for kernel1 to find min-max among the n bodies
//Will find min and max of X and Y coordinates for each thread lock
//Uses reduction technique
__global__ void findMinMax(vect * body, vect * min, vect * max, int n)
{
__shared__ vect min_cache[32];
__shared__ vect max_cache[32];
int index=threadIdx.x+blockDim.x*blockIdx.x;
float xmin=FLT_MAX, ymin=FLT_MAX;
float xmax=FLT_MIN, ymax=FLT_MIN;
while (index<n) //takes care if total number greater than total threads in kernel
{
xmin=fmin(xmin, body[index].x);
ymin=fmin(ymin, body[index].y);
xmax=fmax(xmax, body[index].x);
ymax=fmax(ymax, body[index].y);
index+=(blockDim.x*gridDim.x); //incrementing index by total number of threads in kernel, to take care if total number more than total threads in kernel
}
int tid=threadIdx.x;
min_cache[tid].x=xmin;
min_cache[tid].y=ymin;
max_cache[tid].x=xmax;
max_cache[tid].y=ymax;
int active=blockDim.x>>1;
do
{
__syncthreads();
if (tid<active) //reduction
{
min_cache[tid].x=fmin(min_cache[tid].x, min_cache[tid+active].x);
min_cache[tid].y=fmin(min_cache[tid].y, min_cache[tid+active].y);
max_cache[tid].x=fmax(max_cache[tid].x, max_cache[tid+active].x);
max_cache[tid].y=fmax(max_cache[tid].y, max_cache[tid+active].y);
}
active>>=1;
}while (active>0);
if (tid==0) min[blockIdx.x]=min_cache[0], max[blockIdx.x]=max_cache[0];
}
//part2 for kernel1 to find min-max among the n bodies
//Will find global min and max of X and Y coordinates from local min and max of above kernel
__global__ void findMMinMax(vect * mmin, vect *mmax, vect * min, vect * max, int n)
{
__shared__ vect min_cache[32];
__shared__ vect max_cache[32];
int index=threadIdx.x+blockDim.x*blockIdx.x;
float xmin=FLT_MAX, ymin=FLT_MAX;
float xmax=FLT_MIN, ymax=FLT_MIN;
while (index<n) //takes care if total number greater than total threads in kernel
{
xmin=fmin(xmin, min[index].x);
ymin=fmin(ymin, min[index].y);
xmax=fmax(xmax, max[index].x);
ymax=fmax(ymax, max[index].y);
index+=(blockDim.x*gridDim.x);
}
int tid=threadIdx.x;
min_cache[tid].x=xmin;
min_cache[tid].y=ymin;
max_cache[tid].x=xmax;
max_cache[tid].y=ymax;
int active=blockDim.x>>1;
do
{
__syncthreads();
if (tid<active) //reduction
{
min_cache[tid].x=fmin(min_cache[tid].x, min_cache[tid+active].x);
min_cache[tid].y=fmin(min_cache[tid].y, min_cache[tid+active].y);
max_cache[tid].x=fmax(max_cache[tid].x, max_cache[tid+active].x);
max_cache[tid].y=fmax(max_cache[tid].y, max_cache[tid+active].y);
}
active>>=1;
}while (active>0);
if (tid==0) mmin[blockIdx.x]=min_cache[0], mmax[blockIdx.x]=max_cache[0];
}
//This function will construct particular level of the tree.
//Each node will be divided further into four new nodes and bodies in the array will be swapped so that bodies belonging to same node remain together in the array
//This will work as kernel 2
__global__ void construct(vect *body, float *mass, node *nodes, int level, int tot)
{
int index=blockDim.x*blockIdx.x+threadIdx.x;
int tid=index*4;
int total = 1<<(2*level); //total nodes in current level
int offset=((1<<(2*level))-1)/3; //total nodes in tree upto previous level
int off=offset+total; //total nodes in tree upto current level
while (index<total) //'while' loop will take care if total number more than total threads in kernel
{
index+=offset; //actual index in nodes array
node nd=nodes[index];
if (nodes[index].l<=nodes[index].r)
{
float xl=nd.min.x, xr=nd.max.x;
float yl=nd.min.y, yr=nd.max.y;
float xmid=xl+(xr-xl)/2;
float ymid=yl+(yr-yl)/2;
float l=nd.l, r=nd.r;
node child[4];
for (int i=0;i<4;i++)
{
child[i].min.x=child[i].min.y=FLT_MAX, child[i].max.x=child[i].max.y=FLT_MIN;
for (int j=0;j<4;j++) child[i].child[j]=-1;
}
int i=l-1;
float m=0, x=0, y=0, mm=0, xx=0, yy=0;
for (int j=l;j<=r;j++) //swapping of bodies belonging to current node based on x-coordinates creating two children
{
if (body[j].x<=xmid)
{
i++;
vect temp=body[i];
body[i]=body[j];
body[j]=temp;
float t=mass[i];
mass[i]=mass[j];
mass[j]=t;
}
}
child[2].l=l, child[2].r=i;
child[3].l=i+1, child[3].r=r;
for (int k=2;k<=3;k++)
{
m=mm=x=xx=y=yy=0;
l=child[k].l, r=child[k].r;
i=l-1;
int cnt=0;
for (int j=l;j<=r;j++) //swapping of bodies in two children created previously based on y-coordinates, each creating two new children
{
x+=body[j].x;
y+=body[j].y;
m+=mass[j];
if (body[j].y<=ymid)
{
xx+=body[j].x, yy+=body[j].y, mm+=mass[j];
cnt++;
i++;
vect temp=body[i];
body[i]=body[j];
body[j]=temp;
float t=mass[i];
mass[i]=mass[j];
mass[j]=t;
}
}
if(cnt>0) child[k].mass=mm, child[k].body.x=xx/cnt, child[k].body.y=yy/cnt;
child[k].l=l, child[k].r=i;
mm=m-mm, xx=x-xx, yy=y-yy, cnt=r-l+1-cnt;
if(cnt>0) child[k-2].mass=mm, child[k-2].body.x=xx/cnt, child[k-2].body.y=yy/cnt;
child[k-2].l=i+1, child[k-2].r=r;
}
for (int i=0;i<4;i++)
{
if (i%2) child[i].min.x=xmid, child[i].max.x=xr;
else child[i].min.x=xl, child[i].max.x=xmid;
if (i<2) child[i].min.y=ymid, child[i].max.y=yr;
else child[i].min.y=yl, child[i].max.y=ymid;
if (off+tid+i<tot) nodes[off+tid+i]=child[i];
nd.child[i]=off+tid+i;
}
}
else
{
for (int i=0;i<4;i++)
{
if (off+tid+i<tot)
{
nodes[off+tid+i].l=0;
nodes[off+tid+i].r=-1;
}
nd.child[i]=off+tid+i;
}
}
nodes[index]=nd;
index-=offset;
index+=(blockDim.x*gridDim.x); //will take care if total number more than total threads by incrementing index by total threads.
}
}
//This is kernel 3
//This function calculates force on bodies
__global__ void calculate(vect *body, float *mass, node *nodes, vect *force, int n, float theta)
{
int index=blockDim.x*blockIdx.x+threadIdx.x;
int l=((1<<(2*(lvl-1)))-1)/3; //total nodes in tree upto max depth
while (index<n) //'while' loop takes care if total number more than total threads in kernel
{
int st[4*(lvl)]; //using array as stack
int curr=0; //variable showing current top index
st[curr]=0;
vect bd=body[index];
while (curr>=0) //for each body do DFS until reached leaf
{
int t=st[curr];
curr--;
node nd=nodes[t];
float s=fmax(nd.max.x-nd.min.x, nd.max.y-nd.min.y);
float x=bd.x-nd.body.x, y=bd.y-nd.body.y;
float dist=sqrt(x*x+y*y);
float val=FLT_MAX;
if (dist>0) val=s/dist;
if (val<theta) //Barnes-Hutt approximation
{
vect frc=gravity(nd.body, bd, nd.mass, mass[index]);
force[index].x+=frc.x;
force[index].y+=frc.y;
}
else
{
if (t>=l) //if reached leaf
{
vect frc=gravity(nd.body, bd, nd.mass, mass[index]);
force[index].x+=frc.x;
force[index].y+=frc.y;
continue;
}
for (int i=0;i<4;i++)
{
int temp=nd.child[i];
if (temp==-1 || nodes[temp].l>nodes[temp].r) continue;
st[++curr]=temp;
}
}
}
index+=(blockDim.x*gridDim.x); //will take care if total number more than total threads by incrementing index by total number of threads in kernel
}
}
float maxx(float a, float b)
{
return (a<b?b:a);
}
float minn(float a, float b)
{
return (a<b?a:b);
}
int main()
{
int n;
printf("n : ");
scanf("%d", &n);
vect body[n];
float mass[n];
float m=0, x=0, y=0;
for (int i=0;i<n;i++)
{
body[i].x=rand()%1000000;
body[i].y=rand()%1000000;
mass[i]=rand()%1000000;
m+=mass[i], x+=body[i].x, y+=body[i].y;
}
x/=n, y/=n; //centre of mass of the whole system
vect force[n];
vect *min;
vect *max;
for (int i=0;i<n;i++) force[i].x=force[i].y=0;
vect *dforce;
vect *dbody;
float *dmass;
int s=sizeof(vect)*n;
int sz=sizeof(float)*n;
cudaMalloc(&dbody, s);
cudaMalloc(&dmass, sz);
cudaMalloc(&dforce, s);
cudaMemset(dforce, 0, s);
cudaMalloc(&min, s);
cudaMalloc(&max, s);
cudaMemcpy(dbody, body, s, cudaMemcpyHostToDevice);
cudaMemcpy(dmass, mass, sz, cudaMemcpyHostToDevice);
int val=32;
int block = val;
int grid = val;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
//This is kernel 1 which devided into 2 parts
//bassically it find minimum and maximum from
//the n bodies
findMinMax<<<grid, block>>>(dbody, min, max, n);
findMMinMax<<<1, block>>>(min, max, min, max, val);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
//milliseconds find the total kernal time in GPU
float milliseconds = 0;
cudaEventElapsedTime(&milliseconds, start, stop);
vect gmin;
vect gmax;
cudaMemcpy(&gmin, min, sizeof(vect), cudaMemcpyDeviceToHost);
cudaMemcpy(&gmax, max, sizeof(vect), cudaMemcpyDeviceToHost);
int curr=1;
int tot=1<<(2*lvl);
tot=(tot-1)/3;
node h_nodes[tot];
for (int i=0;i<tot;i++)
{
for (int j=0;j<4;j++) h_nodes[i].child[j]=-1;
}
vect temp;
temp.x=x, temp.y=y;
h_nodes[0].body=temp, h_nodes[0].mass=m, h_nodes[0].l=0, h_nodes[0].r=n-1, h_nodes[0].min=gmin, h_nodes[0].max=gmax;
node *d_nodes;
cudaMalloc(&d_nodes, sizeof(node)*tot);
cudaMemcpy(d_nodes, h_nodes, sizeof(node)*tot, cudaMemcpyHostToDevice);
float t;
for (int i=0;i<lvl-1;i++) //creation of tree level by level. Each thread is assigned a node in current level.
{
block = 1024;
grid=ceil((1.0*curr)/block);
grid = minn(grid, 20);
cudaEventRecord(start);
construct<<<grid, block>>>(dbody, dmass, d_nodes, i, tot);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&t, start, stop);
milliseconds+=t;
curr*=4;
}
cudaMemcpy(h_nodes, d_nodes, sizeof(node)*tot, cudaMemcpyDeviceToHost);
float theta;
printf("theta : ");
scanf("%f", &theta);
grid=minn(20, ceil((1.0*n)/block));
printf("%d\n", grid);
cudaEventRecord(start);
calculate<<<grid, block>>>(dbody, dmass, d_nodes, dforce, n, theta); //Each thread is assigned a body
cudaEventRecord(stop);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&t, start, stop);
milliseconds+=t;
cudaMemcpy(force, dforce, s, cudaMemcpyDeviceToHost);
cudaMemcpy(body, dbody, s, cudaMemcpyDeviceToHost);
cudaMemcpy(mass, dmass, sz, cudaMemcpyDeviceToHost);
x=0, y=0;
for(int i=0;i<n;i++)
{
printf("force %d : x %f y %f m %f : %.15f %.15f\n", i, body[i].x, body[i].y, mass[i], force[i].x, force[i].y);
x+=force[i].x, y+=force[i].y;
}
printf("gpu time : %f\n", milliseconds);
printf("x %f y %f\n", x, y);
}
|
11,290 | #include <iostream>
#include <cuda.h>
using namespace std;
__global__ void matmul_kernel(const float* A, const float* B, float* C, unsigned int n) {
int blocksize = blockDim.x;
extern __shared__ float shared_arr[];
float *As = shared_arr;
float *Bs = (float*)&As[blocksize*blocksize];
int bx = blockIdx.x;
int by = blockIdx.y;
int tx = threadIdx.x;
int ty = threadIdx.y;
int aBegin = by * blocksize * n;
int aEnd = aBegin + n-1;
int aStep = blocksize;
int arow = aBegin/n + ty;
int bBegin = bx * blocksize;
int bStep = n * blocksize;
int bcol = bBegin + tx;
int c = n * blocksize * by + blocksize * bx;
float Csub = 0;
for (int a = aBegin, b = bBegin;a <= aEnd;a += aStep, b += bStep) {
As[blocksize*ty + tx] = ((arow < n) && (a + tx <= aEnd))? A[a + n * ty + tx] : 0;
Bs[blocksize*ty + tx] = ((bcol < n) && (a + tx <= aEnd))? B[b + n * ty + tx] : 0;
//Using aEnd condition for zero padding B matrix since column and row dimension of A and B respectively needs to be the same
__syncthreads();
for (int k = 0; k < blocksize; ++k)
Csub += As[blocksize*ty + k] * Bs[blocksize*k + tx];
__syncthreads();
}
if((by*blocksize + ty < n) && (bx*blocksize + tx < n)) {
C[c + n * ty + tx] = Csub;
}
}
__host__ void matmul(const float* A, const float* B, float* C, unsigned int n, unsigned int block_dim) {
dim3 dimBlock(block_dim, block_dim);
dim3 dimGrid( (n + block_dim-1)/block_dim , (n + block_dim-1)/block_dim );
size_t shared_array_size = (2*block_dim*block_dim)*sizeof(float);
matmul_kernel<<<dimGrid, dimBlock, shared_array_size>>>(A, B, C, n);
cudaDeviceSynchronize();
}
|
11,291 | #include <stdio.h>
#include <stdlib.h>
#include <sys/time.h>
#include <math.h>
#define COMMENT "Histogram_GPU"
#define RGB_COMPONENT_COLOR 255
#define DIM_BLOCO 32
#define DIM_GRID 1960 // 1960*1960*1024
typedef struct {
unsigned char red, green, blue;
} PPMPixel;
typedef struct {
int x, y;
PPMPixel *data;
} PPMImage;
double rtclock()
{
struct timezone Tzp;
struct timeval Tp;
int stat;
stat = gettimeofday (&Tp, &Tzp);
if (stat != 0) printf("Error return from gettimeofday: %d",stat);
return(Tp.tv_sec + Tp.tv_usec*1.0e-6);
}
static PPMImage *readPPM(const char *filename) {
char buff[16];
PPMImage *img;
FILE *fp;
int c, rgb_comp_color;
fp = fopen(filename, "rb");
if (!fp) {
fprintf(stderr, "Unable to open file '%s'\n", filename);
exit(1);
}
if (!fgets(buff, sizeof(buff), fp)) {
perror(filename);
exit(1);
}
if (buff[0] != 'P' || buff[1] != '6') {
fprintf(stderr, "Invalid image format (must be 'P6')\n");
exit(1);
}
img = (PPMImage *) malloc(sizeof(PPMImage));
if (!img) {
fprintf(stderr, "Unable to allocate memory\n");
exit(1);
}
c = getc(fp);
while (c == '#') {
while (getc(fp) != '\n');
c = getc(fp);
}
ungetc(c, fp);
if (fscanf(fp, "%d %d", &img->x, &img->y) != 2) {
fprintf(stderr, "Invalid image size (error loading '%s')\n", filename);
exit(1);
}
if (fscanf(fp, "%d", &rgb_comp_color) != 1) {
fprintf(stderr, "Invalid rgb component (error loading '%s')\n",
filename);
exit(1);
}
if (rgb_comp_color != RGB_COMPONENT_COLOR) {
fprintf(stderr, "'%s' does not have 8-bits components\n", filename);
exit(1);
}
while (fgetc(fp) != '\n');
img->data = (PPMPixel*) malloc(img->x * img->y * sizeof(PPMPixel));
if (!img) {
fprintf(stderr, "Unable to allocate memory\n");
exit(1);
}
if (fread(img->data, 3 * img->x, img->y, fp) != img->y) {
fprintf(stderr, "Error loading image '%s'\n", filename);
exit(1);
}
fclose(fp);
return img;
}
__global__ void count_hist(PPMPixel *data, float *h, unsigned int n_){
// n_ é ó total de pixels
// Uma thread para cada combinação (pixel,rgb), onde pixel = 0, 1, ..., n_-1 e rgb = 0, 1, ..., 63
// Calcular índice da thread e os respectivos índices x, i, j, k, l
// DIM_BLOCO = blockDim.x = blockDim.y
// DIM_GRID = gridDim.x = gridDim.y
unsigned int index = DIM_BLOCO*DIM_BLOCO*(DIM_GRID*blockIdx.x+blockIdx.y)+blockDim.y*threadIdx.x+threadIdx.y;
unsigned int x = index/n_;
unsigned int i = index%n_;
unsigned int j = x/16;
unsigned int k = (x-16*j)/4;
unsigned int l = (x-16*j-4*k);
if (index < 64*n_
&& data[i].red == j
&& data[i].green == k
&& data[i].blue == l) {
atomicAdd(&h[x],1.0); // o histograma é normalizado depois para evitar erro de precisão
}
}
void Histogram(PPMImage *image, float *h) {
int i, j, k, l, x, count;
int rows, cols;
unsigned int n = image->y * image->x;
double t_start, t_end, t_cbuffer, t_offload_enviar, t_kernel, t_offload_receber;
cols = image->x;
rows = image->y;
for (i = 0; i < n; i++) {
image->data[i].red = floor((image->data[i].red * 4) / 256);
image->data[i].blue = floor((image->data[i].blue * 4) / 256);
image->data[i].green = floor((image->data[i].green * 4) / 256);
}
// Parte movida para a GPU
unsigned int size = 3*sizeof(unsigned char)*n;
PPMPixel *d_data;
float *d_h;
t_start = rtclock();
cudaMalloc((void **)&d_data,size);
cudaMalloc((void **)&d_h,64*sizeof(float));
t_end = rtclock();
t_cbuffer = t_end-t_start;
t_start = rtclock();
cudaMemcpy(d_data,image->data,size,cudaMemcpyHostToDevice);
cudaMemcpy(d_h,h,64*sizeof(float),cudaMemcpyHostToDevice);
t_end = rtclock();
t_offload_enviar = t_end-t_start;
dim3 dimGrid(DIM_GRID,DIM_GRID);
dim3 dimBlock(DIM_BLOCO,DIM_BLOCO);
t_start = rtclock();
count_hist<<<dimGrid,dimBlock>>>(d_data,d_h,n);
cudaDeviceSynchronize();
t_end = rtclock();
t_kernel = t_end-t_start;
t_start = rtclock();
cudaMemcpy(h,d_h,64*sizeof(float),cudaMemcpyDeviceToHost);
t_end = rtclock();
t_offload_receber = t_end-t_start;
cudaFree(d_data); cudaFree(d_h);
for(i = 0; i < 64; i++)
h[i] /= n;
double t_total = t_cbuffer+t_offload_enviar+t_kernel+t_offload_receber;
printf("%lf\t%lf\t%lf\t%lf\t%lf\n",t_cbuffer,t_offload_enviar,t_kernel,t_offload_receber,t_total);
}
int main(int argc, char *argv[]) {
if( argc != 2 ) {
printf("Too many or no one arguments supplied.\n");
}
double t_start, t_end;
int i;
char *filename = argv[1]; //Recebendo o arquivo!;
//scanf("%s", filename);
PPMImage *image = readPPM(filename);
float *h = (float*)malloc(sizeof(float) * 64);
//Inicializar h
for(i=0; i < 64; i++) h[i] = 0.0;
t_start = rtclock();
Histogram(image, h);
t_end = rtclock();
for (i = 0; i < 64; i++){
printf("%0.3f ", h[i]);
}
fprintf(stdout, "\n%0.6lfs\n", t_end - t_start);
free(h);
}
|
11,292 | /*-
* Copyright 2015 Grammarly, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuda_runtime.h>
__global__ void testDependencies(int node_id, int* blocking_nodes, int blocking_nodes_num, int *execution_checklist, int* test_results) {
test_results[node_id] = 1;
for (int i = 0; i < blocking_nodes_num; i++) {
int bloking_node_id = blocking_nodes[i];
if (!execution_checklist[bloking_node_id]) {
test_results[node_id] = 0;
break;
}
}
clock_t start_clock = clock64();
clock_t clock_offset = 0;
while (clock_offset < 4000000000L) {
clock_offset = clock64() - start_clock;
}
execution_checklist[node_id] = 1;
}
extern "C" {
cudaError_t _testDependencies(cudaStream_t stream, int node_id, int* blocking_nodes, int blocking_nodes_num, int *execution_checklist, int* test_results) {
testDependencies<<<1, 1, 0, stream>>>(node_id, blocking_nodes, blocking_nodes_num, execution_checklist, test_results);
return cudaGetLastError();
}
} |
11,293 | #include "includes.h"
__global__ void gJoin2(float* out, size_t rowBatch, size_t cols, const float* in1, size_t inStride1, const float* in2, size_t inStride2) {
int outStride = inStride1 + inStride2;
int rows = rowBatch * outStride;
for(int bid = 0; bid < rows; bid += gridDim.x) {
int j = bid + blockIdx.x;
if(j < rows) {
float* rowOut = out + j * cols;
int curBatch = j / outStride;
int curPos = j % outStride;
int jIn1 = (curBatch * inStride1) + curPos;
int jIn2 = (curBatch * inStride2) + curPos - inStride1;
const float* rowIn1 = in1 + jIn1 * cols;
const float* rowIn2 = in2 + jIn2 * cols;
for(int tid = 0; tid < cols; tid += blockDim.x) {
int i = tid + threadIdx.x;
if(i < cols) {
if(curPos < inStride1)
rowOut[i] = rowIn1[i];
else
rowOut[i] = rowIn2[i];
}
}
}
}
} |
11,294 | #include <iostream>
#include <time.h>
#include <stdexcept>
#include <vector>
#include <cstdlib>
#define r1 20
#define c1 20
#define r2 20
#define c2 20
__global__ void matmul(int* a,int* b, int* c){
int x = blockIdx.x;
int y = blockIdx.y;
c[c2 * y + x] = 0;
for(int k = 0; k < c1; k++)
c[c2 * y + x] += (a[c1 * y + k] * b[c2 * k + x]);
}
int main(void){
srand(time(0));
int a[r1][c1];
int b[r2][c2];
int c[r1][c2];
for(int i = 0; i < r1; i++)
for(int j = 0; j < c1; j++)
a[i][j] = rand();
for(int i = 0; i < r2; i++)
for(int j = 0; j < c2; j++)
b[i][j] = rand();
int *p ,*q, *r;
cudaMalloc((void**)&p,r1 * c1 * sizeof(int));
cudaMalloc((void**)&q,r2 * c2 * sizeof(int));
cudaMalloc((void**)&r,r1 * c2 * sizeof(int));
cudaMemcpy(p,a,r1 * c1 * sizeof(int),cudaMemcpyHostToDevice);
cudaMemcpy(q,b,r1 * c1 * sizeof(int),cudaMemcpyHostToDevice);
dim3 grid(c2,r1); // cols * rows
cudaEvent_t start,stop;
float elapsed;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start,0);
matmul<<<grid,1>>>(p,q,r);
cudaDeviceSynchronize();
cudaMemcpy(c,r, r1 * c2 * sizeof(int),cudaMemcpyDeviceToHost);
cudaEventRecord(stop,0);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&elapsed,start,stop);
cudaEventDestroy(start);
cudaEventDestroy(stop);
std::cout << "Elapsed Time: " << elapsed << "ms" << std::endl;
cudaFree(p);
cudaFree(q);
cudaFree(r);
return 0;
}
|
11,295 | #include <stdio.h>
#include <assert.h>
#include <cuda.h>
#define N 10
// texture object is a kernel argument
__global__ void printGpu_tex(cudaTextureObject_t tex) {
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if(tid < N){
float x = tex1Dfetch<float>(tex, tid);
printf("tid=%d, tex1Dfetch<float>(tex, %d) = %f \n", tid, tid, x);
}
}
// texture object is a kernel argument
__global__ void printGpu_vanilla(float* d_buffer) {
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if(tid < N){
float x = d_buffer[tid];
printf("tid=%d, d_buffer[%d] = %f \n", tid, tid, x);
}
}
int main() {
// declare and allocate memory
float* d_buffer;
cudaMalloc(&d_buffer, N*sizeof(float));
float h_buffer[10] = {1,2,3,4,5,6,7,8,9,10};
cudaMemcpy(d_buffer, h_buffer, sizeof(float)*N, cudaMemcpyHostToDevice);
//CUDA 5 texture objects: https://developer.nvidia.com/content/cuda-pro-tip-kepler-texture-objects-improve-performance-and-flexibility
cudaResourceDesc resDesc;
memset(&resDesc, 0, sizeof(resDesc));
resDesc.resType = cudaResourceTypeLinear;
resDesc.res.linear.devPtr = d_buffer;
resDesc.res.linear.desc.f = cudaChannelFormatKindFloat;
resDesc.res.linear.desc.x = 32; // bits per channel
resDesc.res.linear.sizeInBytes = N*sizeof(float);
cudaTextureDesc texDesc;
memset(&texDesc, 0, sizeof(texDesc));
texDesc.readMode = cudaReadModeElementType;
// create texture object: we only have to do this once!
cudaTextureObject_t tex;
cudaCreateTextureObject(&tex, &resDesc, &texDesc, NULL);
//float *d_result;
//cudaMalloc(&d_result, N*sizeof(float));
int grid = N/16+1;
int block = 16;
printGpu_tex<<<grid, block>>>(tex);
printGpu_vanilla<<<grid, block>>>(d_buffer);
// destroy texture object
cudaDestroyTextureObject(tex);
cudaFree(d_buffer);
}
|
11,296 | /*
CUDA good to knows:
Basics:
Per thread:
registers (fast)
local memory (off-chip [still on the GPU though], slow)
Per block:
multiple threads
shared memory (semi-fast)
Per GPU:
Multiple kernels that each run multiple blocks
Global memory (off-chip [still on the GPU though], slow)
Threads are executed by thread processors
Threads reside in thread blocks
Thread blocks are executed by multiprocessors
Several concurrent thread blocks can reside on one multiprocessor
Limited by multiprocessor resources (shared memory and registers)
A kernel is launched as a grid of thread blocks. Only one kernel can execute on a device at a time.
Advanced:
cudaMemcpy(dst, src, size, direction)
blocks CPU thread.
Compiler tips:
nvcc <filename>.cu [-o <executable>]
Builds release mode
nvcc -g <filename>.cu
Builds debug mode
Can debug host code but not device code
nvcc -deviceemu <filename>.cu
Builds device emulation mode
All code runs on CPU, no debug symbols
nvcc -deviceemu -g <filename>.cu
Builds debug device emulation mode
All code runs on CPU, with debug symbols
Tips and tricks:
If our arrays A,B,C are shorter than 1024 elements, N < 1024, then
– one thread block is enough
– N threads in the thread block
If our arrays are longer than 1024, then
– Choose the number of threads in the thread blocks to be
integer*32
– Calculate how many thread blocks you need
– There will be some threads that should do nothing
Why multiples of 32?
– Threads are executed synchronously in bunches of 32 =
warp
– All threads must have their data ready before the warp runs
– Cache lines are 4 B x warp size = 128 B
– GPU resources can be fully utilized when these parameters
are used
# of blocks = ceil(N/threadsInBlock)
= (N+threadsInBlock-1)/threadsInBlock
Compile:
nvcc -o galaxy galaxy_program.cu -res-usage
Run:
time ./galaxy
*/
#include <stdio.h>
#include <iostream>
#include <fstream>
using namespace std;
// Declare functions and classes that are below main.
class GalaxyFile{
public:
int number_of_galaxies;
float *alphas, *deltas;
GalaxyFile(){}
GalaxyFile(int num, float *as, float *ds)
{
number_of_galaxies = num;
alphas = as;
deltas = ds;
}
};
void print_omegas(float*, int);
void write_omegas_to_file(string, float*);
void write_histogram_to_file(string, int*);
void print_histogram(string, int*, int);
GalaxyFile readFile(string);
// Define some useful macros
#define BIN_WIDTH 0.25f
#define BIN_MIN 0.0f
#define BIN_MAX 180.0f
#define NUMBER_OF_BINS (int)(BIN_MAX*(1.0f/BIN_WIDTH))
// Google is your friend.
#define ARCMINS_TO_RADIANS 0.000290888209f
#define RADIANS_TO_DEGREES 57.295779513f
__global__
void angle_between_galaxies(float *alphas1, float *deltas1, float *alphas2, float *deltas2, int *gpu_hist){
int idx = blockDim.x*blockIdx.x + threadIdx.x;
if(idx < 100000){
for(int i=0; i<100000; i++){
float angle = 0.0f;
// Don't do duplicates
if( alphas1[i] != alphas2[idx] && deltas1[i] != deltas2[idx] ) {
float x = sin(deltas1[i]) * sin(deltas2[idx]) + cos(deltas1[i]) * cos(deltas2[idx]) * cos(alphas1[i] - alphas2[idx]);
angle = acosf(fmaxf(-1.0f, fminf(x, 1.0f))) * RADIANS_TO_DEGREES;
}
int ix = (int)(floor(angle * (1.0f/BIN_WIDTH))) % NUMBER_OF_BINS;
__syncthreads();
atomicAdd(&gpu_hist[ix], 1);
}
}
}
int* calculate_histogram(GalaxyFile galaxies1, GalaxyFile galaxies2){
// Declare and allocate memory for histogram arrays that will be accessible on CPU
float galaxy_array_size = galaxies1.number_of_galaxies * sizeof(float);
float histogram_size = NUMBER_OF_BINS * sizeof(int);
int *histogram;
int *total_histogram;
histogram = (int *) malloc(NUMBER_OF_BINS*sizeof(int));
total_histogram = (int *) malloc(NUMBER_OF_BINS*sizeof(int));
memset(total_histogram, 0, NUMBER_OF_BINS*sizeof(int));
// Declare angle arrays that will be accessible on GPU
float *gpu_alphas1;
float *gpu_deltas1;
float *gpu_alphas2;
float *gpu_deltas2;
int *gpu_histogram;
// Allocate memory on GPU for angle arrays
cudaMalloc((void**) &gpu_alphas1, galaxy_array_size);
cudaMalloc((void**) &gpu_deltas1, galaxy_array_size);
cudaMalloc((void**) &gpu_alphas2, galaxy_array_size);
cudaMalloc((void**) &gpu_deltas2, galaxy_array_size);
cudaMalloc((void**) &gpu_histogram, NUMBER_OF_BINS*sizeof(int));
// Copy angles from CPU onto GPU
cudaMemcpy(gpu_alphas1, galaxies1.alphas, galaxy_array_size, cudaMemcpyHostToDevice);
cudaMemcpy(gpu_deltas1, galaxies1.deltas, galaxy_array_size, cudaMemcpyHostToDevice);
cudaMemcpy(gpu_alphas2, galaxies2.alphas, galaxy_array_size, cudaMemcpyHostToDevice);
cudaMemcpy(gpu_deltas2, galaxies2.deltas, galaxy_array_size, cudaMemcpyHostToDevice);
cudaMemcpy(gpu_histogram, histogram, galaxy_array_size, cudaMemcpyHostToDevice);
int warp_size = 32;
int threadsInBlock = 11 * warp_size;
int blocksInGrid = ceil((galaxies1.number_of_galaxies + threadsInBlock) / threadsInBlock);
// Define the grid size (blocks per grid)
dim3 dimGrid(blocksInGrid);
// Define block size (threads per block)
dim3 dimBlock(threadsInBlock);
// Write histogram full of zeros
cudaMemset(gpu_histogram, 0, histogram_size);
// Calculate angles between galaxies1[i] and every galaxy in galaxies2
angle_between_galaxies<<<dimGrid, dimBlock>>>(gpu_alphas1, gpu_deltas1, gpu_alphas2, gpu_deltas2, gpu_histogram);
// Copy result histogram into CPU histogram
cudaMemcpy(histogram, gpu_histogram, histogram_size, cudaMemcpyDeviceToHost);
// Free all the memory we allocated on the GPU
cudaFree( gpu_alphas1 );
cudaFree( gpu_deltas1 );
cudaFree( gpu_alphas2 );
cudaFree( gpu_deltas2 );
cudaFree( gpu_histogram );
return histogram;
}
float* calculate_omegas(int* DD, int* DR, int* RR){
float* omegas;
omegas = (float *) malloc(NUMBER_OF_BINS*sizeof(float));
for(int i=0; i<NUMBER_OF_BINS; i++){
if(RR[i] != 0.0f){
omegas[i] = (DD[i] - 2.0f*DR[i] + RR[i]) / RR[i];
}else{
omegas[i] = 0.0f;
}
}
return omegas;
}
// CUDA program that calculates distribution of galaxies
int main()
{
// Read files and store data in GalaxyFile classes.
GalaxyFile galaxies1;
GalaxyFile galaxies2;
galaxies1 = readFile("test_data/flat_100k_arcmin.txt");
galaxies2 = readFile("test_data/data_100k_arcmin.txt");
int* DD_hist = calculate_histogram(galaxies1, galaxies1);
int* DR_hist = calculate_histogram(galaxies1, galaxies2);
int* RR_hist = calculate_histogram(galaxies2, galaxies2);
print_histogram("DD", DD_hist, 20);
print_histogram("DR", DR_hist, 20);
print_histogram("RR", RR_hist, 20);
write_histogram_to_file("dd_histogram.txt", DD_hist);
write_histogram_to_file("dr_histogram.txt", DR_hist);
write_histogram_to_file("rr_histogram.txt", RR_hist);
float* omegas = calculate_omegas(DD_hist, DR_hist, RR_hist);
print_omegas(omegas, 15);
write_omegas_to_file("omegas.txt", omegas);
return EXIT_SUCCESS;
}
/* UTILITY FUNCTIONS/CLASSES BELOW */
GalaxyFile readFile(string filename)
{
ifstream infile(filename);
int number_of_galaxies;
// Read first line which is the number of galaxies that's stored in the file.
infile >> number_of_galaxies;
float galaxy_array_size = number_of_galaxies * sizeof(float);
float *alphas, *deltas;
alphas = (float*) malloc(galaxy_array_size);
deltas = (float*) malloc(galaxy_array_size);
float alpha;
float delta;
// Read arc minute angles for each galaxy
// Then convert those angles to radians and store those in alphas and deltas arrays
for(int i=0; i<number_of_galaxies; i++) {
infile >> alpha >> delta;
alphas[i] = alpha * ARCMINS_TO_RADIANS;
deltas[i] = delta * ARCMINS_TO_RADIANS;
}
infile.close();
GalaxyFile galaxyFile(number_of_galaxies, alphas, deltas);
return galaxyFile;
}
void print_omegas(float* omegas, int bins_to_print){
for (int i=0; i<NUMBER_OF_BINS; i++){
if(omegas[i] != 0.0f && i < bins_to_print){
printf("omegas[%d]: %f\n", i, omegas[i]);
}
}
}
void print_histogram(string label, int *histogram, int bins_to_print){
long long galaxies_counted = 0;
// Print each bucket bin that has 1 or more galaxy-pair-angle in it.
for (int i=0; i<NUMBER_OF_BINS; i++) {
float bucket_min = (float)i / (1.0f/BIN_WIDTH);
float bucket_max = (float)i / (1.0f/BIN_WIDTH) + BIN_WIDTH;
int bucket_value = histogram[i];
galaxies_counted += histogram[i];
if(bucket_value > 0 && i < bins_to_print){
printf("[%f, %f]: %d\n", bucket_min, bucket_max, bucket_value);
}
}
cout << "Galaxy pairs counted in " << label << ": " << galaxies_counted << endl;
}
void write_omegas_to_file(string filename, float* omegas){
ofstream file;
file.open("output/"+filename);
for (int i=0; i<NUMBER_OF_BINS; i++){
file << omegas[i];
if(i<NUMBER_OF_BINS-1) file << "\n";
}
file.close();
}
void write_histogram_to_file(string filename, int* histogram){
ofstream file;
file.open("output/"+filename);
for (int i=0; i<NUMBER_OF_BINS; i++){
file << histogram[i];
if(i<NUMBER_OF_BINS-1) file << "\n";
}
file.close();
} |
11,297 | #include <cuda.h>
#include <stdio.h>
#define N 256
// kernel code for adding two vector elements
__global__ void vecAdd(float* a, float* b, float* c)
{
int i = threadIdx.x;
if (i < N)
c[i] = a[i] + b[i];
}
int main(void)
{
int i;
float a[N], b[N], c[N];
float *devPtrA, *devPtrB, *devPtrC;
// initialize arrays
for (i=0; i < N; i++) {
a[i] = -i;
b[i] = i*i;
}
// allocate CUDA memory for arrays
int memsize = N*sizeof(float);
cudaMalloc((void**)&devPtrA, memsize);
cudaMalloc((void**)&devPtrB, memsize);
cudaMalloc((void**)&devPtrC, memsize);
// copy host data to CUDA memory
cudaMemcpy(devPtrA, a, memsize, cudaMemcpyHostToDevice);
cudaMemcpy(devPtrB, b, memsize, cudaMemcpyHostToDevice);
// call add function on CUDA GPU
vecAdd<<<1, N>>>(devPtrA, devPtrB, devPtrC);
// copy results back
cudaMemcpy(c, devPtrC, memsize, cudaMemcpyDeviceToHost);
// print results
for (i=0; i < N; i++)
printf("C[%d]=%f\n", i, c[i]);
cudaFree(devPtrA);
cudaFree(devPtrB);
cudaFree(devPtrC);
return 0;
}
|
11,298 | #include <stdio.h>
//static cudaArray* tex_array;
//m_q: Number of vertical interior grid points, k_q: Number of horizontal grid points
__global__
void k_advection_2D_f32(float dt, float dy, float dx, int m_q, int k_q, float2* U, int pitch_u, cudaTextureObject_t Q, float* C, int pitch_c){
const int TILE_WIDTH=8;
const int TILE_HEIGHT=8;
int idy=blockIdx.y*blockDim.y*TILE_HEIGHT+threadIdx.y*TILE_HEIGHT;
int idx=blockIdx.x*blockDim.x*TILE_WIDTH+threadIdx.x*TILE_WIDTH;
int i=0;
float2 p;
C=(float*) ((char*)C+idy*pitch_c);
U=(float2*) ((char*)U+idy*pitch_u);
float2* U_ptr=U;
float* C_ptr=C;
while (i<m_q){
for (int i1=0;i1<TILE_HEIGHT;i1++){
int fy=idy+i1;
if ((fy+i)<m_q){
int j=0;
while(j<k_q){
//printf("y:%d\n",fy);
for (int i2=0;i2<TILE_WIDTH;i2++){
int fx=idx+i2;
if ((fx+j)<k_q){
//printf("i: %d j: %d y: %d x:%d\n",i,j,fy,fx);
float2 v=U_ptr[fx+j];
p.x=(fx+j+1.5f)-(dt*v.x*dx);
p.y=(fy+i+1.5f)-(dt*v.y*dx);
float q=tex2D<float>(Q,p.x,p.y);
C_ptr[fx+j]=q;
}
else{
break;
}
}
j+=gridDim.x*blockDim.x*TILE_WIDTH;
}
}
C_ptr=(float*) ((char*)C_ptr+pitch_c);
U_ptr=(float2*) ((char*)U_ptr+pitch_u);
}
i+=gridDim.y*blockDim.y*TILE_HEIGHT;
C_ptr=(float*) ((char*)C+i*pitch_c);
U_ptr=(float2*) ((char*)U+i*pitch_u);
}
}
__global__
void k_advection_2d_f32(float dt, float dy, float dx, int m_q, int k_q, float2* U, int pitch_u, cudaTextureObject_t Q, float2* C, int pitch_c){
const int TILE_WIDTH=8;
const int TILE_HEIGHT=8;
int idy=blockIdx.y*blockDim.y*TILE_HEIGHT+threadIdx.y*TILE_HEIGHT;
int idx=blockIdx.x*blockDim.x*TILE_WIDTH+threadIdx.x*TILE_WIDTH;
int i=0;
float2 p;
C=(float2*) ((char*)C+idy*pitch_c);
U=(float2*) ((char*)U+idy*pitch_u);
float2* U_ptr=U;
float2* C_ptr=C;
while (i<m_q){
for (int i1=0;i1<TILE_HEIGHT;i1++){
int fy=idy+i1;
if ((fy+i)<m_q){
int j=0;
while(j<k_q){
//printf("y:%d\n",fy);
for (int i2=0;i2<TILE_WIDTH;i2++){
int fx=idx+i2;
if ((fx+j)<k_q){
//printf("i: %d j: %d y: %d x:%d\n",i,j,fy,fx);
float2 v=U_ptr[fx+j];
p.x=(fx+j+1.5f)-(dt*v.x*dx);// we add 1.5 because of boundary conditions offset, else it would be 0.5
p.y=(fy+i+1.5f)-(dt*v.y*dx);// we add 1.5 because of boundary conditions offset, else it would be 0.5
float2 q=tex2D<float2>(Q,p.x,p.y);
C_ptr[fx+j]=q;
}
else{
break;
}
}
j+=gridDim.x*blockDim.x*TILE_WIDTH;
}
}
C_ptr=(float2*) ((char*)C_ptr+pitch_c);
U_ptr=(float2*) ((char*)U_ptr+pitch_u);
}
i+=gridDim.y*blockDim.y*TILE_HEIGHT;
C_ptr=(float2*) ((char*)C+i*pitch_c);
U_ptr=(float2*) ((char*)U+i*pitch_u);
}
}
__host__
void advection_2D_f32_device(float dt, float dy, float dx, int m_q, int k_q, float2* U_d, int pitch_u, float* Q_d, int pitch_q, float* C_d, int pitch_c){
if ((m_q<3) || (k_q<3)){
return;
}
//Create Resource description
cudaResourceDesc resDesc;
memset(&resDesc,0,sizeof(resDesc));
resDesc.resType = cudaResourceTypePitch2D;
resDesc.res.pitch2D.devPtr=Q_d;
resDesc.res.pitch2D.width=k_q;
resDesc.res.pitch2D.height=m_q;
resDesc.res.pitch2D.pitchInBytes=pitch_q;
resDesc.res.pitch2D.desc=cudaCreateChannelDesc<float>(); //is equivalent to cudaCreateChannelDesc<float>()
/*
resDesc.res.pitch2D.desc=cudaCreateChannelDesc(32,32,0,0,cudaChannelFormatKindFloat); //is equivalent to cudaCreateChannelDesc<float2>()
*/
//Create Texture description
cudaTextureDesc texDesc;
memset(&texDesc,0,sizeof(texDesc));
texDesc.normalizedCoords = false;
texDesc.filterMode = cudaFilterModeLinear;
texDesc.readMode=cudaReadModeElementType;
texDesc.addressMode[0] = cudaAddressModeClamp;
//Create Texture Object
cudaTextureObject_t Q_tex;
cudaError_t error1=cudaCreateTextureObject(&Q_tex, &resDesc, &texDesc, NULL);
if (error1 !=cudaSuccess){
printf("Errorcode: %d\n",error1);
}
printf("w, h: %d,%d\n",k_q,m_q);
float* C_ptr=(float*) ((char*)C_d+pitch_c)+1;
float2* U_ptr=(float2*) ((char*)U_d+pitch_u)+1;
k_advection_2D_f32<<<dim3(1,1,1),dim3(8,4,1)>>>(dt,dy,dy,m_q-2,k_q-2,U_ptr,pitch_u,Q_tex,C_ptr,pitch_c);
}
|
11,299 | #include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <cuda_runtime.h>
#define BLK 8
#define dimA (10*BLK*10*BLK)
#define dimB (10*BLK*20*BLK)
#define szA (10*BLK*10*BLK*sizeof(double))
#define szB (10*BLK*20*BLK*sizeof(double))
#define A ((const double (*)[10*BLK])a)
#define B ((const double (*)[20*BLK])b)
#define C ((double (*)[20*BLK])c)
#define bx blockIdx.x
#define by blockIdx.y
#define tx threadIdx.x
#define ty threadIdx.y
void init(int n,double *M){
int i;
for(i=0;i<n;i++){
M[i]=(double)rand()/RAND_MAX;
}
}
void check(int n,double *x,double *y){
int i;
double maxerr=0;
for(i=0;i<n;i++){
if(fabsf(x[i]-y[i])/y[i]>maxerr){
maxerr=fabsf(x[i]-y[i])/y[i];
}
}
printf("max err = %g\n",maxerr);
}
void host_mm(const double *a,const double *b,double *c){
int i,j,k;
for(i=0;i<10*BLK;i++){
for(j=0;j<20*BLK;j++){
for(k=0;k<10*BLK;k++){
C[i][j]+=A[i][k]*B[k][j];
}
}
}
}
void print(double *c){
int i,j;
for(i=0;i<10*BLK;i++){
for(j=0;j<20*BLK;j++){
printf("%.2f\t",C[i][j]);
}
printf("\n");
}
}
__global__
void device_mm(const double *a,const double *b,double *c){
int k;
for(k=0;k<10*BLK;k++)
C[bx*BLK+tx][by*BLK+ty]+=A[bx*BLK+tx][k]*B[k][by*BLK+ty];
}
__global__
void tiled_device_mm(const double *a,const double *b,double *c){
__shared__ double sA[BLK][BLK];
__shared__ double sB[BLK][BLK];
int s,i;
double sum=0;
for(s=0;s<10;s++){
sA[tx][ty]=A[bx*BLK+tx][s*BLK+ty];
sB[tx][ty]=B[s*BLK+tx][by*BLK+ty];
__syncthreads();
for(i=0;i<BLK;i++){
sum+=sA[tx][i]*sB[i][ty];
}
__syncthreads();
}
C[bx*BLK+tx][by*BLK+ty]=sum;
}
int main(){
clock_t start,finish;
double hosttime,devicetime;
dim3 grid(BLK,BLK);
dim3 block(10,20);
double *hA,*hB,*rC,*dA,*dB,*dC,*hC;
hA=(double*)malloc(szA);
hB=(double*)malloc(szB);
hC=(double*)malloc(szB);
rC=(double*)malloc(szB);
init(dimA,hA);
init(dimB,hB);
memset(hC,0,szB);
start=clock();
host_mm(hA,hB,hC);
finish=clock();
hosttime=(double)(finish-start)/CLOCKS_PER_SEC;
printf("cpu time: %.3f\n",hosttime);
cudaMalloc(&dA,szA);
cudaMalloc(&dB,szB);
cudaMalloc(&dC,szB);
cudaMemset(dC,0,szB);
start=clock();
cudaMemcpy(dA,hA,szA,cudaMemcpyHostToDevice);
cudaMemcpy(dB,hB,szB,cudaMemcpyHostToDevice);
device_mm<<<block,grid>>>(dA,dB,dC);
cudaThreadSynchronize();
cudaMemcpy(rC,dC,szB,cudaMemcpyDeviceToHost);
finish=clock();
devicetime=(double)(finish-start)/CLOCKS_PER_SEC;
printf("gpu time1: %.3f, speedup=%.3f\n",devicetime,hosttime/devicetime);
cudaMemset(dC,0,szB);
start=clock();
cudaMemcpy(dA,hA,szA,cudaMemcpyHostToDevice);
cudaMemcpy(dB,hB,szB,cudaMemcpyHostToDevice);
tiled_device_mm<<<block,grid>>>(dA,dB,dC);
cudaThreadSynchronize();
cudaMemcpy(rC,dC,szB,cudaMemcpyDeviceToHost);
finish=clock();
devicetime=(double)(finish-start)/CLOCKS_PER_SEC;
printf("gpu time2: %.3f, speedup=%.3f\n",devicetime,hosttime/devicetime);
free(hA);
free(hB);
free(hC);
free(rC);
cudaFree(dA);
cudaFree(dB);
cudaFree(dC);
return 0;
} |
11,300 | #include <stdio.h>
__global__ void Kernel1 (int nz, int nip, int *d_nprox, float *d_arr)
{
int ipn, ik, isn, k;
ipn = blockIdx.x;
k = threadIdx.x;
// if (k == 0)
// printf ("Kernel1: nprox[%d]=%d\n", ipn, d_nprox[ipn]);
ik = ipn*nz + k;
d_arr[ik] = 0.;
for (isn = 0; isn < d_nprox[ipn]; isn++) {
d_arr[ik] += 1.;
}
// printf ("Kernel1: k=%d ik=%d d_arr=%f\n", k, ik, d_arr[ik]);
}
__host__ int main ()
{
const int nz = 192; // number of vertical points (threading)
const int nip = 10242; // number of horizontal points (blocking)
int k; // vertical index
int ik; // loop index collapses ipn and k
int ipn; // loop index over nip
int isn; // loop index over nprox
int nprox[nip]; // mimics nprox in NIM
int *d_nprox; // nprox on device
float refarr[nip*nz]; // reference array on CPU containing correct results
float outarr[nip*nz]; // output array from GPU containing test results
float *d_arr;
dim3 cuda_threads1(nz); // nz threads
dim3 cuda_grids1(nip); // nip blocks
for (ipn = 0; ipn < nip; ++ipn) {
nprox[ipn] = (ipn%6) + 1;
for (k = 0; k < nz; ++k) {
ik = ipn*nz + k;
refarr[ik] = 0.;
for (isn = 0; isn < nprox[ipn]; isn++) {
refarr[ik] += 1.;
}
}
}
// Allocate space on device
cudaMalloc ((void **) &d_arr, nz*nip*sizeof(float));
cudaMalloc ((void **) &d_nprox, nip*sizeof(int));
// Set up nprox array for device
cudaMemcpy (d_nprox, nprox, nip*sizeof(int), cudaMemcpyHostToDevice);
Kernel1<<< cuda_grids1, cuda_threads1 >>>(nz, nip, d_nprox, d_arr);
cudaThreadSynchronize ();
// Copy results back to host
cudaMemcpy (outarr, d_arr, nz*nip*sizeof(float), cudaMemcpyDeviceToHost);
// Check results
for (ipn = 0; ipn < nip; ++ipn) {
for (k = 0; k < nz; ++k) {
ik = ipn*nz + k;
if (outarr[ik] != refarr[ik]) {
printf ("ik=%d outarr=%f refarr=%f\n", ik, outarr[ik], refarr[ik]);
return -1;
}
}
}
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.