serial_no
int64
1
24.2k
cuda_source
stringlengths
11
9.01M
1
#include "includes.h" __global__ void vecProductKernel(float *d_z, const float *d_x, const float *d_y, unsigned int N) { int idx = blockIdx.x * blockDim.x + threadIdx.x; if (idx < N) { d_z[idx] = d_x[idx] * d_y[idx]; } }
2
#include "includes.h" __global__ void STREAM_Triad_double(double *a, double *b, double *c, double scalar, size_t len) { size_t idx = threadIdx.x + blockIdx.x * blockDim.x; while (idx < len) { c[idx] = a[idx]+scalar*b[idx]; idx += blockDim.x * gridDim.x; } }
3
#include <iostream> #include "sys/time.h" using namespace std; double timeInSeconds (timeval& starttime, timeval& stopstime) { return 1e-6*(1e6*(stopstime.tv_sec - starttime.tv_sec) + (stopstime.tv_usec - starttime.tv_usec)); } __device__ double* dev_vector1 = 0; __device__ double* dev_vector2 = 0; __device__ double* dev_results = 0; __global__ void device_vector_mult () { // IMPLEMENT ME 6: Multiply the threadIdx.x element of dev_vector1 by the // corresponding element of dev_vector2, and store in dev_results. } int main (int argc, char** argv) { int sizeOfVector = 100; if (argc > 1) sizeOfVector = atoi(argv[1]); // Declare and fill host-side arrays of doubles. double* vector1 = new double[sizeOfVector]; double* vector2 = new double[sizeOfVector]; double* results = new double[sizeOfVector]; srand(42); for (int i = 0; i < sizeOfVector; ++i) { vector1[i] = rand() % 100; vector2[i] = rand() % 100; results[i] = 0; } timeval startTime; timeval interTime; timeval stopsTime; gettimeofday(&startTime, NULL); // Use the CPU for this part. // IMPLEMENT ME 1: Multiply each element of vector1 by the corresponding // element in vector2 and store in results. for (int i = 0; i < sizeOfVector; ++i) { results[i] = vector1[i] * vector2[i]; } gettimeofday(&interTime, NULL); double total = 0; // IMPLEMENT ME 2: Sum the results array and store the sum in total. for (int i = 0; i < sizeOfVector; +i) { total += results[i]; } gettimeofday(&stopsTime, NULL); cout << "Dot product is : " << total << endl; // IMPLEMENT ME 3: Time the above operations together and separately // using 'gettimeofday'. cout << "Time for multiplication (seconds): " << timeInSeconds(startTime, interTime) << endl; cout << "Time for addition (seconds): " << timeInSeconds(interTime, stopsTime) << endl; cout << "Overall time (seconds): " << timeInSeconds(startTime, stopsTime) << endl; // Now on to the GPU! // IMPLEMENT ME 4: Use cudaMalloc to allocate space for the three device vectors. // IMPLEMENT ME 5: Use cudaMemcpy to initialise dev_vector1 and dev_vector2 to have // the same content as the host-side arrays. // IMPLEMENT ME 6: Put in the function body for device_vector_mult, above. // IMPLEMENT ME 7: Launch a kernel that runs device_vector_mult. // IMPLEMENT ME 8: Use cudaMemcpy to copy back dev_results into results. // IMPLEMENT ME 9: Calculate the dot product by summing over results, same // as above. // IMPLEMENT ME 10: Take the time for the kernel launch and the addition, // and print out the results (including the dot product) as you did for the CPU. // IMPLEMENT ME 11: Write a reduction kernel that sums over dev_results, and launch it. // Time this operation and compare with the code that first moves the transformed data // to the host, then sums over it. return 0; }
4
/****************************************************************************** *cr *cr (C) Copyright 2010 The Board of Trustees of the *cr University of Illinois *cr All Rights Reserved *cr ******************************************************************************/ #include <stdio.h> #define TILE_SIZE 10 __global__ void mysgemm(int m, int n, int k, const float *A, const float *B, float* C) { /******************************************************************** * * Compute C = A x B * where A is a (m x k) matrix * where B is a (k x n) matrix * where C is a (m x n) matrix * * Use shared memory for tiling * ********************************************************************/ int bx = blockIdx.x; int by = blockIdx.y; int tx = threadIdx.x; int ty = threadIdx.y; int right_boundary = k*TILE_SIZE*by + k; float Sum = 0; for (int a=k*TILE_SIZE*by, b=bx*TILE_SIZE; a<right_boundary; a+=TILE_SIZE,b+=(TILE_SIZE*n)) { __shared__ float Acache[TILE_SIZE][TILE_SIZE]; __shared__ float Bcache[TILE_SIZE][TILE_SIZE]; Acache[ty][tx] = A[a + k * ty + tx]; Bcache[ty][tx] = B[b + n * ty + tx]; __syncthreads(); for (int i=0; i<TILE_SIZE; i++) { Sum += Acache[ty][i] * Bcache[i][tx]; } __syncthreads(); } // INSERT KERNEL CODE HERE int c = n * TILE_SIZE * by + TILE_SIZE * bx; C[c + n * ty + tx] = Sum; } void basicSgemm(char transa, char transb, int m, int n, int k, float alpha, const float *A, int lda, const float *B, int ldb, float beta, float *C, int ldc) { if ((transa != 'N') && (transa != 'n')) { printf("unsupported value of 'transa'\n"); return; } if ((transb != 'N') && (transb != 'n')) { printf("unsupported value of 'transb'\n"); return; } if ((alpha - 1.0f > 1e-10) || (alpha - 1.0f < -1e-10)) { printf("unsupported value of alpha\n"); return; } if ((beta - 0.0f > 1e-10) || (beta - 0.0f < -1e-10)) { printf("unsupported value of beta\n"); return; } // Initialize thread block and kernel grid dimensions --------------------- const unsigned int BLOCK_SIZE = TILE_SIZE; //INSERT CODE HERE dim3 dimBlock(BLOCK_SIZE, BLOCK_SIZE); dim3 dimGrid(n / dimBlock.x, m / dimBlock.y); mysgemm<<<dimGrid, dimBlock>>>(m, n, k, A, B, C); // Invoke CUDA kernel ----------------------------------------------------- }
5
///* // * LinearSysSolver.cpp // * // * Created on: Jul 8, 2013 // * Author: adm85 // */ // //#include <vector> //#include <iostream> //#include <time.h> //#include "LinearSysSolver.h" //#include "cublas_v2.h" //#include "cula.h" // // //LinearSysSolver::LinearSysSolver() //{ // // TODO Auto-generated constructor stub // //} // //LinearSysSolver::~LinearSysSolver() //{ // // TODO Auto-generated destructor stub //} // ///** // * Solves A*x=B for x. The result is stored in the vector pointed to by B. // */ //void LinearSysSolver::solveSystem(cuComplex* A, int M_A, int N_A, cuComplex* B, int M_B, int N_B) { // //Get the LU Factorization // cuComplex* LUMat = new cuComplex[M_A*N_A]; // int ipivLength = N_A; // int* ipiv = new int[ipivLength]; // getLUDecomposition(A, M_A, N_A, LUMat, ipiv, ipivLength); // // //Calculate P*b // swapPivotRows(B, M_B, N_B, ipiv, ipivLength); // // //Solve the system. The result will be stored in B // cublasSolveLinearSystem(LUMat, M_A, N_A, B, M_B, N_B); // // // DEBUG CODE ------- // //cuComplex* test = multiplyMatrices(xTxInv, N, N, xTx, N, N); // cuComplex* test = multiplyMatrices(A, M_A, N_A, B, M_B, N_B); // cout << endl << "X * XInv" << endl; // columnMajorPrintArray(test, M_A, N_B); // delete [] test; // // END DEBUG CODE --- // // delete [] LUMat; // delete [] ipiv; //} // // ///** // * Uses the CULA library to get the LU decomposition of the matrix. // */ //void LinearSysSolver::getLUDecomposition(cuComplex* x, int M, int N, cuComplex* LUMat, int* ipiv, int ipivLength) { // // culaDeviceFloatComplex* devxTx; // culaDeviceInt* devIPIV; // // cudaMalloc(&devxTx, M*N*sizeof(culaDeviceFloatComplex)); // cudaMalloc(&devIPIV, ipivLength*sizeof(culaDeviceInt)); // cudaMemcpy(devxTx, x, M*N*sizeof(culaDeviceFloatComplex), cudaMemcpyHostToDevice); // // culaStatus culaStat; // culaInitialize(); // // culaStat = culaDeviceCgetrf(M, N, devxTx, M, devIPIV); // if(culaStat != culaNoError) { // cout << "Cula Cgetrf failure" << endl; // } // // culaShutdown(); // // //LUMat = new cuComplex[M*N]; // cudaMemcpy(LUMat, devxTx, M*N*sizeof(culaDeviceFloatComplex), cudaMemcpyDeviceToHost); // cudaMemcpy(ipiv, devIPIV, ipivLength*sizeof(culaDeviceInt), cudaMemcpyDeviceToHost); // //// getL(L, LUMat, M, N); //// // cout << "LUMat Inside:" << endl; // columnMajorPrintArray(LUMat, M, N); //// //// getU(U, LUMat, M, N); //// cout << endl << "U" << endl; //// columnMajorPrintArray(U, M, N); // // cudaFree(devxTx); // cudaFree(devIPIV); //} // ///** // * Using the information from the CULA generated IPIF array, // * this function swaps rows as appropriate. // */ //void LinearSysSolver::swapPivotRows(cuComplex* x, int M, int N, int* ipiv, int ipivLength) { // //Temporary row vector // cuComplex rowVec[N]; // // //We use index 1 based ordering because this is what CULA returns // for(int i=1; i <= ipivLength; i++) { // //Check to see if the row swaps. This happens when element x of the ipif // //array is not equal to x. When element x is different, it means that row x // //and the row specified in element x swap places. // if(ipiv[i-1] != i) { // int startIndex = i-1; // //Copy the current row into the temporary row vector // for(int j = 0; j < N; j++) { // rowVec[j].x = x[startIndex+j*M].x; // rowVec[j].y = x[startIndex+j*M].y; // } // // //Copy the specified row into the current row // int specRowStart = ipiv[i-1]-1; // for(int j=0; j < N; j++) { // x[startIndex+j*M].x = x[specRowStart+j*M].x; // x[startIndex+j*M].y = x[specRowStart+j*M].y; // } // // //Copy the temp row into the specified row // for(int j=0; j < N; j++) { // x[specRowStart+j*M].x = rowVec[j].x; // x[specRowStart+j*M].y = rowVec[j].y; // } // } // } // //} // //void LinearSysSolver::cublasSolveLinearSystem(cuComplex* A, int M, int N, cuComplex* B, int M_B, int N_B) { // cuComplex* xInv = new cuComplex[M*N_B]; // // //Now put L, U, and the I matrix on the GPU // cublasStatus_t stat; // cublasHandle_t handle; // // cuComplex* devA; // cuComplex* devB; // cudaMalloc(&devA, M*N*sizeof(cuComplex)); // cudaMalloc(&devB, M_B*N_B*sizeof(cuComplex)); // // stat = cublasCreate(&handle); // if(stat != CUBLAS_STATUS_SUCCESS) { // cout << "Error in solver" << endl; // } // stat = cublasSetMatrix(M, N, sizeof(cuComplex), A, M, devA, M); // if(stat != CUBLAS_STATUS_SUCCESS) { // cout << "Error in solver" << endl; // } // stat = cublasSetMatrix(M_B, N_B, sizeof(cuComplex), B, M_B, devB, M_B); // if(stat != CUBLAS_STATUS_SUCCESS) { // cout << "Error in solver" << endl; // } // // //Set up Alpha // cuComplex alpha; // alpha.x = 1; // alpha.y = 0; // // //First solve L*y = P*b // stat = cublasCtrsm(handle, CUBLAS_SIDE_LEFT, CUBLAS_FILL_MODE_LOWER, CUBLAS_OP_N, CUBLAS_DIAG_UNIT, M, N, &alpha, devA, M, devB, M_B); // if(stat != CUBLAS_STATUS_SUCCESS) { // cout << "Error solving for y" << endl; // } // // //Then solve U*x = y // stat = cublasCtrsm(handle, CUBLAS_SIDE_LEFT, CUBLAS_FILL_MODE_UPPER, CUBLAS_OP_N, CUBLAS_DIAG_NON_UNIT, M, N, &alpha, devA, M, devB, M_B); // if(stat != CUBLAS_STATUS_SUCCESS) { // cout << "Error solving for x" << endl; // } // // //Get results, and store them in matrix B // cudaMemcpy(B, devB, M*N_B*sizeof(cuComplex), cudaMemcpyDeviceToHost); // // //Free resources // cublasDestroy(handle); // cudaFree(devA); // cudaFree(devB); //} // ///** // * Multiplies two matrices together. Result is stored in B on exit. // */ //cuComplex* LinearSysSolver::multiplyMatrices(cuComplex* A, int M_A, int N_A, cuComplex* B, int M_B, int N_B) { // cudaError_t cudaStat; // cublasStatus_t stat; // cublasHandle_t handle; // // cuComplex* devA; // cuComplex* devB; // cuComplex* devC; // cuComplex* alpha = new cuComplex; // cuComplex* beta = new cuComplex; // cuComplex* hostC = new cuComplex[M_A*N_B]; // alpha->x = 1; // alpha->y = 0; // beta->x = 0; // beta->y = 0; // // cudaStat = cudaMalloc(&devA, M_A*N_A*sizeof(cuComplex)); // cudaStat = cudaMalloc(&devB, M_B*N_B*sizeof(cuComplex)); // cudaStat = cudaMalloc(&devC, M_A*N_B*sizeof(cuComplex)); // if(cudaStat != cudaSuccess) { // cout << "Horrible failure!" << endl; // } // // stat = cublasCreate(&handle); // // stat = cublasSetMatrix(M_A, N_A, sizeof(cuComplex), A, M_A, devA, M_A); // if (stat != CUBLAS_STATUS_SUCCESS) { // cout << "Data download A failed" << endl; // } // stat = cublasSetMatrix(M_B, N_B, sizeof(cuComplex), B, M_B, devB, M_B); // if (stat != CUBLAS_STATUS_SUCCESS) { // cout << "Data download B failed" << endl; // } // // //Perform the multiply. // stat = cublasCgemm(handle, CUBLAS_OP_N, CUBLAS_OP_N, M_A, N_B, N_A, alpha, devA, M_A, devB, M_B, beta, devC, M_A); // // stat = cublasGetMatrix(M_A, N_B, sizeof(cuComplex), devC, M_A, hostC, M_A); // if (stat != CUBLAS_STATUS_SUCCESS) { // cout << "Failed to get devC to hostC" << endl; // cout << stat << endl; // } // // cudaFree(devA); // cudaFree(devB); // cudaFree(devC); // cublasDestroy(handle); // // delete alpha; // delete beta; // return hostC; // //} // ///** // * Prints out an array that is stored in column-major order in memory. // */ //void LinearSysSolver::columnMajorPrintArray(cuComplex* x, int M, int N) { // int realIndex; // cout << "------------------------------------------------------" << endl; // cout << " Printing Column Order Matrix " << endl; // cout << "------------------------------------------------------" << endl; // for(int i=0; i < M; i++) { // cout << "Row: " << (i+1) << " "; // for(int j=0; j < N; j++) { // realIndex = (M*j)+i; // cout << x[realIndex].x; // if(x[realIndex].y >= 0) { // cout << "+"; // } // cout << x[realIndex].y << "i "; // } // cout << endl; // } //}
6
// Each thread calculates fitness for one individual // Result: vector of fitness extern "C" __global__ void fitness_kernel(int populationCnt, int *population, int pointsCnt, float *pointsX, float *pointsY, float *result) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i < populationCnt) { int shift = 5*i; float fitness = 0.0f; for (int p = 0; p < pointsCnt; p++) { float fApprox = population[shift + 4]; for (int k = 3; k >= 0; k--) { fApprox = fApprox * (*pointsX) + population[shift + k]; } fApprox /= 10.0f; ++pointsX; fitness += pow(fApprox - *(pointsY++), 2); } result[i] = fitness / pointsCnt; } }
7
#include "cuda_runtime.h" #include <cstdio> #include "time.h" constexpr int segment_size = 1024; constexpr int threads = 512; __device__ char *pool; void __global__ alloc(int **pointers) { auto index = blockIdx.x * blockDim.x + threadIdx.x; // pointers[index] = (int *)malloc(segment_size); pointers[index] = (int *)atomicAdd((unsigned long long *)&pool, segment_size); } void __global__ fill(int **pointers) { auto index = blockIdx.x * blockDim.x + threadIdx.x; for (int i = 0; i < segment_size / sizeof(int); i++) { pointers[index][i] = i; } } void __global__ free(int **pointers) { auto index = blockIdx.x * blockDim.x + threadIdx.x; // free(pointers[index]); } int main() { int **pointers; cudaMalloc(&pointers, threads * sizeof(int *)); int bd = 32; for (int i = 0; i < 10; i++) { char *pool_; cudaMallocManaged(&pool_, segment_size * threads); cudaMemcpyToSymbol(pool, &pool_, sizeof(void *)); alloc<<<threads / bd, bd>>>(pointers); fill<<<threads / bd, bd>>>(pointers); free<<<threads / bd, bd>>>(pointers); } cudaDeviceSynchronize(); }
8
#include <algorithm> #include <iostream> #include <vector> std::vector<double> add(std::vector<double> inarr1, std::vector<double> inarr2); void test_integration() { constexpr size_t arr_size = 2 << 24; std::cout << "Initializing test arrays...\n"; std::vector<double> arr1(arr_size); std::vector<double> arr2(arr_size); for (size_t i = 0; i < arr_size; i++) { arr1[i] = static_cast<double>(i); arr2[i] = static_cast<double>(arr_size - i); } std::cout << "Calling the kernel wrapper...\n"; auto result = add(std::move(arr1), std::move(arr2)); std::cout << "Verifying results...\n"; if (std::all_of(result.begin(), result.end(), [arr_size](double x) { return x == arr_size; })) { std::cout << "All results were valid.\n"; } else { std::cout << "At least one result is invalid.\n"; } } int main() { std::cout << "Test CUDA integration\n"; test_integration(); std::cout << "Finished testing\n"; return 0; }
9
#include "Output_Layer_GPU_Kernels.cuh" __constant__ float anchors_416[10] = { 1.08, 1.19, 3.42, 4.41, 6.63, 11.38, 9.42, 5.11, 16.62, 10.52 }; __device__ float Sigmoid(float x) { float expValue = exp((double)-x); float result = 1 / (1 + expValue); return result; } __global__ void XY_BoundingBox_Coordinates_Transform_Kernel(float* input, int inputHeight, int inputWidth) { int threadIndex = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x; int tensorXYSize = inputHeight * inputWidth; int tensorSize = boundingBoxesPerGridCell * tensorXYSize; if (threadIndex < tensorSize) { int threadDepthIndex = threadIndex % boundingBoxesPerGridCell; //int threadDepthIndexY = (threadIndex % XYCoordinatesCount) + 1; int threadXYIndex = threadIndex % tensorXYSize; int cy = threadXYIndex / inputWidth; int cx = threadXYIndex % inputWidth; //tensor[threadDepthIndex * tensorXYSize + threadXYIndex] = threadDepthIndex; input[threadDepthIndex * 4 * tensorXYSize + threadXYIndex] = (cx + Sigmoid(input[threadDepthIndex * 4 * tensorXYSize + threadXYIndex])) * downsampleFactor; input[(threadDepthIndex * 4 + 1) * tensorXYSize + threadXYIndex] = (cy + Sigmoid(input[(threadDepthIndex * 4 + 1) * tensorXYSize + threadXYIndex])) * downsampleFactor; //input[threadDepthIndex * 4 * tensorXYSize + threadXYIndex] = 1; //input[(threadDepthIndex * 4 + 1) * tensorXYSize + threadXYIndex] = 1; } } __global__ void WH_BoundingBox_Transform_Kernel(float* input, int inputHeight, int inputWidth) { int threadIndex = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x; int tensorXYSize = inputHeight * inputWidth; int tensorSize = boundingBoxesPerGridCell * tensorXYSize; if (threadIndex < tensorSize) { int threadDepthIndex = threadIndex % boundingBoxesPerGridCell; //int threadDepthIndexY = (threadIndex % XYCoordinatesCount) + 1; int threadXYIndex = threadIndex % tensorXYSize; //tensor[threadDepthIndex * tensorXYSize + threadXYIndex] = threadDepthIndex; input[(threadDepthIndex * 4 + 2) * tensorXYSize + threadXYIndex] = exp(input[(threadDepthIndex * 4 + 2) * tensorXYSize + threadXYIndex]) * anchors_416[2 * threadDepthIndex] * downsampleFactor; input[(threadDepthIndex * 4 + 3) * tensorXYSize + threadXYIndex] = exp(input[(threadDepthIndex * 4 + 3) * tensorXYSize + threadXYIndex]) * anchors_416[2 * threadDepthIndex + 1] * downsampleFactor; //input[(threadDepthIndex * 4 + 2) * tensorXYSize + threadXYIndex] = anchors_416[2 * threadDepthIndex] = 1; //input[(threadDepthIndex * 4 + 3) * tensorXYSize + threadXYIndex] = anchors_416[2 * threadDepthIndex + 1] = 1; input[(20 + threadDepthIndex) * tensorXYSize + threadXYIndex] = Sigmoid(input[(20 + threadDepthIndex) * tensorXYSize + threadXYIndex]); //input[(20 + threadDepthIndex) * tensorXYSize + threadXYIndex] = 2; } } __global__ void Softmax_Kernel(float* input, int classesCount, int inputHeight, int inputWidth) { int threadIndex = (blockIdx.x + blockIdx.y * gridDim.x) * blockDim.x + threadIdx.x; int tensorXYSize = inputHeight * inputWidth; int tensorSize = boundingBoxesPerGridCell * tensorXYSize; if (threadIndex < tensorSize) { int threadDepthIndex = threadIndex % boundingBoxesPerGridCell; int threadXYIndex = threadIndex % tensorXYSize; float maxClassProbability = FLOAT_MIN; for (size_t i = 0; i < classesCount; i++) { float classProbability = input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex]; if (classProbability > maxClassProbability) { maxClassProbability = classProbability; } } float classProbabilitiesSum = 0; for (size_t i = 0; i < classesCount; i++) { float exponent = exp(input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex] - maxClassProbability); classProbabilitiesSum += exponent; input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex] = exponent; } for (size_t i = 0; i < classesCount; i++) { input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex] /= classProbabilitiesSum; //input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex] = i; //input[(25 + threadDepthIndex * classesCount + i) * tensorXYSize + threadXYIndex] = 3; } } } void WH_BoundingBox_Transform(float* input, int inputHeight, int inputWidth) { int tensorSize = boundingBoxesPerGridCell * inputHeight * inputWidth; int gridXDim = ceil(tensorSize / 512.0); WH_BoundingBox_Transform_Kernel << <gridXDim, 512 >> > (input, inputHeight, inputWidth); } void XY_BoundingBox_Coordinates_Transform(float* input, int inputHeight, int inputWidth) { int tensorSize = boundingBoxesPerGridCell * inputHeight * inputWidth; int gridXDim = ceil(tensorSize / 512.0); XY_BoundingBox_Coordinates_Transform_Kernel << <gridXDim, 512 >> > (input, inputHeight, inputWidth); } void Softmax_GPU(float* input, int classesCount, int inputHeight, int inputWidth) { int tensorSize = boundingBoxesPerGridCell * inputHeight * inputWidth; int gridXDim = ceil(tensorSize / 512.0); Softmax_Kernel << <gridXDim, 512 >> > (input, classesCount, inputHeight, inputWidth); }
10
#include <stdio.h> #include <cuda_runtime.h> #include <assert.h> int main(int argc, char **argv){ float *a_h, *b_h; // Host data float *a_d, *b_d; // Device data int N = 14, nBytes, i; printf("Start allocating\n"); nBytes = N * sizeof(float); printf("Allocating in Host\n"); a_h = (float*) malloc(nBytes); b_h = (float*) malloc(nBytes); printf("Allocating in Device\n"); cudaMalloc((void**)&a_d, nBytes); cudaMalloc((void**)&b_d, nBytes); printf("End allocating\n"); for(i=0; i<N; i++) a_h[i] = 100.0 + i; printf("Start memcpy\n"); cudaMemcpy(a_d, a_h, nBytes, cudaMemcpyHostToDevice); cudaMemcpy(b_d, a_d, nBytes, cudaMemcpyDeviceToDevice); cudaMemcpy(b_h, b_d, nBytes, cudaMemcpyDeviceToHost); printf("End memcpy\n"); for(i=0; i<N; i++) assert(a_h[i] == b_h[i]); free(a_h); free(b_h); cudaFree(a_d); cudaFree(b_d); return 0; }
11
#include <cuda.h> #define KERNEL_SIZE 3 #define BLOCK_SIZE 512 typedef signed int pixel_channel; typedef unsigned long resolution; __constant__ pixel_channel kernel_cuda[KERNEL_SIZE * KERNEL_SIZE]; pixel_channel kernel_host[KERNEL_SIZE * KERNEL_SIZE] = { -1, -1, -1, -1, 9, -1, -1, -1, -1 }; __global__ void Pixel_Shared_Convolution(pixel_channel *channel_cuda, pixel_channel *rezult_cuda, resolution width, resolution lineQuantity) { __shared__ pixel_channel sharedMemory [3][BLOCK_SIZE + 2]; for(long line = 1; line < lineQuantity; line++) { long temp = blockIdx.x * BLOCK_SIZE + threadIdx.x; sharedMemory [0][threadIdx.x + 1] = channel_cuda[temp + width * (line - 1)]; sharedMemory [1][threadIdx.x + 1] = channel_cuda[temp + width * line]; sharedMemory [2][threadIdx.x + 1] = channel_cuda[temp + width * (line + 1)]; if(threadIdx.x == 0) { if(blockIdx.x != 0) temp--; sharedMemory [0][0] = channel_cuda[temp + width * (line-1)]; sharedMemory [1][0] = channel_cuda[temp + width * line]; sharedMemory [2][0] = channel_cuda[temp + width * (line+1)]; } if(threadIdx.x == (BLOCK_SIZE - 1)) { temp++; sharedMemory [0][BLOCK_SIZE + 1] = channel_cuda[temp + width * (line - 1)]; sharedMemory [1][BLOCK_SIZE + 1] = channel_cuda[temp + width * line]; sharedMemory [2][BLOCK_SIZE + 1] = channel_cuda[temp + width * (line + 1)]; } __syncthreads(); long Sum = 0; for (int i = 0; i < KERNEL_SIZE; i++) for (int j = 0; j < KERNEL_SIZE; j++) Sum += sharedMemory[j][threadIdx.x + i] * kernel_cuda[i * 3 + j]; if (Sum < 0) Sum = 0; if (Sum > 255) Sum = 255; __syncthreads(); if((blockIdx.x * BLOCK_SIZE + threadIdx.x) > width) continue; rezult_cuda[blockIdx.x * BLOCK_SIZE + threadIdx.x + width * line] = Sum; } __syncthreads(); return; } extern "C" __host__ pixel_channel** asyncConvolution(pixel_channel **image, resolution width, resolution height) { pixel_channel **channel_cuda; channel_cuda = (pixel_channel**)malloc(3*sizeof(pixel_channel*)); pixel_channel **rezult_cuda; rezult_cuda = (pixel_channel**)malloc(3*sizeof(pixel_channel*)); resolution size = width * height; cudaHostRegister(image[0], (size + BLOCK_SIZE) * sizeof(pixel_channel), cudaHostRegisterMapped); cudaHostRegister(image[1], (size + BLOCK_SIZE) * sizeof(pixel_channel), cudaHostRegisterMapped); cudaHostRegister(image[2], (size + BLOCK_SIZE) * sizeof(pixel_channel), cudaHostRegisterMapped); cudaMalloc((void **)& rezult_cuda[0], (size + BLOCK_SIZE) * sizeof(pixel_channel)); cudaMalloc((void **)& rezult_cuda[1], (size + BLOCK_SIZE) * sizeof(pixel_channel)); cudaMalloc((void **)& rezult_cuda[2], (size + BLOCK_SIZE) * sizeof(pixel_channel)); cudaMalloc((void **)& channel_cuda[0], (size + BLOCK_SIZE) * sizeof(pixel_channel));; cudaMalloc((void **)& channel_cuda[1], (size + BLOCK_SIZE) * sizeof(pixel_channel)); cudaMalloc((void **)& channel_cuda[2], (size + BLOCK_SIZE) * sizeof(pixel_channel)); cudaMemcpyToSymbol(kernel_cuda, kernel_host, 9 * sizeof(pixel_channel), 0, cudaMemcpyHostToDevice); resolution block_count = 0; if(((width - 2)%BLOCK_SIZE) == 0) block_count = (width - 2)/BLOCK_SIZE; else block_count = (width - 2)/BLOCK_SIZE + 1; dim3 gridSize = dim3(block_count, 1, 1); dim3 blockSize = dim3(BLOCK_SIZE, 1, 1); cudaStream_t stream[3]; for(int i = 0; i < 3; i++) { cudaStreamCreate(&stream[i]); cudaMemcpyAsync(channel_cuda[i], image[i], size*sizeof(pixel_channel), cudaMemcpyHostToDevice, stream[i]); Pixel_Shared_Convolution<<<gridSize, blockSize, 0, stream[i]>>>(channel_cuda[i], rezult_cuda[i], width, height); cudaMemcpyAsync(image[i], rezult_cuda[i], size*sizeof(pixel_channel), cudaMemcpyDeviceToHost,stream[i]); cudaStreamDestroy(stream[i]); } for(int i=0;i<3;i++) { cudaFree(rezult_cuda[i]); cudaFree(channel_cuda[i]); } cudaDeviceReset(); return image; }
12
#include <stdio.h> __global__ void add(int a, int b, int *c) { *c = a + b; } int main( void ) { int c; int *dev_c; //Device Memory allocations cudaError_t err = cudaMalloc((void**)&dev_c, sizeof(&dev_c)); if(err != cudaSuccess) { printf("The error is %s\n", cudaGetErrorString(err)); } add<<<1,1>>>(2, 7, dev_c); if(cudaPeekAtLastError() != cudaSuccess) { printf("The error is %s\n", cudaGetErrorString(cudaGetLastError())); } cudaError_t err2 = cudaMemcpy( &c, dev_c, sizeof(c), cudaMemcpyDeviceToHost); if(err2 != cudaSuccess) { printf("The error is %s\n", cudaGetErrorString(err2)); } printf("2 + 7 = %d\n", c); cudaFree(dev_c); return 0; }
13
#include <iostream> #include <cuda.h> #include <cuda_runtime.h> #include <device_launch_parameters.h> #include <memory> /*CUDAлȡGPU豸*/ int main(void) { int device_count = 0; cudaGetDeviceCount(&device_count); //ú֧CUDAGPU豸ĸ if (device_count ==0) { printf("There are no available device(s) that support CUDA\n"); } else { printf("Detected %d CUDA Capable device(s)\n", device_count); } //ͨ豸Ϣ /* cudaDevicePropṹṩ˿ʶ豸Լȷʹõİ汾Ϣԡṩnameԣַ ʽ豸ơͨѯcudaDriverGetVersioncudaRuntimeGetVersionԻ豸ʹõCUDA Driver ʱİ汾ж豸ϣʹеľǸͨmultiProcessorCount жϡԷ豸ϵദͨʹclockRateԻȡGPUʱʣKHzʱ ʡ */ int device; cudaDeviceProp device_Property; cudaGetDevice(&device); cudaGetDeviceProperties(&device_Property, device); printf("\nDevice %d:\"%s\"\n", device, device_Property.name); int driver_Version; int runtime_Version; cudaDriverGetVersion(&driver_Version); cudaRuntimeGetVersion(&runtime_Version); printf("CUDA Driver Version / Runtime Version %d.%d / %d.%d\n", driver_Version / 1000, (driver_Version % 100) / 10, runtime_Version / 1000, (runtime_Version % 100) / 10); printf("Total amount of global memory:%.0f Mbytes (%1lu bytes)\n", (float)device_Property.totalGlobalMem / 1048576.0f, (unsigned long long)device_Property.totalGlobalMem); printf("(%2d) Multiprocessors", device_Property.multiProcessorCount); printf("GPU Max Clock rate:%.0f MHz (%0.2f GHz)\n", device_Property.clockRate * 1e-3f, device_Property.clockRate * 1e-6f); /* ̶߳ʱάģdim3͡ˣ֪ÿάпԲ̺߳Ϳ顣ÿദ ߳ÿ߳ҲơֿͨmaxThreadsPerMultiProcessormaxThreadsPerBlockҵ ÿ߳ÿпܱܵ߳ ͨmaxThreadsDimȷÿάϵ߳ͬÿάÿͨ maxGridSizeʶǶһֵ飬ֱʾxyzάеֵ */ printf("Maximum number of threads per multiprocessor:%d\n", device_Property.maxThreadsPerMultiProcessor); printf("Maximum number of threads per block:%d\n", device_Property.maxThreadsPerBlock); printf("Max dimension size of a thread block (x,y,z):(%d,%d,%d)\n", device_Property.maxThreadsDim[0], device_Property.maxThreadsDim[1], device_Property.maxThreadsDim[2]); printf("Max dimension size of a grid size (x,y,z):(%d,%d,%d)\n", device_Property.maxGridSize[0], device_Property.maxGridSize[1], device_Property.maxGridSize[2]); }
14
#include <stdio.h> #include <stdlib.h> #include <cuda_runtime.h> #include <device_launch_parameters.h> #include <time.h> #define AND 0 #define OR 1 #define NAND 2 #define NOR 3 #define XOR 4 #define XNOR 5 __global__ void computeLogicGates(char* d_input, char* d_output, int size) { // calculate the index of the thread int index = threadIdx.x + blockIdx.x * blockDim.x; int input_index = index * 3; // if the index is inside the range of the array if (input_index < size) { int output; switch (d_input[input_index+2] - '0') { case AND: if (d_input[input_index] == '1' && d_input[input_index+1] == '1') output = 1; else output = 0; break; case OR: if (d_input[input_index] == '0' && d_input[input_index+1] == '0') output = 0; else output = 1; break; case NAND: if (d_input[input_index] == '1' && d_input[input_index+1] == '1') output = 0; else output = 1; break; case NOR: if (d_input[input_index] == '0' && d_input[input_index+1] == '0') output = 1; else output = 0; break; case XOR: if (d_input[input_index] == d_input[input_index+1]) output = 0; else output = 1; break; case XNOR: if (d_input[input_index] == d_input[input_index+1]) output = 1; else output = 0; break; } d_output[index] = output + '0'; } } int main(int argc, char* argv[]) { // check if necessary arguments are provided if (argc == 1) { return printf("No arguments are provided! Please provide the input file path, input file length and the output file path!"); } else if (argc == 2) { return printf("Input file length and output file path are not provided!"); } else if (argc == 3) { return printf("Output file path is not provided!"); } char* input_file = argv[1]; int input_size = atoi(argv[2]); char* output_file = argv[3]; // read the input file FILE* input_fptr; input_fptr = fopen(input_file, "r"); if (!input_fptr) return printf("Error opening the input file!"); // read the file line by line and populate input_data array char line[10]; // allocate CUDA variables char* d_input; char* d_output; int input_array_size = input_size * 3 * sizeof(char); int output_array_size = input_size * sizeof(char); cudaMallocManaged(&d_input, input_array_size); cudaMallocManaged(&d_output, output_array_size); for (int i = 0; i < input_size; i++) { fgets(line, 9, input_fptr); d_input[i*3] = line[0]; d_input[i*3+1] = line[2]; d_input[i*3+2] = line[4]; } // close file pointer fclose(input_fptr); clock_t start = clock(); // call device kernel computeLogicGates<<<input_size, 1>>>(d_input, d_output, input_array_size); // synchronize threads cudaDeviceSynchronize(); clock_t end = clock(); // write the results into the output file FILE* output_fptr; output_fptr = fopen(output_file, "w"); if(!output_fptr) return printf("Error opening output file!"); for (int i = 0; i < input_size; i++) { char data[3]; sprintf(data, "%c\n", d_output[i]); fputs(data, output_fptr); } // close file pointer fclose(output_fptr); // free up device memory cudaFree(d_input); cudaFree(d_output); // calculate execution time double runtime = (double) (end-start) / CLOCKS_PER_SEC; printf("Execution time: %f ms\n", runtime * 1000); return 0; }
15
#include "Matrix.cuh" #include <cstring> #include <fstream> #include <ctime> #include <device_functions.h> #ifdef __CUDACC__ #define cuda_SYNCTHREADS() __syncthreads() #else #define cuda_SYNCTHREADS() #endif #define Zero ZeroCPU #define PRINT_LOG false //#define TARGET_RESIDUE ((double)1.0e-9); const double TARGET_RESIDUE = 1.0e-6; Matrix::Matrix(int cols, int rows) : cols(cols), rows(rows) { if (PRINT_LOG) printf("Matrix constructor\n"); cudaMallocManaged(&mat, cols * rows * sizeof(double)); } unsigned Matrix::getRows() const { return rows; } unsigned Matrix::getCols() const { return cols; } Matrix::Matrix(int cols, int rows, double* mat) : cols(cols), rows(rows), mat(mat) { if (PRINT_LOG) printf("Matrix constructor\n"); //cudaMallocManaged(&mat, cols * rows * sizeof(double)); } Matrix::Matrix(const Matrix& a) { if (PRINT_LOG) printf("Matrix copy constructor\n"); rows = a.rows; cols = a.cols; cudaMallocManaged(&mat, cols * rows * sizeof(double)); std::memcpy(mat, a.mat, cols * rows * sizeof(double)); } void Matrix::operator=(const Matrix& a) { if (PRINT_LOG) printf("Matrix assignment operator\n"); rows = a.rows; cols = a.cols; cudaFree(mat); cudaMallocManaged(&mat, cols * rows * sizeof(double)); std::memcpy(mat, a.mat, cols * rows * sizeof(double)); } Matrix Matrix::Stub() { return Matrix(1, 1); } Matrix Matrix::ZeroCPU(int cols, int rows) { double* mat; cudaMallocManaged(&mat, cols * rows * sizeof(double)); cudaDeviceSynchronize(); for (long i = 0; i < cols * rows; i++) { mat[i] = 0.0f; } return Matrix(cols, rows, mat); } Matrix Matrix::OneCPU(int cols, int rows) { double* mat; cudaMallocManaged(&mat, cols * rows * sizeof(double)); for (long i = 0; i < cols * rows; i++) { mat[i] = 1.0f; } return Matrix(cols, rows, mat); } __global__ void ZeroGPUKernel(const int n, double* A) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int i = index; i < n; i += stride) { A[index] = 0.0f; } } Matrix Matrix::ZeroGPU(int cols, int rows) { double* mat; cudaMallocManaged(&mat, cols * rows * sizeof(double)); int blockCount = (cols * rows + BLOCK_SIZE - 1) / BLOCK_SIZE; ZeroGPUKernel <<<blockCount, BLOCK_SIZE >>>(cols * rows, mat); cudaDeviceSynchronize(); return Matrix(cols, rows, mat); } Matrix Matrix::IdentityCPU(int cols, int rows) { if (cols != rows) throw "Identity matrix must be square"; auto ret = Zero(cols, rows); for (int i = 0; i < cols; ++i) { ret.mat[i * cols + i] = 1.0f; } return ret; } Matrix Matrix::FromFile(std::string path) { std::fstream reader; int cols, rows; reader.open(path, std::ios::in); reader.seekp(0); reader >> cols; reader >> rows; double* mat; cudaMallocManaged(&mat, cols * rows * sizeof(double)); for (int i = 0; i < cols * rows; ++i) { reader >> mat[i]; } reader.close(); return Matrix(cols, rows, mat); } Matrix Matrix::Jacobi(const Matrix& A, const Matrix& b) { auto LU = A; auto invD = (LU.separateDiagonal()); auto x = ZeroCPU(1, A.getRows()); invD.inverseDiagonalInPlaceCPU(); auto M = -invD * LU; auto temp = invD * b; double res = 1; int counter = 0; do { x = (M * x + temp); //if (counter++ == 9) //{ // counter = 0; res = (A * x - b).vectorEuclideanNorm(); // printf("res: %f\n", res); //} counter++; } while (res > TARGET_RESIDUE); printf("res: %d \n", counter); return x; } Matrix Matrix::JacobiOptimal(const Matrix& A, const Matrix& b) { // 25% czasu wykonania (80000us) prawdopodobnie kopiowanie pamieci z device na host i z powrotem //auto LU = A; //-> auto LU = Matrix(A.cols, A.rows); copyGPU(LU, A); //32x wzrost wydajnosci //auto invD = (LU.separateDiagonal()); //invD.inverseDiagonalInPlaceCPU(); auto invD = Matrix(A.cols, A.rows); separateDiagonalAndInverseGPU(invD, LU); auto x = ZeroGPU(1, A.getRows()); //auto temp1 = invD * b; auto temp1 = Matrix(1, A.rows); refMul(temp1, invD, b); //auto M = -invD * LU; //auto M = Matrix(A.cols, A.rows); auto M = Matrix(A.cols, A.rows); additiveInverseInPlaceGPU(invD); refMulDiag(M, invD, LU); double res = 100; int counter = 9; auto memmul = Matrix(1, A.rows); auto _Amulx = Matrix(1, A.rows); auto resVector = Matrix(1, A.rows); do { refMul(memmul, M, x); refAdd(x, memmul, temp1); //x = (M * x + temp); if (counter++ == 9) { counter = 0; refMul(_Amulx, A, x); refSub(resVector, _Amulx, b); res = resVector.vectorEuclideanNorm(); //printf("res: %f\n", res); } } while (res > TARGET_RESIDUE); return x; } Matrix Matrix::ForwardSubstitution(const Matrix& A, const Matrix& b) { if (!(A.cols == A.rows && A.rows == b.rows)) throw "Incorrect dimensions"; auto x = Matrix(1, A.getRows()); for (int i = 0; i < x.rows; ++i) { double sum = 0; for (int j = 0; j < i; ++j) { sum += A.mat[i * A.cols + j] * x.mat[j]; } x.mat[i] = (b.mat[i] - sum) / A.mat[i * A.cols + i]; } return x; } Matrix Matrix::BackwardSubstitution(const Matrix& A, const Matrix& b) { if (!(A.cols == A.rows && A.rows == b.rows)) throw "Incorrect dimensions"; auto x = Matrix(1, A.getRows()); x.mat[0] = b.mat[0] / A.mat[0]; for (int i = x.rows - 1; i >= 0; --i) { double sum = 0; for (int j = i + 1; j < A.cols; ++j) { sum += A.mat[i * A.cols + j] * x.mat[j]; } x.mat[i] = (b.mat[i] - sum) / A.mat[i * A.cols + i]; } return x; } Matrix Matrix::GaussSeidel(const Matrix& A, const Matrix& b) { auto DL = -(A.lowerCPU() + A.diagonalCPU()); auto U = A.upperCPU(); auto x = ZeroCPU(1, A.getRows()); auto temp = Matrix::ForwardSubstitution(DL, b); double res = 1; int counter = 0; do { //x = -(Matrix::ForwardSubstitution(DL, U * x)) + temp; x = (Matrix::ForwardSubstitution(DL, U * x)) + temp; //if (counter++ == 9) //{ counter++; res = (A * (-x) - b).vectorEuclideanNorm(); //} //printf("res: %f \n", res); //(x).print(); } while (res > TARGET_RESIDUE); printf("res: %d \n", counter); return -x; } Matrix Matrix::GaussSeidelOptimal(const Matrix& A, const Matrix& b) { //auto DL = (A.lowerCPU() + A.diagonalCPU()); //auto U = A.upperCPU(); auto DL = Matrix(A.cols, A.rows); auto U = Matrix(A.cols, A.rows); copyGPU(DL, A); separateUpperGPU(U, DL); //auto DL = (A.lowerCPU() + A.diagonalCPU()); //auto U = A.upperCPU(); auto x = ZeroCPU(1, A.getRows()); auto temp = Matrix::ForwardSubstitution(DL, b); auto memmul = Matrix(1, A.rows); auto memforwardsub = Matrix(1, A.rows); auto memmulres = Matrix(1, A.rows); auto resVector = Matrix(1, A.rows); double res; int counter = 9; do { //x = -(Matrix::ForwardSubstitution(DL, U * x)) + temp; refMul(memmul, U, x); forwardSubstitutionGPU(memforwardsub, DL, memmul); //memforwardsub = Matrix::ForwardSubstitution(DL, memmul); //double xd = maxError(memforwardsub, memforwardsub2); additiveInverseInPlaceGPU(memforwardsub); refAdd(x, memforwardsub, temp); //x = memforwardsub + temp; if (counter++ == 9) { counter = 0; refMul(memmulres, A, x); refSub(resVector, memmulres, b); res = resVector.vectorEuclideanNorm(); } //printf("res: %f \n", res); //(x).print(); } while (res > TARGET_RESIDUE); return x; } Matrix Matrix::LUMehtod(const Matrix& A, const Matrix& b) { Matrix L = Matrix::Stub(); Matrix U = Matrix::Stub(); Matrix::doolitle(L, U, A); auto y = Matrix::ForwardSubstitution(L, b); return Matrix::BackwardSubstitution(U, y); } Matrix Matrix::LUMehtodOptimal(const Matrix& A, const Matrix& b) { Matrix L = Matrix::Stub(); Matrix U = Matrix::Stub(); Matrix::doolitle(L, U, A); auto y = Matrix::ForwardSubstitution(L, b); return Matrix::BackwardSubstitution(U, y); } void Matrix::doolitle(Matrix& L, Matrix& U, const Matrix& A) { if (A.cols != A.rows) throw "Matrix is not square"; L = OneCPU(A.cols, A.rows).diagonalCPU(); U = ZeroCPU(A.cols, A.rows); for (int j = 0; j < A.cols; ++j) { for (int i = 0; i <= j; ++i) { double sum = 0; for (int k = 0; k < i; ++k) { sum += L.mat[i * L.cols + k] * U.mat[k * U.cols + j]; } U.mat[i * U.cols + j] = A.mat[i * U.cols + j] - sum; } for (int i = j + 1; i < A.cols; ++i) { double sum = 0; for (int k = 0; k < j; ++k) { sum += L.mat[i * L.cols + k] * U.mat[k * U.cols + j]; } L.mat[i * U.cols + j] = 1 / U.mat[j * U.cols + j] * (A.mat[i * U.cols + j] - sum); } } } __global__ void doolitleKernel(const int n, double* A, double* B) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { A[j] = B[j]; } } void Matrix::doolitleGPU(Matrix& L, Matrix& U, const Matrix& A) { int blockCount = (A.rows * A.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; //doolitleKernel <<< blockCount, BLOCK_SIZE >>> (A.rows * A.cols, A.mat); cudaDeviceSynchronize(); } void Matrix::createTest(Matrix& A, Matrix& b, Matrix& x, int size) { srand(time(NULL)); const int constrange = 100; const auto r = [](int range)-> double { return (double)(rand() % 20000) / 100 - 100; }; x = Matrix(1, size); A = Matrix(size, size); b = Matrix(1, size); for (int i = 0; i < size; ++i) { x.mat[i] = r(100); } for (int i = 0; i < size; ++i) { double sum = 0; for (int j = 0; j < size; ++j) { if (i != j) { A.mat[i * size + j] = r(100); sum += fabs(A.mat[i * size + j]); } double randomized = r(100); if (randomized > 0) { A.mat[i * size + i] = sum + r(10); } else { A.mat[i * size + i] = -sum + r(10); } } } for (int i = 0; i < size; ++i) { double sum = 0; for (int j = 0; j < size; ++j) { sum += A.mat[i * size + j] * x.mat[j]; } b.mat[i] = sum; } } void Matrix::createTask(Matrix& A, Matrix& b, const int size) { //const int size = 994; const int a1 = 5 + 7; const int a2 = -1; const int a3 = a2; const int inSin(1 + 1); A = Matrix::ZeroCPU(size, size); b = Matrix(1, size); for (int i = 0; i < size; ++i) { A.mat[size * i + i] = a1; if (size * i + i - 1 >= 0) A.mat[size * i + i - 1] = a2; if (size * i + i - 2 >= 0) A.mat[size * i + i - 2] = a3; if (size * i + i + 1 < size * size) A.mat[size * i + i + 1] = a2; if (size * i + i + 2 < size * size) A.mat[size * i + i + 2] = a3; } for (int i = 0; i < size; ++i) { b.mat[i] = sin(i * inSin); } } void Matrix::createTaskC(Matrix& A, Matrix& b) { const int size = 994; const int a1 = 3; const int a2 = -1; const int a3 = a2; const int inSin(1 + 1); A = Matrix::ZeroCPU(size, size); b = Matrix(1, size); for (int i = 0; i < size; ++i) { A.mat[size * i + i] = a1; if (size * i + i - 1 >= 0) A.mat[size * i + i - 1] = a2; if (size * i + i - 2 >= 0) A.mat[size * i + i - 2] = a3; if (size * i + i + 1 < size * size) A.mat[size * i + i + 1] = a2; if (size * i + i + 2 < size * size) A.mat[size * i + i + 2] = a3; } for (int i = 0; i < size; ++i) { b.mat[i] = sin(i * inSin); } } double Matrix::maxError(Matrix& x, Matrix& r) { if (x.rows * x.cols != r.rows * r.cols) throw "Matrices are not the same size"; double max = 0; for (int i = 0; i < x.rows * x.cols; ++i) { if (fabs(x.mat[i] - r.mat[i]) > max) max = fabs(x.mat[i] - r.mat[i]); } return max; } __global__ void copyKernel(const int n, double* A, double* B) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { A[j] = B[j]; } } void Matrix::copyGPU(Matrix& a, const Matrix& b) { int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; copyKernel <<< blockCount, BLOCK_SIZE >>>(a.cols * a.rows, a.mat, b.mat); cudaDeviceSynchronize(); } __global__ void separateDiagonalKernel(const int n, double* d, double* A) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { d[j * n + j] = 1 / A[j * n + j]; A[j * n + j] = 0; } } void Matrix::separateDiagonalAndInverseGPU(Matrix& d, Matrix& A) { int blockCount = (A.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; separateDiagonalKernel <<< blockCount, BLOCK_SIZE >>>(A.cols, d.mat, A.mat); cudaDeviceSynchronize(); } __global__ void separateUpperKernel(const int n, const int cols, double* U, double* A) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { int row = j / cols; int col = j % cols; if (col > row) { U[j] = A[j]; A[j] = 0; } } } void Matrix::separateUpperGPU(Matrix& U, Matrix& A) { int blockCount = (A.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; separateUpperKernel <<< blockCount, BLOCK_SIZE >>>(A.cols * A.rows, A.cols, U.mat, A.mat); cudaDeviceSynchronize(); } __global__ void additiveInverseInPlaceKernel(const int n, double* A) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { A[j] = -A[j]; } } void Matrix::additiveInverseInPlaceGPU(Matrix& A) { int blockCount = (A.rows * A.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; additiveInverseInPlaceKernel <<< blockCount, BLOCK_SIZE >>>(A.rows * A.cols, A.mat); cudaDeviceSynchronize(); } __global__ void forwardSubstitutionKernel(const int n, double* A, double* b, double* x) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { double sum = 0; for (int i = 0; i < n; i++) { if (i == j) { x[j] = (b[j] - sum) / A[j * n + j]; } cuda_SYNCTHREADS(); if (i < j) { sum += A[j * n + i] * x[i]; } } } } void Matrix::forwardSubstitutionGPU(Matrix& result, const Matrix& A, const Matrix& b) { int blockCount = 1; int blockSize = pow(2, ceil(log2f(A.cols))); forwardSubstitutionKernel <<< blockCount, blockSize >>>(A.cols, A.mat, b.mat, result.mat); cudaDeviceSynchronize(); } void Matrix::backwardSubstitutionGPU(Matrix& result, const Matrix& A, const Matrix& b) { } void Matrix::toFile(std::string path) { std::fstream writer; writer.open(path, std::ios::out); writer.seekg(0); writer << cols << ' ' << rows << '\n'; for (int i = 0; i < rows; ++i) { for (int j = 0; j < cols; ++j) { writer << mat[i * cols + j] << ' '; } writer << "\n"; } writer.close(); } Matrix Matrix::separateDiagonal() { if (cols != rows) throw "Matrix is not square"; auto ret = Zero(cols, rows); for (int i = 0; i < cols; ++i) { ret.mat[i * cols + i] = mat[i * cols + i]; mat[i * cols + i] = 0.0f; } return ret; } Matrix Matrix::diagonalCPU() const { if (cols != rows) throw "Matrix is not square"; auto ret = Zero(cols, rows); for (int i = 0; i < cols; ++i) { ret.mat[i * cols + i] = mat[i * cols + i]; } return ret; } Matrix Matrix::lowerCPU() const { if (cols != rows) throw "Matrix is not square"; auto ret = Zero(cols, rows); for (int j = 0; j < cols; ++j) { for (int i = 0; i < j; ++i) { ret.mat[j * cols + i] = mat[j * cols + i]; } } return ret; } Matrix Matrix::upperCPU() const { if (cols != rows) throw "Matrix is not square"; auto ret = Zero(cols, rows); for (int j = 0; j < cols; ++j) { for (int i = j + 1; i < cols; ++i) { ret.mat[j * cols + i] = mat[j * cols + i]; } } return ret; } void Matrix::inverseDiagonalInPlaceCPU() { if (cols != rows) throw "Matrix is not square"; for (int i = 0; i < cols; ++i) { if (mat[i * cols + i] == 0) throw "0 on diagonal"; mat[i * cols + i] = 1 / mat[i * cols + i]; } } void Matrix::transposeVectorInPlace() { unsigned int tmp = cols; cols = rows; rows = tmp; } double Matrix::vectorEuclideanNorm() { if (cols != 1 && rows != 1) throw "Matrix is not a vector"; double sum = 0; for (int i = 0; i < cols * rows; ++i) { sum += mat[i] * mat[i]; } return sqrt(sum); } Matrix Matrix::lu() { throw "Not implemented"; } void Matrix::print() const { for (int i = 0; i < rows; ++i) { for (int j = 0; j < cols; ++j) { printf("%f ", mat[i * cols + j]); } printf("\n"); } printf("\n"); } Matrix::~Matrix() { if (PRINT_LOG) printf("Matrix destructor\n"); cudaFree(mat); //free(mat); } __global__ void mulKernel(const int commonDim, const int cols, const int n, double* A, double* B, double* C) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { int row = j / cols; int col = j % cols; C[j] = 0; for (int i = 0; i < commonDim; i++) { C[j] += A[row * commonDim + i] * B[i * cols + col]; } } } void Matrix::refMul(Matrix& result, const Matrix& a, const Matrix& b) { int blockCount = (a.rows * b.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; mulKernel <<< blockCount, BLOCK_SIZE >>>(a.cols, b.cols, b.cols * a.rows, a.mat, b.mat, result.mat); cudaDeviceSynchronize(); } __global__ void mulDiagKernel(const int commonDim, const int cols, const int n, double* A, double* B, double* C) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { int row = j / cols; int col = j % cols; C[j] = A[row * commonDim + row] * B[row * commonDim + col]; } } void Matrix::refMulDiag(Matrix& result, const Matrix& a, const Matrix& b) { int blockCount = (a.rows * b.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; mulDiagKernel << < blockCount, BLOCK_SIZE >> >(a.cols, b.cols, b.cols * a.rows, a.mat, b.mat, result.mat); cudaDeviceSynchronize(); } Matrix operator*(const Matrix& a, const Matrix& b) { if (a.cols != b.rows) throw "wrong dimensions for multiplication"; double* mat; cudaMallocManaged(&mat, b.cols * a.rows * sizeof(double)); int blockCount = (a.rows * b.cols + BLOCK_SIZE - 1) / BLOCK_SIZE; if (PRINT_LOG) printf("Matrix multiplication on %d blocks x %d threads\n", blockCount, BLOCK_SIZE); mulKernel <<< blockCount, BLOCK_SIZE >>>(a.cols, b.cols, b.cols * a.rows, a.mat, b.mat, mat); cudaDeviceSynchronize(); return Matrix(b.cols, a.rows, mat); } __global__ void addKernel(const int n, double* A, double* B, double* C) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { C[j] = A[j] + B[j]; } } void Matrix::refAdd(Matrix& result, const Matrix& a, const Matrix& b) { int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; addKernel <<< blockCount, BLOCK_SIZE >>>(a.cols * a.rows, a.mat, b.mat, result.mat); cudaDeviceSynchronize(); } Matrix operator+(const Matrix& a, const Matrix& b) { if (a.cols != b.cols || a.rows != b.rows) throw "dimensions must equal for addition"; double* mat; cudaMallocManaged(&mat, a.cols * a.rows * sizeof(double)); int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; if (PRINT_LOG) printf("Matrix addition on %d blocks x %d threads\n", blockCount, BLOCK_SIZE); addKernel <<< blockCount, BLOCK_SIZE >>>(a.cols * a.rows, a.mat, b.mat, mat); cudaDeviceSynchronize(); return Matrix(a.cols, a.rows, mat); } __global__ void subKernel(const int n, double* A, double* B, double* C) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { C[j] = A[j] - B[j]; } } void Matrix::refSub(Matrix& result, const Matrix& a, const Matrix& b) { int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; subKernel <<< blockCount, BLOCK_SIZE >> >(a.cols * a.rows, a.mat, b.mat, result.mat); cudaDeviceSynchronize(); } Matrix operator-(const Matrix& a, const Matrix& b) { if (a.cols != b.cols || a.rows != b.rows) throw "dimensions must equal for addition"; double* mat; cudaMallocManaged(&mat, a.cols * a.rows * sizeof(double)); int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; if (PRINT_LOG) printf("Matrix addition on %d blocks x %d threads\n", blockCount, BLOCK_SIZE); subKernel <<< blockCount, BLOCK_SIZE >>>(a.cols * a.rows, a.mat, b.mat, mat); cudaDeviceSynchronize(); return Matrix(a.cols, a.rows, mat); } __global__ void additiveInverseKernel(const int n, double* A, double* B) { int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for (int j = index; j < n; j += stride) { A[j] = -B[j]; } } Matrix operator-(const Matrix& a) { double* mat; cudaMallocManaged(&mat, a.cols * a.rows * sizeof(double)); int blockCount = (a.cols * a.rows + BLOCK_SIZE - 1) / BLOCK_SIZE; additiveInverseKernel <<<blockCount, BLOCK_SIZE >>>(a.cols * a.rows, mat, a.mat); cudaDeviceSynchronize(); return Matrix(a.cols, a.rows, mat); }
16
#include "includes.h" __global__ void multiply_by_itself_training_util_kernel( const float4 * __restrict input_buf, float4 * __restrict output_buf, int elem_count) { int elem_id = blockDim.x * blockIdx.x + threadIdx.x; if (elem_id < elem_count) { float4 val = input_buf[elem_id]; val.x *= val.x; val.y *= val.y; val.z *= val.z; val.w *= val.w; output_buf[elem_id] = val; } }
17
#include <algorithm> #include <iostream> #include <vector> typedef unsigned long long data_t; static inline void check(cudaError_t err, const char* context) { if (err != cudaSuccess) { std::cerr << "CUDA error: " << context << ": " << cudaGetErrorString(err) << std::endl; std::exit(EXIT_FAILURE); } } #define CHECK(x) check(x, #x) template <class T> void cuda_memcpy(T* target, const T* source, std::size_t num, cudaMemcpyKind direction) { CHECK(cudaMemcpy(target, source, num * sizeof(T), direction)); } static inline int divup(int a, int b) { return (a + b - 1)/b; } // get the 0 bit of each number by bit_shift // example: number : 10001, bit_shit: 1, One: 1, // // it means check if the second bit is 1 or not. __global__ void getMask(data_t *d_in, unsigned int *d_out, const int len, const unsigned int n, data_t bit_shift, unsigned int One) { unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; data_t bit = 0; data_t one=1; data_t shift=one<<bit_shift; unsigned int start=index*len; if (start>=n) return; unsigned int end=start+len; for(unsigned int i=start;i<end && i<n; i++ ){ bit=d_in[i]&shift; bit = (bit > 0) ? 1 : 0; d_out[i] = (One ? bit : 1 - bit); } } __global__ void getIndex(unsigned int *d_index, unsigned int *d_sum, unsigned int* d_mask, const int len, const unsigned int n, unsigned int total_pre) { unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; unsigned int start=index*len; if (start>=n) return; unsigned int end=start+len; for (unsigned int i=start; i<end && i<n; i++){ d_index[i]=d_mask[i]?d_sum[i]:i-d_sum[i]+total_pre; if(d_index[i]>=n){ printf(" d_sum[i] : %d, total_pre : %d, d_mask[i] : %d \n", d_sum[i], total_pre, d_mask[i]); } // if(d_mask[i]==1){ // d_index[i]=total_pre+d_sum[i]; // } } } __global__ void scatter(data_t *d_in, unsigned int *d_index, data_t *d_out, const int len, const unsigned int n) { unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; unsigned int start=index*len; if (start>=n) return; unsigned int end=start+len; for(unsigned int i=start;i<end && i<n; i++ ){ d_out[d_index[i]]=d_in[i]; } } // idea to do exclusive prefix is similar to my ppc course https://www.youtube.com/watch?v=HVhCtl96gUs // I will use y,z,s to specify which step I am in. // in particular, I split the whole array into multiple smaller array. each small array has [len] numbers // Thread level y: each thread will do addition sequentially. threads are working independently, dealing with [len] numbers. // Thread level z: each threads in the same block will do sequentially. threads are working independently, dealing with one block. // Thread level s: each thread will add the result from its previous thread. threads are working independently, dealing with [len] numbers. // Block level y: this will get prefix sum in block level. // Block level z: only one block and one thread are used here, do addition sequentially. // Block level s: each threads will add the result from its previous block. __global__ void prefixsum(unsigned int* mask, unsigned int* output,const int len, const unsigned int n ){ unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; int step=len; int start=index*len+1;//exclusive if (start>n) return; //exclusive, could equal to n int end=start+step; output[start]=mask[start-1]; for(unsigned int i=start+1;i<end&&i<n;i++){ output[i]+=output[i-1]+mask[i-1];//exclusive, therefore mask[i-1] } } __global__ void serialsum_accrossthread(unsigned int* sum,const int len, const unsigned int n){ unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; int step=len; int offset=2*step; unsigned int start=step*blockDim.x*index+offset; unsigned int end=step*blockDim.x*(index+1)+1; for(unsigned int i=start;i<end && i<n; i+=step){ sum[i]+=sum[i-step]; } } __global__ void mergethread(unsigned int* sum,const int len, const unsigned int n){ if (threadIdx.x==0) return; unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; int step=len; unsigned int start=index*step+1;//exclusive unsigned int end=start+step-1; // -1 is important, this position has been added in serial sum unsigned int base=sum[start-1]; for(unsigned int i=start; i<end && i<n; i++){ sum[i]+=base; } } // void serialsum_accrossblock(unsigned int* sum,const int len, const unsigned int n, const int block_size){ // int step=len*block_size;//each block has step number // int start=2*step; // for(unsigned int i=start; i<n; i+=step){ // sum[i]+=sum[i-step]; // } // } __global__ void serialsum_accrossblock(unsigned int* sum,const int len, const unsigned int n, const int block_size){ //only one block and one thread int step=len*block_size;//each block has step number int start=2*step; for(unsigned int i=start; i<n; i+=step){ sum[i]+=sum[i-step]; } } // __global__ void mergeblock(unsigned int* sum,const int len, const unsigned int n){ // unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; // if (index==0) return; //the first block is not needed to merge // int step=len*blockDim.x; // int start=index*step+1; //exclusive // int end=start+step-1;// -1 is important, this position has been added in serial sum // int base=sum[start-1];//last element at last block // for(int i=start; i<end && i<n; i++){ // sum[i]+=base; // } // } __global__ void mergeblock(unsigned int* sum,const int len, const unsigned int n){ if (blockIdx.x==0) return;//the first block is not needed to merge unsigned int index = threadIdx.x + blockDim.x * blockIdx.x; int step=len; unsigned int base_index=blockIdx.x*step*blockDim.x; unsigned int base=sum[base_index]; int start=index*step; //only the first thread in block should excluded the first element int end=start+step; start=(start==base_index)?start+1:start; // int base=sum[start-1];//last element at last block for(int i=start; i<end && i<n; i++){ sum[i]+=base; } } void psort(int n, data_t *data) { if(n<=0) return; // FIXME: Implement a more efficient parallel sorting algorithm for the GPU. const int block_size=256;//64 threads per block; const int len=2000; // add 1000 prefix sum per thread; data_t *d_temp; data_t *d_in=NULL; CHECK(cudaMalloc((void**)&d_in,n*sizeof(data_t))); data_t *d_out_long=NULL; CHECK(cudaMalloc((void**)&d_out_long,n*sizeof(data_t))); unsigned int *d_out=NULL; CHECK(cudaMalloc((void**)&d_out,n*sizeof(unsigned int))); unsigned int *d_sum=NULL; CHECK(cudaMalloc((void**)&d_sum,n*sizeof(unsigned int))); unsigned int *d_index=NULL; CHECK(cudaMalloc((void**)&d_index,n*sizeof(unsigned int))); // std::vector<unsigned int> inter_sum(n); // unsigned int inter_sum[n]; cuda_memcpy(d_in,data,n,cudaMemcpyHostToDevice); data_t bits=sizeof(data_t)*8; // unsigned int out[n]; // unsigned int sum[n]; unsigned int total_zeros, mask_last; //one pass here for(data_t i=0; i<bits; i++){ CHECK(cudaMemset(d_sum,0,n*sizeof(unsigned int))); getMask<<<divup(n,block_size*len),block_size>>>(d_in, d_out, len, n, i, 0); CHECK(cudaGetLastError()); // CHECK(cudaMemcpy(out, d_out, n * sizeof(unsigned int), cudaMemcpyDeviceToHost)); // std::cout<<"out "<<std::endl; // for(int j=0;j<n;j++){ // std::cout<<out[j]<<" "; // } // std::cout<<std::endl; //inclusive prefix sum prefixsum<<<divup(n,block_size*len),block_size>>>(d_out,d_sum,len,n); CHECK(cudaGetLastError()); serialsum_accrossthread<<<divup(n,block_size*len*block_size),block_size>>>(d_sum,len,n); CHECK(cudaGetLastError()); mergethread<<<divup(n,block_size*len),block_size>>>(d_sum,len,n); CHECK(cudaGetLastError()); serialsum_accrossblock<<<1,1>>>(d_sum, len, n, block_size); CHECK(cudaGetLastError()); // CHECK(cudaMemcpy(inter_sum.data(), d_sum, n * sizeof(unsigned int), cudaMemcpyDeviceToHost)); // serialsum_accrossblock(inter_sum.data(), len, n, block_size); // CHECK(cudaMemcpy(d_sum, inter_sum.data(),n * sizeof(unsigned int), cudaMemcpyHostToDevice)); // CHECK(cudaGetLastError()); mergeblock<<<divup(n,block_size*len),block_size>>>(d_sum,len,n); CHECK(cudaGetLastError()); // CHECK(cudaMemcpy(sum, d_sum, n * sizeof(unsigned int), cudaMemcpyDeviceToHost)); // std::cout<<"sum "<<std::endl; // for(int j=0;j<n;j++){ // std::cout<<sum[j]<<" "; // } // std::cout<<std::endl; CHECK(cudaMemcpy(&total_zeros, d_sum+n-1, sizeof(unsigned int), cudaMemcpyDeviceToHost)); CHECK(cudaMemcpy(&mask_last, d_out+n-1, sizeof(unsigned int), cudaMemcpyDeviceToHost)); total_zeros+=(mask_last==1)?1:0; getIndex<<<divup(n,block_size*len),block_size>>>(d_index, d_sum, d_out, len, n, total_zeros); // std::cout<<"index "<<std::endl; // CHECK(cudaMemcpy(sum, d_index, n * sizeof(unsigned int), cudaMemcpyDeviceToHost)); // for(int j=0;j<n;j++){ // std::cout<<sum[j]<<" "; // } // std::cout<<std::endl; CHECK(cudaGetLastError()); scatter<<<divup(n,block_size*len),block_size>>>(d_in, d_index, d_out_long, len, n); CHECK(cudaGetLastError()); //must swap pointers d_temp = d_in; d_in = d_out_long; d_out_long = d_temp; } cuda_memcpy(data, d_in, n, cudaMemcpyDeviceToHost); CHECK(cudaFree(d_in)); CHECK(cudaFree(d_out_long)); CHECK(cudaFree(d_out)); CHECK(cudaFree(d_sum)); CHECK(cudaFree(d_index)); // std::sort(data, data + n); }
18
#include <iostream> using namespace std; #define CUDA_CALL(ans) { gpuAssert((ans), __FILE__, __LINE__); } inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true) { if (code != cudaSuccess) { fprintf(stderr,"GPU assert: %s %s %d\n", cudaGetErrorString(code), file, line); if (abort) exit(code); } } __global__ void square(float *d_out, float *d_in){ int idx = threadIdx.x; float f = d_in[idx]; d_out[idx] = f*f; } int main(){ const int ARRAY_SIZE = 64; const int ARRAY_BYTES = ARRAY_SIZE * sizeof(float); float h_in[ARRAY_SIZE]; for(int i=0; i < ARRAY_SIZE; i++){ h_in[i] = float(i); } float h_out[ARRAY_SIZE]; float *d_in; float *d_out; CUDA_CALL(cudaMalloc((void**) &d_in, ARRAY_BYTES)); CUDA_CALL(cudaMalloc((void**) &d_out, ARRAY_BYTES)); CUDA_CALL(cudaMemcpy(d_in, h_in, ARRAY_BYTES, cudaMemcpyHostToDevice)); square<<<1, ARRAY_SIZE>>>(d_out, d_in); CUDA_CALL(cudaMemcpy(h_out, d_out, ARRAY_BYTES, cudaMemcpyDeviceToHost)); for(int i=0; i< ARRAY_SIZE; i++){ cout << h_out[i]; if(i%4!=3) cout << "\t"; else cout << endl; } }
19
extern "C" __global__ void cuAdd(int n, float *a, float *b, float *result) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i<n) { result[i] = a[i] + b[i]; } } extern "C" __global__ void cuMult(int n, float *a, float *b, float *result) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i<n) { result[i] = a[i] * b[i]; } } extern "C" __global__ void cuDiv(int n, float *a, float *b, float *result) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i<n) { result[i] = a[i] / b[i]; } } extern "C" __global__ void cuExp(int n, float *a, float *result) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i<n) { result[i] = expf(a[i]); } }
20
#include<bits/stdc++.h> using namespace std; __global__ void vec_add(int N, int *A, int *B, int *C){ int i = threadIdx.x + blockIdx.x * blockDim.x; // assert( i<N ); if(i < N) C[i] = A[i] + B[i]; } int main(int argc, char *argv[]){ srand(0); int N = 10000, block_size = 256; if(argc>1) N = stoi(argv[1]); if(argc>2) block_size = stoi(argv[2]); int n_block = (N+block_size-1)/block_size; int *A = new int [N], *B = new int [N], *C = new int [N]; for(int i=0;i<N;++i) A[i] = rand()%50; for(int i=0;i<N;++i) B[i] = rand()%50; clock_t start_time, mid_time1, mid_time2, end_time; // Record the starting time start_time = clock(); int *dA, *dB, *dC; cudaMalloc((void **)&dA, N*sizeof(int)); cudaMalloc((void **)&dB, N*sizeof(int)); cudaMalloc((void **)&dC, N*sizeof(int)); // Copy data to divice cudaMemcpy(dA, A, N*sizeof(int), cudaMemcpyHostToDevice); cudaMemcpy(dB, B, N*sizeof(int), cudaMemcpyHostToDevice); mid_time1 = clock(); // Running code on GPUs vec_add<<<n_block, block_size>>>(N, dA, dB, dC); mid_time2 = clock(); cudaMemcpy(C, dC, N*sizeof(int), cudaMemcpyDeviceToHost); cudaFree(dA); cudaFree(dB); cudaFree(dC); // Record the ending time end_time = clock(); double dt = double(end_time - start_time)/CLOCKS_PER_SEC; double dt_trans = double(mid_time1 + end_time - start_time - mid_time2)/CLOCKS_PER_SEC; cout<<"Data Transfer Time Usage: "<<dt_trans<<"s"<<endl; cout<<"Total Time Usage: "<<dt<<"s\nResults:\n"; int stride = N/10; for(int i=0;i<N;i+=stride) cout<<C[i]<<' '; cout<<endl; delete [] A; delete [] B; delete [] C; return 0; }
21
/*用gpu实现2个矩阵之间的乘法*/ #include<iostream> #include<stdlib.h> #include<sys/time.h> #include<math.h> #include"cuda_runtime.h" using namespace std; #define cols 1024 #define rows 1024 __global__ void multiply(float**Ad,float**Bd,float**Cd) { int x = blockDim.x*blockIdx.x+threadIdx.x; int y = blockDim.y*blockIdx.y+threadIdx.y; if(x<rows && y<cols) { for(int i=0;i<cols;i++) { Cd[y][x]+=Ad[y][i]*Bd[i][x]; } } } int main() { struct timeval start, end; int n=cols*rows; float **A,**B,**C,**Ad,**Bd,**Cd; float *a,*b,*c,*ad,*bd,*cd; A=new float* [cols]; B=new float* [cols]; C=new float* [cols]; a=new float [n]; b=new float [n]; c=new float [n]; cudaMalloc((void**)&Ad,sizeof(float*)*cols); cudaMalloc((void**)&Bd,sizeof(float*)*cols); cudaMalloc((void**)&Cd,sizeof(float*)*cols); cudaMalloc((void**)&ad,sizeof(float)*n); cudaMalloc((void**)&bd,sizeof(float)*n); cudaMalloc((void**)&cd,sizeof(float)*n); for(int i=0;i<n;i++) { a[i]=2; b[i]=2; } for(int i=0;i<cols;i++) { A[i]=ad+i*rows; B[i]=bd+i*rows; C[i]=cd+i*rows; } gettimeofday( &start, NULL);//以开始向gpu拷贝数据为起点,记录时间 cudaMemcpy(Ad,A,sizeof(float*)*cols,cudaMemcpyHostToDevice); cudaMemcpy(Bd,B,sizeof(float*)*cols,cudaMemcpyHostToDevice); cudaMemcpy(Cd,C,sizeof(float*)*cols,cudaMemcpyHostToDevice); cudaMemcpy(ad,a,sizeof(float)*n,cudaMemcpyHostToDevice); cudaMemcpy(bd,b,sizeof(float)*n,cudaMemcpyHostToDevice); dim3 dimBlock(16,16); dim3 dimGrid(cols/16+1,rows/16+1); multiply<<<dimGrid,dimBlock>>>(Ad,Bd,Cd); cudaMemcpy(c,cd,sizeof(float)*n,cudaMemcpyDeviceToHost); gettimeofday( &end, NULL );//以从gpu返回计算数据为终点,记录时间 float target=4096; float error=0.0; for(int i=0;i<n;i++) { error+=abs(c[i]-target); } cout<<"error is "<<error<<endl; int timeuse = 1000000 * ( end.tv_sec - start.tv_sec ) + end.tv_usec - start.tv_usec; cout << "total time is " << timeuse/1000 << "ms" <<endl; delete [] a; delete [] b; delete [] c; delete [] A; delete [] B; delete [] C; cudaFree(Ad); cudaFree(Bd); cudaFree(Cd); cudaFree(ad); cudaFree(bd); cudaFree(cd); return 0; }
22
#include <stdio.h> __global__ void firstParallel() { printf("This is running in parallel.\n"); } int main() { firstParallel<<<5, 5>>>(); cudaDeviceSynchronize(); }
23
#include "cuda_runtime.h" #include "device_launch_parameters.h" #include <sys/time.h> #include <stdio.h> #include <stdlib.h> #include <math.h> __global__ void conv2(float *A, float *kernel,int inputSize, int depth, int kernelSize , int stride, int pad, float *B, int outputSize) { // 计算元素output(i,j)的值 一次卷积运算 int i = threadIdx.x + blockDim.x * blockIdx.x; int j = threadIdx.y + blockDim.y * blockIdx.y; if( !(i < outputSize) || !(j < outputSize) ) return; int Ai = i*stride; int Aj = j*stride; // 除去填充的0 int startk = (pad-Ai) < 0? 0 : pad-Ai; int endk = kernelSize < (inputSize + pad - Ai) ? kernelSize : (inputSize + pad - Ai); int startl = (pad-Aj) < 0? 0 : pad-Aj; int endl = kernelSize < (inputSize + pad - Aj) ? kernelSize : (inputSize + pad - Aj); float sum = 0; for(int d = 0; d < depth; d++) { for( int k = startk ; k < endk; k++) { for( int l = startl; l < endl; l++) { sum += A[d*inputSize*inputSize + (Ai+k-pad)*inputSize + Aj+l-pad]*kernel[d*kernelSize*kernelSize + k*kernelSize+l]; } } B[d*outputSize*outputSize + i*outputSize + j] = sum; } B[i*outputSize + j] = sum; } int main(int argc, char * argv[] ) { // input: inputSize*inputSize*depth // kernel: kernelSize*kernelSize*depth // output: outputSize*outputSize int inputSize = 7; int depth = 3; int kernelSize = 3; int kernelNum = 3; int stride[3] = {1 , 2 , 3 }; int pad[3] = {0,0,0}; int outputSize[3]; // 计算不同stride下需要的padding数量pad和output的规模outputSize for(int i = 0; i < kernelNum; i++) { if((inputSize - kernelSize)%stride[i] != 0) { pad[i] = (stride[i] - ((inputSize - kernelSize)%stride[i])) / 2; } outputSize[i] = (inputSize - kernelSize + 2*pad[i] ) / stride[i] + 1; } // ============================= 资源申请的初始化 ========================= // ==== CPU资源申请和初始化 // input:A kernel:kernel output:B float *A, *kernel[3], *B[3]; A = (float *)malloc(sizeof(float)*inputSize*inputSize*depth); for(int i = 0; i < 3; i++) { kernel[i] = (float *)malloc(sizeof(float)*kernelSize*kernelSize*depth); B[i] = (float *)malloc(sizeof(float)*outputSize[i]*outputSize[i]*depth); } // 初始化input A for(int d = 0; d < depth; d++) { for(int i=0; i<inputSize*inputSize; i++) { A[d*inputSize*inputSize + i] = i; } } // 初始化kernel for(int i = 0; i < 3; i++){ for(int j = 0; j < kernelSize*kernelSize*depth; j++) { kernel[i][j] = 1; } } // ==== GPU资源申请和初始化 float *d_A, *d_kernel[3], *d_B[3]; cudaMalloc((void**)&d_A,sizeof(float)*inputSize*inputSize*depth); for(int i = 0; i < 3; i++) { cudaMalloc((void**)&d_kernel[i], sizeof(float)*kernelSize*kernelSize*depth); cudaMalloc((void**)&d_B[i],sizeof(float)*outputSize[i]*outputSize[i]); } cudaMemcpy(d_A,A,sizeof(float)*inputSize*inputSize*depth,cudaMemcpyHostToDevice); for(int i = 0; i < 3; i++) { cudaMemcpy(d_kernel[i],kernel[i],sizeof(float)*kernelSize*kernelSize*depth,cudaMemcpyHostToDevice); } // ============================= 调用核函数 ========================= struct timeval start, end; gettimeofday( &start, NULL ); for( int i = 0; i < 3; i++ ) { int blockx = (int) (log2(outputSize[i])+ 1); int blocky = blockx; dim3 Block(blockx,blocky); dim3 Grid((inputSize+Block.x-1) / Block.x,(inputSize+Block.y-1) / Block.y ); conv2 <<< Grid, Block >>> (d_A,d_kernel[i],inputSize,depth,kernelSize,stride[i],pad[i],d_B[i],outputSize[i]); } // 结果回传 for( int i = 0; i < 3; i++ ) { cudaMemcpy(B[i],d_B[i],sizeof(float)*outputSize[i]*outputSize[i],cudaMemcpyDeviceToHost); } gettimeofday( &end, NULL ); int timeuse = 1000000 * ( end.tv_sec - start.tv_sec ) + end.tv_usec - start.tv_usec; //printf("Block(%d,%d) Grid(%d,%d).\n", Block.x, Block.y, Grid.x, Grid.y); printf("total time is %f ms\n", timeuse/(float)1000); // 输出结果 FILE *b[3]; b[0] = fopen("matrixB11.m", "wb"); b[1] = fopen("matrixB12.m", "wb"); b[2] = fopen("matrixB13.m", "wb"); for(int k = 0; k < 3; k++ ) { fprintf(b[k], "B = [ \n"); for (int i = 0; i < outputSize[k]; i++) { for (int j = 0; j < outputSize[k]; j++) fprintf(b[k], "%f ", B[k][i * outputSize[k] + j]); fprintf(b[k], "\n"); } fprintf(b[k], "];"); } // ============================= 资源释放 ========================= free(A); cudaFree(d_A); for(int i = 0; i < 3; i++) { free(kernel[i]); free(B[i]); cudaFree(d_B[i]); cudaFree(d_kernel[i]); fclose(b[i]); } return 0; }
24
#include "includes.h" __global__ void __stratifycounts(double *strata, int n, double *a, unsigned int *bi) { __shared__ unsigned int ic[SNDVALS][SNDGRPS]; __shared__ double ss[SNDVALS]; int istart = (int)(((long long)blockIdx.x) * n / gridDim.x); int iend = (int)(((long long)(blockIdx.x+1)) * n / gridDim.x); int bibase = SNDVALS * (blockIdx.x + istart / SBIGBLK); int tid = threadIdx.x + threadIdx.y * blockDim.x; if (threadIdx.y == 0) { ss[threadIdx.x] = strata[threadIdx.x]; } for (int i = istart; i < iend; i += SBIGBLK) { __syncthreads(); if (threadIdx.y < SNDGRPS) { ic[threadIdx.x][threadIdx.y] = 0; } __syncthreads(); for (int k = i + tid; k < min(iend, i + tid + SBIGBLK); k += SNTHREADS) { double v = a[k]; int j = 0; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = (v > ss[j]) ? 2*j+2 : 2*j+1; j = j - SNDVALS + 1; atomicInc(&ic[j][threadIdx.y], 65536*32767); } __syncthreads(); if (threadIdx.y == 0) { bi[bibase + threadIdx.x] = ic[threadIdx.x][0] + ic[threadIdx.x][1] + ic[threadIdx.x][2] + ic[threadIdx.x][3]; } bibase += SNDVALS; } }
25
//#include <hayai/hayai.hpp> // //#include "btree.cuh" // //#include "concurrent-xfasttrie-fixture.cu" // //using BTREE = gpu::BTree<key_type, mapped_type>; //using BTreeInsertionFixture = XTrieInsertionFixture<BTREE, Structure::BTREE>; //using BTreeGetThreadFixture = XTrieGetThreadFixture<BTREE, Structure::BTREE>; //using BTreeGetWarpFixture = XTrieGetWarpFixture<BTREE, Structure::BTREE>; //using BTreePredecessorFixture = XTriePredecessorFixture<BTREE, Structure::BTREE, true>; //using BTreeSuccessorFixture = XTrieSuccessorFixture<BTREE, Structure::BTREE, true>; // //BENCHMARK_F(BTreeInsertionFixture, InsertionBtree, NUMBER_OF_RUNS, NUMBER_OF_ITERATIONS) //{ // insert(); //} ///* //BENCHMARK_F(BTreeGetThreadFixture, GetThreadBtree, NUMBER_OF_RUNS, NUMBER_OF_ITERATIONS) //{ // get_thread(); //} // //BENCHMARK_F(BTreeGetWarpFixture, GetWarpBtree, NUMBER_OF_RUNS, NUMBER_OF_ITERATIONS) //{ // get_warp(); //} // //BENCHMARK_F(BTreePredecessorFixture, PredecessorBtree, NUMBER_OF_RUNS, NUMBER_OF_ITERATIONS) //{ // predecessor(); //}*/ ///* //BENCHMARK_F(BTreeSuccessorFixture, SuccessorBtree, NUMBER_OF_RUNS, NUMBER_OF_ITERATIONS) //{ // successor(); //}*/
26
#include "cuda_runtime.h" #include "device_launch_parameters.h" #include <stdio.h> #include <stdlib.h> #define MAX 65535 #define imin(a,b) (a<b?a:b) const int arr_size =8; const int threadsPerBlock = 256; const int blocksPerGrid = imin(32,(arr_size +threadsPerBlock -1)/threadsPerBlock); __global__ void kernel(float*arrA , float* arrB, float* arrC) { __shared__ float cache[threadsPerBlock]; int tid = threadIdx.x + blockIdx.x * blockDim.x; int cacheIndex = threadIdx.x; float temp = 0; while (tid < arr_size) { temp += arrA[tid] * arrB[tid]; tid += blockIdx.x * blockDim.x; } //set cache values cache[cacheIndex] = temp; __syncthreads(); //REDUCTION FUNCTION int i = blockDim.x / 2; while (i != 0) { if (cacheIndex < i) { cache[cacheIndex] += cache[cacheIndex + i]; } __syncthreads(); i /= 2; } if (cacheIndex == 0) { arrC[blockIdx.x] = cache[0]; } } int main(int argc, char **argv) { const int arr_bytes = arr_size * sizeof(float); float arr_a[MAX]; float arr_b[MAX]; float partial_c[MAX]; float* dev_a; float* dev_b; float* partialdev_c; int i; float j = 1.0; for (i = 0; i < arr_size; i++) { arr_a[i] = j; arr_b[i] = j * j; } cudaMalloc((void**)&dev_a, arr_bytes); cudaMalloc((void**)&dev_b, arr_bytes); cudaMalloc((void**)&partialdev_c, blocksPerGrid * sizeof(float)); cudaMemcpy(dev_a, arr_a, arr_bytes, cudaMemcpyHostToDevice); cudaMemcpy(dev_b, arr_b, arr_bytes, cudaMemcpyHostToDevice); kernel <<<blocksPerGrid,threadsPerBlock >>>(dev_a,dev_b,partialdev_c); cudaMemcpy(partial_c, partialdev_c, blocksPerGrid*sizeof(float), cudaMemcpyDeviceToHost); //calculate final dot product on cpu side float c = 0; for (i = 0; i < blocksPerGrid; i++) { c += partial_c[i]; } printf("The value of Dot product is : %f\n", c); cudaFree(dev_a); cudaFree(dev_b); cudaFree(partialdev_c); }
27
#include <stdio.h> #include <stdlib.h> #define N 5 #define BR() printf("\n") #define BRS(str) printf("%s\n",str) typedef struct { int top; int* data; int stack_size; }FIFO; void exec(); void initialize_array(int*); void print_array(int*); int main(int argc, char const *argv[]) { exec(); return 0; } // __device__ int i,j,k; __device__ int push(int new_data,FIFO* stack_t){ if(stack_t->top > stack_t->stack_size){ return -1; } stack_t->data[stack_t->top] = new_data; stack_t->top++; return 1; } __device__ int pop(FIFO* stack_t){ if(stack_t->top == 0){ return -1; } stack_t->top--; return 1; } __device__ int initialize_stack(FIFO* stack_t,int stack_size){ stack_t->top = 0; stack_t->stack_size = stack_size; stack_t->data = (int*) malloc(stack_size*sizeof(int)); if(stack_t->data == NULL){ return -1; } return 1; } __device__ int top(FIFO* stack_t){ if(stack_t->top == 0){ return -1; } return stack_t->data[stack_t->top-1]; } __device__ int isEmpty(FIFO* stack_t){ if(stack_t->top == 0) return 1; else return 0; } __device__ void swap(int *x, int *y) { int tmp; tmp = *x; *x = *y; *y = tmp; } __device__ void print_d_array(int *array){ int i; BRS(__func__); printf("blockIdx.x %d , threadIdx.x %d\n", blockIdx.x, threadIdx.x); for (i = 0; i < N; i++) { printf("%d ",array[i]); }//for BR(); } __global__ void kernel_test_stack(int *d_array){ int status; int i, x = 3, y = 6; FIFO stack1; print_d_array(d_array); //スワップの確認 printf("x: %d y: %d\n", x, y); swap(&x,&y); printf("x: %d y: %d\n", x, y); //スタックの確認 if ((status = initialize_stack(&stack1, N)) == -1) { printf("initialize_stack error LINE:%d \n", __LINE__); } printf("blockIdx.x %d , threadIdx.x %d stack address %p x %p y%p \n", blockIdx.x, threadIdx.x, &stack1, &x, &y); if(isEmpty(&stack1)){ BRS("Empty"); }//if else{ BRS("NOT Empty"); }//else for(i = 1 ; i < N ; i++){ push(i, &stack1); printf("push: %d\n",i); if(isEmpty(&stack1)){ BRS("Empty"); // printf("top: %d \n",top(&stack1)); }//if else{ BRS("NOT Empty"); // printf("top: %d \n",top(&stack1)); }//else }//for for(i = 1 ; i < N ; i++){ pop(&stack1); BRS("pop"); if(isEmpty(&stack1)){ BRS("Empty"); printf("top: %d \n",top(&stack1)); }//if else{ BRS("NOT Empty"); printf("top: %d \n",top(&stack1)); }//else }//for }//Kernel void exec(){ int array[N]; int *d_array; int iDev = 0; dim3 grid, block; cudaDeviceProp iProp; cudaSetDevice(iDev); cudaGetDeviceProperties(&iProp, iDev); printf("Device %d: %s\n", iDev, iProp.name); initialize_array(array); print_array(array); cudaMalloc((int**)&d_array, sizeof(array)); cudaMemcpy(d_array, array, sizeof(array), cudaMemcpyHostToDevice); grid.x = 1; block.x = 2; kernel_test_stack<<<grid, block>>>(d_array); cudaMemcpy(array, d_array, sizeof(array), cudaMemcpyDeviceToHost); print_array(array); cudaFree(d_array); cudaDeviceReset(); } void initialize_array(int* array){ int i; for (i = 0; i < N; i++) { array[i] = rand() % N * 2; }//for }//function void print_array(int* array){ int i; BRS(__func__); for (i = 0; i < N; i++) { printf("%d ",array[i]); }//for BR(); }//function
28
// nvcc -arch sm_21 -o test -run --keep --ptxas-options="-v" test.cu #include <cuda.h> #include <stdlib.h> #include <stdio.h> __global__ void transpose (int* Input, int* Output) { }
29
#include <stdio.h> #include <math.h> #include <stdlib.h> //Note that any functions that want to be called from the kernel must be preceeded with __device__ //Function we are integrating __device__ float myFunction(float x){ return pow(x,4); } //Trapezoidal rule calculation __device__ float trapezoidal(float a, float b){ return (b-a)*((myFunction(a)+myFunction(b))/2); } //Composite trap rule calculation __device__ float composite_trapezoidal(float a, float b, int n){ float h=(b-a)/(n); float total=0; int i; for (i=0;i<n;i++){ total=total+trapezoidal(a+i*h,a+(i+1)*h); } return total; } //This section runs on the GPUs __global__ void kernel(float* arr, float A, float B, int P, int N){ //Who am I? int id = blockIdx.x * blockDim.x + threadIdx.x; //calculate number of intervals, where they start, and where they end, and what interval this processor will use float intervalWidth = (B-A)/(P); float intervalStart = A+(intervalWidth)*(id); float intervalEnd = intervalStart+intervalWidth; //calculate the partial sum of this interval arr[id] = composite_trapezoidal(intervalStart,intervalEnd,N); } int main(int argc, char** argv){ //Process input from command line if (argc<3){ printf("Please enter a,b,N\n"); return 1; } float A=atof(argv[1]); float B=atof(argv[2]); int N=atoi(argv[3]); printf("Integrating x^4 from %.3f to %.3f with %d points\n", A, B, N); //How many threads will we use and how much data is in each thread? int elements = 512; int bytes = elements * sizeof(float); //Create pointers to host and device arrays float *hostArray = 0; float *deviceArray = 0; //Create the array on the host and on the GPU hostArray = (float*) malloc(bytes); cudaMalloc((void**)&deviceArray, bytes); int blockSize = 128; int gridSize = elements / blockSize; //Instruct each GPU core to run its kernel section kernel<<<gridSize,blockSize>>>(deviceArray, A, B, elements, N); //Gather all the partial sums cudaMemcpy(hostArray, deviceArray, bytes, cudaMemcpyDeviceToHost); //Reduce the partial sums to a single integral float sum = 0; for(int i=0; i < elements; ++i){ sum += hostArray[i]; } //Print result printf("Integrating x^4 from %.3f to %.3f with %d points is: %.3f\n", A, B, N, sum); //Deallocate the two arrays free(hostArray); cudaFree(deviceArray); //Exit from the calling program return 0; }
30
#include "cuda_runtime.h" #include "stdio.h" #include "stdlib.h" #include "string.h" #include "time.h" #define A_w 50 #define A_h 50 #define B_w 32 #define B_h 32 typedef struct{ int width; int height; float * elements; }Matrix; // #define void rightKronecker1(Matrix A, Matrix B, Matrix C){ for(int c_row=0; c_row<C.height; c_row++){ for(int c_col=0; c_col<C.width; c_col++){ C.elements[c_col + c_row*C.width] = A.elements[c_col/B.width + c_row/B.height * A.width] * B.elements[c_col%B.width + c_row%B.height*B.width]; } } } void rightKronecker2(Matrix A, Matrix B, Matrix C){ for(int a_row=0; a_row<A.height; a_row++){ for(int a_col=0; a_col<A.width; a_col++){ for(int b_row=0; b_row<B.height; b_row++){ for(int b_col=0; b_col<B.width; b_col++){ C.elements[(b_col+a_col*B.width)+(b_row+a_row*B.height)*A.width*B.width] = A.elements[a_col+a_row*A.width] * B.elements[b_col+b_row*B.width]; } } } } } void generatorNum(float* array, int num) { // srand((unsigned)time(NULL)); for(int i=0;i<num;i++) { array[i]=rand()%5; } } void printUsage(void) { printf("\n"); printf("The program aims to calculate the product of matrix A and B\n"); printf("-h matrix A row num\n"); printf("-w matrix A col num\n"); printf("-H matrix B row num\n"); printf("-W matrix B col num\n"); } int main(int argc,char** argv){ // int A_w,B_w,A_h,B_h; // if(argc==1) // { // printf("Error: no enough parameters.Please input the col and row number of Matrix A and B,respectively\n"); // exit(0); // } // else if(argc==2) // { // if(strcmp("--help",argv[1])==0) // { // printUsage(); // exit(0); // } // } // for(int id=1;id<argc;id+=2) // { // if(strcmp("-h",argv[id])==0) // A_h=atoi(argv[id+1]); // else if(strcmp("-w",argv[id])==0) // A_w=atoi(argv[id+1]); // else if(strcmp("-W",argv[id])==0) // B_w=atoi(argv[id+1]); // else if(strcmp("-H",argv[id])==0) // B_h=atoi(argv[id+1]); // } // Matrix A,d_A,B,d_B,C,d_C; Matrix A, B, C1, C2; A.width=A_w;A.height=A_h; B.width=B_w;B.height=B_h; C1.width=A_w*B_w;C1.height=A_h*B_h; C2.width=A_w*B_w;C2.height=A_h*B_h; A.elements=(float *)malloc(A.width*A.height*sizeof(float)); B.elements=(float *)malloc(B.width*B.height*sizeof(float)); C1.elements=(float *)malloc(C1.width*C1.height*sizeof(float)); C2.elements=(float *)malloc(C2.width*C2.height*sizeof(float)); // A.elements=(float *)malloc(A.width*A.height*sizeof(float)); // B.elements=(float *)malloc(B.width*B.height*sizeof(float)); // C.elements=(float *)malloc(C.width*C.height*sizeof(float)); generatorNum(A.elements,A.width*A.height); generatorNum(B.elements,B.width*B.height); memset(C1.elements,0,C1.width*sizeof(float)*C1.height); memset(C2.elements,0,C2.width*sizeof(float)*C2.height); // printf("A.elements:\n"); // for(int i=0;i<A.height;i++){ // for(int j=0;j<A.width;j++){ // printf("%d ", int(A.elements[j+i*A.width])); // } // printf("\n"); // } // printf("B.elements:\n"); // for(int i=0;i<B.height;i++){ // for(int j=0;j<B.width;j++){ // printf("%d ", int(B.elements[j+i*B.width])); // } // printf("\n"); // } srand(time(0)); clock_t start,finish1, finish2; start=clock(); rightKronecker1(A, B, C1); finish1=clock(); rightKronecker2(A, B, C2); finish2=clock(); // printf("C1.elements:\n"); // for(int i=0;i<C1.height;i++){ // for(int j=0;j<C1.width;j++){ // printf("%d ", C1.elements[j+i*C1.width]); // } // printf("\n"); // } // printf("C2.elements:\n"); // for(int i=0;i<C2.height;i++){ // for(int j=0;j<C2.width;j++){ // printf("%d ", C2.elements[j+i*C2.width]); // } // printf("\n"); // } printf("Difference between 2 method:\n"); float diff = 0; for(int i=0;i<C2.height;i++){ for(int j=0;j<C2.width;j++){ diff = C2.elements[j+i*C2.width] - C1.elements[j+i*C2.width]; } } printf("%f\n", diff); printf("method1 cost time %f ms\n",(finish1-start)*1000.0/CLOCKS_PER_SEC); printf("method2 cost time %f ms\n",(finish2-finish1)*1000.0/CLOCKS_PER_SEC); // malloc matrix A B C on GPU // cudaMalloc(&d_A.elements,sizeof(float)*A.width*A.height); // cudaMalloc(&d_B.elements,sizeof(float)*B.width*B.height); // cudaMalloc(&d_C.elements,sizeof(float)*C.width*C.height); return 0; }
31
/* Block size X: 32 */ __global__ void fct_ale_b2(const int maxLevels, const double dt, const double fluxEpsilon, const int * __restrict__ nLevels, const double * __restrict__ area_inv, const double * __restrict__ fct_ttf_max, const double * __restrict__ fct_ttf_min, double * __restrict__ fct_plus, double * __restrict__ fct_minus) { int index = 0; double area_item = 0; for ( int level = threadIdx.x; level < nLevels[blockIdx.x] - 1; level += 32 ) { index = (blockIdx.x * maxLevels) + level; area_item = area_inv[index + blockIdx.x]; fct_plus[index] = fmin(1.0, fct_ttf_max[index] / (fct_plus[index] * dt * area_item + fluxEpsilon)); fct_minus[index] = fmin(1.0, fct_ttf_min[index] / (fct_minus[index] * dt * area_item - fluxEpsilon)); } }
32
#include "includes.h" using namespace std; __global__ void setValue(float *data, int idx, float value) { if(threadIdx.x == 0) { data[idx] = value; } }
33
#include "includes.h" __device__ float sigmoid(float x) { return 1.0f / (1 + __expf(-x)); } __global__ void sigmoidActivationForward(float* Z, float* A, int Z_x_dim, int Z_y_dim) { int index = blockIdx.x * blockDim.x + threadIdx.x; if (index < Z_x_dim * Z_y_dim) { A[index] = sigmoid(Z[index]); } }
34
#include <stdio.h> #include <time.h> #include <stdlib.h> #include <sys/time.h> // #define NUM_PARTICLES 10000 // #define NUM_ITERATIONS 10000 // int TPB = 16; #define SEED 10 #define EPSILON 1e-5 typedef struct { float3 position; float3 velocity; } Particle; // Deterministically generates a "random" float, provided a seed and 3 integers. __host__ __device__ float gen_random(int seed, int a, int b, int c) { return (float)((seed * a + b) % c) / c; } // Given an array of particles and an index, print that particle. void printParticle(Particle* particles, int index){ printf("%f %f %f %f %f %f\n", particles[index].position.x, particles[index].position.y, particles[index].position.z, particles[index].velocity.x, particles[index].velocity.y, particles[index].velocity.z); } // Compare two arrays of Particles. If their position coordinates are all within EPSILON of each other, // return true, else false. __host__ bool arraysMatch(Particle* arr1, Particle* arr2, int num_particles) { for (int i = 0; i < num_particles; i++) { if (fabs(arr1[i].position.x - arr2[i].position.x) > EPSILON || fabs(arr1[i].position.y - arr2[i].position.y) > EPSILON || fabs(arr1[i].position.z - arr2[i].position.z) > EPSILON) return false; } return true; } // Get the current time double cpuSecond() { struct timeval tp; gettimeofday(&tp,NULL); return ((double)tp.tv_sec + (double)tp.tv_usec*1.e-6); } // Replaces the x, y and z values in a float3 to random values between 0 and 1. void randomizeFloat3(float3* f3) { f3->x = (float) rand() / RAND_MAX; f3->y = (float) rand() / RAND_MAX; f3->z = (float) rand() / RAND_MAX; } // Randomizes the position and velocity of all Particles in an array. void randomizeParticles(Particle* particles, int num_particles) { srand(0); for (int i = 0; i < num_particles; i++) { randomizeFloat3(&particles[i].position); randomizeFloat3(&particles[i].velocity); } } // Updates a particle's position by its velocity, then updates its velocity __host__ __device__ void updateParticle(Particle* particle, int id, int iter, int num_particles) { int dt = 1; // update position particle->position.x += dt * particle->velocity.x; particle->position.y += dt * particle->velocity.y; particle->position.z += dt * particle->velocity.z; // update the velocity randomly particle->velocity.x += gen_random(SEED, id, iter, num_particles); particle->velocity.y += gen_random(SEED, id, iter, num_particles); particle->velocity.z += gen_random(SEED, id, iter, num_particles); } // CPU function that updates a given particle. void cpu_updatePositionAndVelocity(Particle* particle, int id, int iter, int num_particles) { updateParticle(particle, id, iter, num_particles); } // Kernel that finds a given Particle's ID then updates it if within range. __global__ void gpu_updatePositionAndVelocity(Particle* particles, int iter, int num_particles) { const int id = blockIdx.x * blockDim.x + threadIdx.x; if (id >= num_particles) // If out of bounds, ignore the Particle. return; else updateParticle(&particles[id], id, iter, num_particles); } // Perform the update step for all Particles in the array on CPU with a for loop. void cpu_updateParticles(Particle* particles, int iter, int num_particles) { // srand(time(NULL)) for (int i = 0; i < num_particles; i++) { cpu_updatePositionAndVelocity(&particles[i], i, iter, num_particles); } } // Perform the update step for all Particles in the array by launching GPU kernels. void gpu_updateParticles(Particle* particles, int iter, int num_particles, int tpb) { gpu_updatePositionAndVelocity<<<(num_particles + tpb - 1)/tpb, tpb>>>(particles, iter, num_particles); } int main(int argc, char** argv) { printf("Running the simulations with the following params:\n"); if (argc < 5) { printf("Usage: ./a NUM_PARTICLES NUM_ITERATIONS TPB INCLUDE_CPU\nExample usage: ./a 10000 10000 32 include_cpu\n"); return -1; } // reading the command line arguments, without any kind of error checking const int num_particles = (int) strtol(argv[1], NULL, 10); // e.g. 10000 - NULL is the endpointer and 10 is the base const int num_iterations = (int) strtol(argv[2], NULL, 10); // e.g. 10000 const int tpb = (int) strtol(argv[3], NULL, 10); // e.g. 32 const char* include_cpu = argv[4]; printf("======== %s: %d, %s: %d, %s: %d\n\n", "num_particles", num_particles, "num_iterations", num_iterations, "tpb", tpb); // Declare variables Particle *c_particles, *g_particles, *g_result; double iStart, iElaps; // Initialize array for CPU c_particles = (Particle*) malloc(num_particles*sizeof(Particle)); randomizeParticles(c_particles, num_particles); // Initialize array for GPU - particle positions/velocities in device memory are a copy of those in host memory // g_result = (Particle*) malloc(num_particles*sizeof(Particle)); // Used to store the result of GPU simulation // cudaMallocHost(&g_result, num_particles*sizeof(Particle)); // cudaMalloc(&g_particles, num_particles*sizeof(Particle)); cudaMallocManaged(&g_particles, num_particles*sizeof(Particle)); iStart = cpuSecond(); memcpy(g_particles, c_particles, num_particles*sizeof(Particle)); double copy_time = cpuSecond() - iStart; // CPU Version if (strcmp(include_cpu, "include_cpu") == 0) { // perfrom CPU version if wanted by the user printf("CPU simulation started...\n"); fflush(stdout); iStart = cpuSecond(); for (int i = 0; i < num_iterations; i++) { cpu_updateParticles(c_particles, i, num_particles); } iElaps = cpuSecond() - iStart; printf("Done in %f!\n\n", iElaps); fflush(stdout); } else printf("Excluded the CPU experiment...\n\n"); // GPU Version printf("GPU simulation started...\n"); fflush(stdout); iStart = cpuSecond(); for (int i = 0; i < num_iterations; i++) { // cudaMemcpy(g_particles, g_result, num_particles*sizeof(Particle), cudaMemcpyHostToDevice); gpu_updateParticles(g_particles, i, num_particles, tpb); cudaDeviceSynchronize(); // cudaMemcpy(g_result, g_particles, num_particles*sizeof(Particle), cudaMemcpyDeviceToHost); } iElaps = cpuSecond() - iStart; printf("Done in %f!\n\n", iElaps + copy_time); fflush(stdout); // copying the result back from the GPU memory to the CUP memory // cudaMemcpy(g_result, g_particles, num_particles*sizeof(Particle), cudaMemcpyDeviceToHost); // if CPU version is perfromed, then compare it with GPU version if (strcmp(include_cpu, "include_cpu") == 0) printf(arraysMatch(g_particles, c_particles, num_particles) ? "Results match!\n" : "Results are wrong!\n"); // printf(arraysMatch(g_result, c_particles, num_particles) ? "Results match!\n" : "Results are wrong!\n"); printf("========================================================== \n\n\n"); // Free arrays free(c_particles); cudaFree(g_particles); }
35
__device__ void rot_x(float3 *vec, float angle) { float tmp; tmp = vec->y; vec->y = tmp * cosf(angle) + vec->z * -sinf(angle); vec->z = tmp * sinf(angle) + vec->z * cosf(angle); } __device__ void rot_y(float3 *vec, float angle) { float tmp; tmp = vec->x; vec->x = tmp * cosf(angle) + vec->z * sinf(angle); vec->z = tmp * -sinf(angle) + vec->z * cosf(angle); } __device__ void rot_z(float3 *vec, float angle) { float tmp; tmp = vec->x; vec->x = tmp * cosf(angle) + vec->y * -sinf(angle); vec->y = tmp * sinf(angle) + vec->y * cosf(angle); } __device__ void rot_vec(float3 *vec, float3 angle) { rot_x(vec, angle.x); rot_y(vec, angle.y); rot_z(vec, angle.z); }
36
#include "includes.h" __global__ void cuSetupSincKernel_kernel(float *r_filter_, const int i_filtercoef_, const float r_soff_, const float r_wgthgt_, const int i_weight_, const float r_soff_inverse_, const float r_beta_, const float r_decfactor_inverse_, const float r_relfiltlen_inverse_) { int i = threadIdx.x + blockDim.x*blockIdx.x; if(i > i_filtercoef_) return; float r_wa = i - r_soff_; float r_wgt = (1.0f - r_wgthgt_) + r_wgthgt_*cos(PI*r_wa*r_soff_inverse_); float r_s = r_wa*r_beta_*r_decfactor_inverse_*PI; float r_fct; if(r_s != 0.0f) { r_fct = sin(r_s)/r_s; } else { r_fct = 1.0f; } if(i_weight_ == 1) { r_filter_[i] = r_fct*r_wgt; } else { r_filter_[i] = r_fct; } //printf("kernel %d %f\n", i, r_filter_[i]); }
37
#include "includes.h" using namespace std; struct compressed_sparse_column { int* data; int* row; int* column; int* index_column; int* index_row_start; int* index_row_end; }; struct graph { compressed_sparse_column* dataset; bool* roots; bool* leaves; bool* singletons; int vertices; int edges; }; __global__ void pre_post_order(int* depth, int* zeta, int* zeta_tilde, graph* dataset_graph) { int* pre = new int[dataset_graph->vertices]; int* post = new int[dataset_graph->vertices]; memset(pre, 0, dataset_graph->vertices * sizeof(int)); memset(post, 0, dataset_graph->vertices * sizeof(int)); bool* incoming_edges = new bool[dataset_graph->edges]; memset(incoming_edges, false, dataset_graph->edges * sizeof(bool)); bool* q = new bool[dataset_graph->vertices]; memcpy(q, dataset_graph->roots, sizeof(int) * dataset_graph->vertices); while(true) { bool* p = new bool[dataset_graph->vertices]; memset(p, false, dataset_graph->vertices * sizeof(bool)); bool global_check = false; for(int i = 0; i < dataset_graph->vertices; i++) { if( q[i] ) { int pre_node = pre[i]; int post_node = post[i]; for(int j = dataset_graph->dataset->index_column[i]; dataset_graph->dataset->column[j] == i; j++) { int neighbor_vertex = dataset_graph->dataset->row[j]; // zeta[i] = undefined! pre[neighbor_vertex] = pre_node + zeta_tilde[neighbor_vertex]; post[neighbor_vertex] = post_node + zeta_tilde[neighbor_vertex]; incoming_edges[j] = true; bool flag = true; for(int k = 0; k < dataset_graph->edges; k++) { if( dataset_graph->dataset->row[k] == neighbor_vertex && !incoming_edges[k] ) { flag = false; break; } } if( flag ) { global_check = true; p[neighbor_vertex] = true; } } pre[i] = pre_node + depth[i]; post[i] = post_node + (zeta[i] - 1); } } q = p; if( !global_check ) { break; } } }
38
#include "includes.h" __global__ void gpu_transpo_kernel_naive(u_char *Source, u_char *Resultat, unsigned width, unsigned height){ int j = blockIdx.x*blockDim.x + threadIdx.x; int i = blockIdx.y*blockDim.y + threadIdx.y; if ((i<0)||(i>=height)||(j<0)||(j>=width)) {} else { Resultat[j*height + i] = Source[i*width + j]; } }
39
#include <cstdio> #include <cstdlib> #include <time.h> #include "cuda_timer.cuh" #define SafeTimerCall(err) __safeTimerCall(err, __FILE__, __LINE__) inline void __safeTimerCall(cudaError err, const char *file, const int line) { #pragma warning(push) #pragma warning(disable: 4127) Prevent warning on do-while(0); do { if (cudaSuccess != err) { fprintf(stderr, "CudaTimer failed at %s:%i : %s\n", file, line, cudaGetErrorString(err)); exit(-1); } } while (0); #pragma warning(pop) return; } CudaTimer::CudaTimer() { SafeTimerCall(cudaEventCreate(&_begEvent)); SafeTimerCall(cudaEventCreate(&_endEvent)); return; } CudaTimer::~CudaTimer() { SafeTimerCall(cudaEventDestroy(_begEvent)); SafeTimerCall(cudaEventDestroy(_endEvent)); return; } void CudaTimer::start() { SafeTimerCall(cudaEventRecord(_begEvent, 0)); return; } void CudaTimer::stop() { SafeTimerCall(cudaEventRecord(_endEvent, 0)); return; } float CudaTimer::value() { SafeTimerCall(cudaEventSynchronize(_endEvent)); float timeVal; SafeTimerCall(cudaEventElapsedTime(&timeVal, _begEvent, _endEvent)); return timeVal / CLOCKS_PER_SEC; }
40
#include "cuda.h" typedef long long int64; __global__ void ReceiveFun(double *out, const double*vx, const double*vy, const double*sigmaxx, const double*sigmayy, const double*sigmaxy, int64 nt, const int64 *rcvi, const int64 *rcvj, const int64 *rcvtype, int64 nrcv, int64 NX, int64 NY){ int i = blockIdx.x*blockDim.x + threadIdx.x; if (i>=nrcv) return; int idx = (rcvi[i]-1)*(NY+2) + rcvj[i]-1; switch (rcvtype[i]) { case 0: for(int k=0;k<nt;k++) out[nt*i+k] = vx[k*(NX+2)*(NY+2)+idx]; break; case 1: for(int k=0;k<nt;k++) out[nt*i+k] = vy[k*(NX+2)*(NY+2)+idx]; break; case 2: for(int k=0;k<nt;k++) out[nt*i+k] = sigmaxx[k*(NX+2)*(NY+2)+idx]; break; case 3: for(int k=0;k<nt;k++) out[nt*i+k] = sigmayy[k*(NX+2)*(NY+2)+idx]; break; case 4: for(int k=0;k<nt;k++) out[nt*i+k] = sigmaxy[k*(NX+2)*(NY+2)+idx]; break; default: break; } } void forwardGPU(double *out, const double*vx, const double*vy, const double*sigmaxx, const double*sigmayy, const double*sigmaxy, int64 nt, const int64 *rcvi, const int64 *rcvj, const int64 *rcvtype, int64 nrcv, const int64* nx, const int64* ny){ long long NX, NY; cudaMemcpy(&NX, nx, sizeof(long long), cudaMemcpyDeviceToHost); cudaMemcpy(&NY, ny, sizeof(long long), cudaMemcpyDeviceToHost); cudaDeviceSynchronize(); ReceiveFun<<<(nrcv+255)/256, 256>>>(out, vx, vy, sigmaxx, sigmayy, sigmaxy, nt, rcvi, rcvj, rcvtype, nrcv, NX, NY); } __global__ void Zero(const long long size, double* out) { int i = blockIdx.x*blockDim.x + threadIdx.x; if(i<size) out[i] = 0.0; } __global__ void ReceiveGrad( double*d_vx, double*d_vy, double*d_sigmaxx, double*d_sigmayy, double*d_sigmaxy, const double *d_out, int64 nt, const int64 *rcvi, const int64 *rcvj, const int64 *rcvtype, int64 nrcv, int64 NX, int64 NY) { int i = blockIdx.x*blockDim.x + threadIdx.x; if(i>=nrcv) return; int idx = (rcvi[i]-1)*(NY+2) + rcvj[i]-1; switch (rcvtype[i]) { case 0: for(int k=0;k<nt;k++) d_vx[k*(NX+2)*(NY+2)+idx] += d_out[nt*i+k]; break; case 1: for(int k=0;k<nt;k++){ // printf("Top gradients: %f\n", d_out[nt*i+k]); d_vy[k*(NX+2)*(NY+2)+idx] += d_out[nt*i+k]; } break; case 2: for(int k=0;k<nt;k++) d_sigmaxx[k*(NX+2)*(NY+2)+idx] += d_out[nt*i+k]; break; case 3: for(int k=0;k<nt;k++) d_sigmayy[k*(NX+2)*(NY+2)+idx] += d_out[nt*i+k]; break; case 4: for(int k=0;k<nt;k++) d_sigmaxy[k*(NX+2)*(NY+2)+idx] += d_out[nt*i+k]; break; default: break; } } void backwardGPU( double*d_vx, double*d_vy, double*d_sigmaxx, double*d_sigmayy, double*d_sigmaxy, const double *d_out, int64 nt, const int64 *rcvi, const int64 *rcvj, const int64 *rcvtype, int64 nrcv, const int64* nx, const int64* ny){ long long NX, NY; cudaMemcpy(&NX, nx, sizeof(long long), cudaMemcpyDeviceToHost); cudaMemcpy(&NY, ny, sizeof(long long), cudaMemcpyDeviceToHost); cudaDeviceSynchronize(); Zero<<<(nt*(NX+2)*(NY+2)+255)/256, 256>>>(nt*(NX+2)*(NY+2), d_vx); Zero<<<(nt*(NX+2)*(NY+2)+255)/256, 256>>>(nt*(NX+2)*(NY+2), d_vy); Zero<<<(nt*(NX+2)*(NY+2)+255)/256, 256>>>(nt*(NX+2)*(NY+2), d_sigmaxx); Zero<<<(nt*(NX+2)*(NY+2)+255)/256, 256>>>(nt*(NX+2)*(NY+2), d_sigmayy); Zero<<<(nt*(NX+2)*(NY+2)+255)/256, 256>>>(nt*(NX+2)*(NY+2), d_sigmaxy); ReceiveGrad<<<(nrcv+255)/256, 256>>>(d_vx, d_vy, d_sigmaxx, d_sigmayy, d_sigmaxy, d_out, nt, rcvi, rcvj, rcvtype, nrcv, NX, NY); }
41
#include <cuda_runtime.h> #include <device_launch_parameters.h> #include <time.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #define X_SIZE 10240 #define Y_SIZE 16384 #define ARRAY_SIZE (X_SIZE*Y_SIZE) #define BLOCK_SIZE_X 32 #define BLOCK_SIZE_Y 32 #define TIMESTEPS 1000 const char* input_file_name = "input.dat"; const char* output_file_name = "output.dat"; void prtdat(int nx, int ny, float *current, const char *fnam); void inidat(int nx, int ny, float *u); void printDevProp(cudaDeviceProp devProp) { printf("Major revision number: %d\n", devProp.major); printf("Minor revision number: %d\n", devProp.minor); printf("Name: %s\n", devProp.name); printf("Total global memory: %u or %uKB or %uMB\n", devProp.totalGlobalMem, devProp.totalGlobalMem/1024, devProp.totalGlobalMem / (1024*1024), devProp.totalGlobalMem / 1024 / 1024 / 1024); printf("Total shared memory per block: %u\n", devProp.sharedMemPerBlock); printf("Total registers per block: %d\n", devProp.regsPerBlock); printf("Warp size: %d\n", devProp.warpSize); printf("Maximum memory pitch: %u\n", devProp.memPitch); printf("Maximum threads per block: %d\n", devProp.maxThreadsPerBlock); for (int i = 0; i < 3; ++i) printf("Maximum dimension %d of block: %d\n", i, devProp.maxThreadsDim[i]); for (int i = 0; i < 3; ++i) printf("Maximum dimension %d of grid: %d\n", i, devProp.maxGridSize[i]); printf("Clock rate: %d\n", devProp.clockRate); printf("Total constant memory: %u\n", devProp.totalConstMem); printf("Texture alignment: %u\n", devProp.textureAlignment); printf("Concurrent copy and execution: %s\n", (devProp.deviceOverlap ? "Yes" : "No")); printf("Number of multiprocessors: %d\n", devProp.multiProcessorCount); printf("Kernel execution timeout: %s\n", (devProp.kernelExecTimeoutEnabled ? "Yes" : "No")); return; } __global__ void kernelCalculateNewGenerationWithSharedMemory(float* current, float* next, int ny, int nx) { int ix = threadIdx.x + blockIdx.x * blockDim.x; int iy = threadIdx.y + blockIdx.y * blockDim.y; const float cx = 0.1; const float cy = 0.1; int me = ix + iy * nx, east = ix + 1 + iy * nx, west = ix - 1 + iy * nx, north = ix + (iy - 1) * nx, south = ix + (iy + 1) * nx; // INIT SHARED MEMORY __shared__ float dev_sharedMem[BLOCK_SIZE_Y][BLOCK_SIZE_X]; dev_sharedMem[threadIdx.y][threadIdx.x] = current[me]; __syncthreads(); /* The point to update doesn't need an element that's "included" in this block */ if ((threadIdx.x > 0) && (threadIdx.x < (BLOCK_SIZE_X - 1)) && (threadIdx.y > 0) && (threadIdx.y < (BLOCK_SIZE_Y - 1)) ) { next[me] = cx * (dev_sharedMem[threadIdx.y][threadIdx.x-1] + dev_sharedMem[threadIdx.y][threadIdx.x+1] - 2.0f * dev_sharedMem[threadIdx.y][threadIdx.x]) + cy * (dev_sharedMem[threadIdx.y - 1][threadIdx.x] + dev_sharedMem[threadIdx.y + 1][threadIdx.x] - 2.0f * dev_sharedMem[threadIdx.y][threadIdx.x]) + dev_sharedMem[threadIdx.y][threadIdx.x]; } else if (ix > 0 && ix < X_SIZE - 1 && iy > 0 && iy < Y_SIZE - 1) { next[me] = cx * (current[east] + current[west] - 2.0f * current[me]) + cy * (current[south] + current[north] - 2.0f * current[me]) + current[me]; } } __global__ void kernelCalculateNewGeneration(float* current, float* next, int ny, int nx) { int ix = threadIdx.x + blockIdx.x * blockDim.x; int iy = threadIdx.y + blockIdx.y * blockDim.y; const float cx = 0.1; const float cy = 0.1; int me = ix + iy * nx, east = ix + 1 + iy * nx, west = ix - 1 + iy * nx, north = ix + (iy - 1) * nx, south = ix + (iy + 1) * nx; if (ix > 0 && ix < X_SIZE-1 && iy > 0 && iy < Y_SIZE-1) { next[me] = cx * (current[east] + current[west] - 2.0f * current[me]) + cy * (current[south] + current[north] - 2.0f * current[me]) + current[me]; } } #define CEILDIV(a,b) (((a)+(b)-1)/(b)) #define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); } inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort = true) { if (code != cudaSuccess) { fprintf(stderr, "GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line); if (abort) exit(code); } } int main() { float *dev_heatmap, *heatmap; float *dev_current_map, *dev_next_map; int iz; float duration = 0; cudaEvent_t startEvent, endEvent; gpuErrchk(cudaEventCreate(&startEvent)); gpuErrchk(cudaEventCreate(&endEvent)); heatmap = (float*)malloc(ARRAY_SIZE*sizeof(float)); printf("Grid is %dx%d and block is %dx%d\n", CEILDIV(X_SIZE, BLOCK_SIZE_X), CEILDIV(Y_SIZE, BLOCK_SIZE_Y), BLOCK_SIZE_X, BLOCK_SIZE_Y); // KERNEL CALL PARAMETRES INIT dim3 blockDim(BLOCK_SIZE_X, BLOCK_SIZE_Y); dim3 gridDim(CEILDIV(X_SIZE, BLOCK_SIZE_X), CEILDIV(Y_SIZE, BLOCK_SIZE_Y)); // CPU ARRAY INITIALIZATION inidat(X_SIZE, Y_SIZE, heatmap); prtdat(X_SIZE, Y_SIZE, heatmap, input_file_name); // GPU INIT gpuErrchk(cudaSetDevice(0)); cudaDeviceProp prop; gpuErrchk(cudaGetDeviceProperties(&prop, 0)); // Init timer to count the GPU processing time // GPU processing time = Moving data from host to device + main loop (processing elements) + moving data from device to host cudaEventRecord(startEvent); // GPU MEMORY INIT gpuErrchk(cudaMalloc(&dev_heatmap, 2 * sizeof(float)*ARRAY_SIZE)) gpuErrchk(cudaMemcpy(dev_heatmap, heatmap, sizeof(float)*ARRAY_SIZE, cudaMemcpyHostToDevice)); memset(heatmap, '\0', sizeof(float)*ARRAY_SIZE); // PRE LOOP INITIALIZATIONS iz = 0; dev_current_map = dev_heatmap; dev_next_map = dev_heatmap + ARRAY_SIZE; // MAIN LOOP for (int t = 0 ; t < TIMESTEPS ; t++) { dev_current_map = dev_heatmap + ARRAY_SIZE * iz; dev_next_map = dev_heatmap + ARRAY_SIZE * (1 - iz); // KERNEL CALL //kernelCalculateNewGeneration<<<blockDim,gridDim>>>(dev_current_map,dev_next_map,Y_SIZE,X_SIZE); kernelCalculateNewGenerationWithSharedMemory<<<blockDim,gridDim >>>(dev_current_map, dev_next_map, Y_SIZE, X_SIZE); iz = 1 - iz; } gpuErrchk(cudaMemcpy(heatmap, dev_next_map, sizeof(float)*ARRAY_SIZE, cudaMemcpyDeviceToHost)); gpuErrchk(cudaEventRecord(endEvent)); cudaDeviceSynchronize(); prtdat(X_SIZE, Y_SIZE, heatmap, output_file_name); gpuErrchk(cudaEventElapsedTime(&duration, startEvent, endEvent)); printf("GPU elapsed time: %f\n", duration); return 0; } void inidat(int nx, int ny, float *u) { int ix, iy; for (ix = 0; ix <= nx - 1; ix++) for (iy = 0; iy <= ny - 1; iy++) *(u + ix + nx * iy) = (float)(ix * (nx - ix - 1) * iy * (ny - iy - 1)); } void prtdat(int nx, int ny, float *current, const char *fnam) { int ix, iy; FILE *fp; fp = fopen(fnam, "w"); for (iy = 0; iy < Y_SIZE; iy++) { for (ix = 0; ix < nx; ix++) { fprintf(fp, "%6.1f", *(current + ix + nx*iy)); if (ix != nx - 1) fprintf(fp, " "); else fprintf(fp, "\n"); } } fclose(fp); } /*for (int t = 0; t < TIMESTEPS; t++) { cudaError_t cudaStatus; dev_current_heatmap = dev_heatmap + iz * heatmap_size; dev_next_heatmap = dev_heatmap + (1-iz) * heatmap_size; kernelCalculateNextIteration<<<dim3BlockSizes,dim3GridSizes>>>(dev_current_heatmap, dev_next_heatmap, Y_SIZE, X_SIZE, dev_someint); cudaStatus = cudaGetLastError(); if (cudaStatus != cudaSuccess) { fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus)); } printf("Iteration %d\n", t); iz = 1 - iz; }*/ //cudaMemcpy(&someint, dev_someint, heatmap_size* sizeof(int), cudaMemcpyDeviceToHost);
42
#include "includes.h" __global__ void elementwise_1D_1D_add(float* in1, float* in2, float* out, int size) { int tid = blockIdx.x * blockDim.x + threadIdx.x; int stride = gridDim.x * blockDim.x; for (; tid < size; tid += stride) if (tid < size) out[tid] = in1[tid] + in2[tid]; }
43
#include <stdio.h> #include <stdlib.h> #include <time.h> #include <math.h> #include "cuda.h" //device function __global__ void kernelAddVectors(int N, double *a, double *b, double *c) { int threadid = threadIdx.x; //thread number int blockid = blockIdx.x; //block number int Nblock = blockDim.x; //number of threads in a block int id = threadid + blockid*Nblock; if (id < N) { c[id] = a[id] + b[id]; } } int main(int argc, char **argv) { // get vector size from command line argument int N = atoi(argv[1]); //seed RNG double seed = clock(); srand48(seed); double *h_a, *h_b, *h_c; //host vectors // allocate storage h_a = (double *) malloc(N*sizeof(double)); h_b = (double *) malloc(N*sizeof(double)); h_c = (double *) malloc(N*sizeof(double)); //populate a and b for (int n=0;n<N;n++) { h_a[n] = drand48(); h_b[n] = drand48(); } double hostStart = clock(); // c = a + b for (int n=0;n<N;n++) { h_c[n] = h_a[n] + h_b[n]; } double hostEnd = clock(); double hostTime = (hostEnd - hostStart)/(double) CLOCKS_PER_SEC; size_t inputMem = 2*N*sizeof(double); //number of bytes the operation inputs size_t outMem = 1*N*sizeof(double); //number of bytes the operation outputs size_t totalMem = (inputMem+outMem); printf("The host took %f seconds to add a and b \n", hostTime); printf("The efective bandwidth of the host was: %f GB/s\n", totalMem/(1E9*hostTime)); //Device arrays double *d_a, *d_b, *d_c; //allocate memory on the Device with cudaMalloc cudaMalloc(&d_a,N*sizeof(double)); cudaMalloc(&d_b,N*sizeof(double)); cudaMalloc(&d_c,N*sizeof(double)); double copyStart = clock(); //copy data from the host to the device cudaMemcpy(d_a,h_a,N*sizeof(double),cudaMemcpyHostToDevice); cudaMemcpy(d_b,h_b,N*sizeof(double),cudaMemcpyHostToDevice); double copyEnd = clock(); double copyTime = (copyEnd-copyStart)/(double)CLOCKS_PER_SEC; printf("It took %f seconds to copy the data to device. \n",copyTime); printf("The efective bandwidth of the copy was: %f GB/s\n", inputMem/(1E9*copyTime)); //at this point the data is allocated and populated on the device int Nthreads = atoi(argv[2]); //get the number of threads per block from command line int Nblocks = (N+Nthreads-1)/Nthreads; double deviceStart = clock(); kernelAddVectors <<<Nblocks ,Nthreads >>>(N, d_a, d_b, d_c); cudaDeviceSynchronize(); double deviceEnd = clock(); double deviceTime = (deviceEnd-deviceStart)/(double) CLOCKS_PER_SEC; printf("The device took %f seconds to add a and b \n", deviceTime); printf("The efective bandwidth of the device was: %f GB/s\n", totalMem/(1E9*deviceTime)); printf("The device was %f times faster\n", hostTime/deviceTime); copyStart = clock(); cudaMemcpy(h_c,d_c,N*sizeof(double),cudaMemcpyDeviceToHost); copyEnd = clock(); copyTime = (copyEnd-copyStart)/(double) CLOCKS_PER_SEC; printf("It took %f seconds to copy the data back to the host. \n",copyTime); printf("The efective bandwidth of the copy was: %f GB/s\n", outMem/(1E9*copyTime)); cudaFree(d_a); cudaFree(d_b); cudaFree(d_c); free(h_a); free(h_b); free(h_c); }
44
#include <stdio.h> /* * ホスト上で配列値を初期化します。 */ void init(int *a, int N) { int i; for (i = 0; i < N; ++i) { a[i] = i; } } /* * GPU 上で要素を並列で 2 倍にします。 */ __global__ void doubleElements(int *a, int N) { int i; i = blockIdx.x * blockDim.x + threadIdx.x; if (i < N) { a[i] *= 2; } } /* * ホスト上ですべての要素が 2 倍になっていることを確認します。 */ bool checkElementsAreDoubled(int *a, int N) { int i; for (i = 0; i < N; ++i) { if (a[i] != i*2) return false; } return true; } int main() { int N = 100; int *a; size_t size = N * sizeof(int); /* * このメモリの割り当てをリファクタリングして、 * ホストとデバイスの両方で使用できるポインタ `a` を提供します。 */ a = (int *)malloc(size); init(a, N); size_t threads_per_block = 10; size_t number_of_blocks = 10; /* * この起動は、ポインタ `a` がデバイスで使用できるようになるまで機能しません。 */ doubleElements<<<number_of_blocks, threads_per_block>>>(a, N); cudaDeviceSynchronize(); bool areDoubled = checkElementsAreDoubled(a, N); printf("All elements were doubled? %s\n", areDoubled ? "TRUE" : "FALSE"); /* * ホストとデバイスの両方のアクセス用に割り当てた * メモリを解放するためにリファクタリングします。 */ free(a); }
45
/* ============================================================================ Name : LAB3.cu Author : Kineibe Version : Copyright : Your copyright notice Description : CUDA compute reciprocals ============================================================================ */ #include <iostream> #include <numeric> #include <stdlib.h> #include <string> #include <fstream> #include <sstream> using namespace std; static void CheckCudaErrorAux (const char *, unsigned, const char *, cudaError_t); #define CUDA_CHECK_RETURN(value) CheckCudaErrorAux(__FILE__,__LINE__, #value, value) #define H_T 0.0001 #define H_X 0.5 #define TOTAL_TIME 10 #define EPSILON 0.001 #define RIGHT_COND 1 #define LEFT_COND 0 #define BLOCK_SIZE_AMOUNT 256 const double A = H_T / (H_X * H_X); const double B = 2 * A + 1; double countSum(int k, double* t, int size) { if (k == 0) { return t[k] * 1; } else if (k == size - 1) { return -1 * t[k - 1] / H_X + t[k] / H_X; } else { return -1 * A * t[k - 1] + t[k] / B - A * t[k + 1]; } } double iterationPart(double prev, double multiplier, double f, double sum) { return prev + (f - sum) / multiplier; } void iteration(double* t_prev, int size, double* f, double* t_result) { for (int i = 0; i < size; ++i) { double a; if (i == 0) a = 1; else if (i == size - 1) a = 1 / H_X; else a = B; double sum = countSum(i, t_prev, size); double newT = iterationPart(t_prev[i], a, f[i], sum); t_result[i] = newT; } } bool condition(double* t_prev, double* t_result, int size) { double result = 0; for (int i = 0; i < size; ++i) { result += abs(t_prev[i] - t_result[i]); } return result < EPSILON; } void iterationManager(double* t_prev, int size, double* f, double* t_target) { bool check = true; double* t_result = new double[size]; do { iteration(t_prev, size, f, t_result); check = condition(t_prev, t_result, size); double* temp = t_result; t_result = t_prev; t_prev = temp; } while(!check); for (int i = 0; i < size; ++i) { t_target[i] = t_prev[i]; } delete[] t_result; } void printMas(double* arr, int size) { for (int i = 0; i < size; ++i) { cout << arr[i] << ' '; } cout << endl; } void model(int size) { double* t = new double[size]; for (int i = 0; i < size; ++i) { t[i] = 0; } double* t_next = new double[size]; double* f = new double[size]; f[0] = LEFT_COND; f[size - 1] = RIGHT_COND; // int iterationAmount = TOTAL_TIME / H_T; int iterationAmount = 10; for (int i = 0; i < iterationAmount; ++i) { cout << "Iteration num " << i << endl; for (int i = 1; i < size - 1; ++i) { f[i] = t[i]; } cout << "F array" << endl; printMas(f, size); iterationManager(t, size, f, t_next); printMas(t_next, size); double* temp = t_next; t_next = t; t = temp; } delete[] t_next; delete[] f; delete[] t; } /** * CUDA kernel that computes reciprocal values for a given vector */ __global__ void reciprocalKernel(float *data, float *newData, unsigned vectorSize) { unsigned idx = blockIdx.x*blockDim.x+threadIdx.x; if (idx < vectorSize) { if (idx == vectorSize - 1) { newData[idx] = RIGHT_COND * H_T + data[idx]; } else if (idx == 0) { newData[idx] = LEFT_COND; } else { newData[idx] = data[idx] + (data[idx - 1] - 2 * data[idx] + data[idx + 1]) * H_T / (H_X * H_X); } } } /** * Host function that copies the data and launches the work on GPU */ void gpuReciprocal(float *data, unsigned size) { cudaEvent_t GPUstart, GPUstop; float GPUtime = 0.0f; float *rc = new float[size]; float *gpuOldData; float *gpuNewData; int iterationAmount = TOTAL_TIME / H_T; static const int BLOCK_SIZE = BLOCK_SIZE_AMOUNT; const int blockCount = 1000; CUDA_CHECK_RETURN(cudaMalloc((void **)&gpuOldData, sizeof(float)*size)); CUDA_CHECK_RETURN(cudaMalloc((void **)&gpuNewData, sizeof(float)*size)); CUDA_CHECK_RETURN(cudaMemcpy(gpuOldData, data, sizeof(float)*size, cudaMemcpyHostToDevice)); cudaEventCreate(&GPUstart); cudaEventCreate(&GPUstop); for (int i = 0; i < iterationAmount; ++i) { cudaEventRecord(GPUstart, 0); if (i % 2 == 0) { reciprocalKernel<<<blockCount, BLOCK_SIZE>>> (gpuOldData, gpuNewData, size); cudaEventRecord(GPUstop, 0); CUDA_CHECK_RETURN(cudaMemcpy(rc, gpuNewData, sizeof(float)*size, cudaMemcpyDeviceToHost)); } else { reciprocalKernel<<<blockCount, BLOCK_SIZE>>> (gpuNewData, gpuOldData, size); cudaEventRecord(GPUstop, 0); CUDA_CHECK_RETURN(cudaMemcpy(rc, gpuOldData, sizeof(float)*size, cudaMemcpyDeviceToHost)); } cudaEventSynchronize(GPUstop); float temp; cudaEventElapsedTime(&temp, GPUstart, GPUstop); GPUtime += temp; // // for (int i = 0; i < size; ++i) { // std::cout << "t[" << i << "] = " << rc[i] << std::endl; // } // std::cout << std::endl; } printf("GPU time : %.3f ms\n", GPUtime); CUDA_CHECK_RETURN(cudaFree(gpuOldData)); CUDA_CHECK_RETURN(cudaFree(gpuNewData)); } void initialize(float *data, unsigned size) { for (unsigned i = 0; i < size; ++i) data[i] = 0; } void cpuIteration(float *data, float *newData, unsigned vectorSize) { for (int idx = 0; idx < vectorSize; ++idx) { if (idx == vectorSize - 1) { newData[idx] = RIGHT_COND * H_T + data[idx]; } else if (idx == 0) { newData[idx] = LEFT_COND; } else { newData[idx] = data[idx] + (data[idx - 1] - 2 * data[idx] + data[idx + 1]) * H_T / (H_X * H_X); } } } void cpuReciprocal(float *data, unsigned size) { float *rc = new float[size]; float *oldData = new float[size]; float* result; float CPUstart, CPUstop; float CPUtime = 0.0f; int iterationAmount = TOTAL_TIME / H_T; for (int i = 0; i < iterationAmount; ++i) { CPUstart = clock(); if (i % 2 == 0) { cpuIteration(oldData, rc, size); result = rc; } else { cpuIteration(rc, oldData, size); result = oldData; } CPUstop = clock(); CPUtime += 1000.*(CPUstop - CPUstart) / CLOCKS_PER_SEC; // // for (int i = 0; i < size; ++i) { // std::cout << "t[" << i << "] = " << result[i] << std::endl; // } // std::cout << std::endl; } printf("CPU time : %.3f ms\n", CPUtime); } bool checkShodimost() { return true; } int main(void) { static const int WORK_SIZE = 256000; float *data = new float[WORK_SIZE]; model(5); /* Free memory */ delete[] data; return 0; } /** * Check the return value of the CUDA runtime API call and exit * the application if the call has failed. */ static void CheckCudaErrorAux (const char *file, unsigned line, const char *statement, cudaError_t err) { if (err == cudaSuccess) return; std::cerr << statement<<" returned " << cudaGetErrorString(err) << "("<<err<< ") at "<<file<<":"<<line << std::endl; exit (1); }
46
// includes, system #include <cuda_runtime.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> #include <float.h> // includes, kernels #include "vector_reduction_kernel.cu" // For simplicity, just to get the idea in this MP, we're fixing the problem size to 512 elements. #define NUM_ELEMENTS 512*1 //////////////////////////////////////////////////////////////////////////////// // declaration, forward void runTest( int argc, char** argv); float computeOnDevice(float* h_data, int array_mem_size); extern "C" void computeGold( float* reference, float* idata, const unsigned int len); //////////////////////////////////////////////////////////////////////////////// // Program main //////////////////////////////////////////////////////////////////////////////// int main( int argc, char** argv) { cudaSetDevice(0); runTest( argc, argv); return EXIT_SUCCESS; } //////////////////////////////////////////////////////////////////////////////// //! Run naive scan test //////////////////////////////////////////////////////////////////////////////// void runTest( int argc, char** argv) { int num_elements = NUM_ELEMENTS; const unsigned int array_mem_size = sizeof( float) * num_elements; // Allocate host memory to store the input data float* h_data = (float*) malloc( array_mem_size); // initialize the input data on the host to be integer values // between 0 and 1000 for( unsigned int i = 0; i < num_elements; ++i) h_data[i] = floorf(1000*(rand()/(float)RAND_MAX)); // Function to compute the reference solution on CPU using a C sequential version of the algorithm // It is written in the file "vector_reduction_gold.cpp". The Makefile compiles this file too. float reference = 0.0f; computeGold(&reference , h_data, num_elements); // Function to compute the solution on GPU using a call to a CUDA kernel (see body below) // The kernel is written in the file "vector_reduction_kernel.cu". The Makefile also compiles this file. float result = computeOnDevice(h_data, num_elements); // We can use an epsilon of 0 since values are integral and in a range that can be exactly represented float epsilon = 0.0f; unsigned int result_regtest = (abs(result - reference) <= epsilon); printf( "Test %s\n", (1 == result_regtest) ? "CORRECTO: Coinciden los resultados de la CPU y la GPU" : "INCORRECTO: Los resultados calculados en paralelo en la GPU no coinciden con los obtenidos secuencialmente en la CPU"); printf( "device: %f host: %f\n", result, reference); // cleanup memory free( h_data); } // Function to call the CUDA kernel on the GPU. // Take h_data from host, copies it to device, setup grid and thread // dimensions, excutes kernel function, and copy result of scan back // to h_data. // Note: float* h_data is both the input and the output of this function. float computeOnDevice(float* h_data, int num_elements) { float* d_data = NULL; float result; // Memory allocation on device side cudaMalloc((void**)&d_data, num_elements ); // Copy from host memory to device memory cudaMemcpy(d_data, h_data, num_elements, cudaMemcpyHostToDevice); int threads = (num_elements/2) + num_elements%2; // Invoke the kernel reduction<<<1,threads>>>(d_data,num_elements); // Copy from device memory back to host memory cudaMemcpy(&result, d_data, sizeof(float), cudaMemcpyDeviceToHost ); cudaFree(d_data); cudaDeviceReset(); return result; }
47
/* * Kernel for calulating the element-wise product of two matrices * m, n --> dimensions of matrices A, B, C */ extern "C" { __global__ void hadamard(int m, int n, double *A, int lda, double *B, int ldb, double *C, int ldc) { int i = blockIdx.x * blockDim.x + threadIdx.x; int j = blockIdx.y * blockDim.y + threadIdx.y; if (i >= m || j >= n) return; C[i + j*ldc] = A[i + j*lda] * B[i + j*ldb]; } } /* * Matrix sum, parameters as above */ extern "C" { __global__ void matrix_sum(int m, int n, double *A, int lda, double *B, int ldb, double *C, int ldc) { int i = blockIdx.x * blockDim.x + threadIdx.x; int j = blockIdx.y * blockDim.y + threadIdx.y; if (i >= m || j >= n) return; C[i + j*ldc] = A[i + j*lda] + B[i + j*ldb]; } } /* * Copy that allows us to move around pieces of a matrix */ extern "C" { __global__ void copy(int m, int n, double *dst, int lddst, double *src, int ldsrc) { int i = blockIdx.x * blockDim.x + threadIdx.x; int j = blockIdx.y * blockDim.y + threadIdx.y; if (i >= m || j >= n) return; dst[i + j*lddst] = src[i + j*ldsrc]; } }
48
#include <cuda_runtime.h> #include <stdio.h> #include <assert.h> #include <iostream> #include <stdlib.h> #include <unistd.h> extern "C" __global__ void memcpy_kernel(unsigned char* __restrict__ output, const unsigned char* __restrict__ input){ output += (blockIdx.x<<13)|(threadIdx.x<<2); input += (blockIdx.x<<13)|(threadIdx.x<<2); *((float* )&output[0]) = *((float* )&input[0]); *((float* )&output[0x400]) = *((float* )&input[0x400]); *((float* )&output[0x800]) = *((float* )&input[0x800]); *((float* )&output[0xc00]) = *((float* )&input[0xc00]); *((float* )&output[0x1000]) = *((float* )&input[0x1000]); *((float* )&output[0x1400]) = *((float* )&input[0x1400]); *((float* )&output[0x1800]) = *((float* )&input[0x1800]); *((float* )&output[0x1c00]) = *((float* )&input[0x1c00]); } #define CALL(cmd) \ do {\ cudaError_t cuda_error = cmd;\ if (cuda_error != cudaSuccess) { \ std::cout<<"'"<<cudaGetErrorString(cuda_error)<<"'("<<cuda_error<<")"<<" at "<<__FILE__<<":"<<__LINE__<<std::endl;\ exit(EXIT_FAILURE);\ }\ } while(0) #define WARMUP 20 #define LOOP 100 static inline void b2s(size_t bytes, char * str){ if(bytes<1024){ sprintf(str, "%luB", bytes); }else if(bytes<(1024*1024)){ double b= (double)bytes/1024.0; sprintf(str, "%.2fKB", b); }else if(bytes<(1024*1024*1024)){ double b= (double)bytes/(1024.0*1024); sprintf(str, "%.2fMB", b); }else{ double b= (double)bytes/(1024.0*1024*1024); sprintf(str, "%.2fGB", b); } } static inline int env_get_int(const char * var_name, int def_v) { char * v = getenv(var_name); int r = def_v; if(v) r = atoi(v); return r; } static inline float get_rand(){ static int inited = 0; float v; if(!inited){ srand(time(NULL)); inited = 1; } v = rand() % 1000 + 1; return v / 1000.0f; } static inline int valid_vec(const float * vec_a, const float * vec_b, int num) { int err_cnt = 0; for(int i=0;i<num;i++){ if(vec_a[i] != vec_b[i]) err_cnt++; } return err_cnt; } int main() { cudaSetDevice(0); unsigned char *A, *B; const int dwords = env_get_int("DWORDS",64*3*224*224); float * h_A = (float*)malloc(dwords*sizeof(float)); float * h_B = (float*)malloc(dwords*sizeof(float)); for (int i = 0; i < dwords; ++i) h_A[i] = get_rand(); CALL(cudaMalloc(&A, dwords * sizeof(float))); CALL(cudaMalloc(&B, dwords * sizeof(float))); CALL(cudaMemcpy(A, h_A, dwords * sizeof(float), cudaMemcpyHostToDevice)); // benchmark kernel int bx = 256; int gx = (dwords+255)>>11; assert(dwords/(bx*8*4)); cudaEvent_t start_ev, stop_ev; CALL(cudaEventCreate(&start_ev)); CALL(cudaEventCreate(&stop_ev)); for(int i=0;i<WARMUP;i++) memcpy_kernel<<<gx, bx>>>(B, A); CALL(cudaEventRecord(start_ev, 0)); for(int i=0;i<LOOP;i++) memcpy_kernel<<<gx, bx>>>(B, A); CALL(cudaEventRecord( stop_ev, 0 )); CALL(cudaEventSynchronize(stop_ev)); float ms; CALL(cudaEventElapsedTime(&ms,start_ev, stop_ev)); ms/=LOOP; CALL(cudaMemcpy(h_B, B, dwords * sizeof(float), cudaMemcpyDeviceToHost)); //if(valid_vec(h_A, h_B, dwords) != 0) printf("not valid copy!\n"); sleep(1); // benchmark memcpy api for(int i=0;i<WARMUP;i++) CALL(cudaMemcpy(B, A, dwords * sizeof(float), cudaMemcpyDeviceToDevice)); CALL(cudaEventRecord( start_ev, 0)); for(int i=0;i<LOOP;i++) CALL(cudaMemcpy(B, A, dwords * sizeof(float), cudaMemcpyDeviceToDevice)); CALL(cudaEventRecord( stop_ev, 0 )); CALL(cudaEventSynchronize(stop_ev)); float ms_api; CALL(cudaEventElapsedTime(&ms_api,start_ev, stop_ev)); ms_api/=LOOP; char str[64]; b2s(dwords*sizeof(float), str); printf("%s, bandwidth_kernel:%.3f(GB/s), bandwidth_api:%.3f(GB/s)\n", str, ((double)dwords*sizeof(float)*2)/((double)ms/1000)/1000000000.0, ((double)dwords*sizeof(float)*2)/((double)ms_api/1000)/1000000000.0 ); free(h_A); free(h_B); CALL(cudaFree(A)); CALL(cudaFree(B)); }
49
# include<stdio.h> __global__ void mykernel() { printf("hello world for GPU\n"); } int main() { mykernel<<<1, 10>>>(); cudaDeviceSynchronize(); return 0; }
50
#include "cuda_runtime.h" // A small gpu volumetric path tracer in 200 lines #include "device_launch_parameters.h" // Jerry Guo (c) CGV TU Delft #include "math_constants.h" // Based on smallvpt and cu-smallpt #include "curand_kernel.h" // Compile: nvcc #include <stdlib.h> // Usage: cusmallvpt [#SPP] #include <stdio.h> // Result: image.ppm enum Refl_t { DIFF, SPEC, REFR }; inline void HandleError(cudaError_t err) { if (cudaSuccess != err) { printf("%s\n", cudaGetErrorString(err)); exit(EXIT_FAILURE); } } struct Vec { // position, also color (r,g,b) float x, y, z; __host__ __device__ explicit Vec() { x = 0.f; y = 0.f; z = 0.f; } __host__ __device__ explicit Vec(float v) { x = v; y = v; z = v; } __host__ __device__ explicit Vec(float x_ = 0.f, float y_ = 0.f, float z_ = 0.f) { x = x_; y = y_; z = z_; } Vec(const Vec& vec) noexcept = default; Vec(Vec&& vec) noexcept = default; ~Vec() = default; __device__ Vec& operator=(const Vec& b) { this->x = b.x; this->y = b.y; this->z = b.z; return *this; } __device__ const Vec operator+(const Vec& b) const { return Vec(x + b.x, y + b.y, z + b.z); } __device__ const Vec operator-(const Vec& b) const { return Vec(x - b.x, y - b.y, z - b.z); } __host__ __device__ const Vec operator*(float b) const { return Vec(x * b, y * b, z * b); } __device__ const Vec mult(const Vec& b) const { return Vec(x * b.x, y * b.y, z * b.z); } __device__ float len() const { return sqrt(x * x + y * y + z * z); } __device__ Vec& norm() { float inv_len = 1.f / len(); this->x *= inv_len; this->y *= inv_len; this->z *= inv_len; return *this; } __device__ float dot(const Vec& b) const { return x * b.x + y * b.y + z * b.z; } // cross: __device__ Vec operator%(Vec& b) { return Vec(y * b.z - z * b.y, z * b.x - x * b.z, x * b.y - y * b.x); } __device__ Vec operator%(const Vec& b) { return Vec(y * b.z - z * b.y, z * b.x - x * b.z, x * b.y - y * b.x); } }; __device__ inline float len(const Vec& v) { return sqrt(v.x*v.x + v.y*v.y + v.z*v.z); } __device__ inline Vec norm(const Vec& v) { float inv_len = 1.f / len(v); return Vec(v.x * inv_len, v.y * inv_len, v.z * inv_len); } struct Ray { Vec o, d; __host__ __device__ explicit Ray() : o(Vec(0.f, 0.f, 0.f)), d(Vec(0.f, 0.f, 0.f)) {} __host__ __device__ explicit Ray(Vec o_, Vec d_) noexcept : o(o_), d(d_) {} Ray(const Ray& ray) noexcept = default; Ray(Ray&& ray) noexcept = default; ~Ray() = default; __device__ Ray& operator=(const Ray& r) { this->o = r.o; this->d = r.d; return *this; } }; struct Sphere { float rad; Vec p, e, c; Refl_t refl; __host__ __device__ explicit Sphere(float rad_, Vec p_, Vec e_, Vec c_, Refl_t refl_) : rad(rad_), p(p_), e(e_), c(c_), refl(refl_) {} __device__ float intersect(const Ray& r, float* tin = NULL, float* tout = NULL) const { Vec op = p - r.o; float t, eps = 1e-4, b = op.dot(r.d), det = b * b - op.dot(op) + rad * rad; if (det < 0.f) return 0; else det = sqrt(det); if (tin && tout) { *tin = (b - det <= 0.f) ? 0.f : b - det; *tout = b + det; } return (t = b - det) > eps ? t : ((t = b + det) > eps ? t : 0.f); } }; __host__ __device__ inline float clamp(float x) { return x < 0.f ? 0.f : x>1.f ? 1.f : x; } __host__ __device__ inline int toInt(float x) { return int(pow(clamp(x), 1.f / 2.2f) * 255.f + .5f); } __device__ inline bool intersect(const Sphere* spheres, size_t n_sphere, const Ray& r, float& t, int& id, float tmax = 1e20) { float d, inf = t = tmax; for (int i = int(n_sphere); i--;) if ((d = spheres[i].intersect(r)) && d < t) { t = d; id = i; } return t < inf; } __device__ inline float sampleSegment(float epsilon, float sigma, float smax) { return -log(1.f - epsilon * (1.f - exp(-sigma * smax))) / sigma; } __device__ inline Vec sampleSphere(float e1, float e2) { float z = 1.f - 2.f * e1, sint = sqrt(1.f - z * z); return Vec(cos(2.f * CUDART_PI_F * e2) * sint, sin(2.f * CUDART_PI_F * e2) * sint, z); } __device__ inline Vec sampleHG(float g, float e1, float e2) { float s = 1.f-2.f*e1,cost=(s+2.f*g*g*g*(-1.0+e1)*e1+g*g*s+2.f*g*(1.f-e1+e1*e1))/((1.f+g*s)*(1.f+g*s)),sint=sqrt(1.f-cost*cost); return Vec(cos(2.f * CUDART_PI_F * e2) * sint, sin(2.f * CUDART_PI_F * e2) * sint, cost); } __device__ inline void generateOrthoBasis(Vec& u, Vec& v, Vec w) { Vec coVec = w; if (fabs(w.x) <= fabs(w.y)) if (fabs(w.x) <= fabs(w.z)) coVec = Vec(0.f, -w.z, w.y); else coVec = Vec(-w.y, w.x, 0.f); else if (fabs(w.y) <= fabs(w.z)) coVec = Vec(-w.z, 0.f, w.x); else coVec = Vec(-w.y, w.x, 0.f); coVec.norm(); u = w % coVec, v = w % u; } __device__ inline float scatter(const Ray& r, Ray* sRay, float tin, float tout, float& s, const float& sigma_s, curandState_t* rand_state) { s = sampleSegment(curand_uniform(rand_state), sigma_s, tout - tin); Vec x = r.o + r.d * tin + r.d * s; Vec dir = sampleHG(-0.5f, curand_uniform(rand_state), curand_uniform(rand_state)); Vec u(0.f, 0.f, 0.f), v(0.f, 0.f, 0.f); generateOrthoBasis(u, v, r.d); dir = u * dir.x + v * dir.y + r.d * dir.z; if (sRay) *sRay = Ray(x, dir); return (1.0f - exp(-sigma_s * (tout - tin))); } __device__ Vec radiance(const Sphere* spheres, size_t n_sphere, const Ray& r, int _depth, curandState_t* rand_state) { Ray ray = r; Vec L(0.f, 0.f, 0.f); Vec B(1.f, 1.f, 1.f); int depth = _depth; float tnear, tfar, scaleBy = 1.f, absorption = 1.f; const Sphere homoMedium(300.f, Vec(50.f, 50.f, 80.f), Vec(0.f, 0.f, 0.f), Vec(0.f, 0.f, 0.f), DIFF); const float sigma_s = 0.009f, sigma_a = 0.006f, sigma_t = sigma_s + sigma_a; while (1) { float t; // distance to intersection int id = 0; // id of intersected object if (homoMedium.intersect(ray, &tnear, &tfar) > 0) { Ray sRay; float s, ms = scatter(ray, &sRay, tnear, tfar, s, sigma_s, rand_state), prob_s = ms; scaleBy = 1.f / (1.f - prob_s); if (curand_uniform(rand_state) <= prob_s) {// Sample surface or volume? if (!intersect(spheres, n_sphere, ray, t, id, tnear + s)) { B = B * ms * (1.f - prob_s); ray = sRay; ++depth; continue; } scaleBy = 1.f; } else if (!intersect(spheres, n_sphere, ray, t, id)) return L; if (t >= tnear) { float dist = (t > tfar ? tfar - tnear : t - tnear); absorption = exp(-sigma_t * dist); } } else if (!intersect(spheres, n_sphere, ray, t, id)) return L; const Sphere& obj = spheres[id]; Vec x = r.o + r.d * t, n = Vec(x - obj.p).norm(), nl = n.dot(ray.d) < 0 ? n : n * -1, f = obj.c, Le = obj.e; float p = f.x > f.y && f.x > f.z ? f.x : f.y > f.z ? f.y : f.z; if (++depth > 5) if (curand_uniform(rand_state) < p) B = B * (1 / p); else return L; if (n.dot(nl) > 0 || obj.refl != REFR) { B = B * absorption; Le = obj.e * absorption; } else scaleBy = 1.f; // Accumulate luminance and throughtput L = L + B.mult(Le); B = B.mult(f * scaleBy); ++depth; switch (obj.refl) { case SPEC: { ray = Ray(x, r.d - n * 2 * n.dot(r.d)); break; } case REFR: { ray = Ray(x, r.d - n * 2 * n.dot(r.d)); bool into = n.dot(nl) > 0; float nc = 1, nt = 1.5, nnt = into ? nc / nt : nt / nc, ddn = r.d.dot(nl), cos2t; if ((cos2t = 1 - nnt * nnt * (1 - ddn * ddn)) < 0) break; Vec tdir = Vec(r.d*nnt-n*((into?1:-1)*(ddn*nnt+sqrt(cos2t)))).norm(); float a=nt-nc,b=nt+nc,R0=a*a/(b*b),c = 1 - (into ? -ddn : tdir.dot(n)); float Re=R0+(1-R0)*c*c*c*c*c, Tr=1-Re,P=.25+.5*Re,RP=Re/P,TP = Tr / (1 - P); if (curand_uniform(rand_state) < P) B=B*RP; else { ray=Ray(x,tdir); B=B*TP; } break; } default: { float r1=2*CUDART_PI_F*curand_uniform(rand_state),r2=curand_uniform(rand_state),r2s = sqrt(r2); Vec w = nl, u = Vec((fabs(w.x) > .1 ? Vec(0, 1) : Vec(1.f, 1.f, 1.f)) % w).norm(), v = w % u; Vec d = Vec(u * cos(r1) * r2s + v * sin(r1) * r2s + w * sqrt(1 - r2)).norm(); ray = Ray(x, d); } } } } __global__ void render_kernel(const Sphere* spheres, const size_t n_sphere, Vec* Ls, size_t w, size_t h, int spp) { const size_t x = threadIdx.x + blockIdx.x * blockDim.x; const size_t y = threadIdx.y + blockIdx.y * blockDim.y; const size_t offset = x + y * blockDim.x * gridDim.x; const float inv_spp = 1.0f / float(spp); if (x >= w || y >= h) return; curandState rand_state; curand_init(offset, 0u, 0u, &rand_state); Ray cam(Vec(50.f, 52.f, 285.f), norm(Vec(0.f, -0.042612f, -1.f))); const float fov = 0.5135f; Vec cx = Vec(w * fov / h, 0.0f, 0.0f); Vec cy = norm(Vec(cx % cam.d)) * fov; size_t i = (h - 1u - y) * w + x; for (size_t sy = 0u; sy < 2u; ++sy) for (size_t sx = 0u; sx < 2u; ++sx) { Vec L(0.f, 0.f, 0.f); for (size_t s = 0u; s < spp; ++s) { float u1 = 2.f * curand_uniform(&rand_state); float u2 = 2.f * curand_uniform(&rand_state); float dx = (u1 < 1.f) ? sqrt(u1) - 1.f : 1.f - sqrt(2.f - u1); float dy = (u2 < 1.f) ? sqrt(u2) - 1.f : 1.f - sqrt(2.f - u2); Vec d = cx * (((sx+0.5+dx)*0.5+x)/w-0.5)+cy*(((sy+0.5+dy)*0.5+y)/h-0.5)+cam.d; Ray pRay(cam.o + d * 140.f, d.norm()); L = L + radiance(spheres, n_sphere, pRay, 0, &rand_state) * inv_spp; } Ls[i] = Ls[i] + Vec(0.25f * clamp(L.x), 0.25f * clamp(L.y), 0.25f * clamp(L.z)); } } cudaError_t Render(int w, int h, unsigned int spp = 100) { const size_t n_sphere = 4; Sphere spheres[n_sphere] = {//Scene: radius, position, emission, color, material Sphere(26.5f, Vec(27.f, 18.5f, 78.f),Vec(0.f, 0.f, 0.f),Vec(1.f,1.f,1.f)*.75f,SPEC),//Mirr Sphere(12.f, Vec(70.f, 43.f, 78.f), Vec(0.f, 0.f, 0.f), Vec(0.27f,0.8f,0.8f), REFR),//Glas Sphere(8.f, Vec(55.f, 87.f, 78.f), Vec(0.f, 0.f, 0.f), Vec(1,1,1) * .75f, DIFF), //Lite Sphere(4.f, Vec(55.f, 80.f, 78.f), Vec(10.f,10.f,10.f), Vec(0.f, 0.f, 0.f), DIFF) //Lite }; HandleError(cudaSetDevice(0)); const size_t n_pixels = size_t(w * h); Sphere* spheres_device; HandleError(cudaMalloc((void**)&spheres_device, sizeof(spheres))); HandleError(cudaMemcpy(spheres_device, spheres, sizeof(spheres), cudaMemcpyHostToDevice)); Vec* film_device; HandleError(cudaMalloc((void**)&film_device, sizeof(Vec) * n_pixels)); HandleError(cudaMemset(film_device, 0, sizeof(Vec) * n_pixels)); const dim3 nblocks(w / 16, h / 16); const dim3 nthreads(16, 16); render_kernel <<< nblocks, nthreads >>> (spheres_device, n_sphere, film_device, w, h, spp); Vec* film = (Vec*)malloc(n_pixels * sizeof(Vec)); HandleError(cudaMemcpy(film, film_device, sizeof(Vec) * n_pixels, cudaMemcpyDeviceToHost)); HandleError(cudaFree(spheres_device)); HandleError(cudaFree(film_device)); FILE* f = fopen("image.ppm", "w"); // Write image to PPM file. fprintf(f, "P3\n%d %d\n%d\n", w, h, 255); for (int i=0;i<w*h;i++) fprintf(f,"%d %d %d ",toInt(film[i].x),toInt(film[i].y),toInt(film[i].z)); free(film); return cudaSuccess; } int main(int argc, char* argv[]) { int w = 1024, h = 768, spp = argc == 2 ? atoi(argv[1]) / 4 : 100; Render(w, h, spp); return 0; }
51
#include "includes.h" __global__ void addVectors( float *d_A, float *d_B, float *d_C, int size) { int i = threadIdx.x + blockDim.x * blockIdx.x; if (i < size) { d_C[i] = d_A[i] + d_B[i]; } }
52
extern "C" __global__ void calcDir(// Dots props float* pX, float* pY, float* pZ, //Tree specs // per Block int* dotIndexes, int* stBl0, int* nPtBl0, int* stBl1, int* nPtBl1, float* avgPX, float* avgPY, float* avgPZ, // per GPU Block int* idBl, int* offsBl, // output values, per block int* idFurthest, float* dMax /*float* pX,float* pY,float* pZ, float* avgPX, float* avgPY, float* avgPZ, int* lockBlock, float* dMax, int* idFurthest, int* id_in, int* id_bl_in*/ ) { extern __shared__ int array[]; float* posAVGBlock = (float*)&array[5]; float* dMaxPt = (float*)&posAVGBlock[3]; int* iMaxPt = (int*)&dMaxPt[blockDim.x]; // Fetch block data int iGPUBlock=blockIdx.x; int iThread=threadIdx.x; int idBloc; if (iThread==0) { idBloc=idBl[iGPUBlock]; array[0]=offsBl[iGPUBlock]; array[1]=stBl0[idBloc]; array[2]=nPtBl0[idBloc]; array[3]=stBl1[idBloc]; array[4]=nPtBl1[idBloc]; posAVGBlock[0]=avgPX[idBloc]; posAVGBlock[1]=avgPY[idBloc]; posAVGBlock[2]=avgPZ[idBloc]; } __syncthreads(); int offsPt = array[0]; int startIndexBl0 = array[1]; int nPtBlock0 = array[2]; int startIndexBl1 = array[3]; // useless in fact int nPtBlock1 = array[4]; int nPts = nPtBlock0 + nPtBlock1; int ptToBeComputed = iThread+offsPt; int mx=posAVGBlock[0]; int my=posAVGBlock[1]; int mz=posAVGBlock[2]; if (ptToBeComputed<nPts) { int id_pt=dotIndexes[startIndexBl0+ptToBeComputed]; float xval=(pX[id_pt]-mx); float yval=(pY[id_pt]-my); float zval=(pZ[id_pt]-mz); dMaxPt[iThread]=xval*xval+yval*yval+zval*zval; iMaxPt[iThread]=id_pt; } else { dMaxPt[iThread]=-1; iMaxPt[iThread]=-1; } __syncthreads(); // All data copied to shared Mem }
53
#include<stdio.h> #include<stdlib.h> #include<math.h> __global__ void vecAdd(float* h_a, float* h_b, float* h_c, int n) { int id = blockIdx.x*blockDim.x+threadIdx.x; //check if it is in bound if(id<n) h_c[id] = h_a[id]+ h_b[id]; } int main(int argc, char* argv[]) { //size of vectors int n= 1000; float *h_a;//ip float *h_b;//ip float *h_c;//op float *d_a;//ip float *d_b;//ip float *d_c;//op int size = n * sizeof(float); //allocating memory on host h_a = (float*)malloc(size); h_b = (float*)malloc(size); h_c = (float*)malloc(size); //allocating memory for each vector on GPU cudaMalloc((void **) &d_a, size); cudaMalloc((void **) &d_b, size); cudaMalloc((void **) &d_c, size); //initialize vectors on host int i; for(i = 0; i<n; i++) { h_a[i] = sin(i)*sin(i); h_b[i] = cos(i)*cos(i); } /*printf("h_a: \n"); for(i=0; i<n; i++) printf("%.1f\n", h_a[i]); printf("\n"); printf("h_b: \n"); for(i=0; i<n; i++) printf("%.1f\n", h_b[i]); printf("\n"); */ //copy host vectors to device cudaMemcpy(d_a, h_a, size, cudaMemcpyHostToDevice); cudaMemcpy(d_b, h_b, size, cudaMemcpyHostToDevice); int threadPerBlocks, blockCount; //block size threadPerBlocks = 1024; //grid size blockCount = (int)ceil((float)n/threadPerBlocks); //executing kernel vecAdd<<<threadPerBlocks, blockCount>>>(d_a, d_b, d_c, n); //copy array back to host cudaMemcpy(h_c, d_c, size, cudaMemcpyDeviceToHost); float sum = 0; for(i=0; i<n; i++) sum += h_c[i]; printf("Final result is: %f\n", sum/n); //release device memory cudaFree(d_a); cudaFree(d_b); cudaFree(d_c); //releasing host memory free(h_a); free(h_b); free(h_c); return 0; }
54
#include "includes.h" __global__ void vectorReduce(const float *global_input_data, float *global_output_data, const int numElements) { __shared__ float sdata[10]; __shared__ int sindice[10]; int tid = threadIdx.x; int i = blockIdx.x * (blockDim.x ) + threadIdx.x; sdata[tid] = global_input_data[i]; sindice[tid] = tid; __syncthreads(); for (unsigned int s = blockDim.x / 2; s > 0; s >>= 1) { if (tid < s ) { if (sdata[tid] > sdata[tid + s]) { sdata[tid] = sdata[tid + s]; sindice[tid] = sindice[tid + s]; } __syncthreads(); } } __syncthreads(); if (tid == 0) { global_output_data[0] = sdata[0]; } if (tid == 1) { global_output_data[1] = sindice[0]; } }
55
#include <stdio.h> #include <math.h> #include <sys/time.h> __global__ void convertToFloat(float *d_out, int *d_in){ d_out[threadIdx.x] = (float)d_in[threadIdx.x]; } double time_diff(struct timeval x , struct timeval y){ double x_ms , y_ms , diff; x_ms = (double)x.tv_sec*1000000 + (double)x.tv_usec; y_ms = (double)y.tv_sec*1000000 + (double)y.tv_usec; diff = (double)y_ms - (double)x_ms; return diff; } int main(int argc, char ** argv) { int lenInts = 2000; int ints[2000] = {4, 9, 6, 7, 7, 5, 7, 0, 6, 0, 0, 9, 7, 8, 1, 2, 7, 7, 3, 9, 4, 5, 9, 3, 6, 7, 5, 6, 0, 4, 0, 5, 4, 6, 9, 1, 3, 4, 2, 9, 5, 6, 2, 5, 7, 1, 5, 8, 9, 8, 9, 9, 2, 7, 5, 0, 7, 6, 2, 8, 7, 0, 1, 1, 2, 5, 9, 2, 8, 7, 0, 3, 9, 2, 8, 6, 0, 4, 3, 6, 4, 9, 3, 8, 9, 4, 0, 6, 1, 6, 7, 0, 8, 6, 5, 2, 1, 8, 9, 3, 0, 4, 4, 5, 6, 0, 0, 0, 4, 5, 1, 1, 0, 8, 7, 8, 9, 1, 3, 0, 3, 3, 8, 1, 0, 4, 6, 0, 7, 3, 5, 3, 5, 3, 7, 6, 2, 7, 9, 7, 9, 6, 9, 0, 1, 0, 5, 0, 7, 2, 8, 3, 4, 0, 6, 1, 6, 3, 5, 4, 0, 6, 1, 3, 1, 9, 5, 4, 3, 3, 9, 8, 0, 6, 6, 6, 7, 2, 8, 5, 6, 8, 8, 1, 5, 0, 7, 0, 6, 7, 9, 4, 2, 2, 6, 2, 0, 9, 3, 6, 5, 0, 3, 3, 8, 2, 2, 9, 1, 3, 4, 5, 9, 8, 4, 7, 2, 1, 7, 2, 3, 3, 3, 4, 3, 6, 5, 5, 0, 6, 5, 0, 1, 4, 0, 2, 9, 7, 3, 2, 6, 3, 0, 7, 7, 1, 1, 4, 2, 3, 0, 7, 9, 7, 8, 0, 0, 5, 0, 6, 4, 7, 5, 4, 1, 3, 3, 5, 0, 1, 2, 9, 4, 4, 2, 8, 8, 7, 1, 2, 9, 4, 6, 6, 2, 0, 4, 8, 6, 1, 7, 9, 1, 4, 5, 9, 8, 3, 0, 6, 2, 8, 3, 0, 6, 2, 6, 1, 3, 6, 0, 2, 9, 9, 1, 5, 0, 8, 7, 4, 5, 4, 3, 8, 0, 2, 2, 0, 1, 0, 5, 3, 6, 4, 4, 9, 0, 7, 5, 7, 1, 9, 0, 5, 2, 9, 6, 2, 7, 9, 0, 8, 0, 8, 9, 7, 8, 8, 6, 8, 1, 0, 3, 5, 3, 0, 8, 3, 2, 1, 2, 3, 3, 9, 9, 4, 8, 6, 1, 1, 0, 7, 1, 9, 0, 4, 1, 3, 7, 0, 8, 3, 7, 2, 0, 8, 9, 1, 6, 1, 0, 5, 2, 1, 5, 5, 7, 7, 2, 8, 5, 1, 5, 9, 7, 0, 9, 6, 4, 6, 3, 1, 9, 6, 4, 7, 2, 4, 2, 2, 2, 7, 9, 1, 0, 5, 9, 0, 6, 1, 9, 5, 5, 2, 9, 9, 3, 3, 7, 7, 9, 5, 5, 1, 7, 6, 0, 1, 7, 0, 7, 3, 1, 4, 1, 9, 4, 0, 0, 5, 1, 3, 7, 8, 7, 3, 7, 8, 8, 8, 9, 0, 1, 0, 9, 5, 3, 5, 0, 1, 2, 4, 7, 0, 9, 9, 3, 2, 6, 4, 7, 0, 7, 8, 1, 3, 3, 2, 6, 0, 2, 2, 0, 6, 0, 4, 5, 1, 4, 7, 4, 3, 6, 5, 3, 8, 3, 3, 7, 5, 4, 9, 4, 4, 2, 1, 9, 7, 9, 1, 4, 4, 3, 5, 9, 2, 0, 1, 1, 3, 5, 1, 0, 0, 8, 8, 0, 6, 9, 9, 5, 2, 5, 6, 0, 7, 7, 4, 5, 0, 7, 0, 3, 2, 4, 2, 6, 7, 7, 5, 6, 4, 3, 2, 5, 3, 2, 5, 8, 0, 1, 2, 1, 4, 3, 4, 7, 4, 2, 2, 8, 5, 4, 1, 4, 2, 1, 4, 7, 1, 4, 7, 0, 1, 3, 0, 2, 7, 9, 2, 8, 7, 9, 7, 9, 2, 1, 7, 8, 0, 6, 9, 5, 8, 7, 0, 5, 2, 3, 2, 3, 1, 7, 8, 9, 7, 2, 6, 3, 1, 3, 2, 9, 5, 8, 2, 4, 1, 3, 5, 4, 4, 0, 9, 1, 6, 7, 0, 3, 9, 4, 7, 7, 5, 4, 4, 9, 6, 2, 2, 3, 9, 3, 1, 2, 3, 5, 1, 1, 2, 1, 7, 4, 3, 3, 7, 4, 8, 1, 4, 2, 0, 0, 3, 2, 2, 5, 7, 3, 0, 7, 9, 9, 0, 7, 1, 0, 0, 9, 5, 9, 6, 7, 4, 5, 2, 9, 8, 4, 4, 1, 6, 6, 3, 9, 1, 4, 7, 4, 6, 2, 5, 1, 8, 3, 2, 5, 8, 3, 3, 4, 1, 2, 4, 0, 9, 9, 0, 1, 4, 4, 0, 2, 2, 7, 8, 7, 3, 5, 3, 1, 5, 1, 1, 8, 8, 2, 6, 6, 7, 9, 1, 6, 4, 2, 6, 7, 3, 9, 7, 1, 2, 1, 7, 1, 7, 7, 2, 7, 2, 5, 7, 6, 8, 7, 2, 8, 1, 8, 6, 5, 1, 2, 4, 0, 4, 4, 3, 7, 6, 7, 1, 8, 7, 5, 2, 3, 5, 4, 8, 7, 8, 8, 7, 0, 5, 9, 2, 7, 7, 8, 6, 4, 3, 5, 7, 0, 0, 9, 5, 5, 4, 8, 1, 9, 4, 2, 6, 6, 3, 3, 7, 6, 1, 5, 1, 5, 8, 7, 8, 5, 2, 4, 4, 9, 4, 5, 6, 1, 0, 5, 4, 8, 2, 1, 7, 5, 5, 5, 8, 0, 8, 7, 4, 9, 1, 5, 9, 3, 2, 7, 6, 6, 2, 4, 9, 2, 7, 2, 8, 4, 1, 5, 1, 1, 0, 6, 1, 3, 0, 7, 1, 4, 0, 3, 3, 6, 1, 0, 3, 6, 2, 7, 5, 2, 0, 9, 1, 8, 8, 9, 1, 3, 9, 4, 4, 1, 8, 3, 9, 5, 3, 9, 4, 1, 1, 9, 2, 9, 2, 4, 3, 4, 7, 1, 0, 9, 4, 4, 6, 2, 8, 7, 3, 7, 9, 5, 7, 4, 6, 3, 3, 4, 5, 5, 6, 5, 1, 6, 8, 6, 2, 8, 1, 6, 9, 6, 0, 3, 6,4, 9, 6, 7, 7, 5, 7, 0, 6, 0, 0, 9, 7, 8, 1, 2, 7, 7, 3, 9, 4, 5, 9, 3, 6, 7, 5, 6, 0, 4, 0, 5, 4, 6, 9, 1, 3, 4, 2, 9, 5, 6, 2, 5, 7, 1, 5, 8, 9, 8, 9, 9, 2, 7, 5, 0, 7, 6, 2, 8, 7, 0, 1, 1, 2, 5, 9, 2, 8, 7, 0, 3, 9, 2, 8, 6, 0, 4, 3, 6, 4, 9, 3, 8, 9, 4, 0, 6, 1, 6, 7, 0, 8, 6, 5, 2, 1, 8, 9, 3, 0, 4, 4, 5, 6, 0, 0, 0, 4, 5, 1, 1, 0, 8, 7, 8, 9, 1, 3, 0, 3, 3, 8, 1, 0, 4, 6, 0, 7, 3, 5, 3, 5, 3, 7, 6, 2, 7, 9, 7, 9, 6, 9, 0, 1, 0, 5, 0, 7, 2, 8, 3, 4, 0, 6, 1, 6, 3, 5, 4, 0, 6, 1, 3, 1, 9, 5, 4, 3, 3, 9, 8, 0, 6, 6, 6, 7, 2, 8, 5, 6, 8, 8, 1, 5, 0, 7, 0, 6, 7, 9, 4, 2, 2, 6, 2, 0, 9, 3, 6, 5, 0, 3, 3, 8, 2, 2, 9, 1, 3, 4, 5, 9, 8, 4, 7, 2, 1, 7, 2, 3, 3, 3, 4, 3, 6, 5, 5, 0, 6, 5, 0, 1, 4, 0, 2, 9, 7, 3, 2, 6, 3, 0, 7, 7, 1, 1, 4, 2, 3, 0, 7, 9, 7, 8, 0, 0, 5, 0, 6, 4, 7, 5, 4, 1, 3, 3, 5, 0, 1, 2, 9, 4, 4, 2, 8, 8, 7, 1, 2, 9, 4, 6, 6, 2, 0, 4, 8, 6, 1, 7, 9, 1, 4, 5, 9, 8, 3, 0, 6, 2, 8, 3, 0, 6, 2, 6, 1, 3, 6, 0, 2, 9, 9, 1, 5, 0, 8, 7, 4, 5, 4, 3, 8, 0, 2, 2, 0, 1, 0, 5, 3, 6, 4, 4, 9, 0, 7, 5, 7, 1, 9, 0, 5, 2, 9, 6, 2, 7, 9, 0, 8, 0, 8, 9, 7, 8, 8, 6, 8, 1, 0, 3, 5, 3, 0, 8, 3, 2, 1, 2, 3, 3, 9, 9, 4, 8, 6, 1, 1, 0, 7, 1, 9, 0, 4, 1, 3, 7, 0, 8, 3, 7, 2, 0, 8, 9, 1, 6, 1, 0, 5, 2, 1, 5, 5, 7, 7, 2, 8, 5, 1, 5, 9, 7, 0, 9, 6, 4, 6, 3, 1, 9, 6, 4, 7, 2, 4, 2, 2, 2, 7, 9, 1, 0, 5, 9, 0, 6, 1, 9, 5, 5, 2, 9, 9, 3, 3, 7, 7, 9, 5, 5, 1, 7, 6, 0, 1, 7, 0, 7, 3, 1, 4, 1, 9, 4, 0, 0, 5, 1, 3, 7, 8, 7, 3, 7, 8, 8, 8, 9, 0, 1, 0, 9, 5, 3, 5, 0, 1, 2, 4, 7, 0, 9, 9, 3, 2, 6, 4, 7, 0, 7, 8, 1, 3, 3, 2, 6, 0, 2, 2, 0, 6, 0, 4, 5, 1, 4, 7, 4, 3, 6, 5, 3, 8, 3, 3, 7, 5, 4, 9, 4, 4, 2, 1, 9, 7, 9, 1, 4, 4, 3, 5, 9, 2, 0, 1, 1, 3, 5, 1, 0, 0, 8, 8, 0, 6, 9, 9, 5, 2, 5, 6, 0, 7, 7, 4, 5, 0, 7, 0, 3, 2, 4, 2, 6, 7, 7, 5, 6, 4, 3, 2, 5, 3, 2, 5, 8, 0, 1, 2, 1, 4, 3, 4, 7, 4, 2, 2, 8, 5, 4, 1, 4, 2, 1, 4, 7, 1, 4, 7, 0, 1, 3, 0, 2, 7, 9, 2, 8, 7, 9, 7, 9, 2, 1, 7, 8, 0, 6, 9, 5, 8, 7, 0, 5, 2, 3, 2, 3, 1, 7, 8, 9, 7, 2, 6, 3, 1, 3, 2, 9, 5, 8, 2, 4, 1, 3, 5, 4, 4, 0, 9, 1, 6, 7, 0, 3, 9, 4, 7, 7, 5, 4, 4, 9, 6, 2, 2, 3, 9, 3, 1, 2, 3, 5, 1, 1, 2, 1, 7, 4, 3, 3, 7, 4, 8, 1, 4, 2, 0, 0, 3, 2, 2, 5, 7, 3, 0, 7, 9, 9, 0, 7, 1, 0, 0, 9, 5, 9, 6, 7, 4, 5, 2, 9, 8, 4, 4, 1, 6, 6, 3, 9, 1, 4, 7, 4, 6, 2, 5, 1, 8, 3, 2, 5, 8, 3, 3, 4, 1, 2, 4, 0, 9, 9, 0, 1, 4, 4, 0, 2, 2, 7, 8, 7, 3, 5, 3, 1, 5, 1, 1, 8, 8, 2, 6, 6, 7, 9, 1, 6, 4, 2, 6, 7, 3, 9, 7, 1, 2, 1, 7, 1, 7, 7, 2, 7, 2, 5, 7, 6, 8, 7, 2, 8, 1, 8, 6, 5, 1, 2, 4, 0, 4, 4, 3, 7, 6, 7, 1, 8, 7, 5, 2, 3, 5, 4, 8, 7, 8, 8, 7, 0, 5, 9, 2, 7, 7, 8, 6, 4, 3, 5, 7, 0, 0, 9, 5, 5, 4, 8, 1, 9, 4, 2, 6, 6, 3, 3, 7, 6, 1, 5, 1, 5, 8, 7, 8, 5, 2, 4, 4, 9, 4, 5, 6, 1, 0, 5, 4, 8, 2, 1, 7, 5, 5, 5, 8, 0, 8, 7, 4, 9, 1, 5, 9, 3, 2, 7, 6, 6, 2, 4, 9, 2, 7, 2, 8, 4, 1, 5, 1, 1, 0, 6, 1, 3, 0, 7, 1, 4, 0, 3, 3, 6, 1, 0, 3, 6, 2, 7, 5, 2, 0, 9, 1, 8, 8, 9, 1, 3, 9, 4, 4, 1, 8, 3, 9, 5, 3, 9, 4, 1, 1, 9, 2, 9, 2, 4, 3, 4, 7, 1, 0, 9, 4, 4, 6, 2, 8, 7, 3, 7, 9, 5, 7, 4, 6, 3, 3, 4, 5, 5, 6, 5, 1, 6, 8, 6, 2, 8, 1, 6, 9, 6, 0, 3, 6}; float h_intsAsFloats[lenInts]; float *d_intsAsFloats; int * d_ints; float serial_intsAsFloats[lenInts]; struct timeval start, before , after; gettimeofday(&before , NULL); for (int i = 0; i < lenInts; i++){ serial_intsAsFloats[i] = (float) ints[i]; } gettimeofday(&after , NULL); printf("Serial time : %.0lf us\n\n" , time_diff(before , after) ); start = before; gettimeofday(&before , NULL); cudaMalloc((void **) &d_intsAsFloats, lenInts*sizeof(float)); gettimeofday(&after , NULL); printf("Parallel cudaMalloc : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); cudaMalloc((void **) &d_ints, lenInts*sizeof(int)); gettimeofday(&after , NULL); printf("Parallel cudaMalloc : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); cudaMemcpy(d_ints, ints, lenInts*sizeof(int), cudaMemcpyHostToDevice); gettimeofday(&after , NULL); printf("Parallel cudaMemcpy : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); convertToFloat<<<1,lenInts>>>(d_intsAsFloats, d_ints); gettimeofday(&after , NULL); printf("Parallel calling kernal : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); cudaMemcpy(h_intsAsFloats, d_intsAsFloats, lenInts*sizeof(float), cudaMemcpyDeviceToHost); gettimeofday(&after , NULL); printf("Parallel cudaMemcpy : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); cudaFree(d_ints); gettimeofday(&after , NULL); printf("Parallel cudaFree : %.0lf us\n" , time_diff(before , after) ); gettimeofday(&before , NULL); cudaFree(d_intsAsFloats); gettimeofday(&after , NULL); printf("Parallel cudaFree : %.0lf us\n" , time_diff(before , after) ); printf("Parallel total: %.0lf us\n" , time_diff(start , after) ); return 0; }
56
#include <stdio.h> // Number of threads #define NT 1024 // Structure to hold the 2D Points typedef struct { double x; double y; } point; // Structure to store the metric center result typedef struct { double distance; int pointIndex; } result; // Function to calculate distance between two points __device__ double pointDistance(point *aPoint, point *bPoint) { double distance; distance = sqrt(((aPoint->x - bPoint->x) * (aPoint->x - bPoint->x)) + ((aPoint->y - bPoint->y) * (aPoint->y - bPoint->y))); return distance; } // Compare two distances __device__ int compareDistance(double a, double b) { if(a < b) return -1; if(a > b) return 1; return 0; } // Assign the values of one result struct to another result struct __device__ void assignResult(result *a, result *b) { a->pointIndex = b->pointIndex; a->distance = b->distance; } // Function to reduce the block's result __device__ void reduceBlockResult(result *blockResult, result *newResult) { // Store this block's result in the devResult array at this block's index only if the new result // is better than the old result of this block. if((blockResult->distance == -100.00 && blockResult->pointIndex == -1) || (compareDistance(blockResult->distance, newResult->distance) == 1)) { assignResult(blockResult, newResult); } } // Array holding the result of each thread in a block __shared__ result shrResult [NT]; // Kernel function to calculate the metric center extern "C" __global__ void metricCenter(point *pts, result *devResult, int n) { int thr, size, block, noOfBlocks; result thrResult, tempResult; block = blockIdx.x; noOfBlocks = gridDim.x; thr = threadIdx.x; size = NT; // Calculate the distance from this block's points to one of the other points. for(int i = block; i < n; i += noOfBlocks) { thrResult.distance = -1.0; for(int j = thr; j < n; j += size) { tempResult.distance = pointDistance(&pts[i], &pts[j]); // Keep only the point whose distance is maximum from this block's point if(compareDistance(tempResult.distance, thrResult.distance) == 1) { tempResult.pointIndex = i; assignResult(&thrResult, &tempResult); } } assignResult(&shrResult[thr], &thrResult); // Reduce the results of all threads in this block __syncthreads(); for(int m = NT/2; m > 0 ; m >>= 1) { if(thr < m) { if(compareDistance(shrResult[thr].distance, shrResult[thr+m].distance) == -1) { assignResult(&shrResult[thr], &shrResult[thr+m]); } } __syncthreads(); } // If this is the 1st thread of the block, it will now have the reduced result of this block. if (thr == 0) { reduceBlockResult(&devResult[blockIdx.x], &shrResult[0]); } } }
57
#include <stdio.h> #include <stdlib.h> #include <cuda_runtime.h> #include <time.h> __global__ void vAdd(int* A, int* B, int* C, int num_elements){ //Posicion del thread int i = blockIdx.x * blockDim.x + threadIdx.x; if(i < num_elements){ C[i] = A[i] + B[i]; } } void sumarVectores(int* A, int* B, int* C, int num_elements){ //Posicion del thread //int i = blockIdx.x * blockDim.x + threadIdx.x; for(int i=0; i<num_elements; i++){ C[i] = A[i] + B[i]; } } int main(){ int num_elements = 100000; //Reservar espacio en memoria HOST int * h_A = (int*)malloc(num_elements * sizeof(int)); int * h_B = (int*)malloc(num_elements * sizeof(int)); int * h_C = (int*)malloc(num_elements * sizeof(int)); //Inicializar elementos de los vectores for(int i=0; i<num_elements; i++){ h_A[i] = 1; h_B[i] = i; } cudaError_t err; int size = num_elements * sizeof(int); int * d_A = NULL; err = cudaMalloc((void **)&d_A, size); int * d_B = NULL; err = cudaMalloc((void **)&d_B, size); int * d_C = NULL; err = cudaMalloc((void **)&d_C, size); //Copiamos a GPU DEVICE err = cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice); err = cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice); err = cudaMemcpy(d_C, h_C, size, cudaMemcpyHostToDevice); int HilosPorBloque = 512; int BloquesPorGrid = (num_elements + HilosPorBloque -1) / HilosPorBloque; //Lanzamos el kernel y medimos tiempos cudaEvent_t start, stop; cudaEventCreate(&start); cudaEventCreate(&stop); cudaEventRecord(start, 0); vAdd<<<BloquesPorGrid, HilosPorBloque>>>(d_A, d_B, d_C, num_elements); cudaEventRecord(stop,0); cudaEventSynchronize(stop); float tiempo_reserva_host; cudaEventElapsedTime(&tiempo_reserva_host, start, stop); printf("Tiempo de suma vectores DEVICE: %f\n", tiempo_reserva_host); cudaEventDestroy(start); cudaEventDestroy(stop); //Copiamos a CPU el vector C err = cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost); //Realizamos la suma en la CPU cudaEvent_t start1, stop1; cudaEventCreate(&start1); cudaEventCreate(&stop1); cudaEventRecord(start1, 0); sumarVectores(h_A, h_B, h_C, num_elements); cudaEventRecord(stop1,0); cudaEventSynchronize(stop1); float tiempo_reserva_host1; cudaEventElapsedTime(&tiempo_reserva_host1, start1, stop1); printf("Tiempo de suma vectores HOST: %f\n", tiempo_reserva_host1); cudaEventDestroy(start1); cudaEventDestroy(stop1); /*for(int i=0; i<num_elements; i++){ printf("%i", h_C[i]); printf("\n"); }*/ }
58
#include <stdio.h> __global__ void saxpy(int n, float *x, float *y) { int i = blockIdx.x*blockDim.x + threadIdx.x; if (i < n) y[i] = x[i] + y[i]; } void cuda_array_culc_add_float(float* x, float* y, int32_t N) { float *d_x, *d_y; cudaMalloc(&d_x, N*sizeof(float)); cudaMalloc(&d_y, N*sizeof(float)); cudaMemcpy(d_x, x, N*sizeof(float), cudaMemcpyHostToDevice); cudaMemcpy(d_y, y, N*sizeof(float), cudaMemcpyHostToDevice); // Perform SAXPY on 1M elements saxpy<<<(N+255)/256, 256>>>(N, d_x, d_y); cudaMemcpy(y, d_y, N*sizeof(float), cudaMemcpyDeviceToHost); }
59
// // Created by songzeceng on 2020/11/26. // #include "cuda_runtime.h" #include "stdio.h" #define N 64 #define TPB 32 float scale(int i, int n) { return ((float ) i) / (n - 1); } __device__ float distance(float x1, float x2) { return sqrt((x2 - x1) * (x2 - x1)); } __global__ void distanceKernel(float *d_out, float *d_in, float ref) { int i = blockDim.x * blockIdx.x + threadIdx.x; float x = d_in[i]; d_out[i] = distance(x, ref); } int main() { float ref = 0.5f; float *in; float *out; cudaMallocManaged(&in, N * sizeof(float )); cudaMallocManaged(&out, N * sizeof(float )); for (int i = 0; i < N; ++i) { in[i] = scale(i, N); } distanceKernel<<<N / TPB, TPB>>>(out, in, ref); cudaDeviceSynchronize(); for (int i = 0; i < N; ++i) { printf("%.2f\t", out[i]); } printf("\n"); cudaFree(in); cudaFree(out); return 0; }
60
#include <iostream> #include <ctime> __global__ void matMulKernel(float* matA, float* matB, float* matC, int rows, int cols) { dim3 gIdx; gIdx.y = blockIdx.y * blockDim.y + threadIdx.y; gIdx.x = blockIdx.x * blockDim.x + threadIdx.x; float sum = 0; if(gIdx.x < cols && gIdx.y < rows) { for(int i = 0; i < rows; ++i) { sum += matA[gIdx.y * cols + i] * matB[i * cols + gIdx.x]; } matC[gIdx.y * cols + gIdx.x] = sum; } } void printMat(float* mat, int rows, int cols) { for(int i = 0; i < rows; ++i) { for(int j = 0; j < cols; ++j) { int index = i * cols + j; std::cout << mat[index] << " "; } std::cout << "\n"; } } int main(int argc, char** argv) { if(argc != 2) { std::cout << "Usage: " << argv[0] << " <DIM>" << std::endl; exit(1); } int matDim = atoi(argv[1]); const int NUM_COLS = matDim; const int NUM_ROWS = matDim; //allocate host mem for input matrices float* matA_h = new float[NUM_ROWS * NUM_COLS]; float* matB_h = new float[NUM_ROWS * NUM_COLS]; //fill input matrices for(int i = 0; i < NUM_ROWS; ++i) { for(int j = 0; j < NUM_COLS; ++j) { int index = i * NUM_COLS + j; matA_h[index] = index; //scale matrix (factor 2) matB_h[index] = (i == j) ? 2 : 0; } } //allocate dev mem for input matrices float* matA_d; float* matB_d; int matSize = NUM_ROWS * NUM_COLS * sizeof(float); cudaMalloc(&matA_d, matSize); cudaMalloc(&matB_d, matSize); //copy input matrices to device cudaMemcpy(matA_d, matA_h, matSize, cudaMemcpyHostToDevice); cudaMemcpy(matB_d, matB_h, matSize, cudaMemcpyHostToDevice); //allocate dev mem for output matrix float* matC_d; cudaMalloc(&matC_d, matSize); cudaMemset(matC_d, 0, matSize); //determine block and grid size dim3 bDim(16, 16); dim3 gDim; gDim.x = (NUM_ROWS + 16 - 1) / 16; //ceil(num_rows/16) gDim.y = (NUM_ROWS + 16 - 1) / 16; cudaEvent_t start, stop; //record start event cudaEventCreate(&start); cudaEventCreate(&stop); cudaEventRecord(start, 0); //launch kernel matMulKernel<<<gDim, bDim>>>(matA_d, matB_d, matC_d, NUM_ROWS, NUM_COLS); //record stop event cudaEventRecord(stop, 0); cudaEventSynchronize(stop); float elapsed; cudaEventElapsedTime(&elapsed, start, stop); //allocate host mem for output matrix float* matC_h = new float[NUM_ROWS * NUM_COLS]; //copy output matrix from dev to host cudaMemcpy(matC_h, matC_d, matSize, cudaMemcpyDeviceToHost); //print output matrix printMat(matC_h, NUM_ROWS, NUM_COLS); std::cout << std::endl << "Compute time: " << elapsed << "ms" << std::endl; }
61
/** * Copyright 1993-2015 NVIDIA Corporation. All rights reserved. * * Please refer to the NVIDIA end user license agreement (EULA) associated * with this source code for terms and conditions that govern your use of * this software. Any use, reproduction, disclosure, or distribution of * this software and related documentation outside the terms of the EULA * is strictly prohibited. * */ /** * Vector addition: C = A + B. * * This sample is a very basic sample that implements element by element * vector addition. It is the same as the sample illustrating Chapter 2 * of the programming guide with some additions like error checking. */ #include <stdio.h> // For the CUDA runtime routines (prefixed with "cuda_") #include <cuda_runtime.h> /** * CUDA Kernel Device code * * Computes the vector addition of A and B into C. The 3 vectors have the same * number of elements numElements. */ __global__ void verifyCollatz(int64_t maxNumber) { int timesToRunGrid = maxNumber / (blockDim.x * gridDim.x) + 1; int64_t number = 0; int64_t i = 0; for (int64_t gridRunNumber = 0; gridRunNumber < timesToRunGrid; ++gridRunNumber) { // odd numbers only number = 2 * (blockDim.x * gridDim.x * gridRunNumber + blockDim.x * blockIdx.x + threadIdx.x) + 1; i = number; if (number > 2 && number < maxNumber) { while (i >= number) { if (i & 0x1) { /* odd case */ i = i * 3 + 1; } else { /* even case */ i = i >> 1; } } } } } /** * Host main routine */ int main() { // Error code to check return values for CUDA calls cudaError_t err = cudaSuccess; int64_t maxNumber = 256ll * 256ll * 256ll * 256ll; // Launch the Vector Add CUDA Kernel int threadsPerBlock = 256; int blocksPerGrid = 256; // use CUDA builtin heruistics to get max performance cudaOccupancyMaxPotentialBlockSize( &blocksPerGrid, &threadsPerBlock, (void*) verifyCollatz, 0, 0); printf("CUDA kernel launch with %d blocks of %d threads\n", blocksPerGrid, threadsPerBlock); verifyCollatz<<<blocksPerGrid, threadsPerBlock>>>(maxNumber); err = cudaGetLastError(); cudaDeviceSynchronize(); if (err != cudaSuccess) { fprintf(stderr, "Failed to launch collatz kernel (error code %s)!\n", cudaGetErrorString(err)); exit(EXIT_FAILURE); } printf("Done\n"); return 0; }
62
extern "C" { __global__ void tx1mx_32(const int lengthX, const float *t, const float *x, float *z) { int i = threadIdx.x + blockIdx.x * blockDim.x; if (i<lengthX) { z[i] += t[i]*x[i]*(1.0-x[i]); } } }
63
#include <stdio.h> #include <stdlib.h> #include <time.h> #include <pthread.h> #include <unistd.h> #include <ctype.h> struct ThreadStruct { float *a, *b, *c; int size, elapsed_time; }; __global__ void vectorMultGPU(float *a, float *b, float *c, int n) { int i = blockIdx.x * blockDim.x + threadIdx.x; while (i < n) { c[i] = a[i] * b[i]; i+= blockDim.x * gridDim.x; } } void vectorMultCPU(float *a, float *b, float *c, int n) { int i; for (i = 0; i < n; ++i) { c[i] = a[i] * b[i]; } } void *threadCPU(void *threadarg) { time_t curTime, baseTime; struct ThreadStruct *data; data = (struct ThreadStruct*) threadarg; baseTime = curTime = time(NULL); while(curTime < baseTime + data->elapsed_time) //Runs for 10 seconds { vectorMultCPU(data->a, data->b, data->c, data->size); curTime = time(NULL); } return NULL; } int main(int argc, char **argv) { int cores = 4; int size = 100000; int elapsed_time = 10; int option; while ((option = getopt (argc, argv, "s:t:c:")) != -1) { switch (option) { case 's': size = atoi(optarg); break; case 't': elapsed_time = atoi(optarg); break; case 'c': cores = atoi(optarg); break; case '?': if (optopt == 's' || optopt == 't' || optopt == 'c') fprintf (stderr, "Option -%c requires an argument.\n", optopt); else if (isprint (optopt)) fprintf (stderr, "Unknown option `-%c'.\n", optopt); else fprintf (stderr, "Unknown option character `\\x%x'.\n", optopt); return 1; default: abort (); } } pthread_t *thread_arr = (pthread_t*)malloc(cores*sizeof(pthread_t)); float *a, *b, *c, *GPUout; float *d_a, *d_b, *d_c; int i; a = (float*)malloc(size*sizeof(float)); b = (float*)malloc(size*sizeof(float)); c = (float*)malloc(size*sizeof(float)); GPUout = (float*)malloc(size*sizeof(float)); cudaMalloc(&d_a, size*sizeof(float)); cudaMalloc(&d_b, size*sizeof(float)); cudaMalloc(&d_c, size*sizeof(float)); for(i = 0; i < size; ++i) { a[i] = b[i] = i; c[i] = 0; } cudaMemcpy(d_a, a, size*sizeof(float), cudaMemcpyHostToDevice); cudaMemcpy(d_b, b, size*sizeof(float), cudaMemcpyHostToDevice); cudaMemcpy(d_c, c, size*sizeof(float), cudaMemcpyHostToDevice); time_t curTime, baseTime; struct ThreadStruct Threaddata = {a, b, c, size, elapsed_time}; for (i = 0; i < cores; ++i) pthread_create(&thread_arr[i], NULL, threadCPU, (void *) &Threaddata); baseTime = curTime = time(NULL); while(curTime < baseTime + elapsed_time) { cudaDeviceSynchronize(); vectorMultGPU<<< (size+511)/512, 512 >>>(d_a, d_b, d_c, size); curTime = time(NULL); } for (i = 0; i < cores; ++i) pthread_join(thread_arr[i],NULL); cudaMemcpy(GPUout, d_c, size*sizeof(float), cudaMemcpyDeviceToHost); free(a); free(b); free(c); cudaFree(d_a); cudaFree(d_b); cudaFree(d_c); printf("Test Complete\n"); return 0; }
64
#include <curand.h> #include <curand_kernel.h> #define DIM 1600 #define PI 3.14159265 __global__ void Rotate(uchar4 *ptr, unsigned char *R_input, unsigned char *G_input, unsigned char *B_input, size_t i_size, float a, unsigned long col, unsigned long row) { int x = threadIdx.x + (blockIdx.x * blockDim.x); int y = threadIdx.y + (blockIdx.y * blockDim.y); int offset = x + y * blockDim.x * gridDim.x; x = x - (blockDim.x * gridDim.x / 2); y = y - (blockDim.y * gridDim.y / 2); unsigned char* f_r, *f_g, *f_b; int ximg = (x*cos(a) + y*sin(a)) + (col/2), yimg = (y*cos(a) - x*sin(a)) + (row/2); if (ximg < col && yimg < row) { f_r = (unsigned char*)((char*)R_input + yimg*i_size); f_g = (unsigned char*)((char*)G_input + yimg*i_size); f_b = (unsigned char*)((char*)B_input + yimg*i_size); ptr[offset].x = f_r[ximg]; ptr[offset].y = f_g[ximg]; ptr[offset].z = f_b[ximg]; ptr[offset].w = 255; } else{ ptr[offset].x = 0; ptr[offset].y = 0; ptr[offset].z = 0; ptr[offset].w = 255; } } __global__ void Scale(unsigned char *R_input, unsigned char *G_input,unsigned char *B_input, unsigned char *R_output, unsigned char *G_output,unsigned char *B_output, size_t i_size, size_t pitch2, float s, unsigned long col, unsigned long row){ float x = threadIdx.x + (blockIdx.x * blockDim.x); float y = threadIdx.y + (blockIdx.y * blockDim.y); int offset = x + y * pitch2; x = x - (DIM / 2); y = y - (DIM / 2); unsigned char* f_r, *f_g, *f_b; x /= s; y /= s; int ximg = x + (col/2), yimg = y + (row/2); if (ximg < (col - 1) && yimg < (row - 1)) { f_r = (unsigned char*)((char*)R_input + yimg*i_size); f_g = (unsigned char*)((char*)G_input + yimg*i_size); f_b = (unsigned char*)((char*)B_input + yimg*i_size); float cx = x - floor(x); float cy = y - floor(y); float R1 = f_r[ximg]*(1 - cx) + f_r[ximg + 1]*(cx); float R2 = f_r[ximg + i_size]*(1 - cx) + f_r[ximg + 1 + i_size]*(cx); R_output[offset] = R1*(1 - cy) + R2*(cy); R1 = f_g[ximg]*(1 - cx) + f_g[ximg + 1]*(cx); R2 = f_g[ximg + i_size]*(1 - cx) + f_g[ximg + 1 + i_size]*(cx); G_output[offset] = R1*(1 - cy) + R2*(cy); R1 = f_b[ximg]*(1 - cx) + f_b[ximg + 1]*(cx); R2 = f_b[ximg + i_size]*(1 - cx) + f_b[ximg + 1 + i_size]*(cx); B_output[offset] = R1*(1 - cy) + R2*(cy); }else{ R_output[offset] = 0; G_output[offset] = 0; B_output[offset] = 0; } }
65
inline __device__ float operator*(float3 a, float3 b) { return a.x * b.x + a.y * b.y + a.z * b.z; } inline __device__ float dot(float3 a, float3 b) { return a.x * b.x + a.y * b.y + a.z * b.z; } inline __device__ float3 operator*(float3 a, float b) { return make_float3(a.x * b, a.y * b, a.z * b); } inline __device__ float3 operator*(float b, float3 a) { return make_float3(a.x * b, a.y * b, a.z * b); } inline __device__ float3 operator/(float3 a, float b) { return make_float3(a.x / b, a.y / b, a.z / b); } inline __device__ float3 operator+(float3 a, float3 b) { return make_float3(a.x + b.x, a.y + b.y, a.z + b.z); } inline __device__ float3 operator+(float3 a, float b) { return make_float3(a.x + b, a.y + b, a.z + b); } inline __device__ float3 operator+(float b, float3 a) { return make_float3(a.x + b, a.y + b, a.z + b); } inline __device__ float3 operator-(float3 a, float3 b) { return make_float3(a.x - b.x, a.y - b.y, a.z - b.z); } inline __device__ float3 operator-(float3 a, float b) { return make_float3(a.x - b, a.y - b, a.z - b); } /*inline __device__ float3 operator-(float b, float3 a){ return make_float3(a.x-b,a.y-b,a.z-b); }*/ inline __device__ float length(float3 a) { return norm3df(a.x, a.y, a.z); } inline __device__ float distance(float3 a, float3 b) { return norm3df(a.x - b.x, a.y - b.y, a.z - b.z); } inline __device__ float clamp(float x, float a, float b) { return fmaxf(a, fminf(b, x)); }
66
/***************************************************************************//** * \file LHS1.cu * \author Christopher Minar (minarc@oregonstate.edu) * \brief kernels to generate the left hand side for the intermediate velocity solve */ #include "LHS1.h" namespace kernels { __global__ void LHS1_mid_luo_X(int *row, int *col, double *val, int *ghostTagsUV, double *dx, double *dy, double dt, double nu, int nx, int ny) { if (threadIdx.x + blockDim.x * blockIdx.x >= (nx-1)*ny) return; int i = threadIdx.x + blockDim.x * blockIdx.x, I = i % (nx-1), J = i / (nx-1); if (I == 0 || I == nx-2 || J == 0 || J == ny-1) return; //int numE = i*5; // top row - corner mid sides current row int numE = (nx-1)*4 - 2 + (J-1)*(5*(nx-1) - 2) + I*5 - 1; double temp = 1; //EAST row[numE] = i; col[numE] = i+1; val[numE] = -0.5*dt*nu*(1/(dx[I+1]*(dx[I+1]+dx[I])*0.5)); temp += 0.5*dt*nu*(1/(dx[I+1]*(dx[I+1]+dx[I])*0.5)); numE++; //WEST row[numE] = i; col[numE] = i-1; val[numE] = -0.5*dt*nu*(1/(dx[I]*(dx[I+1]+dx[I])*0.5)); temp += 0.5*dt*nu*(1/(dx[I]*(dx[I+1]+dx[I])*0.5)); numE++; //NORTH row[numE] = i; col[numE] = i+(nx-1); val[numE] = -0.5*dt*nu*(1/(dy[J]*(dy[J+1]+dy[J])*0.5)); temp += 0.5*dt*nu*(1/(dy[J]*(dy[J+1]+dy[J])*0.5)); numE++; //SOUTH row[numE] = i; col[numE] = i-(nx-1); val[numE] = -0.5*dt*nu*(1/(dy[J]*(dy[J-1]+dy[J])*0.5)); temp += 0.5*dt*nu*(1/(dy[J]*(dy[J-1]+dy[J])*0.5)); numE++; //CENTER row[numE] = i; col[numE] = i; val[numE] = temp; numE++; } __global__ void LHS1_mid_luo_Y(int *row, int *col, double *val, int *ghostTagsUV, double *dx, double *dy, double dt, double nu, int nx, int ny) { if (threadIdx.x + blockDim.x * blockIdx.x >= nx*(ny-1)) return; int ip = threadIdx.x + blockDim.x * blockIdx.x, I = ip % nx, J = ip / nx, i = ip + (nx-1)*ny; if (I == 0 || I == nx-1 || J == 0 || J == ny-2) return; int numE = (nx-1)*ny*5 - 2*ny-2*(nx-1) + nx*4-2 + (J-1)*(nx*5 - 2) + I*5 - 1; double temp = 1; //EAST row[numE] = i; col[numE] = i+1; val[numE] = -0.5*dt*nu*(1/(dx[I]*(dx[I]+dx[I+1])*0.5)); temp += 0.5*dt*nu*(1/(dx[I]*(dx[I]+dx[I+1])*0.5)); numE++; //WEST row[numE] = i; col[numE] = i-1; val[numE] = -0.5*dt*nu*(1/(dx[I]*(dx[I]+dx[I-1])*0.5)); temp += 0.5*dt*nu*(1/(dx[I]*(dx[I]+dx[I-1])*0.5)); numE++; //NORTH row[numE] = i; col[numE] = i + nx; val[numE] = -0.5*dt*nu*(1/(dy[J+1]*(dy[J]+dy[J+1])*0.5)); temp += 0.5*dt*nu*(1/(dy[J+1]*(dy[J]+dy[J+1])*0.5)); numE++; //SOUTH row[numE] = i; col[numE] = i-nx; val[numE] = -0.5*dt*nu*(1/(dy[J]*(dy[J]+dy[J+1])*0.5)); temp += 0.5*dt*nu*(1/(dy[J]*(dy[J]+dy[J+1])*0.5)); numE++; //CENTER row[numE] = i; col[numE] = i; val[numE] = temp; numE++; } }//end kernel
67
#include <iostream> #include <math.h> #include <time.h> #include <stdlib.h> #include <random> #include <vector> #include <chrono> #include <deque> #include <algorithm> #include <iterator> #include <curand.h> #include <curand_kernel.h> #define BLOCK_SIZE 1024 __global__ void min_reduce(int *arr, const int n) { int i = blockIdx.x * blockDim.x + threadIdx.x; if (i < n) { int j = n-i-1; int x = arr[i]; int y = arr[j]; arr[i] = x < y ? x:y; } } int get_min_val(int *min_arr, int n) { while (n > 1) { min_reduce<<<(n + BLOCK_SIZE - 1)/BLOCK_SIZE, BLOCK_SIZE>>>(min_arr, n); n = (n+1)/2; } cudaDeviceSynchronize(); return min_arr[0]; } void random_vector(int *arr, const int n, const int min_val=0.0, const int max_val=1000.0) { static std::random_device rd; static std::mt19937 mte(rd()); std::uniform_int_distribution<int> dist(min_val, max_val); for (int i = 0; i < n; i++) { arr[i] = dist(mte); } } bool check_correctness(int *arr, int pred, int n) { int min_el = 1 << 30; for (int i = 0; i < n; i++) { if (arr[i] < min_el) { min_el = arr[i]; } } return pred == min_el; } int main(void) { int n = 1 << 25; int *arr, *temp; cudaMallocManaged(&arr, n*sizeof(int)); random_vector(arr, n, 0, 10000); temp = new int[n]; std::copy(arr, arr+n, temp); auto t1 = std::chrono::high_resolution_clock::now(); int min_el = get_min_val(arr, n); auto t2 = std::chrono::high_resolution_clock::now(); auto duration = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 ).count(); std::cout << duration << std::endl; t1 = std::chrono::high_resolution_clock::now(); std::cout << check_correctness(temp, min_el, n) << std::endl; t2 = std::chrono::high_resolution_clock::now(); duration = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 ).count(); std::cout << duration << std::endl; cudaFree(arr); return 0; }
68
//put C:/Users/molly/Desktop/289Q/project/main.cu //nvcc -std=c++11 main.cu // includes, system #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> #include <fstream> #include <cooperative_groups.h> #include <cooperative_groups.h> // includes, project #include <cuda.h> #include <cuda_runtime.h> using namespace cooperative_groups; namespace cg = cooperative_groups; // #define FILESIZE_CHAR 1048576 #define FILESIZE_CHAR 1048576 #define FILESIZE_INT FILESIZE_CHAR/4 __host__ void makeLUT(int N, int* LUT){ int M = N; int even = 0; int odd = 1; int LUTsize = N*(log2((double)N)*2 - 2); for (int i =0; i < LUTsize/2; i+=N){ for (int j=0; j<N; j+=M){ for (int k =0; k<M/2; k++){ LUT[i+j+k] = even; even+=2; } for (int k =M/2; k<M; k++){ LUT[i+j+k] = odd; odd+=2; } } even=0; odd=1; M = M/2; } for (int x=LUTsize-N, i=LUTsize/2; i<LUTsize;i+=N, x-=N){ for(int j=0; j<N; j++){ int newIndex = LUT[x+j-LUTsize/2]; LUT[newIndex + i] = j; } } return; } int createMask(int n) { int r = 0; for (int i=0; i<n; i++) r |= 1 << i; return r; } __global__ void benes(int N, int block, char* network, int* LUT, volatile int* valid, int mask, int* data, char* output){ int idx = threadIdx.x; int in1, in2, in1_index, in2_index; int readOffset=0; int fileSize = FILESIZE_INT/2; int readOffsetSecondNet=fileSize; thread_group g = tiled_partition(this_thread_block(), 2); //stops working after 32? if(blockIdx.x == 0){ while(readOffset < fileSize){ in1 = data[idx*2 + readOffset]; in2 = data[idx*2+1 + readOffset]; readOffset+=N; while((valid[idx + (blockIdx.x+1)*(N/2)])==1); if ((in1 & mask) < (in2 & mask)){ network[idx*2 + (blockIdx.x+1)*N] = in1; network[idx*2 + (blockIdx.x+1)*N + 1] = in2; } else{ network[idx*2 + (blockIdx.x+1)*N] = in2; network[idx*2 + (blockIdx.x+1)*N + 1] = in1; } g.sync(); // __syncthreads(); valid[idx + (blockIdx.x+1)*(N/2)]=1;// valid[idx*2 + 1 + (blockIdx.x+1)*N]=1; } } else if ( blockIdx.x < block) { while(readOffset < fileSize){ while((valid[idx + (blockIdx.x)*(N/2)])==0); in1_index = LUT[idx*2 + (blockIdx.x-1)*N]; in2_index = LUT[idx*2 + (blockIdx.x-1)*N + 1]; in1 = network[in1_index+(blockIdx.x)*N]; in2 = network[in2_index+(blockIdx.x)*N]; valid[idx + (blockIdx.x)*(N/2)] = 0;// valid[idx*2 + 1 + (blockIdx.x)*N] = 0; while((valid[idx + (blockIdx.x+1)*(N/2)])==1); if ((in1 & mask) < (in2 & mask)){ network[idx*2 + (blockIdx.x+1)*N] = in1; network[idx*2 + (blockIdx.x+1)*N + 1] = in2; } else{ network[idx*2 + (blockIdx.x+1)*N] = in2; network[idx*2 + (blockIdx.x+1)*N + 1] = in1; } if (blockIdx.x != gridDim.x - 1 && blockIdx.x != block-1){ valid[idx + (blockIdx.x+1)*(N/2)]=1;// valid[idx*2 + 1 + (blockIdx.x+1)*N]=1; g.sync(); // __syncthreads(); } else { output[idx*2 + readOffset] = network[idx*2 + (blockIdx.x+1)*N]; output[idx*2+1 + readOffset] = network[idx*2 + (blockIdx.x+1)*N + 1]; } readOffset += N; } } else if(blockIdx.x == block){ while(readOffsetSecondNet < FILESIZE_INT){ in1 = data[idx*2 + readOffsetSecondNet]; in2 = data[idx*2+1 + readOffsetSecondNet]; readOffsetSecondNet+=N; while((valid[idx + (blockIdx.x+1)*(N/2)])==1); if ((in1 & mask) < (in2 & mask)){ network[idx*2 + (blockIdx.x+1)*N] = in1; network[idx*2 + (blockIdx.x+1)*N + 1] = in2; } else{ network[idx*2 + (blockIdx.x+1)*N] = in2; network[idx*2 + (blockIdx.x+1)*N + 1] = in1; } valid[idx + (blockIdx.x+1)*(N/2)]=1;// valid[idx*2 + 1 + (blockIdx.x+1)*N]=1; // __syncthreads(); g.sync(); } } else{ while(readOffsetSecondNet < FILESIZE_INT){ // printf("waiting for previous block %d to produce\n", blockIdx.x - 1); while((valid[idx + (blockIdx.x)*(N/2)])==0); // printf("waiting for previous block %d to produce\n", blockIdx.x - 1); in1_index = LUT[idx*2 + ((blockIdx.x%block)-1)*N]; in2_index = LUT[idx*2 + ((blockIdx.x%block)-1)*N + 1]; in1 = network[in1_index+(blockIdx.x)*N]; in2 = network[in2_index+(blockIdx.x)*N]; // printf("Block %d thread %d consumed %d %d\n", blockIdx.x,threadIdx.x, in1, in2); valid[idx + (blockIdx.x)*(N/2)] = 0; //valid[idx*2 + 1 + (blockIdx.x)*N] = 0; //printf("waiting for next block %d to consume\n", blockIdx.x + 1); while((valid[idx + (blockIdx.x+1)*(N/2)])==1); if ((in1 & mask) < (in2 & mask)){ network[idx*2 + (blockIdx.x+1)*N] = in1; network[idx*2 + (blockIdx.x+1)*N + 1] = in2; // printf("Block %d produced %d %d\n", blockIdx.x, in1, in2); } else{ network[idx*2 + (blockIdx.x+1)*N] = in2; network[idx*2 + (blockIdx.x+1)*N + 1] = in1; } //printf("Block %d produced %d %d\n", blockIdx.x, in1, in2); if (blockIdx.x != gridDim.x - 1){ valid[idx + (blockIdx.x+1)*(N/2)]=1; //valid[idx*2 + 1 + (blockIdx.x+1)*N]=1; // __syncthreads(); g.sync(); //printf("valid:%d index:%d\n",valid[idx + (blockIdx.x+1)*N],idx + (blockIdx.x+1)*N); } else { output[idx*2 + readOffsetSecondNet] = network[idx*2 + (blockIdx.x+1)*N]; output[idx*2+1 + readOffsetSecondNet] = network[idx*2 + (blockIdx.x+1)*N + 1]; } readOffsetSecondNet += N; } } } int main(int argc, char *argv[]){ if (argc != 3){ printf("Usage: %s <input.txt> <size>\n", argv[0]); return 1; } std::ifstream file(argv[1], std::ios::binary); if (!file) { printf("Could not open input file\n"); return 1; } int N = atoi(argv[2]); if (FILESIZE_INT<N) N = FILESIZE_INT; int blockSize = N/2; int blocks = 2*log2((double)N)-1; int b = 2*log2((double)N)-1; int LUTsize = N*(log2((double)N)*2 - 2); int numBlocks; if (FILESIZE_INT <= N) numBlocks = blocks; else numBlocks = 2*blocks; char* network; cudaMallocManaged(&network,N*(numBlocks+1)*sizeof(char)); memset(network,0,N*(numBlocks+1)*sizeof(char)); int* LUT; cudaMallocManaged(&LUT,LUTsize*sizeof(int)); makeLUT(N,LUT); int mask = createMask(log2((double)N)); int *valid; cudaMallocManaged(&valid,(N/2)*(numBlocks)*sizeof(int)); memset(valid,0,(N/2)*(numBlocks+1)*sizeof(int)); for(int i = 0; i < N/2; i++) valid[i] = 1; char* data; cudaMallocManaged(&data,FILESIZE_CHAR*sizeof(char)); memset(data,0,FILESIZE_CHAR*sizeof(char)); file.read(data, FILESIZE_CHAR*sizeof(char)); file.close(); int* idata; cudaMallocManaged(&idata,FILESIZE_CHAR*sizeof(char)); memcpy(idata, data, FILESIZE_CHAR*sizeof(char)); char* output; cudaMallocManaged(&output,FILESIZE_CHAR*sizeof(char)); memset(output,0,FILESIZE_CHAR*sizeof(char)); benes<<<numBlocks,blockSize>>>(N, blocks, network, LUT, valid, mask, idata, output); cudaDeviceSynchronize(); // printf("The input is:"); // for (int i = 0; i < FILESIZE_INT; i++){ // if (i%N == 0) printf("\n"); // printf("%d ", idata[i]); // } // printf("\n\n"); for (int i = 0; i < FILESIZE_INT-1; i++){ if ((i%N != N-1) && (output[i+1]!=0)) { if((mask & output[i+1]) < (mask & output[i])){ printf("ERROR in routing at output %d %d %d\n",i ,mask & output[i+1],mask &output[i] ); return 1; } } } printf("Routing was successful!\n"); cudaFree(valid); cudaFree(LUT); cudaFree(network); cudaFree(data); cudaFree(idata); cudaFree(output); }
69
#include "includes.h" __global__ void vxy_kernel_large(const float* x, float* y, float* result, unsigned int len, unsigned int rowsz) { unsigned int idx = blockIdx.x * blockDim.x + threadIdx.x + rowsz * blockIdx.y; if (idx < len) result[idx] = x[idx] * y[idx]; }
70
#include <stdio.h> #include <string.h> #define CSC(call) \ do { \ cudaError_t res = call; \ if (res != cudaSuccess) { \ fprintf(stderr, "ERROR in %s:%d. Message: %s\n", \ __FILE__, __LINE__, cudaGetErrorString(res)); \ exit(0); \ } \ } while(0) #define THREADS_PER_BLOCK 20 #define BLOCKS_PER_GRID 20 __global__ void scalar(const int* arr1, const int* arr2, const int size, int* res) { __shared__ int cache[THREADS_PER_BLOCK]; int offsetx = blockDim.x * gridDim.x; int tid = blockIdx.x * blockDim.x + threadIdx.x; int temp = 0; while(tid < size) { temp += arr1[tid] * arr2[tid]; tid += offsetx; } cache[threadIdx.x] = temp; __syncthreads(); int i = THREADS_PER_BLOCK / 2; while (i > 0) { if(threadIdx.x < i) cache[threadIdx.x] += cache[threadIdx.x + i]; __syncthreads(); i = i / 2; } if (threadIdx.x == 0) { res[blockIdx.x] = cache[0]; } } int main() { int size; scanf("%d", &size); int* arr1 = (int*) malloc(size * sizeof(int)); int* arr2 = (int*) malloc(size * sizeof(int)); for(int i = 0; i < size; ++i) { scanf("%d", &arr1[i]); } for(int i = 0; i < size; ++i) { scanf("%d", &arr2[i]); } int* dev_arr1; int* dev_arr2; int* dev_res; CSC(cudaMalloc(&dev_arr1, sizeof(int) * size)); CSC(cudaMalloc(&dev_arr2, sizeof(int) * size)); CSC(cudaMalloc(&dev_res, sizeof(int) * size)); CSC(cudaMemcpy(dev_arr1, arr1, sizeof(int) * size, cudaMemcpyHostToDevice)); CSC(cudaMemcpy(dev_arr2, arr2, sizeof(int) * size, cudaMemcpyHostToDevice)); scalar<<<20, 20>>>(dev_arr1, dev_arr2, size, dev_res); int* res = (int*) malloc(size * sizeof(int)); cudaMemcpy(res, dev_res, sizeof(int) * size, cudaMemcpyDeviceToHost); long long int scalar_mult = 0; for(int i = 0; i < size; ++i) { scalar_mult += res[i]; } printf("%llu\n", scalar_mult); return 0; }
71
#include <stdlib.h> #include <stdio.h> // This will output the proper CUDA error strings in the event that a CUDA host call returns an error #define checkCudaErrors(err) __checkCudaErrors (err, __FILE__, __LINE__) inline void __checkCudaErrors(cudaError err, const char *file, const int line) { if (cudaSuccess != err) { fprintf(stderr, "%s(%i) : CUDA Runtime API error %d: %s.\n", file, line, (int)err, cudaGetErrorString(err)); exit(EXIT_FAILURE); } } int main(int argc, char **argv) { printf("[%s] - Starting...\n", argv[0]); int gpuid[2] = {1, 2}; // we want to find the first two GPU's that can support P2P float total_time = 0.0; bool enable_p2p = true; if (enable_p2p) { // Enable peer access printf("Enabling peer access between GPU%d and GPU%d...\n", gpuid[0], gpuid[1]); checkCudaErrors(cudaSetDevice(gpuid[0])); checkCudaErrors(cudaDeviceEnablePeerAccess(gpuid[1], 0)); checkCudaErrors(cudaSetDevice(gpuid[1])); checkCudaErrors(cudaDeviceEnablePeerAccess(gpuid[0], 0)); } for (int loop = 0; loop < 100; loop++) { // Allocate buffers const size_t buf_size = 1024 * 1024 * 16 * sizeof(float); printf("Allocating buffers (%iMB on GPU%d, GPU%d and CPU Host)...\n", int(buf_size / 1024 / 1024), gpuid[0], gpuid[1]); checkCudaErrors(cudaSetDevice(gpuid[0])); float *g0; checkCudaErrors(cudaMalloc(&g0, buf_size)); checkCudaErrors(cudaSetDevice(gpuid[1])); float *g1; checkCudaErrors(cudaMalloc(&g1, buf_size)); // Create CUDA event handles cudaEvent_t start_event, stop_event; float time_memcpy; int eventflags = cudaEventBlockingSync; checkCudaErrors(cudaEventCreateWithFlags(&start_event, eventflags)); checkCudaErrors(cudaEventCreateWithFlags(&stop_event, eventflags)); // P2P memcopy() benchmark checkCudaErrors(cudaEventRecord(start_event, 0)); for (int i = 0; i < 100; i++) { // With UVA we don't need to specify source and target devices, the // runtime figures this out by itself from the pointers // Ping-pong copy between GPUs if (i % 2 == 0) { checkCudaErrors(cudaMemcpy(g1, g0, buf_size, cudaMemcpyDefault)); } else { checkCudaErrors(cudaMemcpy(g0, g1, buf_size, cudaMemcpyDefault)); } } checkCudaErrors(cudaEventRecord(stop_event, 0)); checkCudaErrors(cudaEventSynchronize(stop_event)); checkCudaErrors(cudaEventElapsedTime(&time_memcpy, start_event, stop_event)); total_time += time_memcpy; printf("cudaMemcpyPeer / cudaMemcpy (%f ms) between GPU%d and GPU%d: %.2fGB/s\n", time_memcpy, gpuid[0], gpuid[1], (1.0f / (time_memcpy / 1000.0f)) * ((100.0f * buf_size)) / 1024.0f / 1024.0f / 1024.0f); // Free resources checkCudaErrors(cudaEventDestroy(start_event)); checkCudaErrors(cudaEventDestroy(stop_event)); checkCudaErrors(cudaSetDevice(gpuid[0])); checkCudaErrors(cudaFree(g0)); checkCudaErrors(cudaSetDevice(gpuid[1])); checkCudaErrors(cudaFree(g1)); } if (enable_p2p) { // Disable peer access (also unregisters memory for non-UVA cases) printf("Disabling peer access...\n"); checkCudaErrors(cudaSetDevice(gpuid[0])); checkCudaErrors(cudaDeviceDisablePeerAccess(gpuid[1])); checkCudaErrors(cudaSetDevice(gpuid[1])); checkCudaErrors(cudaDeviceDisablePeerAccess(gpuid[0])); } printf("Total time is %.2fs\n", total_time / 1000); //delete device_handler; return (EXIT_SUCCESS); }
72
#include <iostream> #include <cmath> #include <algorithm> #include <iomanip> typedef double Real; __global__ void add(int n, Real* x, Real* y){ int index = blockIdx.x * blockDim.x + threadIdx.x; int stride = blockDim.x * gridDim.x; for(int i=index; i<n; i+=stride){ y[i] = x[i] + y[i]; } } int main(){ std::cout << std::fixed << std::setprecision(20); int n = 1<<20; Real *x, *y; cudaMallocManaged(&x, n*sizeof(Real)); cudaMallocManaged(&y, n*sizeof(Real)); for(int i=0; i<n; i++){ x[i] = static_cast<Real>(1); y[i] = static_cast<Real>(2); } int blocksize = 32*8; int numBlock = (n + blocksize - 1) / blocksize; add<<<numBlock, blocksize>>>(n, x, y); cudaDeviceSynchronize(); Real maxError = static_cast<Real>(0); for(int i=0; i<n; ++i){ maxError = std::max(maxError, std::fabs(y[i] - static_cast<Real>(3))); } std::cout << "Max Error: " << maxError << std::endl; cudaFree(x); cudaFree(y); }
73
#include <stdlib.h> #include <cuda.h> #include <stdio.h> #include <malloc.h> __host__ void fill_vector(float *V, int len){ float aux = 5.0; for (int i = 0; i < len; i++) { V[i] = ((float)rand() / (float)(RAND_MAX)) * aux ; } } __host__ void print(float *V, int len){ for (int i = 0; i < len; i++) { printf("%.2f ", V[i]); } printf("\n"); } __global__ void AddVector(float* d_A, float* d_B, float* d_R, int n){ //calculate row index of element int i = threadIdx.x + blockDim.x * blockIdx.x; if (i < n) d_R[i] = d_A[i] + d_B[i]; return; } int main(){ int n = 100; float size = n * sizeof(float); //Manejo de errores en cuda cudaError_t error = cudaSuccess; //CPU float *h_A, *h_B, *h_R; h_A = (float*)malloc(size); h_B = (float*)malloc(size); h_R = (float*)malloc(size); //GPU float *d_A, *d_B, *d_R; error = cudaMalloc((void**)&d_A, size); if (error != cudaSuccess){ printf("Error solicitando memoria en la GPU para d_A\n"); exit(-1); } error = cudaMalloc((void**)&d_B, size); if (error != cudaSuccess){ printf("Error solicitando memoria en la GPU para d_B\n"); exit(-1); } error = cudaMalloc((void**)&d_R, size); if (error != cudaSuccess){ printf("Error solicitando memoria en la GPU para d_R\n"); exit(-1); } //Fill Matrix fill_vector(h_A, n); fill_vector(h_B, n); print(h_A, n); printf("---------------------------------\n"); print(h_B, n); printf("---------------------------------\n"); //Copy from CPU to GPU cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice); cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice); //Dimension kernel dim3 dimGrid(ceil(n/10.0), 1, 1); dim3 dimBlock(10,1,1); AddVector<<<dimGrid, dimBlock>>>(d_A, d_B, d_R, n); cudaMemcpy(h_R, d_R, size, cudaMemcpyDeviceToHost); print(h_R, n); free(h_A); free(h_B); free(h_R); cudaFree(d_A); cudaFree(d_B); cudaFree(d_R); return 0; }
74
#include <iostream> using namespace std; __global__ void fnSearch(char *str, char *key, int *res) { *res = -1; if(str[threadIdx.x] == *key) *res = threadIdx.x; } int main(int argc, char *argv[]) { if (argc != 3) { cout << "Usage: charSearch.out STRING KEY" << endl; exit(1); } char *dStr, *dKey; int *dRes, *hRes; cudaMalloc((void**)&dStr, sizeof(char) * strlen(argv[1])); cudaMalloc((void**)&dKey, sizeof(char)); cudaMalloc((void**)&dRes, sizeof(int)); hRes = new(int); cudaMemcpy(dStr, argv[1], sizeof(char) * strlen(argv[1]), cudaMemcpyHostToDevice); cudaMemcpy(dKey, argv[2], sizeof(char), cudaMemcpyHostToDevice); fnSearch<<<1, strlen(argv[1])>>>(dStr, dKey, dRes); cudaMemcpy(hRes, dRes, sizeof(int), cudaMemcpyDeviceToHost); cout << "Result: " << *hRes << endl; return 0; }
75
float h_A[]= { 0.646300533086186, 0.6891034119322159, 0.5468255896007155, 0.6042228186164886, 0.8659380581803113, 0.6300291449865434, 0.6636944471272259, 0.9882951548595007, 0.6352107108241554, 0.5790636985735749, 0.8804145795069749, 0.9456035439132031, 0.6321246094793169, 0.5520083637849034, 0.8193643662644936, 0.948699220113753, 0.6755087191072062, 0.8452024670159349, 0.5158472479991425, 0.7454278577521886, 0.8203518918008311, 0.8306414037192553, 0.9102755274193095, 0.8049150489951427, 0.6634987536615461, 0.5516742816892066, 0.6842642708230713, 0.7483998039947184, 0.8051003412268876, 0.5649583199862422, 0.8121027556323586, 0.5450967323115479, 0.6219450160218438, 0.5105097521704045, 0.9137357556898562, 0.5150533504856335, 0.9355026026464295, 0.710832721093494, 0.9629822013245587, 0.5863652172884737, 0.9265505203829214, 0.5420760069497614, 0.6783567622586935, 0.8976679836225981, 0.5509090210473756, 0.6604391659811224, 0.999366552142813, 0.9348916843328499, 0.713477120025813, 0.7305105281555632, 0.5508255633550583, 0.5329064212395214, 0.6742118985756717, 0.689140376023022, 0.7270457963615451, 0.7209463549118231, 0.7283645311972975, 0.7472317688709345, 0.8427026709428014, 0.8917006197702075, 0.7860696907282438, 0.8998261806382524, 0.774140322305406, 0.7407395850512472, 0.7016144680644383, 0.9513347541186932, 0.9539583490820657, 0.8055369082163983, 0.66731931499848, 0.9269077839786752, 0.8036882303399886, 0.8353559289667416, 0.7487273959642642, 0.95231413311441, 0.52936400623473, 0.6622843724305907, 0.7865911951337959, 0.8490486059918574, 0.5654667231844523, 0.902222672460675, 0.7377938242893363, 0.845162901466018, 0.6178930176516815, 0.8820423918233746, 0.819658695927562, 0.7297992824653494, 0.8608408011644345, 0.7814267405834245, 0.5451303358395813, 0.8364497176294705, 0.9476071729161337, 0.8683920694866987, 0.6120014563881961, 0.7324781077435785, 0.952295205463342, 0.6058991359641921, 0.8291900507261571, 0.5226152595564822, 0.5947825439255605, 0.8782021043314617, 0.7569168372950734, 0.5797010072157455, 0.6594462100662504, 0.8337467492618065, 0.9914618549442806, 0.9321282144535272, 0.7246478245290418, 0.7161212795026455, 0.6016851675753103, 0.5125244716188995, 0.9299996842565255, 0.9903492897808992, 0.6237940900552181, 0.8205895991703788, 0.9884414871779998, 0.5866948961638769, 0.998033951487114, 0.5295990006307705, 0.5611045875923815, 0.961245424281093, 0.7226827601352674, 0.9640509189881881, 0.6519866100403702, 0.7892687497473407, 0.8167014390840873, 0.6765396258366096, 0.8449615379127254, 0.596268393959178, 0.7939249923913629, 0.5168816989873475, 0.9345299250253124, 0.6309463095185301, 0.8986805331645678, 0.7523313838450412, 0.82531344079664, 0.7627569522371702, 0.9031553492515547, 0.9494476034448109, 0.6288373163385834, 0.7924580168091493, 0.721495346147775, 0.834300321380002, 0.6469884013241809, 0.6472322377172521, 0.7984745212053365, 0.962294073457627, 0.7886130868301862, 0.8776482969543955, 0.6478837103419697, 0.9882165119301165, 0.8740486150400817, 0.5056143331065995, 0.6618047410295208, 0.7610666592760644, 0.5702625328895041, 0.9369221577593562, 0.9494164874124904, 0.59238444587363, 0.8780965038804809, 0.5218639139258541, 0.6812401728541819, 0.9328253167831007, 0.5161381775199221, 0.9100393851884749, 0.9728293591126462, 0.811344502001907, 0.782823841572214, 0.8658958032470887, 0.663719411218762, 0.9692889930153497, 0.6467599014694876, 0.589304535120637, 0.5094633344034718, 0.7025147226816439, 0.6598503212467648, 0.7020746471976945, 0.8830918473238974, 0.7866485844831004, 0.6345775079170256, 0.5615367280865449, 0.8802227833065868, 0.8582600706563485, 0.9155482170781064, 0.9530293740421751, 0.8182483372937428, 0.8524389803132264, 0.7241233536334677, 0.5151834741836199, 0.7385812199918054, 0.7943893265125952, 0.9051641860383268, 0.619534572253894, 0.8242822542479566, 0.6413536058059588, 0.9807819497947537, 0.9898101031902062, 0.8415733555438634, 0.9867989644513635, 0.9373926397421499, 0.8237322958318012, 0.9271544178576562, 0.8356995743720048, 0.5658178563673646, 0.9705983473416786, 0.6736511025432669, 0.7830998587352098, 0.7045935293009279, 0.6834898270240016, 0.6499489142941706, 0.8808467511064504, 0.6642293957183821, 0.8653745173498084, 0.6267646816753698, 0.8074151052755552, 0.6799619044150402, 0.9942692009440288, 0.8625681533776105, 0.9257538399244084, 0.9914011126522407, 0.7917287747201345, 0.6046048508747064, 0.532450046417468, 0.6437265828207415, 0.8897046260829842, 0.9224802213438084, 0.6057855632559244, 0.9499914588921554, 0.611727504863876, 0.7587968793908222, 0.67209262512403, 0.9950600556677005, 0.7501592342573983, 0.8822302791460712, 0.562604449598759, 0.9938232800091651, 0.9950277163985779, 0.7850563426271466, 0.6291752867355491, 0.5837153379176236, 0.7493907741017607, 0.6658782563135046, 0.6476146082689006, 0.5974297752374016, 0.7034458626620241, 0.5161588039335538, 0.7186483385553928, 0.9061181053411442, 0.6296220803731616, 0.8344587501610189, 0.7658368512919322, 0.5193911487477161, 0.5254419830916216, 0.7504808479462405, 0.8552544477499019, 0.8289137948682707, 0.5426242175782335, 0.858845508474556, 0.8252356216134121, 0.8866364015504669, 0.5073032774858128, 0.74148567685559, 0.5809190407335006, 0.8382147766638192, 0.5021179405425454, 0.5612965191774761, 0.521704780892861, 0.9620418862791433, 0.6967427399939414, 0.6533446492141379, 0.7147919014055153, 0.7887651897338765, 0.8217711569279046, 0.5366901108437196, 0.9734989556630906, 0.5485794158722644, 0.5458920483132449, 0.9415910985632716, 0.6905162757784671, 0.8177732442367671, 0.5193192818261485, 0.904439960839182, 0.577484626417917, 0.5023125567163751, 0.5351238363408092, 0.5506641464567381, 0.9097008547341774, 0.5028782459886247, 0.9775999371965542, 0.8480896041264325, 0.9524433141692397, 0.8790167956693373, 0.6918264294189349, 0.6610097567204785, 0.5590353481221483, 0.9055975628804205, 0.6238987671821737, 0.9890972864990741, 0.6749305158850749, 0.6388066974704508, 0.9249096968119721, 0.9237097208162639, 0.9956186647783947, 0.7502845085261427, 0.9157536785718855, 0.6367148161459021, 0.8914383120371315, 0.7754815852778648, 0.7442070581925427, 0.7168533964646541, 0.8035208845828656, 0.9058793058946397, 0.5506057302703941, 0.5610586777236432, 0.6198991192228714, 0.6759727566157296, 0.6521536736152977, 0.8911054170392861, 0.8730066061369885, 0.9052165427830005, 0.6290577933163359, 0.6266432294048905, 0.5833044339268814, 0.542572680556954, 0.709871771808865, 0.9961238310508744, 0.5220897050603603, 0.8772828170016069, 0.8770640256265352, 0.6734715416008624, 0.66448493340621, 0.711886014564672, 0.8948545491540754, 0.742454808358824, 0.5031309948396989, 0.998105761408189, 0.7416349611897435, 0.9833498748501672, 0.9434160912644086, 0.9287361899928851, 0.5668358498590604, 0.5516216715871469, 0.9180811238230364, 0.5003209498989232, 0.9919408995756567, 0.6098195086655246, 0.8529329865006654, 0.5483923087170157, 0.5108091169433435, 0.9310974593281147, 0.5131543331047703, 0.5522897530875988, 0.6135861087993936, 0.942225130594302, 0.7883109048664032, 0.980358430652991, 0.7427552974292404, 0.9008592468210845, 0.8330182916915136, 0.5116266438107839, 0.7155765952485353, 0.5586222664249273, 0.8094091386607725, 0.8137341760476213, 0.596062013591621, 0.6233668771354632, 0.6220904258017181, 0.7729922623950242, 0.9242654659438391, 0.9918232862707279, 0.7038756613345727, 0.6955031237560236, 0.7330651825711396, 0.9814429451532296, 0.6637713448493832, 0.6273600806376864, 0.7920615560597659, 0.7560570649825917, 0.8667792441759616, 0.9311245995511708, 0.7390681612865757, 0.8622775259815559, 0.6482748715498996, 0.599471939542426, 0.5779101969326355, 0.801001618896622, 0.7871239215733595, 0.8926484924268543, 0.7436028538658448, 0.5052795513219919, 0.963326129083858, 0.6198395865526083, 0.7441529681401635, 0.9601662315681015, 0.7548356711809994, 0.9954837830129526, 0.6759079213898986, 0.9621800756631611, 0.89322570586708, 0.7735070667526001, 0.6915282225910707, 0.9341423848626801, 0.6211207388152615, 0.6819132804430946, 0.9004959920486495, 0.6869188767615144, 0.8235250113200907, 0.9166578872951766, 0.9013150686850961, 0.8380839858962025, 0.9073157003461221, 0.9543454184467036, 0.9055858124509073, 0.8918637925715706, 0.9903134675953051, 0.5127986114584449, 0.696692956130758, 0.5370523981116533, 0.7001724629171293, 0.9916611642074329, 0.9857366515184061, 0.924747395057823, 0.9445137331888381, 0.7766195394457958, 0.6195252022646915, 0.9546686663614874, 0.9844751038378801, 0.844027852820263, 0.7447830762001548, 0.6791783356204665, 0.5337497689443171, 0.7495252125656233, 0.7131514817215079, 0.877101450045404, 0.945751398349572, 0.8683084143497066, 0.8372134741459116, 0.6646904302471112, 0.8462850394417225, 0.5136995543425608, 0.7093735655361126, 0.8695010981991498, 0.7935643355642961, 0.5975979145080428, 0.6512819838748201, 0.5073914397414905, 0.7782127409874147, 0.6518832024877972, 0.9932076929834982, 0.5595762061403449, 0.9922246720679966, 0.5687109423220303, 0.838105358258217, 0.969625314094954, 0.7190620443864618, 0.8664081309733891, 0.667132130714411, 0.6353479152159843, 0.9881038444464245, 0.9749456914050801, 0.6056862445310439, 0.7651350765547513, 0.7111211950747746, 0.7268386451680536, 0.6345617335482233, 0.520889051962435, 0.9564077666058417, 0.5944431116457711, 0.7802461524486353, 0.9072284896017788, 0.7054058059340611, 0.5258608417539039, 0.9747301898084519, 0.8116918485805862, 0.5947262934805981, 0.835470561834067, 0.8289930620314478, 0.5932868967061289, 0.712644147073302, 0.6305110950712968, 0.8225485490400115, 0.8741217768921016, 0.6296711447821639, 0.706668544980442, 0.9063993574246532, 0.6289844799422348, 0.9689829115352273, 0.8146775882388788, 0.9554286901233602, 0.5417569350671105, 0.805744277346577, 0.6248071416806387, 0.6357804381341573, 0.5849076492790561, 0.9432718005552067, 0.8528011364197232, 0.9393180145037578, 0.9055210552178747, 0.6218196701805875, 0.5630041801771573, 0.9057041149454138, 0.9699522473736628, 0.6410947775990148, 0.9467625040225263, 0.8152812695819185, 0.7962021187861228, 0.943125023708686, 0.593552831309881, 0.8989205954589661, 0.5432046020734231, 0.8479840409511219, 0.8508701651831969, 0.5299197849521209, 0.5325591497756108, 0.8793828608768641, 0.9369757353802319, 0.5594073029268327, 0.5286711607741894, 0.8548877866552773, 0.5686180685915301, 0.7391274856651364, 0.7926810868411738, 0.8954541553960164, 0.5655129506186884, 0.9109372852850768, 0.8388292670887891, 0.6047967180900435, 0.6235155630673881, 0.8682310872676411, 0.9662344277289328, 0.6665840484803136, 0.9948095299771151, 0.7204924530281454, 0.6999509968934565, 0.5766348487013278, 0.7367778800867257, 0.6837518469566681, 0.7770362604491128, 0.7587593463886848, 0.6310152500067889, 0.9919350377155323, 0.5287101907299846, 0.6341323580528309, 0.6416557527648712, 0.5883926591073251, 0.631425571085183, 0.8420540781161465, 0.6143637234888837, 0.8104077645102102, 0.9959475858013923, 0.9714705072445354, 0.6585921059526243, 0.8276218042252581, 0.5236214010776717, 0.8033802033078954, 0.7285054761100016, 0.5029381516906044, 0.9234000025643223, 0.6620917674867863, 0.561503064154029, 0.5771265064791333, 0.8742754298885033, 0.7971833382563311, 0.7199981465522782, 0.7252244800011279, 0.5969156324580289, 0.7381505609258481, 0.5680191990851209, 0.5729645938848937, 0.7039117180288094, 0.9133157526199824, 0.5354540604279023, 0.5974301685518403, 0.5038891564217153, 0.8157942322558649, 0.7859410990553226, 0.8056504129361373, 0.6700358006832727, 0.6973785780151314, 0.5778551791276771, 0.5271360770943279, 0.8813298017380735, 0.5704358736540642, 0.7085428617195324, 0.9275680222175162, 0.5098610056870094, 0.9834861477586689, 0.5938433356175594, 0.6385932490945254, 0.6407794033101999, 0.9894985463088162, 0.6947984656850965, 0.6908835745820855, 0.9434479881043976, 0.6400819937464182, 0.6227332389056729, 0.9317305409527508, 0.7490244259085336, 0.7080759286228255, 0.7261418110356387, 0.9849611732531696, 0.8275907799859956, 0.7877709069472671, 0.6635787742348993, 0.9566498365513382, 0.9748628490405655, 0.6920318310713454, 0.9891349039024118, 0.6207854977713125, 0.5846708458760457, 0.638580615990205, 0.6107995937311764, 0.6273644935634082, 0.8791650679890575, 0.548205927966527, 0.9068404986513314, 0.7206877308174136, 0.8159804032398001, 0.7058470702694106, 0.7782382475467775, 0.554249281387349, 0.9329381142030011, 0.5511034131330732, 0.7350473494414951, 0.9682932885624354, 0.7124734961916888, 0.9885473291650108, 0.6051389970719803, 0.6532977034077603, 0.7299236953606625, 0.7255637558553876, 0.7654754536758199, 0.5954706519063397, 0.5918197380827519, 0.9673961499233998, 0.8059289887491525, 0.9788048055672876, 0.6030938291157355, 0.7844111512535903, 0.5347588790075453, 0.7080473758010537, 0.6515005741162019, 0.7964049788091725, 0.8410201353420025, 0.6714313442320763, 0.5910490044351887, 0.8995951419154249, 0.5184526686036783, 0.7925424835796833, 0.7679126300955836, 0.9590203996334217, 0.9903091706139668, 0.7216147584909365, 0.9976929437673969, 0.9704877980644011, 0.5470410188986154, 0.5186496911750648, 0.7275990372036649, 0.6748393260356575, 0.7191963173930802, 0.7788636130564965, 0.7043338505652308, 0.9406509282040869, 0.52560660969902, 0.9616897082844407, 0.5269839631434159, 0.6345471970871202, 0.7327621134132786, 0.5424223826722891, 0.6168325444359117, 0.6391903370210928, 0.8675913802283597, 0.7232739802939148, 0.6507550219605913, 0.9393934233789198, 0.8263999233020167, 0.7485394947870259, 0.5132102800989462, 0.625481521007652, 0.7739774047696706, 0.8857528228584339, 0.9238937325350356, 0.6833594764663675, 0.5085098423318805, 0.60230946300701, 0.9636284306289673, 0.7561520464326359, 0.5270883050805939, 0.7334739362727096, 0.812623690762135, 0.7789196515887583, 0.664852634521558, 0.879214888978636, 0.7926990751198626, 0.5970731570931582, 0.5355408806670174, 0.8625834422501488, 0.8591529471731963, 0.8343916360589914, 0.5093196955880548, 0.9616363595445148, 0.5602164363946163, 0.7172587103636402, 0.8135567537750678, 0.5115644623041465, 0.6116482269828576, 0.6993011884296083, 0.6353127926219422, 0.6707370605525017, 0.6775098986182565, 0.5641301865518293, 0.9806327958605061, 0.6565364439300072, 0.9204489360294551, 0.8897248477441302, 0.5042206987979128, 0.6867821143085405, 0.8326952648842246, 0.8133394645759977, 0.631755831093546, 0.7168534543337715, 0.553815544401435, 0.9210250470711574, 0.8693782939314261, 0.707530539955956, 0.8280387791335548, 0.9898684171960364, 0.9106199707727254, 0.8917367641107529, 0.9175156167204768, 0.6570701428088337, 0.8850069006100854, 0.9016916422038186, 0.6217220978517903, 0.732174065703552, 0.9311739233989099, 0.7568504517733975, 0.8992529268458326, 0.8349936529456221, 0.7594875346301262, 0.9825630141201046, 0.8352988135145627, 0.9576918526608564, 0.7281027500440791, 0.7017470713526053, 0.8891857370680291, 0.5002872327542913, 0.94839759897853, 0.5867946200920484, 0.7109245157620054, 0.7900136686828181, 0.7454451494369139, 0.5634953251365167, 0.780902394602301, 0.7360328369367453, 0.9592633173685323, 0.6611483647614069, 0.6183278808848897, 0.5651687423987624, 0.9233209866077092, 0.8569320658136195, 0.5587178571907216, 0.9146668880118916, 0.5304406517948714, 0.7397743026504903, 0.5984947276197737, 0.7800889432751461, 0.9562199258305095, 0.6432837513122842, 0.5550505863181256, 0.9235852230924513, 0.733640926191228, 0.776085602341642, 0.9524542079848878, 0.847250075104929, 0.7383027441400019, 0.7390817430532659, 0.5934418555980321, 0.939762183617721, 0.7243231328197473, 0.8502483578899043, 0.5133099121559599, 0.7053783530245934, 0.8376415716976677, 0.6373816462943671, 0.8866852726164371, 0.6866767594367265, 0.9713340705964297, 0.8351969079006495, 0.5776190440980556, 0.5764313107857315, 0.5384416761182539, 0.53363574776288, 0.5034229345108567, 0.5980528030498259, 0.5110302205892395, 0.8948816982126281, 0.8736692914770265, 0.6978317046133657, 0.99010958406235, 0.7502160632818542, 0.6153720826043493, 0.5256339570888131, 0.9980145093725308, 0.7755385200777405, 0.569001590154208, 0.9176724785790076, 0.8181647947535928, 0.7374480098332676, 0.6909665020438541, 0.8142226548304321, 0.9742959309490044, 0.7369373381019042, 0.5827434624790624, 0.9271939313775199, 0.9317717788450606, 0.7216238185386424, 0.6985416016974761, 0.5807711852783854, 0.5361875519031234, 0.5535722388142401, 0.9228446765858227, 0.9872259771598613, 0.7961943683173853, 0.7509096841824182, 0.8920629777940434, 0.7196990780883037, 0.9465682735955805, 0.7023041340593035, 0.5003835914835418, 0.9551501012151069, 0.567378116214371, 0.6789007117283913, 0.8131317297967569, 0.5157030155040458, 0.8903269497329267, 0.9698394707342216, 0.880610216618767, 0.5638125233271751, 0.9439176534406095, 0.6617599887583265, 0.5971275743875617, 0.8433006006231972, 0.6361631341613987, 0.9314640172014954, 0.6003004748354532, 0.9335735031331582, 0.6512360228208864, 0.5086987276822852, 0.9289508271411779, 0.7764175949496209, 0.7217065396368492, 0.524910731412012, 0.5528080380926884, 0.6064559602195418, 0.5868470659969289, 0.6635625851282483, 0.9960085972417181, 0.9708803572636373, 0.7949211055726071, 0.9311976496496084, 0.5058496039334064, 0.9849933979285113, 0.754603430565185, 0.6020953284255771, 0.8881080637470343, 0.5444215224718545, 0.9790234088872531, 0.9507974984957426, 0.6060991653926988, 0.5812583351180527, 0.9352654402216442, 0.6986846296293825, 0.9168548851724783, 0.5943415536972141, 0.7972851114931117, 0.9238458252041004, 0.7785439940736236, 0.8027079420177673, 0.625666199588418, 0.9878265887939884, 0.8684623032413582, 0.9657038529429582, 0.6748846472509984, 0.6763728035482574, 0.863897236201751, 0.8878137025210504, 0.882000334699977, 0.9874521190492971, 0.9968497286219995, 0.7085143622349079, 0.7502563166466953, 0.5100596894405709, 0.7221069810109304, 0.5119915802691737, 0.6772169647506838, 0.7944568379199017, 0.8985029224042136, 0.6440184616163815, 0.637449236824535, 0.8830754347167523, 0.7009911817687886, 0.7345796154807748, 0.8674924373519134, 0.5915263587943979, 0.6006873299642819, 0.935101364309366, 0.5916840133813797, 0.7025417008931176, 0.8738420055138969, 0.8515053284533634, 0.532849192641057, 0.8372844013505963, 0.6928445088878702, 0.5000481438995283, 0.5884335221398116, 0.5995670630808899, 0.9888530084238121, 0.5438025285085197, 0.5095021646983758, 0.8894768148169612, 0.6466027312844618, 0.5842592246182025, 0.6407079406106068, 0.9032205059340827, 0.8851738069269643, 0.7745756049713829, 0.6454741220170227, 0.6795858748123376, 0.6269660238146295, 0.5474855981954658, 0.5414390310120987, 0.9741726869608864, 0.5810304537700886, 0.7099185038655249, 0.5934671282518265, 0.6398698522874915, 0.5002762153443079, 0.5859937767770008, 0.6670655649237398, 0.9185268232951198, 0.9105261178683872, 0.572102526223022, 0.6327333283895704, 0.9320586638263583, 0.9026164478514013, 0.7180313118623922, 0.660405559974598, 0.5149313797841719, 0.7054232144392834, 0.5740223050858865, 0.6275890571803284, 0.9267748250226557, 0.9800441654951677, 0.507829382203053, 0.6419830505612427, 0.7088084545395703, 0.5335691036102399, 0.9141766177125922, 0.6341179915055504, 0.5884302188764479, 0.9210203431912694, 0.7434708823921008, 0.5987877133257116, 0.6492463885149824, 0.9479386335243074, 0.5840048168805851, 0.6965573314801549, 0.8494532049049655, 0.6078019087261085, 0.7384128836762065, 0.5950362018635402, 0.9937877541408366, 0.780514396725051, 0.7698684040692368, 0.976694951920666, 0.7459299494984142, 0.8168788335134516, 0.6234539464714894, 0.6198843520593409, 0.9697927309373819, 0.8316396562914714, 0.7592195918822549, 0.5077289855037546, 0.5098327402585117, 0.8774091203916243, 0.9091281027415556, 0.614181245033647, 0.5810847215641792, 0.7892407118312466, 0.7772499067663932, 0.984650564958578, 0.9091021548873972, 0.6740446906572493, 0.8293863877721068, 0.6226249360311578, 0.6547289407526626, 0.6042360601159997, 0.65963962983253, 0.8538765541782942, 0.6417756251984567, 0.7496272766906809, 0.6157990105244564, 0.6963683703890142, 0.8878627934136596, 0.9434391660816033, 0.9286000386024276, 0.6547148468210243, 0.9386945753296556, 0.7776746037920286, 0.5410738256834172, 0.5541537887220906, 0.566096810007787, 0.7358519830139285, 0.7612404901406558, 0.6523714579294894, 0.5605225243157361, 0.6525721726253293, 0.6322496631504118, 0.8822743815096532, 0.6608110422999461, 0.5329385729201039, 0.580515496665571, 0.5779912079140113, 0.87997203223194, 0.9898542697442663, 0.8837662614684308, 0.8544232781478822, 0.5022842180548204, 0.5773064792910672, 0.7000346056691509, 0.718178137054416, 0.5619155200597288, 0.7235558961907185, 0.9406133465773018, 0.6031276692009929, 0.7910496614542846, 0.5308397615651719, 0.7846357213502575, 0.732955045034702, 0.660689603613501, 0.624289347522082, 0.8362875255492506, 0.7137812227450171, 0.5647727715887381, 0.9218689726515423, 0.9018539978672373, 0.6124883956060718, 0.5444309741330681, 0.5961502031060293, 0.6645214096905985, 0.9141330615507679, 0.7775357416815067, 0.5995049883443608, 0.8604483111182484, 0.5541032546241027, 0.8089363999340876, 0.9964760805106907, 0.506888214034129, 0.6832218426476613, 0.8301052263479682, 0.5965132647446971, 0.7986420428667425, 0.8468177817819622, 0.8562725128157398, 0.9310770114279951, 0.6744064769329418, 0.6243212131028824, 0.6708758667597288, 0.7834429795058873, 0.9432635554966639, 0.5957322054153149, 0.852522052426096, 0.9840865714593958, 0.6284795713964796, 0.8577619768166257, 0.7834187556268557, 0.7598640599146151, 0.9503367574361032, 0.8626399322323969, 0.64924784869908, 0.8446978433049003, 0.8411085966844416, 0.8925151032605001, 0.7091735431702804, 0.5597521801285236, 0.5502308175904501, 0.5254614794048132, 0.9605476270430058, 0.951637702110574, 0.8604801880713773, 0.990710364114952, 0.6655172596096852, 0.8501810642029749, 0.6001422674077659, 0.7603824968044223, 0.6406260944029176, 0.5169377858960995, 0.8967587379119223, 0.7394718814613863, 0.6484652171756973, 0.5438339034064437, 0.9935483396714682, 0.5849071752608563, 0.627438105749589, 0.9748620047026393, 0.9257332841564356, 0.8878997598941276, 0.5256609082863422, 0.9064511396772408, 0.9587194048007492, 0.5081764783610119, 0.8249905737305162, 0.6496397513801804, 0.7195955682880676, 0.7359959541681667, 0.9703071744057263, 0.8722490060693451, 0.9769402537669138, 0.5094019439460136, 0.6126200264330806, 0.8684780898967995, 0.5956058159270011, 0.634553677925165, 0.8848127447141709, 0.5484706883921411, 0.8679089029485725, 0.9546972653518806, 0.8794620109801896, 0.5279504419038505, 0.7126180146048886, 0.9102055908771131, 0.8644315606962762, 0.8247123860600846, 0.5921112363602581, 0.6083673824929476, 0.5141519057771338, 0.9760424747773796, 0.7388198003770832, 0.9030473608551833, 0.904359911597262, 0.8638327120164218, 0.7382776034258496, 0.7094500429092381, 0.8287307558957766, 0.8447362311752091, 0.9785323302753333, 0.736732262464013, 0.7646723240894204, 0.7237446545726518, 0.5312982298304042, 0.7374050603540891, 0.8661083699118752, 0.9404874446075214, 0.6414711394173191, 0.6645194727047316, 0.7493180113974431, 0.7034505260472272, 0.6518367147635957, 0.6108364199269274, 0.7823246082941627, 0.7544465339385489, 0.5395698291431562, 0.6508190074543254, 0.5253117886085376, 0.8826920181853335, 0.9128581699322444, 0.8775904303667588, 0.9530524152776587, 0.757540024053555, 0.7970528714870762, 0.8405165121084591, 0.7283701977135559, 0.8671076989061002, 0.7822913892834322, 0.9427750325895521, 0.9600203869932834, 0.5396420948196136, 0.7307336987601604, 0.5959344008466603, 0.5776775177983159, 0.819142990425868, 0.6944146798811895, 0.7832859900074962, 0.6801847757931612, 0.9562701633899062, 0.7073889663331205, 0.5371517686907452, 0.5848038209178932, 0.6938954583876427, 0.9675153814133772, 0.9106809336078343, 0.7797237871405178, 0.9076233577730968, 0.6225333434628848, 0.9360833778123654, 0.755369345876056, 0.8287246287544336, 0.7832527602513946, 0.6830294948575468, 0.8217939616674648, 0.9727466352953125, 0.7098410595636506, 0.5030784525369147, 0.5099524752704464, 0.699598836281111, 0.555056886140102, 0.7944902259103886, 0.5355412850764948, 0.8045953576527118, 0.6490594403989591, 0.5023053789482935, 0.5481470344054418, 0.6399705982155802, 0.7844154284709528, 0.99014092490572, 0.9394735331043571, 0.6345423902977484, 0.6345885277603347, 0.6059020120817644, 0.6162847990429718, 0.9938693460348769, 0.9328400015594591, 0.6165395760986283, 0.9594682926040896, 0.6622919370118638, 0.9726975942649864, 0.5879807594666935, 0.5088247547089808, 0.6965997150463837, 0.9811608422948641, 0.8840675507065754, 0.9461812951681865, 0.8351248119731944, 0.5258843901761987, 0.6435590303497869, 0.8202875258008769, 0.7188447487617302, 0.7537645472558716, 0.8602731300035864, 0.8213784766089811, 0.6233752684188447, 0.7483878865072116, 0.50831758545153, 0.6575499844823798, 0.5380055033765019, 0.973742952448867, 0.8789710918614744, 0.7859915308789611, 0.7795509374148415, 0.5649060990073578, 0.7342098766891465, 0.564522965051357, 0.8676055501459201, 0.8472994826441227, 0.541154935013825, 0.8442759512833237, 0.5504082923093758, 0.6013676037224776, 0.9132968095667775, 0.9090459752866911, 0.7523346988693529, 0.5744768965300744, 0.9071978714393114, 0.995205231231019, 0.7015666490876219, 0.6860956478368885, 0.9301163684069222, 0.5976847519892772, 0.8298652469795309, 0.7348524460887784, 0.742158085690625, 0.6770698920585714, 0.860925612589416, 0.8873602465364756, 0.8437063814421497, 0.6209881732403102, 0.829286545633026, 0.5194834129005317, 0.9906188454842377, 0.5093668333896422, 0.5288771726621817, 0.6753817717410937, 0.5058557013786278, 0.8566538681674771, 0.8723433921961546, 0.986852463115012, 0.7027213269100716, 0.8460417142725801, 0.8369293454511714, 0.8996028741129123, 0.8070300480397428, 0.6395918389393982, 0.6247988069825273, 0.5217237552768497, 0.9342462247028214, 0.5253919575192875, 0.9815169780012409, 0.7333905685896838, 0.9754139904718684, 0.7543786396478807, 0.5553422065483762, 0.8605854785466969, 0.7134281059264829, 0.8283217475427667, 0.7679318837676344, 0.9064655279993168, 0.5340062039068005, 0.5967458090600182, 0.7449451895754696, 0.7009630626585777, 0.8229852180533714, 0.7710466568238832, 0.7893926797078262, 0.817593298383644, 0.6617725579564899, 0.8374289808524042, 0.8111016365128783, 0.533743690882045, 0.5921943670802837, 0.8660611424836043, 0.6305624362282244, 0.9549253702967202, 0.6785916275513784, 0.756979539974934, 0.8996012053945419, 0.8146714034375477, 0.6805102217565409, 0.9352072622350791, 0.6090949891172568, 0.5912686591571317, 0.6971505625008763, 0.5071837255359728, 0.7374163333106882, 0.8523875481946477, 0.56776876698454, 0.6248777695410921, 0.8450663188772065, 0.6095041964288364, 0.8442595393557512, 0.9643969221663399, 0.8917021871612274, 0.7489457698695341, 0.5814112596780918, 0.7915501323598708, 0.8864148194083357, 0.6043682732411422, 0.7757927970030094, 0.8414617032413634, 0.5127137823903312, 0.8567724441958259, 0.6096130293164774, 0.6764684849289472, 0.8460181956771147, 0.8696529308729957, 0.7251476480008732, 0.5954690025886057, 0.6568670955123213, 0.6944252959426949, 0.5745244305618826, 0.8282796889683786, 0.5631043299831184, 0.7496262715990387, 0.9874906496118887, 0.8966278353205451, 0.5386878772027497, 0.7192172797212988, 0.9524368515562635, 0.948137475377858, 0.5836522477410689, 0.6172856380623571, 0.5136821094715727, 0.5776015301502713, 0.9325720853696757, 0.9358075816466761, 0.6922749689595328, 0.7852498892451859, 0.912267458881405, 0.6742255335694698, 0.7825304761528185, 0.8650554793338581, 0.616915032070197, 0.7810817168499768, 0.9473839014056948, 0.833115484908957, 0.8620544746165284, 0.5446811432723602, 0.6800025829987993, 0.5371002649252101, 0.8073457868335473, 0.5203839902214875, 0.8879935715586273, 0.9050492337238809, 0.6244224538063633, 0.8954581886045854, 0.6749155672495473, 0.5327180582267492, 0.6068500446531124, 0.6181917388588856, 0.928827225520807, 0.8706357918505327, 0.7569962458361807, 0.632191131482521, 0.8804432451958644, 0.9068103574280871, 0.7156312683798343, 0.551462738681055, 0.7527167245345066, 0.6769902031144203, 0.8491450711886029, 0.8869128730515499, 0.6857741949438243, 0.5183713894200075, 0.6597834749918241, 0.649392365841013, 0.6327994903390479, 0.5499856606413038, 0.8876766433343859, 0.7804487525561608, 0.7796656778818778, 0.6484136414898243, 0.8160956501029327, 0.6423040468014047, 0.5671073335602459, 0.5526960361178777, 0.5553425527673415, 0.9755499600629666, 0.9928124220227508, 0.6058651779313444, 0.6747882667387863, 0.8467797893684955, 0.6685222062966949, 0.5294215942221532, 0.8551626982140346, 0.6960009500385091, 0.7945150136104191, 0.8325705370376602, 0.5777289361329725, 0.5814658204500128, 0.8327616354819651, 0.7958688395679077, 0.7701728647558552, 0.5924808022506777, 0.5462275854987788, 0.522210665552151, 0.6056395856981897, 0.5178370985212434, 0.8943275807428943, 0.9657874897859372, 0.9632183094600104, 0.8669319389396521, 0.8203026553770668, 0.9379869353251327, 0.9423806547201279, 0.5125447251955291, 0.6336199700406894, 0.6832034183414766, 0.607363621009056, 0.7319400487880183, 0.6655850057385082, 0.9688448447358833, 0.5665282114140241, 0.7453251265824534, 0.7960961578288981, 0.7742151704064646, 0.9511709473649064, 0.9252329964190595, 0.6539343559829813, 0.5391208097912679, 0.7302206845223498, 0.5358925784953117, 0.6105242434897172, 0.903830102265514, 0.6541822195524265, 0.7166172233936803, 0.9812827898923311, 0.9014105163125912, 0.7833210136095907, 0.7524349487505273, 0.5892425376933675, 0.8321436417314974, 0.5669995368466987, 0.7981550018744118, 0.6163970300775787, 0.6163245398802475, 0.6851213037980425, 0.6221799795635743, 0.8200215587543256, 0.8800228163434116, 0.6252504767446851, 0.9388372708142068, 0.6474065066154271, 0.5585177486673933, 0.9414591324677744, 0.7494190285321578, 0.8665670048851938, 0.6441625882369895, 0.5500864534070471, 0.5645315317349273, 0.9101297690499928, 0.914060329266259, 0.5370183630537323, 0.6062367316655717, 0.8123459107196331, 0.9139763783020962, 0.8401469997958583, 0.7680045167064222, 0.5236781655543681, 0.5127182912740973, 0.9844074043200753, 0.7028101781298418, 0.7750288273436847, 0.6028738265986948, 0.8017024253856293, 0.8217832925465804, 0.8669114038313149, 0.552438085395886, 0.7851719354908234, 0.9854974630650922, 0.5631888515722597, 0.8077333668559901, 0.805110839528868, 0.6808573491308427, 0.6210993576490422, 0.5582073155176236, 0.8886806979987446, 0.6346451473353911, 0.7047633982508874, 0.9828449970100417, 0.7779544679141983, 0.7244238444737225, 0.6602807303982986, 0.8489727051208034, 0.736280056727493, 0.6973148694446145, 0.9986240268748925, 0.5807384198079513, 0.9810149587070336, 0.6360983801168032, 0.7890882768283014, 0.7180638337133805, 0.5018512003036433, 0.803478032443756, 0.5179078761901904, 0.947212930362237, 0.8911588293935601, 0.9223407798106737, 0.6356717171145961, 0.9034008222528855, 0.8291715261646946, 0.9933956589015248, 0.5245271086617016, 0.8747290432021282, 0.975061617448933, 0.7408085622198496, 0.773796871774915, 0.5394209648105619, 0.5451148488755475, 0.6391951931191318, 0.776788243909841, 0.8261706644507478, 0.9047868831325021, 0.5481106526128778, 0.8047905630122272, 0.6934334265616842, 0.9951449361533032, 0.9008723073222187, 0.8303249530582817, 0.9614719279223345, 0.5939099457870023, 0.855212835446811, 0.5736026167526582, 0.5337544065275819, 0.5276587875341385, 0.9201768486160865, 0.921438288641341, 0.6116135180314728, 0.8887019191529395, 0.9724180433247355, 0.7894497591403742, 0.8836457043741688, 0.5130707105222323, 0.9008478611980439, 0.6465700465092636, 0.7981920691567168, 0.7860991150167704, 0.5188982865064249, 0.8121762235466801, 0.6930998638912795, 0.968866891125206, 0.7991567009751961, 0.6979732658740968, 0.7754261892340776, 0.8912489381979298, 0.9531509455817969, 0.675440420635226, 0.9249797978245234, 0.7000756166411202, 0.7932128659110149, 0.7441123206676281, 0.6853047031566151, 0.9982201398072461, 0.8100448187441223, 0.9654304013952628, 0.8436742915516208, 0.7608565272353498, 0.7387329789121503, 0.6881608908337034, 0.7293064229397865, 0.6288318800765664, 0.6452694573946312, 0.745596481377305, 0.7052388202751451, 0.9398021384725406, 0.9936044527953776, 0.5053367455693586, 0.9397217810177876, 0.8727579917614927, 0.6305339096359578, 0.9701398564611007, 0.9036349319877326, 0.7360448214963455, 0.7667628545404437, 0.8908001704442619, 0.8709745942139613, 0.9829927471669158, 0.6638527760593609, 0.5323941673051724, 0.8632625240863816, 0.9488306849931083, 0.9625398788421153, 0.6234109570123262, 0.7058963191092794, 0.7956753117484123, 0.9979352047169103, 0.7582837579218544, 0.9767289083196253, 0.9777560204220709, 0.868573185281674, 0.5198583096881311, 0.7598794783061432, 0.9416989693185505, 0.9593428687847696, 0.969980158218346, 0.9130102007756591, 0.6332307042735479, 0.7803254954082977, 0.7295677033961345, 0.5794764109158461, 0.7712186890082855, 0.7029614180618526, 0.5771843279854566, 0.9381772811249327, 0.6503719945852242, 0.577763031307815, 0.7471884797307842, 0.7378569028354156, 0.9016599419918686, 0.588184676927477, 0.7508085875758775, 0.9413754730045316, 0.8830156722865798, 0.7183488445510054, 0.8345149441436648, 0.7196573979021692, 0.6598014882326657, 0.5753303679412307, 0.5979103739218312, 0.7487044684446251, 0.6941474295156114, 0.7083503731318006, 0.5839368425388733, 0.7544146040739399, 0.5850350519375349, 0.9592364248777274, 0.5388398060977094, 0.5691401513402823, 0.9789920570884498, 0.6208860235227177, 0.697479624994208, 0.9097855066223199, 0.8412782684620281, 0.6634511613869263, 0.6714209893235005, 0.833730302160608, 0.7431177031941008, 0.8940578270865556, 0.8466025019568351, 0.8390888910449469, 0.6518282611776997, 0.650333591307825, 0.8613617511772558, 0.6758351232236768, 0.6877408117881705, 0.5401824116330747, 0.7555265991133051, 0.7748705234614401, 0.9722725604800513, 0.573648991905642, 0.8900860682195646, 0.5151775873549511, 0.9613551364001022, 0.7821282303822312, 0.9179684193360822, 0.954375487520639, 0.6375117026710135, 0.9969647853257733, 0.8753013808757907, 0.8767860462910737, 0.710778000695732, 0.5742968745965789, 0.8162380259980637, 0.9158520010548006, 0.7807470860740728, 0.8979230455024267, 0.9936572275271202, 0.7083052038787085, 0.7902521609007434, 0.6434419180679427, 0.7357963130984683, 0.6065238357981039, 0.9800933376603832, 0.7692620281134402, 0.7579853441464142, 0.6683577412390953, 0.7543696139052103, 0.6638397343036877, 0.5727361366627709, 0.5332073131414556, 0.9311153557504835, 0.6679040016288984, 0.5278687396235189, 0.7877730191756949, 0.9039152602573776, 0.7982597461978624, 0.5589322450917251, 0.6458198255012435, 0.9025203932036321, 0.7319232465980791, 0.70737607160659, 0.5574214483185417, 0.7216494100920376, 0.7647229285542485, 0.5899485792738064, 0.7673460156090166, 0.9875782996529305, 0.98618450010204, 0.6010520895145822, 0.5526428261395229, 0.8709623139063545, 0.8710733729535005, 0.5064902455823086, 0.604306883624546, 0.7007514757499618, 0.8361132049149278, 0.8996417257909157, 0.7520941596671157, 0.5528428164716568, 0.8768181301526347, 0.8305087292664315, 0.7446329381927888, 0.7001460895625479, 0.5835099802245496, 0.8502155045387971, 0.6252382019572444, 0.6063793722532907, 0.8413295403100353, 0.914332814387289, 0.6816095934237016, 0.5534579898743051, 0.972733984563328, 0.5850269979411842, 0.7214791777950158, 0.5757249107479367, 0.5364819227699389, 0.7672358014717838, 0.6567572953392699, 0.5275593403499311, 0.5350875316422268, 0.7573155608493425, 0.9160726940427522, 0.9544130961581814, 0.9077428702601527, 0.5449241445444004, 0.7823543751102859, 0.8966934734508378, 0.5880343360531544, 0.6688908267964186, 0.7003690216784018, 0.6180258104395422, 0.5460745121723951, 0.6952546253838328, 0.9476619625512321, 0.6699261023972627, 0.854368404283271, 0.9812526164066645, 0.6185533158716691, 0.7189393189370077, 0.7301822144253631, 0.5276956853595252, 0.7486260567960925, 0.7979547086510521, 0.5505542475907209, 0.5067118481511903, 0.6802081783472471, 0.5826107562880349, 0.9973960968257876, 0.5442625662027267, 0.5933856100950915, 0.8004133938603625, 0.6501603018478066, 0.9136498268435296, 0.973223525863065, 0.5340426222197758, 0.8334922180033413, 0.5680340101986648, 0.8483948479170726, 0.6976033081306539, 0.944263324299244, 0.6092948520069132, 0.9479844073486698, 0.7658755727841363, 0.9366904211777938, 0.8237204435444285, 0.6648709396861319, 0.6071892963529493, 0.9023091319716239, 0.6593248585704261, 0.7696253717257049, 0.8621859509734695, 0.8541978830798401, 0.7989712613329538, 0.822594641126195, 0.7849292712487492, 0.6826915901698933, 0.9625566265883636, 0.8747900625701517, 0.786501169606096, 0.5611417539844026, 0.7335236065041818, 0.7012644465242514, 0.8997867341425713, 0.5497858689040093, 0.56753509286922, 0.5545375466589157, 0.8377569370145683, 0.6003668191791804, 0.5692010022069849, 0.7787286890731056, 0.6696248960097018, 0.7004274113592284, 0.9058876092872112, 0.7550524871368689, 0.9491336146987548, 0.7638181162883693, 0.6683507302712779, 0.7563238899322937, 0.8181436562956627, 0.721340716306976, 0.7563906068387825, 0.5658492377708144, 0.6628779787012683, 0.5197302897117531, 0.7040943788443303, 0.7954878739004267, 0.8543947776280933, 0.7154412310828782, 0.8203192889255231, 0.7215129650498571, 0.5467154663022733, 0.9572352265178746, 0.7429350000460719, 0.9806098094503721, 0.9170709116151697, 0.9858593894620771, 0.6673810311766473, 0.6501687179182749, 0.8266081311834439, 0.5306604446954801, 0.6716790080972266, 0.7774885192180188, 0.9345563707193072, 0.6746297844713616, 0.9164054466096763, 0.8030842107816305, 0.5202398620730274, 0.6534579779571236, 0.7731552883763311, 0.8394929989774749, 0.9285368502247304, 0.8416179063204652, 0.572874709828308, 0.8422790217454043, 0.9170645353878004, 0.7914905149517789, 0.6183538300151361, 0.535795338483187, 0.943304840316853, 0.8005439159146318, 0.7112905374991182, 0.7443576891671735, 0.7710353683658455, 0.5921313216562778, 0.9566716460998531, 0.7661440351783944, 0.8224724469269697, 0.591354294756712, 0.6470669947876853, 0.5992991607180007, 0.7960556235589228, 0.8900459170531745, 0.9208182611947631, 0.7460974983454941, 0.9433293109296301, 0.9137401385463397, 0.51672588425317, 0.6108843491768661, 0.938590669229814, 0.5062922604442429, 0.9979074647141006, 0.7445964931827995, 0.6486125891994764, 0.9458553589203578, 0.6362973443860189, 0.550696422561825, 0.677478613167537, 0.7770316211587592, 0.5121534214133081, 0.5209739824821442, 0.8021146091983891, 0.9149970984341234, 0.900621548585335, 0.581359362457005, 0.6829404947328868, 0.6538135938967646, 0.6031098722170811, 0.5997375306113115, 0.7659769529842986, 0.7253929493668845, 0.7720305679776065, 0.5660331843801046, 0.9207277424036889, 0.5217060734655707, 0.9327566887731054, 0.7744635363223176, 0.8030699714017145, 0.5705856962071845, 0.9242749124292089, 0.7583590442944816, 0.902113197251282, 0.7348835333342476, 0.9333935326712411, 0.7667268289618221, 0.7560830480587016, 0.7058091809979673, 0.8152982252443272, 0.9929260438158979, 0.939069166399255, 0.8397961524188727, 0.8857540839785139, 0.7617290243853512, 0.9036188146321261, 0.8140801657215706, 0.9035303391780497, 0.5467023636254116, 0.9827284504198194, 0.9019867528844698, 0.7851386858698965, 0.891608871059943, 0.6174531632188842, 0.8900252379026531, 0.861549894770399, 0.7768943405300803, 0.934528355613536, 0.6441735201545122, 0.8388281831649465, 0.9770759599730278, 0.781010404395656, 0.8671511065214479, 0.6969580503977257, 0.7135742293138223, 0.6293419320786222, 0.7918855875656434, 0.5607649307356393, 0.6175899417005879, 0.6094739708223893, 0.515101135854747, 0.5443531279216514, 0.5537161990710757, 0.9989882345402354, 0.8428042677641336, 0.8189260499995997, 0.85777781553411, 0.8290312512273856, 0.7460253275425582, 0.542399986198209, 0.510260344703211, 0.823531965249229, 0.8414759630516149, 0.7989750593387412, 0.9138964041779954, 0.9153969286541697, 0.9720058922426311, 0.9071481959427632, 0.6728902463793919, 0.730625471398446, 0.9005255159632273, 0.770034625745865, 0.9214041906779316, 0.8341368186084965, 0.846986104149309, 0.8391202149537814, 0.8005140216882831, 0.9095236492176553, 0.785325810916474, 0.5489304452867346, 0.792587351679225, 0.5293739123157402, 0.8270659452138094, 0.7514625588864067, 0.8202507187125352, 0.817561435301652, 0.7467330550147773, 0.9277406245496785, 0.752885451613533, 0.6484528604224584, 0.5542351877021723, 0.9954285111739991, 0.8116745128136178, 0.6533920953399586, 0.7281161122485529, 0.7324037817436879, 0.8838107854590571, 0.9118347124806095, 0.9196970680584746, 0.9611010623607499, 0.7739342010009924, 0.7441658962211481, 0.5651958978313366, 0.5873684319377303, 0.9284159627352556, 0.6346351906027625, 0.671606134138911, 0.6610032738031324, 0.5429595131303434, 0.7504688934505459, 0.7032971483216719, 0.6731852664133458, 0.8066455590689368, 0.8382349784506453, 0.5483299859843886, 0.5293054391149836, 0.8457193158083962, 0.8690082028655831, 0.705650003032489, 0.8086137506139984, 0.8273093899057027, 0.6907198371155754, 0.8545508898689513, 0.7073294982752711, 0.5572109504914221, 0.8754942214317016, 0.6715919119662541, 0.9903331411493443, 0.6204501247263183, 0.9473818186619448, 0.685159600255169, 0.9330679448884298, 0.9890905848649757, 0.5861603771812822, 0.7225951152606965, 0.5231181671218508, 0.6195905568078993, 0.8943872210741156, 0.8350570405630343, 0.7752029062354104, 0.9896247165165813, 0.7553432155180784, 0.768291212312699, 0.7521423346222214, 0.8637565757601864, 0.8082660014510369, 0.6078965201018187, 0.749710619165771, 0.5156610705791325, 0.5769700920105281, 0.807745415766798, 0.5932623989049783, 0.7196948628959172, 0.7864965525231593, 0.7489442150358967, 0.614675169026283, 0.8264502384613146, 0.5224662845843853, 0.9624405536426044, 0.5903287545609569, 0.6253542175541514, 0.5980550970355402, 0.9135905955780861, 0.9186593526992112, 0.9741657915359625, 0.9804192433752115, 0.605254972879977, 0.9075233293234832, 0.7723452615061284, 0.9603993162389632, 0.9493959944789986, 0.5971390898517117, 0.6684630958599818, 0.8253076311191903, 0.5061247385704244, 0.5181063613012312, 0.617573262151663, 0.9123165920730871, 0.892528260586356, 0.8796924221722413, 0.5604717861796283, 0.7120610872394912, 0.7471513994320598, 0.9729386907784328, 0.5994833256549406, 0.8954767169224921, 0.7864761705897556, 0.6477691102954963, 0.71664572458387, 0.6272676403149164, 0.5743462806317712, 0.9679693801471798, 0.5046917393992689, 0.7715937702297035, 0.6316605339232974, 0.5845468925124607, 0.9776573595186898, 0.6847486281023281, 0.8378915597059021, 0.9333256439809108, 0.8735461081647014, 0.7830294053644733, 0.5051969124367539, 0.5417345550664883, 0.9668244465385268, 0.6399789463298933, 0.7788904521198569, 0.5605660406907831, 0.512295152419642, 0.9676386332591524, 0.6803892954850141, 0.887713485351433, 0.9477586222264694, 0.7822455691943133, 0.7676570623958365, 0.5549812163456553, 0.9049934574089542, 0.7005695789577748, 0.7801350696720641, 0.5554540825773526, 0.6277682552189735, 0.9416706129794157, 0.581110406746204, 0.9998795779380831, 0.5071531498280116, 0.6509534128833041, 0.6067852794486263, 0.7384474831602019, 0.8151115376246312, 0.9296540574134302, 0.510953035283791, 0.9706166906530969, 0.9333234214105983, 0.6435319363317172, 0.70530909691761, 0.7233121331592953, 0.9515165826723813, 0.7691306327763093, 0.5546574974070104, 0.725567082663143, 0.8590032689410457, 0.7896482674839831, 0.514549077463718, 0.9567610257809955, 0.7068365275677078, 0.753496519622916, 0.8777238064777778, 0.6610077151679061, 0.5089035225013274, 0.6626949464231799, 0.6114582437987941, 0.7819345887698279, 0.9089890050359167, 0.6062580040903423, 0.688161429412204, 0.6596286572056427, 0.7614275290108808, 0.5692654273605788, 0.874484136443246, 0.6309931318539849, 0.6649364549409689, 0.8643315637600362, 0.7978171182778633, 0.682262225740459, 0.8247772665775043, 0.5191093195713357, 0.8332145820313304, 0.7527318190273292, 0.7591958004315374, 0.8971370237457637, 0.6605427289661666, 0.706996608440069, 0.7476155106276041, 0.7961045588703075, 0.7251426775297496, 0.9237054566370994, 0.8636958484883313, 0.8531755443049407, 0.6766576759781902, 0.8906338143674815, 0.727860749212329, 0.9436405643444785, 0.7932765559480011, 0.8096944839988567, 0.8770889006395786, 0.9320152107742977, 0.5529168603160912, 0.7229712145846612, 0.9731433496675921, 0.9897250068618613, 0.7727447522659878, 0.6724110273846232, 0.7319352486760768, 0.9530533465031759, 0.6590286594396747, 0.9369588083347946, 0.832879954190183, 0.5927141407708627, 0.7447633910861983, 0.8950945541851583, 0.90578923744337, 0.6066790960173287, 0.9871076762463247, 0.7306218015326649, 0.6600472091752305, 0.8636243118331752, 0.5329980997587327, 0.5674797478322691, 0.8056050333728324, 0.975299755054505, 0.6799768655548581, 0.9487663115412681, 0.5836749986687946, 0.8884754182827872, 0.6787739338007341, 0.6951765689914934, 0.9073007185067242, 0.5134861220336069, 0.9810025897139225, 0.8308775611299204, 0.6335431782637555, 0.8714570561751791, 0.5488842286345786, 0.5768300370859633, 0.5204718756717979, 0.7953812422624443, 0.662900508897597, 0.8369158235480018, 0.9822863051645975, 0.7938374748267347, 0.6588597952663638, 0.6161893439852649, 0.5546863378652598, 0.9893054987702137, 0.9520460972034095, 0.5829792492698787, 0.5637006315632925, 0.7223580573625948, 0.724684393965551, 0.5693430208400079, 0.836961887286368, 0.6824498749397339, 0.6437860844684828, 0.7557532408520562, 0.6974266986586238, 0.9792910815407916, 0.8741350965891597, 0.5708852199310624, 0.8799291502610319, 0.8233852043619405, 0.8171978338258923, 0.978865215805058, 0.5529219712500684, 0.8803881276563228, 0.8581786307250284, 0.87200441007383, 0.7080231337489273, 0.7317361029353358, 0.6775152808067225, 0.9837716837573258, 0.9428325852019912, 0.7881054949340741, 0.6604412680923197, 0.6123692497857589, 0.8279384531631737, 0.760512428402689, 0.5972164710035721, 0.6479183484564794, 0.5762187023869051, 0.54758366562878, 0.9115206731689018, 0.8832833866600073, 0.7325905421051581, 0.8091193299835803, 0.839218824437634, 0.9787852124836454, 0.8053339152700596, 0.8356852532425556, 0.8240507869938187, 0.9740237752640535, 0.9931761245615565, 0.5950118822625011, 0.5289375126691105, 0.8559781842765348, 0.6198372847777663, 0.5490619246624096, 0.6048304538025988, 0.8343607340914828, 0.9566811589713188, 0.7696567609185765, 0.7828540736789713, 0.6809236749253211, 0.9799225203251257, 0.9821625866090269, 0.6737628581156196, 0.9340461352902343, 0.742056025076328, 0.9507728944675005, 0.5587624479893024, 0.7457438426699534, 0.9470554148049211, 0.9933152408253991, 0.7069231524303603, 0.889057519778365, 0.5097479559011764, 0.6545222685367909, 0.6271453375362639, 0.5818624862242291, 0.5690300348310493, 0.5181433736063241, 0.5451720251797354, 0.8371573837955228, 0.6699579486628621, 0.9761444889402684, 0.7365151551551907, 0.8382031221102908, 0.7970965926296717, 0.7011588850450396, 0.5822560955915597, 0.7900619160887581, 0.7989130984535968, 0.8065475006843086, 0.6581855563522824, 0.5018047446237279, 0.7569519823323595, 0.5147426583559769, 0.9786745996656923, 0.8156139183305978, 0.7356477244192167, 0.7541804835470702, 0.734757208003097, 0.5045587299945418, 0.5303455895669095, 0.5713755070179968, 0.6372861417531028, 0.9627696140651487, 0.9973125164045592, 0.8869422560109821, 0.8065892369376447, 0.8126287976684519, 0.8669461386946946, 0.5256207364711027, 0.9120951454103885, 0.5263114869658219, 0.8866379730033018, 0.6833837778724448, 0.7994810952293212, 0.8623676753394862, 0.8066674767874644, 0.5092868411919669, 0.6689057761528561, 0.7649170552457144, 0.5051594478259663, 0.5598542495452838, 0.8634237398019453, 0.8154254228316611, 0.9695870547033658, 0.5342552018407609, 0.6869483631022941, 0.8797981981981702, 0.6230704386581206, 0.6166376633018309, 0.5186378750513103, 0.6265359989878394, 0.8546445377605627, 0.5947277275861709, 0.9194109160605044, 0.5521957790993656, 0.9481596704170852, 0.6346443784157285, 0.902720305805383, 0.8798004121069422, 0.5490523252301799, 0.9543544676678031, 0.9931011629862341, 0.777460798459517, 0.5655936415419727, 0.7069151465705608, 0.8640018719038445, 0.7375912152650734, 0.9630426941457727, 0.5800725176796135, 0.7067429130726242, 0.5146532146504491, 0.6435235663647425, 0.5471384627308264, 0.7105161356710972, 0.958679319050755, 0.8367101807249165, 0.7070243753339476, 0.7229045329606405, 0.7972691951544764, 0.6798233598993134, 0.7176235919063468, 0.91170447398034, 0.736465366477916, 0.9022858658245363, 0.5512810534115868, 0.7789875815921998, 0.783733420400949, 0.9759962432645646, 0.9464786383038353, 0.7169211327302041, 0.7936240875860956, 0.7901108840341563, 0.8903502875435869, 0.9144223752598559, 0.7941804934130069, 0.7864643503028335, 0.6562524526404936, 0.9525407875730805, 0.9178651810410354, 0.9844773720148667, 0.7428850534569553, 0.9259735016821641, 0.9310059241538635, 0.5745888946490565, 0.9680115523000683, 0.8783056637959048, 0.9748459952839612, 0.5048248469836063, 0.6900185306083388, 0.6809436344827771, 0.8603493764130179, 0.6040203265327851, 0.724434059439697, 0.8744526760103659, 0.8959109158829978, 0.8633346161056987, 0.5168716993171554, 0.5863811811012163, 0.7361499793485369, 0.8679139925211152, 0.6337635401464927, 0.9028995554897881, 0.8993560319581759, 0.8465668761919004, 0.9101853799598072, 0.6242008060943864, 0.9907173990737392, 0.8466624833129868, 0.7814457499248777, 0.8801828320798788, 0.5777127154023846, 0.8912307235422683, 0.5955918883919609, 0.5439038370871672, 0.8380609814805291, 0.689369915728844, 0.6116051484545539, 0.8768057782314285, 0.6730714909100632, 0.978129673361231, 0.8680020507823825, 0.8188962649611309, 0.6342455228119152, 0.9265904641638976, 0.5396357177335984, 0.7805362185041311, 0.7666336630606617, 0.8477902985070589, 0.7685461610970374, 0.9244424645284983, 0.6564172428911061, 0.700061548600162, 0.9542737293493, 0.6587555416589623, 0.807877736582622, 0.5490998315671063, 0.6732663742778978, 0.9272557593484276, 0.8492664615678696, 0.6527384739289815, 0.5789324406344215, 0.8595752544462529, 0.7199049967515996, 0.9985635174173131, 0.9693653293979474, 0.6860893255624885, 0.9580263119461534, 0.5969360287138548, 0.7770003017609765, 0.8089293082810078, 0.5146350564179565, 0.7228140987150389, 0.5949030597265885, 0.8094936795387526, 0.7596294652669495, 0.7542210783324407, 0.5326594788760274, 0.7699999140622817, 0.5919471535024157, 0.8686730016877595, 0.6362015381767266, 0.538337631615097, 0.8343494614105527, 0.6756806600243378, 0.5756201682482686, 0.7491075021429778, 0.9815118077059359, 0.9395633546614336, 0.5956239069759983, 0.7425720690787192, 0.7154305443662787, 0.9968382502660822, 0.8981249912967668, 0.7631937763790451, 0.8237530318349429, 0.7704249714564485, 0.8120336479093986, 0.9509689306344252, 0.7322956776295946, 0.6941067520580373, 0.6665363339867199, 0.7711806098103131, 0.8457683611430785, 0.7583070012806803, 0.5069869038150167, 0.7180279413534858, 0.6030709990528049, 0.6911238352269893, 0.6732640762654942, 0.8742941663241869, 0.6499021865080621, 0.6936150371355578, 0.9732765132731132, 0.6987618545716321, 0.7874824292693328, 0.7954977566993784, 0.7322148904092525, 0.8857213600048387, 0.620660636875375, 0.734998084701467, 0.7469864488538172, 0.5922974245450295, 0.7567669374534955, 0.8217452846132344, 0.5171538297641295, 0.6274506760878458, 0.5352762722426246, 0.6441919252895842, 0.7643891481051122, 0.655923680364914, 0.886130697520665, 0.705279483649611, 0.9818570281155916, 0.6142002444406094, 0.605648154807536, 0.8269870485087334, 0.887489980653451, 0.9328236488725601, 0.7657944523881715, 0.9156619629829084, 0.7577306265038513, 0.5584383874842338, 0.8714261335992777, 0.7313762972170608, 0.8589050303191069, 0.9286434649706492, 0.5664020281358664, 0.9802593059768752, 0.6323747465045446, 0.9868530100953928, 0.6412094106059348, 0.9515638805601974, 0.5781493926003649, 0.8752831106883118, 0.9005257805109437, 0.7431371636778721, 0.5775509823933744, 0.981761118342831, 0.7273972304373442, 0.5856572199992669, 0.9549697612498349, 0.7338728086464137, 0.7460960000186192, 0.8738883292627051, 0.7184570670290089, 0.7894762901135266, 0.6484698284503627, 0.7021811743370342, 0.6671495327232173, 0.7109590691741912, 0.735793837626517, 0.9520369292813727, 0.9368124013178012, 0.7011508992779548, 0.9644674091159212, 0.9930851310438813, 0.9628599473343858, 0.9771180645076885, 0.8311646659613939, 0.9093548689193252, 0.9844447754975272, 0.5180229700419133, 0.5830273197114607, 0.5727743005966157, 0.8470941450330498, 0.6408640082631372, 0.9347396973669702, 0.7552507321465418, 0.5280512426857488, 0.8679893575086652, 0.7031834116245146, 0.9999728660875287, 0.8028559474112216, 0.8345880264591095, 0.5402199393068219, 0.5671105727456287, 0.796304015318111, 0.5407857408765387, 0.9884319762394158, 0.9043779451422389, 0.8022965108889071, 0.8236880988076585, 0.7507299674643768, 0.8152673871444787, 0.9270064191521026, 0.9990834099311146, 0.6656452983711956, 0.8635584436024839, 0.9383011381179119, 0.7912127667046729, 0.6257386764408103, 0.9812754327329374, 0.617003191643571, 0.9218170184367902, 0.8259013291318453, 0.681465407627226, 0.8376392239787434, 0.5882225432120576, 0.7290551371901529, 0.9041454390975521, 0.9861050830856393, 0.7914331636113184, 0.6636479437494521, 0.5778125779063306, 0.5186231066985278, 0.9890878737411377, 0.6275106175980223, 0.5758846237890598, 0.5808122141634511, 0.8795260394759081, 0.6427466114882376, 0.6364578295744638, 0.5662334711144658, 0.887498948547137, 0.6367025310575114, 0.7374418014910966, 0.6558064521544521, 0.5098887775421721, 0.7041687933642886, 0.8315753606671847, 0.6176636425624564, 0.9209163927534108, 0.9128771114681451, 0.8245762516849193, 0.7065620625876674, 0.6288317225786085, 0.877175204648489, 0.8761680345614649, 0.554608048213807, 0.8381300165721286, 0.501558645329331, 0.748445132108857, 0.9298802995336695, 0.8724780864642443, 0.9288990667712753, 0.7156604187725404, 0.781087990251971, 0.9235982628380839, 0.8134755225456809, 0.9348047742259575, 0.916640026365166, 0.9164582484883936, 0.5567793533624232, 0.8098510299985918, 0.6331936574214978, 0.5476240241078518, 0.9993718673619383, 0.754133011811504, 0.6598927729760993, 0.948291858279573, 0.7690700415926701, 0.5477038994489942, 0.7260116056545639, 0.9549052178595907, 0.7231979598968336, 0.6208852995712937, 0.6841627497487581, 0.7625954119820191, 0.9896578092179137, 0.7298919955436527, 0.9245976754745124, 0.7917930645614574, 0.9331379420674093, 0.6347274351988547, 0.7638102505408855, 0.9149387642088896, 0.880339704359326, 0.6322539669911438, 0.8249827733167769, 0.6315705515873904, 0.9491516734204624, 0.6114884284480185, 0.5910510516556442, 0.520224421106239, 0.8497498605060272, 0.8095044790220168, 0.9217699632374752, 0.6235921911073896, 0.7947277118678993, 0.6054092256584153, 0.7144545948111161, 0.9198234226599118, 0.9377742726253322, 0.523967084634474, 0.7508308725479722, 0.6099129238255967, 0.9899167351070215, 0.5387228842776299, 0.8616417008046178, 0.6283461282469319, 0.7038784339452411, 0.7262584812630147, 0.8559436255019984, 0.9333244081005722, 0.8684241808601367, 0.9735236774547937, 0.7634734226747131, 0.7070722217570924, 0.7868115576795063, 0.6514313640781826, 0.5510085621127909, 0.5528774605750735, 0.9462516815341069, 0.713381775953112, 0.8576945003873562, 0.7184572334267079, 0.7123423856799495, 0.9229596618394008, 0.9960311686251071, 0.7275633309403311, 0.7997294678764446, 0.720502747646887, 0.9131331784300525, 0.8807644856929224, 0.9698916050350079, 0.6798356738593773, 0.6217116369418392, 0.6447914409371804, 0.5859399947768993, 0.7465943563795356, 0.6885755419362358, 0.640144162092532, 0.965534846121196, 0.8625278057086292, 0.556617006226183, 0.8524144897116295, 0.8182895104182293, 0.8680952952592913, 0.5951411185876139, 0.5698907301684174, 0.9965195547204829, 0.5845090760067999, 0.7973168732427041, 0.7159297239065208, 0.9587418087742497, 0.6298805441904416, 0.8422190710013395, 0.8863777734152868, 0.5715672485349452, 0.748368371407697, 0.8318585809964365, 0.5013859547358902, 0.8673809950442855, 0.7744223108616175, 0.7555840604549188, 0.5373812348395822, 0.5857326877046667, 0.5144902226155832, 0.8053972779516523, 0.9725703869895006, 0.7424875104264131, 0.5067951607846526, 0.6428089647315594, 0.695222581239705, 0.5522497964621969, 0.5783687677846919, 0.6520303801481416, 0.9127341849478945, 0.5065082530209337, 0.7989402845174212, 0.7724124603236244, 0.8970213171822643, 0.6861530521017105, 0.6617661553003059, 0.891922749393079, 0.733509908039073, 0.8936646334099936, 0.6938047893768016, 0.9676046669119243, 0.6053888226376785, 0.9244752338014608, 0.8863946224542801, 0.7681190163262692, 0.5166947749448361, 0.8823969910345482, 0.7383638469614456, 0.8856475430353085, 0.910778632984675, 0.8724518573926602, 0.6365800889850816, 0.7109834802253054, 0.9121069073505311, 0.5121735893319554, 0.8207268630591772, 0.7069732023838176, 0.6805295629318702, 0.9444179059990174, 0.9990157375404339, 0.6019415284493639, 0.5055515817018845, 0.536144605456282, 0.8257571912478197, 0.7720660031765909, 0.5634141980704449, 0.7247427112535103, 0.9106268795079893, 0.8662500256401682, 0.5540867588109475, 0.6407855086930864, 0.655697770775693, 0.9748494648665471, 0.6757783127468109, 0.966283689337771, 0.6328087613147584, 0.6108118821958863, 0.934415943823405, 0.7525769685894936, 0.551788839128966, 0.8532773998929755, 0.9986651178314444, 0.532006708390215, 0.6575820481818266, 0.8451469746374114, 0.9053523752343307, 0.9467457868478049, 0.547255901486583, 0.6601299774805685, 0.9384494518703321, 0.5529869182720059, 0.8650132847003973, 0.5809219314126022, 0.6250242419417851, 0.6509530864830715, 0.8451843740667544, 0.5883170393737849, 0.6972266332587103, 0.7347631307366861, 0.6476330925662354, 0.687745763438558, 0.6644372633265089, 0.5108479762049543, 0.6974638128847298, 0.573480872087137, 0.6212509567621658, 0.8159635626271795, 0.7564560370959083, 0.8267236878082831, 0.8289178368696328, 0.8769370987234637, 0.6083276834302205, 0.5958331246320492, 0.5470784764086558, 0.6630917509729672, 0.5443216363292709, 0.7721469362791709, 0.9960646202870738, 0.8422677929623476, 0.7633137803128115, 0.9782915097954004, 0.5427112210990855, 0.9542208691935687, 0.6886269194743009, 0.8105798525699153, 0.6137464414142324, 0.6552488338929965, 0.5647010523353941, 0.641936031843235, 0.6946078755853831, 0.8832321884231562, 0.5878894492302429, 0.7507466215449854, 0.5389001185389588, 0.8326984893569314, 0.777693951596017, 0.6257982561432613, 0.7286296627464895, 0.5799615308375898, 0.9330435950536866, 0.9343156849920031, 0.8510445372813951, 0.824281477928634, 0.7474531460213685, 0.6460310921610075, 0.652562288958389, 0.7770021478623697, 0.9791559593393566, 0.7066570907433651, 0.7404642967511094, 0.915598832298995, 0.6549000882336721, 0.644576087636844, 0.6584895861371702, 0.7494330517996384, 0.6384452459070249, 0.971658386436084, 0.7784608753416428, 0.9087118100048681, 0.9659569028015902, 0.8223280752871056, 0.7990285585419121, 0.8610747028121333, 0.6720114557459784, 0.9117327998312855, 0.8170186171430238, 0.9110856060391874, 0.6064471536017452, 0.7318648284038936, 0.6064927975779224, 0.6974981862164347, 0.8304882132341257, 0.9887585375018222, 0.5582847346892855, 0.9434522189453984, 0.6100870505331326, 0.7895830861033359, 0.7403434727692486, 0.9348475147874735, 0.8390849887207219, 0.508307576282874, 0.827740446923393, 0.9899963669794161, 0.5268115418799011, 0.5495696025510243, 0.8174793874293795, 0.5239272526889847, 0.7124269781624796, 0.9400258342865182, 0.5410714952632729, 0.9792741636356486, 0.7680762763018079, 0.5803305528157401, 0.8585520216969782, 0.5352943254131031, 0.6943019587081634, 0.514877785345754, 0.6260799151252154, 0.6392316083054342, 0.8574710530745562, 0.5940103327229413, 0.8685870597275481, 0.6207446085227611, 0.9000712369129003, 0.9483270496899965, 0.7908493301894062, 0.6856660172438929, 0.9765804305811796, 0.9188213211500387, 0.6983703456471102, 0.6263355897574212, 0.535219857361493, 0.809866948201803, 0.8996749260850472, 0.6774785312280267, 0.5093973987875141, 0.6641533525729038, 0.5553036975634971, 0.7658066463113495, 0.6738018809679437, 0.8980896873691, 0.8212924576560029, 0.5687448258716743, 0.5899457443038727, 0.6396537843001906, 0.8916991223144561, 0.6176986551222454, 0.6724900677038548, 0.6697095217351017, 0.885010470102074, 0.8825754772666274, 0.9972816383247338, 0.9614515966791926, 0.5179428698013692, 0.6458716426985314, 0.6992178586158968, 0.6212178268685749, 0.6653059060064983, 0.521874857685959, 0.6929554354654772, 0.6716975259471956, 0.8906210865306023, 0.674515423440637, 0.842225665911988, 0.5555304367008632, 0.6705745188721852, 0.7938126095000784, 0.9657930963034723, 0.6878603430895267, 0.6529771357421242, 0.5553179552443095, 0.6736146870011457, 0.5201879417895816, 0.8805175070723285, 0.7167347139117826, 0.6189476820718061, 0.5109611303914623, 0.5626061824271404, 0.5671178917684991, 0.5822523636546284, 0.7769161048414299, 0.9358702680709181, 0.5577732493075203, 0.6497998162451883, 0.7638871352984999, 0.6329629385979526, 0.6858768342422081, 0.9213109776817622, 0.9389299508293628, 0.7742933274111782, 0.6041845124241805, 0.799180299192398, 0.5420165263444549, 0.9715216748632493, 0.5311495597629001, 0.5025477077161122, 0.5569836544383839, 0.9686297452692627, 0.8763849116611057, 0.6887883432004733, 0.6516708429999932, 0.7854834293860016, 0.6421068057942882, 0.5587133904421753, 0.5173411464119198, 0.5593825587742427, 0.5501117817756719, 0.6423051188938742, 0.5934408022037668, 0.972904788461578, 0.8905737211692906, 0.8061552168579224, 0.7242356084435998, 0.8215895561970789, 0.7954729579510433, 0.6192574656272241, 0.9593568597716282, 0.9102017028341398, 0.6983819704313335, 0.9063768034474678, 0.740280817081831, 0.6172778944007216, 0.6808367966634448, 0.8975908794035696, 0.572147079286146, 0.619520884473197, 0.5271721894235619, 0.9384557646773504, 0.7541992581932402, 0.5379752023271718, 0.7272150426242809, 0.9596032809000303, 0.9780024947400857, 0.8308810030179176, 0.5229640526271784, 0.9785657756807455, 0.9862666240834506, 0.9297264049447922, 0.614750870509662, 0.9206426608195313, 0.9892126447973957, 0.5835563579995111, 0.8231009771168398, 0.5814460839297517, 0.654337494638498, 0.6130351588628368, 0.8645624983591264, 0.7715220542196328, 0.9843596912923173, 0.5838913897750408, 0.948424818012455, 0.6691096277164525, 0.7375075133186222, 0.5699809144264969, 0.8509199837212886, 0.8726708065959075, 0.6023393170796506, 0.9453937330776331, 0.9692719920336377, 0.8619385747637986, 0.8598620685145281, 0.7621018190148284, 0.5829649564299816, 0.6544936620780588, 0.9318163334603144, 0.7888903498409228, 0.919268724679065, 0.6466017196928089, 0.9517965555713161, 0.9066443383210316, 0.5823760864435448, 0.8253687875543247, 0.5309485317971765, 0.8813231022417395, 0.8684764650516927, 0.5305354586572975, 0.8010120230590927, 0.6515963757541372, 0.6939462880629641, 0.7779077507068506, 0.6519176084458396, 0.9973354734229636, 0.69558604652423, 0.7403033279716278, 0.6266660730956128, 0.7845683134145913, 0.8924369975243188, 0.7451005377277966, 0.6678729044858713, 0.9878756018421113, 0.9982380305385424, 0.8205530714859642, 0.9005060023080076, 0.5448344326685193, 0.7467928169124635, 0.5645954847799738, 0.6852258078651019, 0.8136399919667003, 0.6535867888445563, 0.6388520939202006, 0.9413234727487838, 0.8251148351271446, 0.8587997393910023, 0.9821604471167107, 0.6502002430488762, 0.6372546735528262, 0.6119269096585362, 0.714386370109676, 0.6729654340814765, 0.8853657302938767, 0.8620592846906303, 0.9531539744562754, 0.5690723447707369, 0.9915848383240959, 0.8610858535435809, 0.9279540015302579, 0.7606387614690593, 0.8742952704314373, 0.9021353314200078, 0.8424591314971492, 0.8324801243650101, 0.841959673974177, 0.812586526516916, 0.8860448194426664, 0.9447546847670494, 0.8905482347149242, 0.5854598983395367, 0.6595675887977281, 0.9595705630517264, 0.8205729162580688, 0.7332826656119501, 0.8277780416768445, 0.853843206619471, 0.9697040199869242, 0.6431313316453253, 0.940966536616924, 0.5039207888323194, 0.8118809192251186, 0.9730143159765838, 0.69030063909415, 0.5633663960784416, 0.5625041430064642, 0.8695394521232119, 0.5677623715507125, 0.9253237037450515, 0.5827611555854666, 0.9746065481922803, 0.8740554336476879, 0.5937445475176004, 0.8004831997113873, 0.9853951231390151, 0.6303598266878185, 0.7134655731217103, 0.6712005244808303, 0.5047961229459063, 0.6782522988502386, 0.5580027423848344, 0.6516174661110543, 0.7609535162716914, 0.6255064277778388, 0.9995630858087493, 0.7777337675455454, 0.9028381210197616, 0.7809058442629301, 0.6955546837083117, 0.5971413324759302, 0.5193984700234073, 0.9264914213660417, 0.998878042183359, 0.8152123365338648, 0.645200567109361, 0.5913849043820043, 0.9281195087704646, 0.6183482148548842, 0.7928355485988103, 0.8680121824295421, 0.9114863315953611, 0.911829853697415, 0.9961262159577486, 0.6890721975318735, 0.550814326035635, 0.84040862878113, 0.9095906059579562, 0.8794522661348058, 0.5086950883882884, 0.73738305066707, 0.9934896617607336, 0.5066266052835346, 0.6402215166262903, 0.5634933346039623, 0.8357575275744097, 0.7471629845648465, 0.7299624133096554, 0.5849862221241143, 0.5209731055290912, 0.939702131983081, 0.6883736337071296, 0.6144965115436358, 0.6522437862620436, 0.8587349310147792, 0.7489200595859344, 0.7803984758338863, 0.7924887558033862, 0.7770623788861637, 0.7734271293910542, 0.5357752757576888, 0.6676981608404828, 0.6358526424424067, 0.711994247478485, 0.7417364091085609, 0.9385334603858646, 0.9523955644382778, 0.6781011377975206, 0.9387831576516374, 0.649493513566189, 0.7184956816150068, 0.9036961007365656, 0.8886483250802912, 0.6884995181000493, 0.7190578598827985, 0.8873462399457571, 0.8316872307288017, 0.6271125166458418, 0.9332436335987297, 0.8412943236284491, 0.5610915585409105, 0.693198728377997, 0.8545646526463988, 0.9928640098155328, 0.55431265881697, 0.7917879446070553, 0.7397863120674625, 0.9808948294563449, 0.7419201242974098, 0.7257797507430891, 0.5721795056946823, 0.5226225787379777, 0.7831831264924121, 0.6222563741397473, 0.7766273368807064, 0.8722073867581213, 0.7433245269330808, 0.8913910533279896, 0.6478375943186332, 0.7434445303410112, 0.6691654331177973, 0.6217763777961304, 0.8516899497994386, 0.9143731377592739, 0.5253663463722491, 0.8318676359991238, 0.9787516087158492, 0.7043413837019878, 0.7270903256589749, 0.5486240670067759, 0.7126419461088204, 0.6433841365541946, 0.6610627774958524, 0.5718330747742512, 0.6261614705006793, 0.643915249198313, 0.88748867237881, 0.5585392292949019, 0.6375532106136279, 0.6198353712487953, 0.6046431667361039, 0.6302500749138311, 0.9494072167849648, 0.643463107922541, 0.539465316561905, 0.9528051679697753, 0.506808946502495, 0.8818546143802247, 0.7027518384800191, 0.8132983497521293, 0.696685658250286, 0.6459113678970879, 0.8702312758853779, 0.7189615058998694, 0.6422523303444169, 0.8957794933365832, 0.811413862241635, 0.7737164260147078, 0.6497752393392638, 0.949237280834248, 0.9218137565876985, 0.9927123453771043, 0.6834136065285386, 0.863836896274043, 0.718911385277518, 0.598707502155756, 0.545678788802366, 0.7149112558166684, 0.7915979854834762, 0.9889895519816244, 0.6345932938051326, 0.5559852168300599, 0.8315255518419433, 0.8212844721435784, 0.8877230900315228, 0.7874484757994056, 0.9535522374581296, 0.7381312100951917, 0.7683138170765311, 0.7827418465364089, 0.6598400835485346, 0.8907769734205615, 0.5605026504138684, 0.9612194258152587, 0.8340129071536857, 0.7896083344726624, 0.6258567815772469, 0.8552619394493164, 0.8896459374169016, 0.690712163024782, 0.7979027364651596, 0.6545876441604241, 0.7535592100755548, 0.6203593033408575, 0.7709617407100469, 0.8720633626275407, 0.5479727347643444, 0.7931270743702402, 0.8209373358010248, 0.567455533372637, 0.6295983247762753, 0.607280305830612, 0.9452167439478709, 0.9689547657586115, 0.5926939461233975, 0.9590242923684239, 0.5790650203377397, 0.5349212446720722, 0.5091261332246073, 0.9090029369972192, 0.9579979350485391, 0.672307624160464, 0.6453499328881792, 0.6752872043237532, 0.5348799636107799, 0.8738551246897349, 0.7160314395438616, 0.7056245392249119, 0.8034086430505435, 0.7730835435779225, 0.7527149397205569, 0.7099617673092162, 0.8316703187897805, 0.8559023894833847, 0.55620165136069, 0.7406839702345319, 0.9489835323257468, 0.9093067758524173, 0.9247783282230195, 0.6452162212309418, 0.5696045966356812, 0.7928415269672435, 0.8676674676721278, 0.6521659896865557, 0.9165253882736794, 0.5983450368442071, 0.77095780377614, 0.9291368970376137, 0.7532520431114305, 0.6789064078951697, 0.9585396475041217, 0.5937881234265951, 0.7945986797467082, 0.8815127166017247, 0.6026300418495191, 0.903247705484133, 0.8788680081992724, 0.9306219434986991, 0.8810212648859881, 0.617528652079323, 0.5393958187499712, 0.6126737347663227, 0.6327019713697709, 0.5948628670576597, 0.7606420939320937, 0.6703508535004945, 0.5951364184508888, 0.5418505181819386, 0.7791488789521541, 0.8220851687747881, 0.6308149540632034, 0.7077149402870209, 0.8459072426932202, 0.688008839677668, 0.9949243549052196, 0.7567844404528272, 0.6286413724651778, 0.957858061644219, 0.5609264416694368, 0.5488079769705481, 0.8061463251727707, 0.6377668171631314, 0.8867945725346138, 0.7869394284897333, 0.813201364835874, 0.7420702202156905, 0.9370397588634805, 0.6799184570547614, 0.6383520509170746, 0.7805647570998111, 0.6531414859237896, 0.6358850434480706, 0.8193336280209982, 0.5260035459772704, 0.9515176360284558, 0.7157444993779563, 0.7731324258168022, 0.7488448710270095, 0.5923295548592449, 0.9173438146923893, 0.5392018391463578, 0.6617031408059719, 0.602587809030132, 0.753604828344475, 0.9082223516752721, 0.55743202563428, 0.9254203820146125, 0.7799102599921897, 0.8405400690659512, 0.698241200546821, 0.7869651025522404, 0.6190496886915977, 0.7673797681995875, 0.7458212073108065, 0.7830935659745806, 0.9715059495734295, 0.6459235359749544, 0.8725287362243865, 0.5547278918634684, 0.7078702878103338, 0.7084763024275992, 0.624031111600611, 0.8452512097882166, 0.6775843596273977, 0.9843693178420654, 0.9265939372483094, 0.9867341457680598, 0.6240033735608352, 0.6521777037779948, 0.608276429520284, 0.9109713817247029, 0.7154254503173342, 0.8019158510486082, 0.5096962362538366, 0.8177089563007522, 0.7108628791242275, 0.7155861144562817, 0.9311217922799493, 0.6941686593453158, 0.5581812843833018, 0.5815568417487507, 0.6512028545484427, 0.8194260376062752, 0.5734598270964976, 0.8156177416217079, 0.8689998526756706, 0.7435404452433698, 0.5055232265511538, 0.6063597514743844, 0.9755761576841162, 0.5421758991578574, 0.8277280565809857, 0.5160524120908312, 0.6199390878129909, 0.7996430877393228, 0.7963537442002, 0.7569930537500249, 0.9577015062679295, 0.8201875104556684, 0.7672144651596682, 0.8554917749112714, 0.7368079073064119, 0.9726932414615075, 0.8500582087996447, 0.7917842600463328, 0.7771065969010073, 0.8841445641639687, 0.7084568956111985, 0.8791482213764787, 0.946078636023286, 0.5136610727575222, 0.6436875577965118, 0.9287817129407083, 0.9557826845721052, 0.7064629504181233, 0.7732242648464418, 0.624534298564204, 0.5786064145931726, 0.9196767371657896, 0.9975977956827418, 0.71973310894555, 0.8955195100118065, 0.9512358947663333, 0.980114315449454, 0.8162875676729238, 0.6742574295375401, 0.6970840122387553, 0.9170377302538466, 0.7424754645668379, 0.8311038060414927, 0.7997472586262004, 0.5738482988796733, 0.5710208367548929, 0.5244760338945305, 0.6049067954838865, 0.8324884540830526, 0.587493097411732, 0.8242676379200653, 0.6813119976619324, 0.9916915258479801, 0.5856001231177388, 0.7873007575293056, 0.581550798582791, 0.9624389078003763, 0.847563918695218, 0.645331780426017, 0.5834630804432425, 0.6935235607134487, 0.7286623174171017, 0.8878313729275369, 0.6939120892604103, 0.9881775734389289, 0.9458884632299343, 0.8638753425885227, 0.5799015114763102, 0.7954435427638655, 0.9021326689483955, 0.7139117714052743, 0.6500157467358381, 0.8102646992299348, 0.7450189341136846, 0.6820087693332869, 0.5233681686869455, 0.5191990695290827, 0.7574246430887979, 0.9077053538869346, 0.6117007933725858, 0.5445485088494841, 0.6697045844783496, 0.7992498890570828, 0.9081407312887695, 0.8276896542604137, 0.5613381584216895, 0.7301444691568819, 0.9571224261693565, 0.7182064467495601, 0.583924638811383, 0.9552815459279431, 0.625785796139117, 0.9688982087596112, 0.8136252438113549, 0.9557468788134917, 0.5615872808084174, 0.5312945404510725, 0.7942063949804652, 0.912881126929669, 0.9664721245713612, 0.7627492298069771, 0.7762953326408714, 0.9466036997905837, 0.857157863551866, 0.6739699954341467, 0.6325637688355272, 0.8753099941637541, 0.5371341166036516, 0.5437715704621935, 0.8993780773979673, 0.7009792160710844, 0.8940641965748185, 0.9888080101055712, 0.5391517742872183, 0.515123413625564, 0.6764126909992434, 0.8263265512526189, 0.5843585251060119, 0.9382908119772059, 0.7980076598750045, 0.9597282754654074, 0.5054249769693624, 0.9943847460958914, 0.8502777016919079, 0.7434970537236489, 0.7247926239630486, 0.796656354005673, 0.971683482369424, 0.8090917685366033, 0.9612995765671701, 0.5571830640219726, 0.9788257786862173, 0.8003921349032442, 0.8544270898364673, 0.8014299647466352, 0.6393921558928591, 0.5312097701245597, 0.7554520090329353, 0.6629401596083269, 0.8722878715133413, 0.5317065054487198, 0.8326439465847859, 0.9243730072559788, 0.8170886835452208, 0.8072167144611968, 0.5724840282565273, 0.9258492091288794, 0.557820797068928, 0.5065154198608162, 0.822010813240383, 0.8941161705799804, 0.601239908447723, 0.8677696656361946, 0.7332052309217382, 0.7726990540125216, 0.8522218827766268, 0.841346976456111, 0.7673712107837727, 0.6572134298166652, 0.6692103430541971, 0.931179481372892, 0.6707227703059945, 0.9278623447203607, 0.7114052151854906, 0.5318774234651663, 0.7302896471658002, 0.7260709299887957, 0.8434558513390727, 0.9339343290592527, 0.7256228467661592, 0.7664974167771713, 0.9954358445789393, 0.8592696512227952, 0.7609630677847377, 0.7003489234798793, 0.6035522793345249, 0.5065413105460304, 0.6614354570224472, 0.5441316942210639, 0.5319726959814496, 0.5026186337305418, 0.9455465846661896, 0.7918942891724192, 0.8793036141120224, 0.9848488278127272, 0.6852087100870523, 0.9355796064038278, 0.7396980557578998, 0.5503400624640208, 0.8401619014365923, 0.7079790450996849, 0.9636805907366446, 0.6148770210183507, 0.5804328184882694, 0.9192041804909354, 0.6670378401901227, 0.8231344780973455, 0.8534275126256647, 0.5210637899335233, 0.6330795788815231, 0.9592082833426989, 0.8885168888483177, 0.8643588047018562, 0.79501489138546, 0.684716962614645, 0.6811785520466094, 0.9026336812382265, 0.5615573811679293, 0.7328893633552775, 0.8166702747288044, 0.7369316715736981, 0.6985707007629697, 0.7856809411144526, 0.7484439887517627, 0.5329049725998627, 0.9863789117970325, 0.8717081657368656, 0.6780711632093408, 0.9935171185866303, 0.8820999204986406, 0.8186887870089998, 0.5090454511611939, 0.6784640026968956, 0.6546884441922959, 0.829655012533842, 0.5526039129588501, 0.5940918174079493, 0.6324667241414657, 0.5500611118302707, 0.7470319141312312, 0.6024338102400357, 0.7870668433978628, 0.8933278879358957, 0.5042887551819166, 0.6015473929561779, 0.9537591284792527, 0.528670935480388, 0.7691729965815337, 0.704770301574143, 0.9956583818416309, 0.816918525404061, 0.858726683379873, 0.5661414785048022, 0.6928789687265098, 0.503389707321987, 0.5547296235437986, 0.6118134360316878, 0.6188162653202343, 0.5669853507822556, 0.5582296610031758, 0.6372578866436718, 0.7656390181741654, 0.5822412857786028, 0.882864624114071, 0.5142782232434921, 0.6778854487595252, 0.5987112730293984, 0.8230199029339251, 0.6038758072719975, 0.9981408470938775, 0.6870075103332252, 0.8589727315151138, 0.6329422786410714, 0.9136139585600513, 0.7111296342194271, 0.8864856777928148, 0.9210128891394684, 0.7272568830162269, 0.8189284084467376, 0.7242804633348967, 0.5730336762497968, 0.6698101827436966, 0.5204515985292524, 0.9102551354442964, 0.5416205294732693, 0.7173904800770623, 0.8453685361868895, 0.6796536367183759, 0.5092169682430465, 0.7021842621488219, 0.7393334924186158, 0.9362154472037767, 0.9175006676525574, 0.7149858770791998, 0.682891006672204, 0.6450145072943625, 0.6687187130889057, 0.7628093097740092, 0.5011291743111654, 0.819676421411458, 0.6361421966364851, 0.6064141203009172, 0.9895785374816427, 0.5974159400688883, 0.8904718738395601, 0.7797455782748164, 0.592923915927807, 0.849310357579185, 0.5050958196390294, 0.8485137733079822, 0.9607709821579519, 0.8831982962816691, 0.6729625427540922, 0.9681282183483451, 0.993414662706744, 0.7897667987169767, 0.557891664716643, 0.5775619662848736, 0.6512402443325611, 0.5707089592284371, 0.9014033277583067, 0.7395855114739904, 0.8605974221409114, 0.836639768888213, 0.7565351349210164, 0.5982640002513511, 0.8124806121624889, 0.8993340527395236, 0.9916763122921861, 0.5468170836321744, 0.8357500286936452, 0.5620298678634142, 0.5197986930769143, 0.9864587652485126, 0.6754738942325857, 0.8566673859798264, 0.8163899571540126, 0.5656880252758867, 0.7680538908002641, 0.8449255642532012, 0.6670434764219642, 0.9035499118473491, 0.6793801331282379, 0.9677290352747815, 0.917372343470783, 0.6309613412790787, 0.7640653932166399, 0.7157744196066911, 0.8817713604446832, 0.9764088218173926, 0.6465847897676495, 0.958743110380376, 0.7611273601020474, 0.602525806955866, 0.9999148619848277, 0.942393023132452, 0.5665935441663009, 0.9575035698165404, 0.8469278897328135, 0.7009303322871345, 0.5563903608442924, 0.650134718936613, 0.6936309704648581, 0.5068212493500892, 0.5667841083086648, 0.8730977658347476, 0.8410574889930859, 0.9982359159757487, 0.6531874705281489, 0.8309254048718318, 0.9434633669084433, 0.7981172618962339, 0.5987566775338533, 0.6100897293316787, 0.9960298474300473, 0.7765133018671321, 0.5799956531707086, 0.8304121662388477, 0.9260123447888606, 0.8208219496649777, 0.8640084736614946, 0.6137671097639377, 0.5344935055655694, 0.7713061794337774, 0.6068911727030901, 0.6639795570176098, 0.7337364498220698, 0.6212133471522083, 0.581610615959643, 0.6534586375129603, 0.9164638627178077, 0.5574991746725858, 0.5147399877463121, 0.9138046475611028, 0.5925357172355872, 0.5611840319700676, 0.5255692938336589, 0.6380433689124168, 0.8682354533362517, 0.544855459434477, 0.833235093585187, 0.7487825533698623, 0.9440666783564186, 0.6621870803877137, 0.7314147025288857, 0.7321861609052341, 0.9477216830592485, 0.8511050563201316, 0.7189576805497994, 0.7002019718412824, 0.7744575406020626, 0.8801281342684931, 0.7688208853330227, 0.932444367738331, 0.5186891682793808, 0.9589703702223153, 0.9265566489381707, 0.83615556150078, 0.5682256902268517, 0.6576899289276235, 0.5393028926783151, 0.6771221845844255, 0.6167968831770942, 0.7355570907820095, 0.9830399557858535, 0.6526847703456047, 0.7382091511683249, 0.6640644402140274, 0.6480315642245522, 0.6841829791619647, 0.9692215734243179, 0.9676905589954694, 0.741934062202609, 0.9144500135606437, 0.6608471587989674, 0.7374901458705366, 0.6976868411452279, 0.5957651810819602, 0.8315700946037611, 0.6295869588452225, 0.5428152320491866, 0.5712372955608487, 0.7495267710680142, 0.9038243545304325, 0.6570050291117601, 0.9234695501073802, 0.5303288783108595, 0.5844852847661716, 0.7584880924558421, 0.9653510497207571, 0.5263372564760783, 0.8323914841867697, 0.7683550789476248, 0.5359041663880585, 0.5048768443862367, 0.8848286911786718, 0.6467039420750584, 0.6734309105238592, 0.8434881285308087, 0.7407761212798936, 0.6794761541878416, 0.70593883228417, 0.6411812498667444, 0.7658940751198802, 0.7340786186812144, 0.8269624046615084, 0.761277079378587, 0.6715274923152821, 0.6800819851912574, 0.8698624849915333, 0.8449324677683518, 0.9988375817902537, 0.6807079530900053, 0.5772086360624057, 0.6339026709639966, 0.9366528735945245, 0.9216004363453842, 0.5386141553624999, 0.7949779355341388, 0.6733334759545241, 0.6337737318676749, 0.9503845490945169, 0.8947982229383484, 0.6891546452928713, 0.5829450307610553, 0.532441889879574, 0.6466424258510267, 0.5720859530537848, 0.6630369326671308, 0.5870181403923631, 0.9804860216956797, 0.6710634789301442, 0.9480770654184532, 0.8072949594754686, 0.882543026336918, 0.7282246981248792, 0.7429791247812471, 0.6590821813674898, 0.9572948356939357, 0.7989719219028398, 0.9188349847700179, 0.6656548012730772, 0.6094634841623002, 0.7646778224993602, 0.5833440266296736, 0.7418442682075158, 0.5435092810977257, 0.7389553567889235, 0.566080747253845, 0.515498505580895, 0.6694824977154976, 0.8859146998494019, 0.6474507392860643, 0.7583181117156567, 0.8395132749873349, 0.8421563210798101, 0.7615442386378874, 0.6867596253410417, 0.6662154658553658, 0.9408779554856246, 0.9984995509467864, 0.6769774429766968, 0.9585123428568754, 0.9969714923976143, 0.9734783882890401, 0.5666401778505616, 0.8123083414841522, 0.8885633699526383, 0.9572025569239236, 0.6169785371759298, 0.5106157697341235, 0.657184633619085, 0.8371219306232669, 0.5003396811423793, 0.6947679817632985, 0.607158823167471, 0.8068508717768978, 0.6307154224687177, 0.62038691848973, 0.7771629470776246, 0.9891597513328989, 0.8665260714909688, 0.98637174108884, 0.8195062694215338, 0.5694686706112337, 0.5991837199562903, 0.7506516419783535, 0.698259439504132, 0.5125753137452125, 0.8957238629819819, 0.9104936206423208, 0.7853570450271141, 0.8905140859888988, 0.5701409204224014, 0.5657158556408055, 0.518557072388959, 0.8740406468725668, 0.6645938638399376, 0.7155173246535531, 0.5432298593995508, 0.5461103481072734, 0.6265613490772608, 0.8680121880628084, 0.6021134560718792, 0.7751054228711158, 0.9562917486489062, 0.7794080472234545, 0.6303766895583742, 0.7750811446737986, 0.8692828727530821, 0.873291943675436, 0.8724663325553061, 0.9194837616333418, 0.5106560618272171, 0.5147012046277386, 0.6386692224765844, 0.6116947170543141, 0.9322375423002343, 0.9510138482142512, 0.5694758480847406, 0.9642979704252896, 0.518420453375281, 0.806122240269282, 0.7003074832644395, 0.5778291782003334, 0.9832326905623061, 0.5740839970304883, 0.5752743525869655, 0.9324897523018479, 0.927453476816596, 0.7132332763119531, 0.8141695232790711, 0.7693713829058235, 0.5490531407479449, 0.5293322780642298, 0.6663198962426441, 0.9002553128003958, 0.5314106689414464, 0.8800186299780169, 0.5834557905221731, 0.5209828024852274, 0.5869724804638142, 0.8540492965086786, 0.964600042060846, 0.6678994317646645, 0.9403755282970285, 0.640366982027853, 0.8648396807142804, 0.8852168611884617, 0.7932609305904013, 0.5000058069356004, 0.8783663361389134, 0.545478207748705, 0.8689498361910496, 0.6357183997080752, 0.7237000469036434, 0.571802601621621, 0.643716584580313, 0.5754999320033336, 0.8125283567133252, 0.5196753238336698, 0.8257083816023767, 0.8301249004193336, 0.6338430614681396, 0.8446058107961882, 0.6976421935721422, 0.9124127120758313, 0.6190771364891969, 0.7200668119283998, 0.6542945244115069, 0.7633893204444755, 0.8641388595100432, 0.5520997379499468, 0.9656237118875759, 0.955836978877487, 0.8809713199855504, 0.5718749062350967, 0.8183727540886943, 0.5642095624875023, 0.8383393418827026, 0.5622619592808484, 0.778297259794763, 0.6808486752091714, 0.6011553493069326, 0.7383487705277274, 0.5480376698059135, 0.7045802052144723, 0.9596182272053837, 0.8957051660430186, 0.7964367114365456, 0.5722751784807805, 0.5242659456377172, 0.8896044067090096, 0.7603798642710892, 0.5957407593796249, 0.8102849392348339, 0.8337914193100725, 0.7272561066318319, 0.6638280292496291, 0.6966953715151525, 0.6999515792767945, 0.532415997669985, 0.7388852024445376, 0.6554105080243475, 0.7522097752952477, 0.7551907052631905, 0.5570149811600222, 0.6312111549128626, 0.509097648574182, 0.9733942554993267, 0.92649909836852, 0.7822538439036006, 0.77570936925348, 0.8347176616667091, 0.7302361304543254, 0.9989645300124596, 0.7683345820321004, 0.7197023319335154, 0.5967578521351464, 0.6777813309933147, 0.80333063945048, 0.6299424052014534, 0.70109985952833, 0.6960206153714118, 0.9337081333999288, 0.5043253122078875, 0.5201698613830248, 0.65022107731901, 0.5161877970020062, 0.92402568020914, 0.9560461413471488, 0.9410793066542592, 0.8200658762856166, 0.9644581009889854, 0.7642600884738648, 0.8007652908985127, 0.5603761787667784, 0.8031438739761867, 0.6169326806798776, 0.8708480102327775, 0.9425768876920138, 0.7265730665647927, 0.8521036120403186, 0.7465197721761688, 0.7961671585680661, 0.5287108283521518, 0.5387567567653542, 0.6603964423526814, 0.8241074424044703, 0.584820556479368, 0.7944129155125247, 0.6897766853291547, 0.8653979635116928, 0.6414926524572317, 0.543228757464977, 0.5441807058171211, 0.7940746364553134, 0.7892549650625567, 0.8088430103999313, 0.8738846420815032, 0.9069153582911532, 0.9131356942596419, 0.7214931503764366, 0.9180839242237961, 0.5898538794111412, 0.6859770756514207, 0.7926624558334525, 0.7077110878138801, 0.8087125442189511, 0.6841352457134595, 0.8426976029416047, 0.5760505051735223, 0.5876755546636621, 0.9095127957770642, 0.5730854618942245, 0.6500782858712222, 0.6184155454466136, 0.7033277077546618, 0.885900898482743, 0.956010121041853, 0.5430346490930387, 0.7021981794720178, 0.8349998060936681, 0.9518186752138404, 0.689314079636953, 0.7411112688831284, 0.5699656654850984, 0.703580627080387, 0.672568851821226, 0.9870216815011864, 0.7493836752752427, 0.7349627803465353, 0.8347264541141706, 0.62494619271138, 0.598609868870011, 0.9314622578150027, 0.5299525734767938, 0.6974818449883845, 0.9788091527058718, 0.6326204847461595, 0.9226294482445663, 0.881983700176592, 0.9190340423687833, 0.9746802267847279, 0.9033679878963403, 0.8180012983119787, 0.6390339059186692, 0.665285647033, 0.534596447642389, 0.5237997864871153, 0.8315919171944963, 0.7080958110328355, 0.8253458028314571, 0.9026934946468455, 0.9341155137080166, 0.7762046457469692, 0.7136663216095916, 0.7645020542569076, 0.5499554213610942, 0.7742772421172878, 0.9109674120056599, 0.5677356334791448, 0.8619718111544717, 0.8837447117186533, 0.798152334434139, 0.9510852244614009, 0.9391624761184056, 0.7252255581514284, 0.7557801382530468, 0.7714410815540117, 0.7214649062949225, 0.6132770113673156, 0.9020515094994529, 0.984273593979863, 0.6665769057972586, 0.5130073001211634, 0.8587676725299223, 0.7574468432979367, 0.5008961562901291, 0.9437592937005722, 0.9535714323186884, 0.8656115296404863, 0.8361605025048535, 0.6759785569273185, 0.7744065450638811, 0.6918840474771344, 0.9116137631868243, 0.5109371181991671, 0.5074166027008746, 0.7994755338219182, 0.5914561417574447, 0.7708520256734908, 0.7219357585480064, 0.5696735590930131, 0.8348107269909326, 0.7333429227824955, 0.5412966668367738, 0.6220701766480571, 0.5274620212911437, 0.7570654491251468, 0.6693175013604735, 0.8969885681405452, 0.7969923493870732, 0.6314384936037383, 0.778292626163283, 0.8465408693631262, 0.5123935968048314, 0.5139250246727514, 0.960475625572737, 0.8957948678468645, 0.7502827313056744, 0.8948610030796551, 0.5685483362507204, 0.8705636429672062, 0.8810424124786556, 0.5089006289387963, 0.9407446181148191, 0.8325077592632077, 0.5338392196505475, 0.6944405143944403, 0.8783123920142356, 0.5054053971309349, 0.6411647795655461, 0.7171404016567708, 0.5011877887394671, 0.9156493234738379, 0.8920951578475997, 0.7453501062219658, 0.7328310259609825, 0.622928435209141, 0.9113913248980074, 0.9526614958653113, 0.7613701846650887, 0.882810808032283, 0.9286746433774523, 0.5843805269469209, 0.7822578096688348, 0.5798086348346037, 0.7627948460516543, 0.9759483585316773, 0.5734805271903853, 0.8960247781430969, 0.612157057236264, 0.7067966734137503, 0.61838480287867, 0.6311439482853578, 0.6653151767520196, 0.8951488738400091, 0.5131020759123713, 0.5945511529550429, 0.6842835460840468, 0.7595356749533781, 0.7811218909728554, 0.7850971985458973, 0.9149877130323446, 0.5613645541716035, 0.8842588367022449, 0.8552301779168274, 0.9258518524561191, 0.6759451410685073, 0.7418227103311832, 0.5151297221604283, 0.6831006508827469, 0.7473236453440704, 0.675331635917438, 0.8136679331248764, 0.6312998911217114, 0.8528034870466061, 0.913759381467317, 0.5742871637391994, 0.8738168653308233, 0.6356336846931386, 0.7303866233989145, 0.7677606975417203, 0.7810147388569528, 0.7072083886215599, 0.7642070465253954, 0.9988711827833896, 0.5753152382281933, 0.840586134517336, 0.7815841885596546, 0.6583625369758598, 0.9438413061732989, 0.8347018301717503, 0.8253438178174447, 0.9500157088982555, 0.5188278625099285, 0.544153688237175, 0.672368953614815, 0.5718442386125245, 0.671552339611757, 0.7633951521651976, 0.6097840772831067, 0.981082764456042, 0.820469720972372, 0.8618552064667606, 0.935799508562552, 0.7115836422706601, 0.5864412079775658, 0.6581350405651933, 0.7485555694336558, 0.7004639536401864, 0.8321156507314456, 0.7761848688849782, 0.8241348823975753, 0.8845036224023013, 0.8343446771566043, 0.6882072078975485, 0.5509421057567443, 0.9678211976701672, 0.8604249945711597, 0.5381600407866463, 0.8614049592019696, 0.6578246141185837, 0.6543865382033379, 0.71995128751772, 0.7320551164401925, 0.6658306534473848, 0.8233534074245743, 0.7184747685497447, 0.5374982713590863, 0.8066207803228286, 0.8964833331386177, 0.7349827330309026, 0.7404439943259014, 0.6095780823833523, 0.7084744742411935, 0.9770420965796427, 0.9333551639598039, 0.6166561998298261, 0.8117160228358999, 0.9687900960393783, 0.7778041777698022, 0.9104507084245063, 0.7487645882521281, 0.5463598994245631, 0.6239028681498036, 0.8752892843045276, 0.7452589309198996, 0.9402298904389718, 0.5584559236811011, 0.8211481824631822, 0.6812482338093159, 0.522724292012774, 0.5934190429482458, 0.9699262112881231, 0.9985618341479784, 0.5272351665772099, 0.9032032426122593, 0.6865701436704157, 0.6154559552562626, 0.8579656515160645, 0.8918778054157227, 0.8286773031985666, 0.6574417542668115, 0.8864959886334347, 0.7476183342851597, 0.7000070252952126, 0.7293188721289562, 0.9793583039125586, 0.6836022256167696, 0.7421064255074348, 0.656335242508051, 0.5949851945181044, 0.9614559720743296, 0.6788196372122397, 0.7207783868143132, 0.754997909968957, 0.7355637782167364, 0.9760181429824475, 0.79481673236159, 0.6543270099624543, 0.9242180941032984, 0.9554303591665239, 0.754930588808185, 0.6830383461447994, 0.9252279937165782, 0.540540130517633, 0.6755624254092565, 0.9974515672872952, 0.9524148408561037, 0.9238675477401828, 0.9131358024753227, 0.6103431463262127, 0.6162802907770288, 0.9040008437471867, 0.8677952861468807, 0.8176068920227844, 0.9821973985704648, 0.6659827805611458, 0.8288310479610299, 0.8168969045309622, 0.5916271266110253, 0.9532172259751979, 0.9294455454787773, 0.8685091185646399, 0.7164776068397839, 0.5341177400883543, 0.6687242564064899, 0.9121561373151799, 0.5191306573559509, 0.7100067399365613, 0.6660841148791972, 0.829534945470392, 0.7025144613715623, 0.8730002388480345, 0.5959035226233262, 0.564412112813724, 0.6539121769975078, 0.5726483637521085, 0.5939007742425575, 0.5527643267279283, 0.6427082469538203, 0.9186513489858849, 0.8399292729038601, 0.9465947657958218, 0.8604909214998393, 0.8438457408835476, 0.8072989723430226, 0.608264834108954, 0.6739189233236519, 0.5260977643244609, 0.8684168795909704, 0.723430103135951, 0.9885231250904746, 0.5393633299736686, 0.9965072564339282, 0.6055891946156189, 0.6821919364415744, 0.9293812316412557, 0.9842416376946038, 0.7665017202671227, 0.9857777041401417, 0.8880020494812835, 0.5104274601296117, 0.6636807138344175, 0.8977369595641145, 0.670711732951939, 0.7101194809973523, 0.5935002250023373, 0.7085682591681464, 0.6260697658896572, 0.5115719594782069, 0.978597404025569, 0.9711412780388528, 0.5323463186836713, 0.7443673718638817, 0.5534759230084786, 0.5320585519363925, 0.5549329635831545, 0.5457584491030174, 0.8856809557080141, 0.6971766576615703, 0.9534369858722478, 0.7075979683769842, 0.5186234517986679, 0.7460625014903279, 0.7121570291575405, 0.568867023785018, 0.9219906914836531, 0.7483642948521463, 0.8566159348860984, 0.6610289330697701, 0.6072952278732857, 0.6893717118806366, 0.9889030215181692, 0.5676680358886557, 0.9035170099538545, 0.9973123248652106, 0.9103414506871877, 0.5692582194538067, 0.7477913163089939, 0.9346456437228037, 0.9803655898875825, 0.7036695855409989, 0.5867458900847056, 0.887575696459987, 0.7522539684843835, 0.736946681704536, 0.7933727235827319, 0.8136852391666874, 0.9942498362471706, 0.8665527033984071, 0.9501379255441257, 0.7156746655851214, 0.7841013142769957, 0.7390315864725435, 0.5156428464282075, 0.7386079562567796, 0.8775200896386206, 0.8957082599358825, 0.5059902142367407, 0.502550910021164, 0.8642224567707346, 0.5776575090699293, 0.6326819176327442, 0.6607039093099374, 0.9064906483272657, 0.5480572572671709, 0.5398923822057775, 0.7674651816129506, 0.8310322111801978, 0.9576449964580247, 0.7214845984629337, 0.5055966576048021, 0.8364429014559547, 0.8982441753996935, 0.8561866938769425, 0.9121594337316888, 0.711217267601604, 0.8268529022838278, 0.5996116314617441, 0.675959430812749, 0.7778624616582475, 0.7427233797355075, 0.6256184058009254, 0.5080799852863116, 0.8996295681706485, 0.8114081929537738, 0.5858332593202784, 0.9162351559479616, 0.5639018365498003, 0.6500645176929221, 0.5826681288791782, 0.7954344319006095, 0.7234519772609602, 0.7003029762242992, 0.9020343511632415, 0.6357025324314911, 0.6713717746942747, 0.8647648079365163, 0.7845220086867348, 0.9062427823730064, 0.7235819152763311, 0.9489553995135045, 0.9282358177913306, 0.7016315753945147, 0.6949452691735609, 0.8265645162110082, 0.8548632173635953, 0.6688230863141941, 0.8398355983340178, 0.9784915536951264, 0.9536922186163591, 0.6208845502814971, 0.7238466222316613, 0.9821530338779665, 0.5041573231981352, 0.6555274657852574, 0.7927521027951518, 0.7036047285111762, 0.8356879624819646, 0.7397411983648284, 0.9139157810000794, 0.8604452232977858, 0.665413060210704, 0.6363748612142833, 0.6443454225155846, 0.6105836412009449, 0.5758189765092213, 0.5255362938128991, 0.9304495638475007, 0.6366591749889869, 0.7130991531182121, 0.742345107255981, 0.8396368474515439, 0.9917681604211397, 0.7516860532631663, 0.7244921343101527, 0.6686355090064794, 0.6928632004123215, 0.6976304606241784, 0.6612489073738339, 0.6809033094866476, 0.7619591933832485, 0.698833013729103, 0.9585557580537929, 0.9797920054979969, 0.9855547506887493, 0.9214248060039312, 0.9186914935631555, 0.5843561858425479, 0.8010985488255218, 0.8440664648744154, 0.6018087291883145, 0.9012563199501973, 0.8374199648737702, 0.795661406330584, 0.7960622714437549, 0.8552094691470538, 0.8544717574694283, 0.5629658498768744, 0.6071386655153468, 0.6151117073751593, 0.8180557640914754, 0.9478717872263751, 0.5252096677993874, 0.6838835358442397, 0.7366854530263358, 0.6144063742344184, 0.8778855722955683, 0.8363282637810704, 0.9287257745608704, 0.7587985867748129, 0.656073183992102, 0.5785837877872285, 0.883226159806636, 0.6491367515159512, 0.7753889172822628, 0.9916210892190954, 0.7650454984572947, 0.8420904265694902, 0.9582282142838426, 0.9961385020148816, 0.6456006659432508, 0.6389447523509337, 0.5144791725545079, 0.6479251122879925, 0.515886404666275, 0.7854346530952321, 0.5192985116967572, 0.5590782518856463, 0.9094716486672645, 0.64157757165627, 0.9262581850399487, 0.9964114398883375, 0.8206019931991638, 0.9662159804212183, 0.5061272723218497, 0.7618026120943746, 0.9410795795971356, 0.9617134576099076, 0.5846478678979898, 0.767864490716794, 0.8021694925285706, 0.8333555431790537, 0.9385973913809338, 0.9793107255439716, 0.648973724454168, 0.9396080906128588, 0.9168326135275411, 0.9042202101780509, 0.5887014247155781, 0.6817711573565666, 0.779952811679819, 0.6783648996566026, 0.9481520209337795, 0.6919404085348397, 0.7181547543053408, 0.6089496088979642, 0.5479043501981455, 0.7651984015570508, 0.6600288330354326, 0.6039525894026194, 0.6375343301229046, 0.5894704933255116, 0.9097531733684207, 0.7026355727232999, 0.9047534417321893, 0.505556923461971, 0.6620824949546744, 0.6904099701483333, 0.8000567564957775, 0.7387391480576346, 0.6524935719082019, 0.8134089427346523, 0.5263174996821618, 0.8896399740307409, 0.9763187107667436, 0.9892097833255319, 0.9700506210998167, 0.5300987058787058, 0.8296996776171313, 0.634872420232828, 0.5402091529883839, 0.6372476846286199, 0.6936789909507765, 0.8776598822428939, 0.5556111393930533, 0.835237501367988, 0.6941716627132704, 0.934483719630499, 0.958326007083752, 0.6018999510742888, 0.9129302137945171, 0.7838810133795702, 0.9773790523783594, 0.7592129588466809, 0.9494533522086162, 0.8621195420557352, 0.9150536508721089, 0.5001862319308534, 0.893442665855245, 0.8382127640403796, 0.5328512515456705, 0.6385744576633918, 0.7539374253912282, 0.9873618722171875, 0.9894300185496903, 0.6516634086161293, 0.6760481679596156, 0.9860429921028746, 0.6720491468766756, 0.676194732755744, 0.6266010386467017, 0.9656720958783187, 0.6653606751278193, 0.5530121266326611, 0.7870716378546273, 0.5338784067571696, 0.865088549304875, 0.673577776892917, 0.9827081665507307, 0.6524150580745054, 0.632399063924097, 0.7300131898867317, 0.8327004460796393, 0.5434621965675879, 0.6748887713413922, 0.7205421895221704, 0.8395616837833538, 0.7840112694878065, 0.7495487791854549, 0.8455326681130642, 0.5324256040620687, 0.9144297801909465, 0.5282219804014493, 0.5549625970599592, 0.9662310323354126, 0.6447346727049663, 0.8333133813525198, 0.7247826415871037, 0.8065072993815536, 0.506163120588488, 0.9963226682286757, 0.7156742886447665, 0.9107778017087054, 0.5502902420264376, 0.7854479898697108, 0.9969025319096945, 0.5519972480362527, 0.8536891303278593, 0.6375372092282958, 0.674027719904182, 0.6303209325286557, 0.6433571881970991, 0.6988802127388056, 0.8860296406889692, 0.7922703680927962, 0.7572808820757377, 0.9922440807436588, 0.5573709158904556, 0.9789396585596872, 0.6689039772877672, 0.7686341997049408, 0.9677306607413034, 0.745070731027007, 0.7413647263271838, 0.8312641881796591, 0.6410111513009584, 0.6704960873691872, 0.8622257573471338, 0.75381910677669, 0.5700469587577306, 0.9557219977769258, 0.6954874873345098, 0.5957814470636679, 0.5423522226396933, 0.9353542225557778, 0.7599610296502375, 0.5128871666278307, 0.5651774142610106, 0.5537307166324317, 0.8739058446977492, 0.9421079314958218, 0.6912739065487253, 0.9967267939674567, 0.5518928551394332, 0.9267284792652294, 0.5789747830722392, 0.6626976349908329, 0.6163203781733855, 0.9363244203226314, 0.8948714975018945, 0.5214783985245284, 0.9168860539894772, 0.9052932006167165, 0.6774335970225241, 0.6630973780482265, 0.9930775094404001, 0.8129198214276246, 0.8455114461051217, 0.5164534605937872, 0.6050806044849881, 0.6682224791968006, 0.8647628565774225, 0.5810229912673434, 0.6447832253172068, 0.8545954073584852, 0.5736692407153106, 0.7653818935849277, 0.538308789142607, 0.5572240262266513, 0.5097945065493701, 0.948398610357437, 0.5171745868454434, 0.7881033007845146, 0.7309215400073392, 0.8286242438608158, 0.6044326315616171, 0.9719792998742722, 0.5492842371603607, 0.6258540198349027, 0.9128996039921438, 0.5213644970355132, 0.772455381964787, 0.8908564273300332, 0.618066068039179, 0.7850026566593354, 0.8984844055972834, 0.8894705887592418, 0.8863700782951249, 0.6542095841620074, 0.6455303469473022, 0.7823715739634269, 0.7979141116388988, 0.7644610906192374, 0.8025277264462468, 0.80745199109308, 0.8490458642581238, 0.8683469717840697, 0.5541381919932256, 0.666574513804582, 0.975011405157963, 0.875042314111878, 0.6989097969007545, 0.6482755714852344, 0.720438934925844, 0.5045830321043804, 0.5996077821537482, 0.8942552110584305, 0.8079362934551291, 0.8708580847725722, 0.9655677030106309, 0.7205319790130653, 0.9373132937076454, 0.888371479394419, 0.7936034659112817, 0.8111978509682978, 0.8465209033172204, 0.9725726717871929, 0.8110614396192282, 0.5967338661243291, 0.6207388035043866, 0.987583557643821, 0.5272236431695583, 0.6222067978763364, 0.8460710313413496, 0.7910523449333242, 0.6862238871442647, 0.5056749752705407, 0.6046016304515754, 0.9812902187925922, 0.988550399579386, 0.805386537678998, 0.6972713064253814, 0.915147884907915, 0.8837750426480389, 0.5612741338425564, 0.895080356587941, 0.6377695485394768, 0.5446805925421283, 0.5007266461144655, 0.8684685076628353, 0.5653054916759821, 0.7009384748068157, 0.8732893815031064, 0.7523736302870523, 0.9242940860478683, 0.8169784583178696, 0.6086746475674403, 0.8927098083323383, 0.7604628436861527, 0.5726414466839644, 0.6998925619700167, 0.7605235794676684, 0.708191068677783, 0.5178391866521537, 0.7775334355596157, 0.5685030827343135, 0.8432905815397527, 0.7554271294148058, 0.9419232221710917, 0.9046279933351717, 0.7112322642628832, 0.8731272181391675, 0.7091951687682443, 0.590523463136732, 0.6722722352928301, 0.5157093554285511, 0.8115541634662325, 0.8774551064111218, 0.882748650381509, 0.9792856914216577, 0.6448967411770633, 0.7996157247328766, 0.5718594677083753, 0.9798719810024641, 0.6666267885514123, 0.5240318363238934, 0.7624552202938945, 0.8678053858676439, 0.8297798063041102, 0.7993135680821599, 0.8670615655199345, 0.5617580223731742, 0.5103460006762935, 0.71569208559022, 0.9364169294563183, 0.6449850207789598, 0.590563994001492, 0.6242535496081378, 0.6179751360390937, 0.8345621896286477, 0.6088965013339616, 0.612616990499441, 0.6497769389521777, 0.6151845809732843, 0.6217169080809714, 0.5758173482437918, 0.741999583255877, 0.8876529066642673, 0.9684639314776782, 0.9440127958628906, 0.5798993706280596, 0.7539656857359934, 0.6059253523625864, 0.70372180060122, 0.7480830986138708, 0.8055245271609341, 0.6499593975067659, 0.5564261169013638, 0.9784838364467618, 0.8857767922074817, 0.5536956320676218, 0.8869616638030391, 0.8580835079603728, 0.7396156176861628, 0.9610523077671722, 0.7251860103706547, 0.9864032004899759, 0.884241564870379, 0.900769591042476, 0.6948585987495902, 0.9790568458645679, 0.8199310403177209, 0.9434198099989655, 0.8617708400583362, 0.7569618756740477, 0.7477771874030309, 0.915100218415511, 0.9200585341128239, 0.6694981859778837, 0.8674568394086739, 0.9701611613689733, 0.759791175405224, 0.6391564425593448, 0.585762618410348, 0.8530707111359189, 0.8619129677351934, 0.8922530419752425, 0.8337760254731911, 0.8052787534060222, 0.9727653235114824, 0.994656636180288, 0.9303010429780785, 0.7408786104035472, 0.8985205605395863, 0.7519587184721476, 0.7258881510666814, 0.5081311499569656, 0.814529170383263, 0.5234585526552524, 0.9752012045352194, 0.9093942118088721, 0.5145055499209501, 0.7498530481485208, 0.8326149591173078, 0.9438811465166068, 0.799767630101196, 0.5106275157910719, 0.5129107162369075, 0.5812060167687076, 0.977412361852132, 0.948798284493662, 0.626706102722419, 0.8553085808036411, 0.6590759512801778, 0.9212824474725669, 0.8167985004798506, 0.5167588369156132, 0.7562820756987764, 0.5237127583781183, 0.5327005398826717, 0.5114894429111333, 0.5452077556984849, 0.989701362751594, 0.8963502917148533, 0.6206919943103992, 0.8828758411664133, 0.6040659062537469, 0.5606449099596892, 0.9304726708451986, 0.6667980159209577, 0.5284368909809218, 0.8586424193346593, 0.8406884810498844, 0.767338019870124, 0.8139338077618663, 0.9485968122205686, 0.5108413344359136, 0.8225778592113587, 0.8925925588191661, 0.7239061164788698, 0.9409420679563103, 0.6166441822046764, 0.8333851650014761, 0.8725780310241862, 0.8724609955315272, 0.6412746475947531, 0.7507160870084728, 0.6855605908982398, 0.6762086439172391, 0.9946360546939546, 0.8895557214538586, 0.7968787314538355, 0.6705484581317189, 0.750963780969884, 0.5692483094684155, 0.7559875259936252, 0.818728674429207, 0.8295800037176841, 0.7444454271304493, 0.9524202813356795, 0.8588896597167563, 0.8342995884543927, 0.9161102117415443, 0.998331905932066, 0.9066585252269295, 0.9298018970053079, 0.6764629725418927, 0.7688219165422201, 0.8876403101439907, 0.5714713064039865, 0.8115471474842331, 0.8397706970997907, 0.9651048141567546, 0.5304659066947781, 0.5078995689397696, 0.8177137654654869, 0.7032081737369679, 0.7759603524681997, 0.6360366381639903, 0.9084883373570118, 0.6280887362601106, 0.9742906929331399, 0.7820801138138798, 0.6552859682134242, 0.9956836467562704, 0.8206115612687664, 0.831244613900549, 0.749181609555085, 0.6428790910240353, 0.9703870509388732, 0.8003280349250845, 0.9346010015960775, 0.7321561760797317, 0.850643556433561, 0.8790562583161815, 0.937094040180904, 0.5778541633949055, 0.9320716206607246, 0.5755066534989784, 0.6981796578212742, 0.86804327865494, 0.9313109389226089, 0.5860421136726794, 0.6979123572334596, 0.5562115157312126, 0.6544038601673974, 0.9545930379067129, 0.7901642320455392, 0.9911908326117609, 0.9093283017501468, 0.6912548281890023, 0.8934881587055545, 0.6673018399168282, 0.9481531991056924, 0.8116404538812667, 0.5770754248899665, 0.5196117212832351, 0.6979862196154123, 0.9245485013813307, 0.9917188220948336, 0.610076792018597, 0.5499141185003689, 0.955130421009397, 0.6246213241222536, 0.731071981838161, 0.9719961374373023, 0.6560272229543945, 0.6341377238834913, 0.7540249462609622, 0.6786389195926681, 0.6745884051607864, 0.6294019630179234, 0.6060127236375559, 0.6663154563897371, 0.58079534741119, 0.7620100270432207, 0.7513447071612716, 0.9585109744380339, 0.6009890744655164, 0.9121535759308554, 0.9566285666553154, 0.6824405287556708, 0.8639903982818585, 0.8529525637219864, 0.6681915170856614, 0.8723931114241338, 0.5422347987520577, 0.9348408509193347, 0.5895058141497216, 0.6533248819793822, 0.8591311585183602, 0.7533034441939205, 0.7397525129753619, 0.8449103857413255, 0.9504865702120677, 0.5871515120192112, 0.5125716253608092, 0.5321440807463691, 0.6318359214082465, 0.7738158985772965, 0.6208270749831529, 0.5723376132508678, 0.6295520426619294, 0.6079321789461327, 0.6307962236594624, 0.8720668013612612, 0.8573411159038253, 0.7994830479560654, 0.5291129366698315, 0.730299898341132, 0.9992615214712324, 0.6489138667889941, 0.5570532966529903, 0.7987420251300448, 0.5148767588015648, 0.5527355044801634, 0.924065301217761, 0.8314834555166328, 0.8765994018850569, 0.9177234086707626, 0.57744477014974, 0.9328207268559405, 0.6129153592088881, 0.5706522486999459, 0.8143160171643082, 0.7820681460886361, 0.9065851481515177, 0.6747397129193216, 0.662307905265836, 0.5394189610744446, 0.7157743243330196, 0.9039485508480232, 0.9667686813609421, 0.6656391845140814, 0.5484272229592088, 0.9623074549208026, 0.7812868313896815, 0.8905651914898415, 0.8343217898773115, 0.7224740160666783, 0.6575307055122546, 0.9799199071174773, 0.8594535075791576, 0.6683701936870281, 0.9313869844106115, 0.7656408799377834, 0.883508213446771, 0.6456353267342998, 0.8409905388214477, 0.6871639260185411, 0.6587994136517332, 0.6425464747042164, 0.6553159285025008, 0.747979223736001, 0.7692181538101066, 0.743110228910683, 0.7214353977774258, 0.6200631941956483, 0.6875547424660127, 0.7438073924911068, 0.9899620830289777, 0.831224918590273, 0.7242530387656467, 0.6378274276205242, 0.530447116746884, 0.8124503711694029, 0.9982278806039738, 0.5212795472509764, 0.9438474297895023, 0.7861639295705878, 0.8329548336005874, 0.6712733462473335, 0.5964346733084825, 0.8489478062827802, 0.787153868650061, 0.952430226281932, 0.8299829707311365, 0.9011520767420665, 0.6810330481933665, 0.7959215924779854, 0.9722465038605195, 0.7826533281930301, 0.9366391191315367, 0.7479530558240454, 0.8400515875519181, 0.5946967229333473, 0.9460118570216752, 0.8453284970022399, 0.8251354974527325, 0.6168331129517055, 0.6346175932775842, 0.5674624961393282, 0.7279324391880804, 0.9427375806116307, 0.9453012988849482, 0.566598313039319, 0.5470043532956596, 0.5506119789704131, 0.5552027739065204, 0.8798668426857461, 0.7814265117057642, 0.9105978810568618, 0.7551641220745543, 0.7437903608140974, 0.8332810931866148, 0.5476016993855162, 0.9006731853398722, 0.5130279235575103, 0.5301623726086515, 0.6872910075239711, 0.7465196035742174, 0.5236905595675623, 0.579334991486078, 0.9101331620921582, 0.5025037859152981, 0.7623963808924552, 0.8587356036158069, 0.8836943878390509, 0.624908748798299, 0.8689501034086986, 0.6383179754719102, 0.6425498652060226, 0.6865549932687112, 0.7412463734676329, 0.6280305951979734, 0.6437810921796641, 0.664301809566634, 0.9982913378202819, 0.6835782479013142, 0.9298308701742145, 0.7381133754037414, 0.602769739861282, 0.6755225753091366, 0.5413389702294492, 0.8481471109755765, 0.7439782030925761, 0.8138407649855817, 0.5598508062584108, 0.6050458116401012, 0.516861323675953, 0.8895397779871683, 0.6518058261117071, 0.5725061949757606, 0.623547182902894, 0.9919124403847397, 0.659000456026009, 0.8083253554612775, 0.6451740727197146, 0.6282817066348705, 0.5639453402867562, 0.5946570805200544, 0.5080975294363962, 0.5524821751962263, 0.9259315526655564, 0.6973665614471751, 0.617131134865859, 0.5194055776007778, 0.7390835637360076, 0.6479371583420295, 0.901291366850015, 0.9416712931538062, 0.8702742810410187, 0.5234046261809551, 0.779562077366474, 0.5046784748198361, 0.8325822926406503, 0.9648522581365508, 0.919124956626611, 0.5788926264152097, 0.9697145713945676, 0.5926024252350969, 0.7196987400088733, 0.5650184378653482, 0.8986896930685337, 0.5887093116647508, 0.8317607004520826, 0.9000910444385397, 0.9704087779681649, 0.9822206676047478, 0.9390015404380947, 0.7350260278778347, 0.9777580044780979, 0.8209574602624066, 0.5456521126082413, 0.6690582116818913, 0.7106763566968786, 0.5358275300663002, 0.5175184644545454, 0.9671598442824778, 0.8434477370108506, 0.5336708151203241, 0.5491230678879331, 0.7052364681589574, 0.8980568381314566, 0.8490511202073877, 0.9963586262301354, 0.7150740891105958, 0.9154839500867846, 0.5416947623775938, 0.8258943743094442, 0.8831062675985984, 0.9996308901716864, 0.8606535904520898, 0.7485940629171184, 0.8348248664365192, 0.6568738720798364, 0.7059718916191766, 0.8191499316839674, 0.5355428450253126, 0.994181873315425, 0.7192744543246001, 0.911784555144449, 0.7361051525610574, 0.9461471624844567, 0.9919364509529531, 0.7647576243968814, 0.9939217810877989, 0.7923205022211934, 0.5917206228464386, 0.7540679084528483, 0.6380451416340747, 0.90876941200116, 0.9109647990255668, 0.8762237801742128, 0.8718376708948153, 0.6089466601474736, 0.9422088278828535, 0.6992615667696045, 0.7428203481317598, 0.5037763202746968, 0.7581342581140671, 0.7428133483437884, 0.9507251888770272, 0.505767854923442, 0.7636171563857431, 0.7186992225390344, 0.5345903069888588, 0.8721687992114349, 0.6745420846651248, 0.9626044992106007, 0.7701081217624797, 0.9169916420117684, 0.6294399343689318, 0.8010339290208828, 0.8325697982122198, 0.6690063737377006, 0.5758870749139879, 0.9308131521451991, 0.9741957498204521, 0.8426989858673751, 0.6721335472374792, 0.9840082004175503, 0.6398082832468853, 0.7146739203313057, 0.6388539708213767, 0.5518810251899979, 0.6086908267141273, 0.9853312240410674, 0.7695695937780771, 0.5236407077102752, 0.8050335400548572, 0.9590852344348106, 0.6047981891703997, 0.7082475900864009, 0.8126924379507021, 0.6555187245669541, 0.5477763460078676, 0.6767294576614296, 0.7567072811039409, 0.929177106706844, 0.8297560141639826, 0.9084981060472459, 0.7295145406424864, 0.9576025106244299, 0.6031987365854332, 0.7400211830018926, 0.9131839791103595, 0.7564706896145135, 0.9069179695939111, 0.745852089723429, 0.5693640220477191, 0.530967997180285, 0.5878471579495275, 0.5272256477221992, 0.5541266028270933, 0.9054360457380093, 0.8451492738151849, 0.6276794665682606, 0.6393005769479679, 0.9156127329190741, 0.5732111073062887, 0.6820696140948646, 0.5487604696872912, 0.6638210307197243, 0.5357885664849409, 0.5574079664671834, 0.7520949373170231, 0.9310433927684811, 0.9611895583051681, 0.5508748444696392, 0.571856751833049, 0.8083324369894647, 0.6401666279076357, 0.5149783616224346, 0.712951402389226, 0.9697771418618912, 0.9598937218479184, 0.9315437048818521, 0.8780095209303238, 0.8307292114659354, 0.8632091632818641, 0.6631836697892819, 0.9032255101652717, 0.6208718076132246, 0.7981130510134825, 0.7619133161493655, 0.8393621474624626, 0.5142109244042133, 0.6853662583631417, 0.9174443774617522, 0.6264996016755534, 0.8142093949560223, 0.6107622286363483, 0.7631103323289437, 0.9595058972873932, 0.7480961120622356, 0.7870747650761089, 0.6563860545810211, 0.974933332862683, 0.8790500055690837, 0.5173268655970613, 0.9077615245351769, 0.5649220280946787, 0.9250425181197677, 0.515768911176496, 0.9861365498919419, 0.6204874195655736, 0.8856072237824193, 0.9696742394850757, 0.5601860727169075, 0.5566053171793514, 0.8148928789997116, 0.9276377366796138, 0.7620261840941702, 0.583685460339791, 0.5012766658992078, 0.658912385309783, 0.997645539852714, 0.8691218141782133, 0.740275226906329, 0.8604454937652504, 0.9714989365548432, 0.8680033424336302, 0.8591432115153267, 0.8818996997172206, 0.851022579128784, 0.8889644380070153, 0.5579733654774679, 0.7015377441221222, 0.5765878562999958, 0.848423838051402, 0.6794217040789039, 0.620849406482332, 0.9365532854758876, 0.9804626419221518, 0.933896126925242, 0.9638704780744998, 0.6932434659020816, 0.624355925829668, 0.9914349538704588, 0.5973919147172617, 0.9904960431312819, 0.6871952043194742, 0.5919116200084016, 0.5975622089389194, 0.7210960692113648, 0.8489350246926728, 0.5913121600241779, 0.827416809809387, 0.808243543542049, 0.916582822950619, 0.7827504972902646, 0.8035431848607406, 0.7420322889687868, 0.8738482420566027, 0.6719095823717143, 0.9413923383732696, 0.8894892870199551, 0.9286226397585142, 0.8480164759616844, 0.5499974689635071, 0.682134780883203, 0.9106390015767272, 0.5415436187482721, 0.6253802185391212, 0.9071158302684275, 0.6646717036149808, 0.9701109219243478, 0.6426738804352837, 0.6713109598327993, 0.6822901646962827, 0.6609658523183088, 0.5259440187730062, 0.9996007269133378, 0.6542960807197811, 0.9604380888060101, 0.7939003493320789, 0.6710603868510411, 0.9143222686394259, 0.7011769611265453, 0.6984193396617862, 0.7835829108338777, 0.9029053761138512, 0.5185301212420292, 0.9768748983913915, 0.9415011457014427, 0.605667165162392, 0.5807487221579383, 0.5229972519469499, 0.8996238300777222, 0.5333523175178193, 0.6338312707718852, 0.7906595502622555, 0.7394776958855113, 0.8822647031012276, 0.5792456726207447, 0.6060289752084239, 0.59738722505868, 0.8036987054884588, 0.6318838918780572, 0.7542769303287236, 0.7371766288707337, 0.5317398004401181, 0.9859062415675864, 0.5090268555795172, 0.5953014524822569, 0.6845006038992987, 0.618116678501057, 0.8397584495301553, 0.8911417998315636, 0.7574370040360896, 0.8022049029347608, 0.7412617112073377, 0.8521415328975266, 0.7847448973135493, 0.9361824326521333, 0.6141933606996414, 0.5579393041666796, 0.743446264721986, 0.8218650490572034, 0.7638250078143927, 0.6037231985775295, 0.5646252241428036, 0.8413183953870893, 0.8763059540536184, 0.7659744179680156, 0.5764257574449114, 0.9540026281628655, 0.9459591208912934, 0.6740129097806586, 0.8174383407713175, 0.929506104553306, 0.8332831904528146, 0.663263566225297, 0.5013466267031965, 0.8810081628878481, 0.961015739295236, 0.7202279995184866, 0.8956031104786478, 0.6629094231665342, 0.6499454852891273, 0.5635704829501369, 0.949100252346629, 0.801409983664389, 0.8499217548992319, 0.7102412441342927, 0.5659466859462172, 0.8310819671728318, 0.64919690062132, 0.9745646407105648, 0.5353443784099708, 0.7807915025186756, 0.8944976796764404, 0.7322101169379684, 0.8929598254934342, 0.8225385148406966, 0.9877194806062892, 0.8519158779828959, 0.7490921416150285, 0.9910482718179279, 0.8629317263921277, 0.9324597259320305, 0.7523593035203828, 0.5337732995406582, 0.9381318318201499, 0.9534112778188844, 0.9901212873349984, 0.8453933032595844, 0.5665860762199117, 0.848745001468165, 0.7302542818847466, 0.5806048673755082, 0.8892291005016375, 0.5600656515812532, 0.6621505183869394, 0.9882551078284239, 0.6184017365398122, 0.7034795141286356, 0.5713806747775991, 0.9793210788772707, 0.518739591676302, 0.9770530336462055, 0.6238009551441674, 0.769552498182572, 0.9021421508179768, 0.5864536992558724, 0.6650409805719825, 0.7029192050307664, 0.9247051787419717, 0.630844010055782, 0.8251133014313583, 0.7433441661396432, 0.5007663927200829, 0.5657765378652244, 0.8560847084618797, 0.532407529824069, 0.6203428995684561, 0.8657224988368766, 0.9414847678534359, 0.8128096812842516, 0.9415149845042652, 0.9968675477337037, 0.6910928344764198, 0.862197167781477, 0.7900355818901039, 0.8144255043885911, 0.5715349855162275, 0.6451606351791037, 0.906487817882614, 0.7955509442071145, 0.6138461827193851, 0.8672158586412106, 0.9821305854356762, 0.6490146156074252, 0.7497819861332757, 0.6233209219296154, 0.9961367778355639, 0.8268389494579773, 0.5911445306477774, 0.8692570468595011, 0.606225822878868, 0.9071164031009722, 0.9763444065528841, 0.8330365267963592, 0.5816252915967688, 0.8034701004585916, 0.9815171685338404, 0.6407791119865044, 0.6705284390014794, 0.9895017351388478, 0.8043267796899647, 0.9053671578191433, 0.8501588407732036, 0.9299602133962361, 0.7797121335804715, 0.7580661891275962, 0.757995553462457, 0.9207085675003752, 0.6603775693497382, 0.9203211401929199, 0.8063039460173962, 0.5072220115588004, 0.5834564258945258, 0.926672586643553, 0.5733840626417026, 0.6725181326603895, 0.7627139978493577, 0.9188483425695912, 0.6588471441608188, 0.5489400419829952, 0.5530431436732157, 0.6926795995203132, 0.8336761107772268, 0.5353857019029865, 0.9135668443449176, 0.9404195846644339, 0.6045938732687836, 0.832102712481127, 0.7246067899538654, 0.5797261865561318, 0.9086097530184468, 0.6794967576079536, 0.7279788100183431, 0.58352781439535, 0.9132851253243481, 0.7916021707578611, 0.9197166061178488, 0.6628595991537716, 0.8751491715264126, 0.6358378980923691, 0.6961644191741689, 0.8791648323764291, 0.7255280967969631, 0.8463327300462983, 0.6462634564166616, 0.654412280900885, 0.5028359673078285, 0.7320363872494591, 0.6738780657907539, 0.553314417855775, 0.8056666401066834, 0.5144258586110524, 0.620400666159552, 0.9618885420852021, 0.7212572393134558, 0.5921208623210962, 0.7307119192881724, 0.5658365851952072, 0.8744866290286397, 0.9094057340288264, 0.8727496131607384, 0.7963817783094735, 0.9445730122412959, 0.545107747796955, 0.7785372942342575, 0.5510461234613888, 0.6704647554854897, 0.6297175253271474, 0.9476322025069388, 0.6382585177372411, 0.6858951473332586, 0.506924472348355, 0.9711299642876424, 0.8416808932653932, 0.5171392145582858, 0.6070454066199347, 0.8181799947568047, 0.5949269235194905, 0.7011254287996236, 0.8851795644865501, 0.9336508387720961, 0.8114802756022452, 0.5335668795271751, 0.8477596579754565, 0.7839979889210541, 0.96357153760001, 0.7491378700375544, 0.8409461201072477, 0.5182573778086701, 0.983121330872997, 0.5727923494724874, 0.8733073479444509, 0.873732730809655, 0.5476747736734127, 0.8775909512349692, 0.6955266788524741, 0.6173700379279927, 0.5658567754437628, 0.6583144046193574, 0.5692106979715361, 0.8098959753159554, 0.9453743233580805, 0.5651749684071069, 0.940926259648261, 0.685107147700752, 0.9944575322647939, 0.8289494905652102, 0.809748986515362, 0.7199759114344715, 0.6477977122734841, 0.5008459124374651, 0.5458201066945627, 0.6673267427553381, 0.8082324602413753, 0.9251185181115764, 0.6593063601022977, 0.8616616372119741, 0.6480564029317369, 0.9928492532923685, 0.5858379800429006, 0.6648109554655944, 0.5843321664540182, 0.6036045936172416, 0.6213941508282957, 0.770544857122542, 0.6881063153758744, 0.9727727046133338, 0.5755144719462586, 0.7771927164667769, 0.547975611687048, 0.9100790258106277, 0.8693354264924416, 0.6873658692536446, 0.9375406398921834, 0.7125903578414812, 0.9670200877604898, 0.8996804982842057, 0.8270947704818606, 0.5220927459316825, 0.8331401696859071, 0.9262220868971276, 0.7662790279540495, 0.7277118081156889, 0.5429633465109894, 0.6499297467997416, 0.7774402106926288, 0.8690704463178767, 0.8746366409400838, 0.5732673225574458, 0.9005027649739645, 0.5942022846172836, 0.5016184431267685, 0.5724667566027446, 0.5679170853475457, 0.5571346161915087, 0.5359664606916478, 0.9042250327422112, 0.6119169740813302, 0.9537720500114317, 0.6052013758886547, 0.5649458789910851, 0.6698502872027516, 0.5943665694122892, 0.8420214281960239, 0.8922024667608199, 0.8834439172075004, 0.8585943863956393, 0.6545870971585632, 0.511864462734207, 0.7104262024285828, 0.8471447300162056, 0.5798181072903852, 0.9768848182634415, 0.7655062438199809, 0.9043508886239935, 0.5525912837584785, 0.8742135676270333, 0.5924430532702758, 0.7835322160675412, 0.9694952627970288, 0.6790714392487864, 0.7404036568514782, 0.6136874138894772, 0.7372237934320325, 0.5994036089057619, 0.9782860121655232, 0.6905422385653593, 0.7586769845549562, 0.5644560016279353, 0.5531692185323454, 0.9685674417118189, 0.7008834836166964, 0.8997264544497989, 0.8718475745698481, 0.7226049685942544, 0.9042912484634908, 0.6513439857000194, 0.5700677759815922, 0.5497150381648168, 0.6227497770911719, 0.7503174533864772, 0.9106822756873143, 0.9840962079114687, 0.7300989295293154, 0.6965864075687607, 0.7605939906386932, 0.6274927950607967, 0.7280673714160244, 0.5789315699577973, 0.7844095538149041, 0.6364644021146414, 0.7439610644815688, 0.862522773149345, 0.8124127148999893, 0.7329070540894043, 0.5881468559225811, 0.5711736600977816, 0.6516721351146957, 0.9034669670756992, 0.9756058714140338, 0.5105552274536298, 0.7901895171755392, 0.6576826102206815, 0.5153097142342173, 0.990789303225108, 0.5088438668768593, 0.7995434358221443, 0.6931214716326113, 0.7634814188405696, 0.73911987430203, 0.5298346603877444, 0.671802121252893, 0.5833694748626387, 0.7760111993931386, 0.5629325953234372, 0.5625252355339437, 0.7731941368835478, 0.9959076126478873, 0.508411347241783, 0.8637280487632221, 0.849528047564218, 0.6153752783385149, 0.737419052442398, 0.9961788216051635, 0.9308677218503676, 0.8703340877994614, 0.962764480661111, 0.7766341278383857, 0.969699634462984, 0.9249277200849835, 0.8428072199301783, 0.6542399343112852, 0.7788041204479893, 0.5149808868552206, 0.7703701196956748, 0.6656170615685806, 0.5014842724418124, 0.7339314124414604, 0.7084390098495446, 0.8612823595873873, 0.5534892031891943, 0.5983864941820476, 0.5052883209214856, 0.850654267767163, 0.6037475957376495, 0.7281082593814012, 0.9544564991410622, 0.9605432076438106, 0.8343915022026079, 0.9921839173575588, 0.8659412169494761, 0.7741720891365971, 0.7472776350435302, 0.9417336132983741, 0.7625703370421142, 0.9702957075734349, 0.6520372094621814, 0.5853461211600434, 0.8312698947732468, 0.9517634394178764, 0.793792661047068, 0.9203309022242423, 0.7526004833442819, 0.8126399077384473, 0.5468337059865118, 0.6910297688184622, 0.7602136431901315, 0.8941452215693861, 0.6971265675618927, 0.9405182724430502, 0.6533103886769077, 0.9814364566749274, 0.8507623924593727, 0.7565945531783052, 0.6281461594141244, 0.5528693137681613, 0.9288626428132976, 0.9603078392148165, 0.6981603402405614, 0.7032064823913237, 0.7208791863083243, 0.7321398921344049, 0.982615419205818, 0.8889257198841766, 0.7967556972668262, 0.5800975864571819, 0.5675293292341701, 0.6014051578786626, 0.9793672301564219, 0.5889080862383254, 0.9485731740737803, 0.8619029917296794, 0.953946219999102, 0.9184200168732022, 0.8599682805617193, 0.8671002933337709, 0.9461143436862698, 0.7598611511349622, 0.922132352683315, 0.6421955777614081, 0.8356584099843797, 0.632279882794637, 0.831633128237651, 0.6855347294184113, 0.7310709300373774, 0.7526302259840572, 0.947448154782603, 0.8772582935443404, 0.9482422778697546, 0.7940726000511904, 0.7450961864831841, 0.7727237420772337, 0.6558486134198822, 0.7896300882656345, 0.8169735607765589, 0.9255731208067093, 0.7515772237941648, 0.5285892025573582, 0.9325548935754131, 0.6467259116353458, 0.5527276411995121, 0.6938334564574848, 0.9020166972932648, 0.9511910759048087, 0.850539562392973, 0.9830790204678698, 0.6114751978110489, 0.5696758379184861, 0.6073976614326303, 0.9835859482457012, 0.6815113601183531, 0.7149422699609673, 0.668469500576615, 0.8134721582485482, 0.9523961505524883, 0.7036764527741294, 0.940936459619827, 0.9203568050849684, 0.6047497360139672, 0.8909711892624385, 0.9276841192761078, 0.8159069790078146, 0.6510312797058044, 0.96457022368362, 0.8197751949050621, 0.6492127877581992, 0.8174404523827372, 0.6456050955745896, 0.5537637854404754, 0.803423481452022, 0.5935711113967153, 0.5656568526537833, 0.6946488551082611, 0.9208290721609491, 0.8285723699677446, 0.5039348227860196, 0.83807770884522, 0.94941060981521, 0.9255519929388483, 0.83151446768875, 0.704261165107587, 0.6705925131448701, 0.7954414904634906, 0.9102417497439719, 0.8973814848083963, 0.7027823156685774, 0.6809743161338941, 0.8400930961410447, 0.6544825206912691, 0.5936191121475589, 0.6098432715716356, 0.6187910356982614, 0.5468658771089432, 0.6264343715214674, 0.5041356670127476, 0.9674962226078226, 0.9050277737885746, 0.7936593582349388, 0.6096407380274126, 0.613611000053517, 0.6238373968969477, 0.7029527730992118, 0.9370604268068581, 0.8806417952241865, 0.5360856084300725, 0.8656844068638787, 0.6668690507771546, 0.6417667250502159, 0.7738938003977714, 0.9426986965128328, 0.6056344566130712, 0.5230112096206755, 0.9159236840030894, 0.9937432348751171, 0.9345038135971047, 0.7683750727524779, 0.5775777960988799, 0.988611692752493, 0.9887296420955052, 0.536327618330819, 0.756566974451988, 0.8366075335427013, 0.9107928771245182, 0.5161937138360806, 0.6413721752920131, 0.8178308551283758, 0.5253290804143698, 0.8248561064031583, 0.5192251763088735, 0.7881903435787168, 0.8067590265203075, 0.8367853891313793, 0.900248399087032, 0.9629618712131891, 0.8197196871984488, 0.9571480519343173, 0.8696243147053886, 0.6024004004835963, 0.9940597346652376, 0.9152422533551574, 0.7880486411629548, 0.9414990986979996, 0.8970066882784551, 0.5453874358818824, 0.6969838763223108, 0.862193537591254, 0.7781637822417875, 0.5200707261894677, 0.9611839472602827, 0.8880188053232222, 0.8108581395730916, 0.9283254825625256, 0.8449422127229824, 0.6762651829236576, 0.8498937136682188, 0.8167577210771826, 0.979465973456586, 0.7685849046161162, 0.8311605190210216, 0.754767555670858, 0.6669684184801851, 0.9205544972427047, 0.6737882991367233, 0.6472369512866004, 0.8404396384940804, 0.7556380512059366, 0.943057687436327, 0.9452457419539588, 0.7540793253934455, 0.8134577598617327, 0.6870691929818225, 0.951508655185747, 0.6844400623811586, 0.5861785558268452, 0.5406831513303881, 0.7746811900699899, 0.5068925077969787, 0.5977354603726354, 0.9377364336913849, 0.7409974300096525, 0.6037342773254262, 0.8832145645567583, 0.5175076757744124, 0.7662699207204287, 0.9616248496438993, 0.9407397480028867, 0.8995356409653562, 0.6819297687111362, 0.6827526137152915, 0.5003349963411161, 0.7609182947481975, 0.788044318677101, 0.9109547547272163, 0.7778506670147598, 0.9214191628760735, 0.6461387854501006, 0.7102858055994863, 0.954014839734733, 0.6196056386352573, 0.5681670402891323, 0.5133303087650926, 0.5426087875466159, 0.5895786915594983, 0.7746487471172482, 0.8774653772073235, 0.5702753385572272, 0.7142300140535749, 0.6845494399315013, 0.7802740964462768, 0.5895505683992917, 0.7630675455299742, 0.9128899976811513, 0.9836318671591682, 0.685072030278494, 0.9928829912429464, 0.9107919622238143, 0.6295716059777767, 0.761727332751532, 0.7262962855497962, 0.7283901717213126, 0.766932388131865, 0.6357798368712051, 0.604507285867328, 0.6763687771279823, 0.6721865311114175, 0.5186769465341295, 0.8066526417101189, 0.737600245117265, 0.7574946253389045, 0.9822925363868376, 0.6400476720230552, 0.6269319627821257, 0.6952627196645965, 0.9166843147863557, 0.8773734656332947, 0.9900767689051748, 0.6135923163376094, 0.660832847432361, 0.8564925625261977, 0.968731827898049, 0.9807762483112663, 0.9727796581255344, 0.7561000628256942, 0.8481712049796453, 0.5565347548037309, 0.7459688846947418, 0.82472607104824, 0.8199137975632552, 0.9516790158451226, 0.9402416471917567, 0.9840108406153008, 0.8947776522944455, 0.774628391694426, 0.8986312324925527, 0.5314981070528024, 0.9625744961090141, 0.8911141130487985, 0.7132441127140182, 0.512990257284528, 0.579316618736835, 0.8620983300791822, 0.9833976751025184, 0.9527207830301339, 0.6298273625868904, 0.815495241343476, 0.5881015291527121, 0.7913188640624692, 0.7099210530947955, 0.7522925591640094, 0.5944220340185769, 0.6988632483908601, 0.637266770148003, 0.9576553890178743, 0.9007946341426135, 0.7417935527913493, 0.591436046108186, 0.9998238071596031, 0.5506011167893692, 0.88525712334556, 0.80754877405888, 0.7699961638591879, 0.803929094757599, 0.5663042473531694, 0.8214355169222599, 0.5391587489341252, 0.7605221834560651, 0.5304718300472717, 0.717559255198535, 0.9443316979473941, 0.7943145054172224, 0.9050678036083267, 0.6151612639198532, 0.5602507667394581, 0.8042916469699162, 0.5655426798382674, 0.652548419767767, 0.7713290554797467, 0.7899289360152547, 0.637770175257261, 0.7233317545761551, 0.9089221483811096, 0.5117995326940902, 0.8324344031561803, 0.6153085617230847, 0.9083125477844938, 0.8324152383943755, 0.8090296580483545, 0.5001745848730008, 0.9962346890679281, 0.7373417662116579, 0.9951703386629501, 0.7395931315663136, 0.5696377912019076, 0.5917488874097298, 0.6748573579327317, 0.8769432891857535, 0.5691026729976516, 0.6761289211131405, 0.8314405814992777, 0.9134457819583013, 0.6225968020866421, 0.6135759445656412, 0.8311070898123274, 0.7283674419712469, 0.9486284002644159, 0.6844657630718356, 0.5707254250317484, 0.6909127166143363, 0.6445000342932923, 0.7456531283124668, 0.7859649317468724, 0.6010633225460422, 0.7406827147584976, 0.8724638258710198, 0.5153928851581455, 0.9988760099479499, 0.5669304755233022, 0.8095205407625105, 0.792622485041846, 0.7913910846377656, 0.5535494736315726, 0.6919053993657291, 0.6536061016632948, 0.7464073118075729, 0.6396588396613225, 0.5794470834528156, 0.6490326893473868, 0.690088561595623, 0.874372007474997, 0.8838552498683668, 0.9743723992673781, 0.7362385941938411, 0.852415921151637, 0.804573500790353, 0.7144546658608908, 0.9113633838322783, 0.8485944806382635, 0.9714298649279068, 0.909170510926882, 0.7429611394694472, 0.5803296224511241, 0.5976068794202294, 0.8940634121658386, 0.7269907695543304, 0.6643535658670378, 0.7882089909695664, 0.8682977316935927, 0.8771195191775105, 0.6633989309201578, 0.740784654049776, 0.7534958910455833, 0.5707755124321667, 0.7274099527553044, 0.5249795181708543, 0.8138896329575629, 0.6967900973580026, 0.5932684545176272, 0.7327506945953604, 0.5553576267162681, 0.77581738471256, 0.6526446923467705, 0.672476438486218, 0.8794851882331634, 0.5546299276591804, 0.6728883641211967, 0.6187927615823623, 0.9692284082564657, 0.9675316647841373, 0.6465906066702043, 0.8603072997320135, 0.6933337633834702, 0.9473599492476242, 0.7345802044687432, 0.8675181044301365, 0.8694198932878385, 0.7770807119429932, 0.9952964264644264, 0.775219941157125, 0.862614356753628, 0.604606272156893, 0.8709402513683127, 0.9857742099084161, 0.8360069393803965, 0.6566118588365555, 0.6170684800499598, 0.5363769203703678, 0.900759320763458, 0.6005496919475729, 0.5691091550441016, 0.7656958835970669, 0.8801159654922822, 0.9441994008703869, 0.798699052743302, 0.9364585577258434, 0.9681879163365615, 0.6611034011875769, 0.5679327352358461, 0.6501808973341667, 0.521372901249659, 0.5458059553407324, 0.6497833035818329, 0.7280676266542376, 0.7208274603999099, 0.5629405338872506, 0.8236189303830068, 0.8832237715362599, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0}; int h_B[]= { 1, 3, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480, 482, 484, 486, 488, 490, 493, 495, 497, 499, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 559, 561, 563, 565, 567, 569, 572, 574, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 699, 701, 704, 706, 708, 710, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802, 804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 845, 847, 849, 851, 853, 855, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894, 896, 898, 900, 902, 904, 906, 908, 910, 912, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941, 943, 945, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987, 989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1022, 1024, 1026, 1028, 1030, 1032, 1034, 1036, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1062, 1064, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1090, 1092, 1094, 1096, 1098, 1100, 1102, 1104, 1106, 1108, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1132, 1134, 1136, 1138, 1141, 1143, 1145, 1147, 1150, 1152, 1156, 1158, 1161, 1163, 1167, 1169, 1171, 1173, 1175, 1177, 1179, 1181, 1184, 1186, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1210, 1212, 1215, 1217, 1220, 1222, 1225, 1227, 1230, 1232, 1238, 1240, 1243, 1245, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1357, 1359, 1361, 1363, 1367, 1369, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1393, 1395, 1399, 1401, 1404, 1406, 1409, 1411, 1414, 1416, 1419, 1421, 1424, 1426, 1429, 1431, 1434, 1436, 1439, 1441, 1443, 1445, 1447, 1449, 1452, 1454, 1458, 1460, 1462, 1464, 1469, 1471, 1473, 1475, 1479, 1481, 1483, 1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521, 1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1556, 1558, 1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597, 1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635, 1637, 1639, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1695, 1697, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1718, 1720, 1722, 1724, 1726, 1728, 1732, 1734, 1740, 1742, 1744, 1746, 1748, 1750, 1753, 1755, 1758, 1760, 1762, 1764, 1766, 1768, 1771, 1773, 1776, 1778, 1781, 1783, 1786, 1788, 1791, 1793, 1796, 1798, 1800, 1802, 1804, 1806, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824, 1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1855, 1857, 1859, 1861, 1865, 1867, 1869, 1871, 1873, 1875, 1877, 1864, 1877, 1864, 1877, 1864, 1921, 1923, 1925, 1927, 1929, 1931, 1731, 1580, 1580, 1237, 1235, 1468, 1468, 1739, 1418, 1423, 1234, 1209, 1237, 1235, 1790, 1237, 1235, 1739, 1737, 1739, 1737, 1736, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 1555, 1237, 1235, 1641, 1555, 1237, 1235, 1237, 1235, 1752, 1694, 1731, 1237, 1235, 1224, 1229, 1224, 1229, 1237, 1235, 1757, 1752, 1643, 1877, 1209, 1237, 1235, 1234, 1237, 1235, 1234, 1209, 1237, 1235, 1224, 1229, 1224, 1229, 1209, 1234, 1237, 1235, 1224, 1229, 1224, 1229, 1209, 1234, 1237, 1235, 1061, 1060, 1808, 1643, 1641, 1643, 1790, 1736, 1808, 1641, 1237, 1235, 1757, 1694, 2285, 2287, 2289, 2291, 2294, 2296, 2298, 2300, 2303, 2305, 2307, 2309, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2349, 2351, 2353, 2355, 2357, 2359, 1456, 1451, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 1237, 1235, 1224, 1229, 1224, 1229, 1237, 1235, 1237, 1235, 1061, 1060, 1237, 1235, 1418, 1423, 1451, 1451, 1717, 1775, 1752, 1757, 1757, 1752, 1785, 1785, 1757, 1752, 1757, 1752, 1775, 1757, 1752, 1737, 1737, 1757, 1752, 1717, 2615, 2617, 2619, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2644, 2646, 2649, 2651, 2653, 2655, 1061, 1060, 1214, 1219, 1229, 1224, 1237, 1235, 1214, 1219, 1229, 1224, 1149, 1229, 1224, 1237, 1235, 1149, 1155, 1237, 1235, 1237, 1235, 1456, 1451, 1438, 1456, 1451, 1467, 1423, 1418, 1423, 1433, 1418, 1433, 1438, 1456, 1451, 1457, 1398, 1398, 1457, 1467, 1877, 1643, 1641, 1770, 1770, 1739, 1737, 1739, 1737, 1877, 1864, 1877, 1864, 1877, 1864, 1877, 1864, 1864, 1864, 2979, 2981, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002, 3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040, 3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3065, 3067, 3070, 3072, 3074, 3076, 3078, 3080, 3083, 3085, 3089, 3091, 3094, 3096, 3100, 3102, 3104, 3106, 3108, 3110, 3113, 3115, 3119, 3121, 3124, 3126, 3130, 3132, 3134, 3136, 3139, 3141, 3098, 3093, 3146, 3144, 3098, 3093, 3128, 3123, 3128, 3123, 3146, 3144, 2983, 2983, 3098, 3093, 3064, 3146, 3144, 3098, 3093, 3143, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3128, 3123, 3128, 3123, 3128, 3123, 3098, 3093, 3098, 3093, 3098, 3093, 3128, 3123, 3128, 3123, 3146, 3144, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3098, 3093, 3151, 3680, 3682, 3688, 3690, 3149, 3147, 3149, 3147, 3149, 3147, 2658, 3707, 3709, 3098, 3093, 3064, 3098, 3093, 3098, 3093, 3143, 2658, 2972, 2972, 4018, 4020, 3146, 3144, 4053, 4055, 4057, 4059, 4062, 4064, 3146, 3144, 3146, 3144, 3149, 3147, 3082, 3088, 3112, 3118, 3144, 3146, 3146, 3144, 3149, 3147, 3151, 4140, 4142, 4145, 4147, 4152, 4154, 4157, 4159, 4162, 4164, 4166, 4168, 4171, 4173, 4175, 4177, 4156, 4061, 4161, 4156, 4181, 4179, 4161, 4156, 4181, 4179, 4181, 4179, 4151, 4161, 4061, 4181, 4179, 4151, 4179, 4181, 4181, 4179, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688, 6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726, 6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764, 6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802, 6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840, 6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878, 6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916, 6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954, 6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992, 6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030, 7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068, 7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106, 7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144, 7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182, 7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220, 7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258, 7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296, 7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334, 7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372, 7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410, 7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448, 7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486, 7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524, 7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7543, 7544, 7545, 7546, 7547, 7548, 7550, 7552, 7554, 7555, 7556, 7557, 7558, 7559, 7560, 7561, 7562, 7563, 7564, 7565, 7566, 7567, 7568, 7569, 7570, 7571, 7572, 7573, 7574, 7575, 7576, 7577, 7578, 7579, 7580, 7581, 7582, 7583, 7584, 7585, 7586, 7587, 7588, 7589, 7590, 7591, 7592, 7593, 7594, 7595, 7596, 7597, 7598, 7599, 7600, 7601, 7602, 7603, 7604, 7605, 7606, 7607, 7608, 7609, 7610, 7611, 7612, 7613, 7614, 7615, 7616, 7617, 7618, 7619, 7620, 7621, 7622, 7623, 7624, 7625, 7626, 7627, 7628, 7629, 7630, 7631, 7632, 7633, 7634, 7635, 7636, 7637, 7638, 7639, 7640, 7641, 7642, 7643, 7644, 7645, 7646, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677, 7679, 7681, 7683, 7684, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7702, 7703, 7704, 7705, 7706, 7707, 7708, 7709, 7710, 7711, 7712, 7713, 7714, 7715, 7716, 7717, 7718, 7719, 7720, 7721, 7722, 7723, 7724, 7725, 7726, 7727, 7728, 7729, 7730, 7731, 7732, 7733, 7734, 7735, 7736, 7737, 7738, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753, 7755, 7757, 7759, 7760, 7761, 7762, 7763, 7764, 7765, 7766, 7767, 7768, 7769, 7770, 7771, 7772, 7773, 7774, 7775, 7776, 7777, 7778, 7779, 7780, 7781, 7782, 7783, 7784, 7785, 7786, 7787, 7788, 7789, 7790, 7791, 7792, 7793, 7794, 7795, 7796, 7797, 7798, 7799, 7800, 7801, 7802, 7803, 7804, 7805, 7806, 7807, 7808, 7809, 7810, 7811, 7812, 7813, 7814, 7815, 7816, 7817, 7818, 7819, 7820, 7821, 7823, 7825, 7827, 7829, 7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867, 7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7896, 7897, 7898, 7899, 7900, 7901, 7902, 7903, 7904, 7905, 7906, 7907, 7908, 7909, 7910, 7911, 7912, 7913, 7914, 7915, 7916, 7917, 7918, 7919, 7920, 7921, 7922, 7923, 7924, 7925, 7926, 7927, 7928, 7929, 7930, 7931, 7932, 7933, 7934, 7935, 7936, 7937, 7938, 7939, 7940, 7941, 7942, 7943, 7944, 7945, 7946, 7947, 7948, 7949, 7950, 7951, 7952, 7953, 7954, 7955, 7956, 7958, 7960, 7961, 7962, 7963, 7964, 7965, 7966, 7967, 7969, 7970, 7971, 7972, 7973, 7974, 7975, 7976, 7977, 7978, 7979, 7980, 7982, 7983, 7984, 7986, 7988, 7990, 7991, 7992, 7993, 7994, 7995, 7996, 7997, 7998, 7999, 8000, 8001, 8002, 8003, 8004, 8005, 8006, 8007, 8009, 8011, 8013, 8015, 8017, 8019, 8021, 8023, 8024, 8025, 8026, 8027, 8028, 8029, 8030, 8031, 8032, 8033, 8034, 8035, 8036, 8037, 8038, 8039, 8040, 8041, 8042, 8043, 8044, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 8321, 8323, 1808, 8415, 8495, 8507, 8458, 8503, 8505, 1877, 8321, 8323, 1808, 8415, 8497, 8509, 8499, 8511, 8458, 8503, 8505, 1877, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1438, 8065, 1456, 1451, 1438, 8065, 1456, 1451, 8067, 576, 576, 576, 576, 1580, 1580, 1580, 8069, 1188, 1183, 8073, 1188, 1183, 1209, 1234, 8519, 8077, 1188, 1183, 698, 698, 698, 1785, 1785, 8426, 576, 1790, 1770, 1790, 1468, 1468, 1770, 8524, 8082, 1699, 1699, 1699, 1699, 1699, 1214, 1229, 1224, 8526, 8528, 1731, 1736, 1736, 1731, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 698, 8096, 8450, 1736, 1731, 8099, 1752, 1736, 1731, 1739, 1737, 8099, 1752, 8097, 1752, 8099, 1752, 8426, 8102, 1188, 1183, 8106, 1188, 1183, 8110, 1757, 1752, 1775, 8113, 8115, 1808, 8117, 8119, 8121, 8123, 1757, 1209, 1234, 8531, 1438, 8127, 576, 1736, 1731, 8533, 1736, 1731, 8535, 576, 576, 576, 576, 8132, 8134, 1790, 1790, 1790, 1790, 1790, 1739, 1737, 8538, 8540, 8542, 8135, 8137, 1188, 1183, 8141, 1188, 1183, 1209, 1234, 8546, 1780, 1785, 1785, 1785, 1785, 8148, 1757, 1752, 1785, 1785, 1785, 1785, 1641, 1234, 1209, 8550, 1209, 1234, 8552, 698, 1752, 8156, 8158, 1752, 8159, 8160, 698, 1752, 1214, 1224, 1229, 1234, 1209, 8557, 1219, 8559, 1219, 8561, 8330, 1188, 1183, 1234, 1209, 8563, 8314, 8565, 1699, 1694, 1643, 1877, 8172, 1188, 1183, 1214, 1224, 1229, 8570, 1214, 1224, 1229, 8573, 1214, 1224, 1229, 8575, 8577, 1219, 8579, 1219, 8581, 8583, 8585, 1219, 8587, 1219, 8589, 8591, 8593, 8188, 1188, 1183, 8595, 1188, 1183, 1408, 1403, 1408, 1403, 1371, 1438, 844, 8200, 857, 576, 8426, 8598, 576, 1555, 1643, 1641, 1580, 1580, 1580, 1580, 1580, 1468, 1468, 1468, 1468, 576, 8450, 576, 8426, 576, 576, 1209, 1234, 8605, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 1214, 1229, 1224, 1219, 1229, 1224, 1731, 1736, 1737, 1739, 698, 1757, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 698, 698, 698, 8224, 8226, 8228, 8230, 8232, 698, 1757, 1736, 1731, 1739, 1737, 698, 1757, 8237, 8238, 1864, 8408, 8627, 1165, 1160, 1165, 1160, 1165, 1160, 8347, 8247, 1188, 1183, 1214, 1224, 1229, 1234, 1209, 8637, 1165, 1160, 1165, 1160, 1165, 1160, 8347, 8247, 1188, 1183, 1219, 8639, 1219, 8641, 1209, 1234, 8643, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8257, 1188, 1183, 1165, 1160, 8335, 1188, 1183, 1214, 1229, 1224, 1234, 1209, 8645, 1165, 1160, 8647, 1188, 1183, 1219, 1229, 1224, 1234, 1209, 8649, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1428, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1408, 1403, 8651, 8270, 844, 8273, 857, 8276, 8278, 8280, 8282, 8284, 8286, 1456, 1456, 8297, 8426, 1736, 1731, 1736, 1731, 1739, 8305, 1757, 1752, 1699, 1694, 8469, 1717, 8291, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8294, 8296, 1736, 1731, 8318, 8659, 1699, 1694, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8297, 8663, 1736, 1731, 8318, 8665, 8469, 1717, 8299, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8305, 1757, 1752, 8481, 1775, 1770, 1780, 8307, 1736, 1731, 1736, 1731, 1736, 1731, 1739, 8314, 8668, 1736, 1731, 1739, 8318, 8672, 1699, 1694, 1790, 8321, 8323, 8458, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8685, 1188, 1183, 1165, 1160, 8335, 1188, 1183, 8687, 8689, 1234, 1209, 8691, 8693, 8695, 1165, 1160, 1165, 1160, 1165, 1160, 1155, 8330, 1188, 1183, 1165, 1160, 1165, 1160, 1155, 8335, 1188, 1183, 1219, 1214, 8698, 1234, 1209, 8700, 1165, 1160, 1165, 1160, 1165, 1160, 8347, 8349, 1188, 1183, 1219, 1214, 1229, 1224, 1209, 8704, 1219, 1214, 1229, 1224, 1234, 8706, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 8408, 8412, 8362, 8708, 8412, 1408, 1403, 1408, 1403, 1413, 8408, 8711, 8412, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1408, 1403, 1408, 1403, 1413, 8715, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1428, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1418, 1423, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 8391, 8721, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1371, 8394, 1456, 1451, 8412, 1408, 1403, 1408, 1403, 1408, 1403, 1413, 1423, 1418, 1433, 1428, 1438, 8408, 1456, 1451, 8412, 1468, 1468, 1468, 1468, 1468, 1468, 8413, 8414, 8495, 8455, 8458, 8503, 8505, 1478, 8415, 1739, 1737, 8419, 1757, 1752, 1699, 1694, 1717, 8426, 8428, 8430, 8432, 1555, 1643, 1641, 1580, 1580, 1580, 1580, 1580, 1580, 1580, 1736, 1731, 1739, 1737, 8477, 1757, 1752, 1694, 1699, 1770, 8469, 1775, 1717, 8450, 8481, 1775, 1770, 1780, 8452, 8454, 8729, 8493, 8495, 8455, 1877, 1864, 8458, 8503, 8505, 1736, 1731, 1737, 1736, 1731, 1739, 8477, 1757, 1752, 1699, 1694, 1699, 1694, 8469, 1775, 1717, 1790, 1785, 1736, 1731, 8733, 1736, 1731, 8735, 8477, 1757, 1752, 8481, 1775, 1770, 1780, 1790, 1785, 1795, 8489, 1808, 8491, 8493, 8737, 8495, 8739, 8497, 8741, 8499, 8743, 8501, 8503, 8505, 1877, 8514, 8784, 3128, 3123, 3128, 3123, 3128, 3123, 8679, 8786, 8681, 8788, 8790, 8792, 8515, 8794, 8681, 8671, 8670, 8671, 8670, 8671, 8670, 8671, 8670, 8732, 8746, 8745, 8746, 8745, 8670, 2983, 8746, 8745, 8746, 8745, 8530, 8662, 8671, 8670, 8671, 8670, 8671, 8548, 8548, 8732, 8731, 8671, 8604, 2983, 2983, 8601, 8670, 8661, 8604, 8662, 8661, 8731, 8798, 3128, 3123, 3128, 3123, 3128, 3123, 8679, 8801, 8610, 8758, 8803, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 8626, 8616, 8806, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 8626, 8684, 8808, 8810, 8812, 8612, 8814, 8816, 8818, 8613, 8820, 8822, 8824, 8614, 8826, 8828, 8615, 8830, 8630, 8758, 3128, 3123, 8626, 8616, 8832, 3128, 3123, 3128, 3123, 8622, 3143, 8834, 8836, 8838, 8840, 8842, 3143, 8626, 8630, 3098, 3093, 3098, 3093, 3098, 3093, 8635, 8636, 8847, 8849, 8851, 8723, 8713, 8713, 8723, 8662, 8661, 2983, 8855, 3128, 3123, 3128, 3123, 3128, 3123, 8679, 8681, 8858, 8860, 3128, 3123, 8684, 8719, 8719, 2983, 2983, 2983, 3098, 3093, 3064, 3098, 3093, 8775, 3093, 3098, 3098, 3093, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 3064, 3064, 3064, 8867, 8758, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 3143, 8769, 3098, 3093, 8763, 3098, 3093, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 3064, 3064, 3064, 8872, 8758, 3093, 3098, 3098, 3093, 3098, 3093, 8763, 3128, 3123, 3064, 3064, 3064, 8874, 8876, 3093, 3098, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 3143, 8769, 3093, 3098, 3098, 3093, 3098, 3093, 8775, 3128, 3123, 3128, 3123, 3128, 3123, 8781, 3143, 3143, 8884, 8886, 8883, 8882, 8883, 8882, 8883, 8882, 8883, 8882, 8899, 8894, 8896, 8883, 8882, 8883, 8882, 8883, 8882, 8883, 8882, 8883, 8882, 4149, 4144, 8845, 8894, 8896, 8901, 4149, 4144, 8903, 8894, 8896, 8905, 4149, 4144, 8845, 8896, 8907, 4149, 4144, 8845, 8894, 8896, 4061, 4061, 4151, 8883, 8882, 8883, 8882, 4149, 4144, 4156, 4156, 4156, 4149, 4144, 4161, 4161, 4161, 8912, 4149, 4144, 4161, 4156, 8894, 8896, 8917, 8916, 8915, 8916, 8915, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 8960, 8961, 8962, 8963, 8964, 8966, 8967, 8968, 8969, 8970, 8971, 8972, 8973, 8974, 8976, 8978, 8979, 8980, 8981, 8982, 8983, 8984, 8985, 8986, 8987, 8988, 8989, 8990, 8991, 8992, 8993, 8994, 8995, 8996, 8997, 8998, 8999, 9000, 9001, 9002, 9003, 9004, 9005, 9006, 9007, 9008, 9009, 9010, 9011, 9012, 9013, 9014, 9016, 9017, 9018, 9019, 9020, 9021, 9022, 9023, 9024, 9025, 9026, 9027, 9028, 9029, 9030, 9031, 9033, 9034, 9035, 9036, 9037, 9038, 9039, 9040, 9041, 9044, 9045, 9046, 9047, 9048, 9049, 9050, 9051, 9052, 9053, 9054, 9055, 9056, 9057, 9058, 9059, 9060, 9061, 9062, 9063, 9064, 9065, 9066, 9067, 9068, 9069, 9070, 9071, 9072, 9073, 9074, 9075, 9076, 9077, 9078, 9079, 9080, 9081, 9082, 9083, 9084, 9085, 9086, 9087, 9088, 9089, 9090, 9091, 9092, 9094, 9095, 9096, 9097, 9098, 9100, 9101, 9103, 9104, 9105, 9106, 9107, 9108, 9109, 9110, 9111, 9112, 9113, 9114, 9115, 9119, 9120, 9121, 9122, 9123, 9124, 9125, 9126, 9127, 9129, 9130, 9131, 9132, 9133, 9134, 9135, 9136, 9137, 9138, 9139, 9140, 9141, 9142, 9143, 9145, 9146, 9148, 9149, 9150, 9151, 9152, 9153, 9154, 9155, 9156, 9157, 9158, 9159, 9160, 9161, 9163, 9165, 9167, 9168, 9169, 9170, 9171, 9173, 9175, 9176, 9177, 9178, 9179, 9180, 9181, 9182, 9183, 9184, 9186, 9187, 9188, 9190, 9191, 9192, 9195, 9197, 9201, 9203, 9207, 9208, 9209, 9211, 9212, 9213, 9214, 9215, 9216, 9217, 9218, 9219, 9220, 9221, 9222, 9223, 9225, 9226, 9227, 9228, 9229, 9230, 9231, 9232, 9233, 9234, 9235, 9236, 9237, 9238, 9239, 9240, 9241, 9242, 9243, 9244, 9245, 9247, 9248, 9249, 9250, 9251, 9252, 9253, 9254, 9255, 9256, 9257, 9258, 9259, 9260, 9261, 9262, 9263, 9264, 9265, 9266, 9267, 9268, 9269, 9270, 9271, 9272, 9273, 9274, 9275, 9276, 9277, 9278, 9279, 9280, 9281, 9282, 9283, 9284, 9285, 9286, 9287, 9288, 9289, 9290, 9291, 9292, 9294, 9295, 9296, 9297, 9298, 9299, 9300, 9301, 9302, 9303, 9304, 9305, 9306, 9307, 9308, 9310, 9311, 9312, 9313, 9314, 9315, 9316, 9317, 9318, 9319, 9320, 9322, 9324, 9325, 9327, 9328, 9329, 9330, 9331, 9332, 9333, 9334, 9335, 9336, 9337, 9338, 9339, 9340, 9341, 9342, 9343, 9344, 9345, 9346, 9348, 9349, 9351, 9352, 9353, 9354, 9355, 9356, 9357, 9359, 9360, 9361, 9362, 9363, 9364, 9365, 9366, 9367, 9368, 9369, 9370, 9371, 9372, 9373, 9374, 9375, 9376, 9377, 9378, 9379, 9380, 9381, 9382, 9383, 9384, 9385, 9386, 9387, 9388, 9389, 9390, 9391, 9392, 9393, 9395, 9396, 9397, 9398, 9399, 9400, 9401, 9402, 9403, 9404, 9405, 9406, 9407, 9408, 9409, 9410, 9411, 9412, 9413, 9414, 9415, 9416, 9417, 9418, 9419, 9420, 9421, 9422, 9423, 9424, 9425, 9426, 9427, 9428, 9429, 9430, 9431, 9432, 9433, 9435, 9436, 9437, 9438, 9439, 9440, 9441, 9442, 9443, 9444, 9446, 9447, 9448, 9450, 9451, 9452, 9453, 9454, 9455, 9456, 9457, 9458, 9459, 9460, 9461, 9462, 9463, 9464, 9465, 9466, 9467, 9468, 9469, 9470, 9471, 9472, 9473, 9474, 9475, 9477, 9478, 9479, 9480, 9482, 9483, 9484, 9485, 9486, 9487, 9488, 9489, 9490, 9491, 9492, 9493, 9494, 9496, 9497, 9498, 9499, 9500, 9501, 9502, 9505, 9506, 9510, 9511, 9512, 9513, 9514, 9515, 9516, 9517, 9518, 9519, 9520, 9521, 9522, 9523, 9524, 9525, 9526, 9527, 9528, 9529, 9531, 9532, 9534, 9535, 9536, 9537, 9538, 9539, 9540, 9541, 9542, 9543, 9544, 9545, 9546, 9547, 9548, 9550, 9551, 9552, 9553, 9554, 9556, 9557, 9558, 9559, 9560, 9561, 9562, 9563, 9564, 9565, 9567, 9568, 9569, 9570, 9571, 9572, 9573, 9575, 9576, 9577, 9578, 9579, 9580, 9581, 9582, 9583, 9584, 9585, 9586, 9587, 9589, 9590, 9591, 9592, 9593, 9594, 9595, 9596, 9597, 9598, 9599, 9600, 9601, 9602, 9603, 9604, 9605, 9606, 9607, 9608, 9609, 9610, 9611, 9612, 9613, 9614, 9615, 9617, 9618, 9619, 9620, 9621, 9622, 9623, 9624, 9625, 9626, 9627, 9628, 9629, 9630, 9631, 9632, 9633, 9634, 9635, 9636, 9637, 9638, 9639, 9640, 9641, 9642, 9643, 9644, 9645, 9646, 9647, 9648, 9649, 9650, 9651, 9652, 9653, 9654, 9655, 9656, 9657, 9658, 9659, 9660, 9661, 9662, 9663, 9664, 9665, 9666, 9667, 9668, 9669, 9670, 9671, 9672, 9673, 9674, 9675, 9676, 9677, 9678, 9679, 9680, 9681, 9682, 9683, 9684, 9685, 9686, 9687, 9688, 9689, 9690, 9691, 9692, 9693, 9694, 9695, 9696, 9697, 9698, 9699, 9700, 9701, 9702, 9703, 9704, 9705, 9707, 9708, 9709, 9710, 9711, 9712, 9713, 9714, 9715, 9716, 9717, 9718, 9719, 9720, 9721, 9722, 9723, 9724, 9725, 9726, 9727, 9728, 9729, 9730, 9731, 9732, 9733, 9734, 9736, 9737, 9739, 9740, 9741, 9742, 9743, 9744, 9745, 9746, 9747, 9748, 9749, 9750, 9751, 9752, 9754, 9756, 9758, 9760, 9761, 9762, 9763, 9764, 9766, 9767, 9768, 9769, 9770, 9771, 9772, 9774, 9778, 9780, 9781, 9782, 9783, 9784, 9785, 9786, 9787, 9788, 9789, 9790, 9791, 9792, 9793, 9794, 9795, 9796, 9797, 9798, 9799, 8523, 8523, 8523, 8719, 9043, 9800, 9801, 9802, 9803, 9804, 9805, 8544, 8544, 8544, 9806, 9807, 9808, 9809, 9810, 9811, 9185, 9189, 9194, 9200, 9206, 9812, 9813, 9814, 9815, 9816, 9817, 9818, 9819, 9820, 9821, 9823, 9824, 9825, 9826, 9827, 9828, 9829, 9831, 9832, 9834, 9835, 9836, 9837, 9838, 9839, 9840, 9841, 9842, 9844, 9845, 9846, 9847, 9848, 9849, 9850, 9851, 9852, 9856, 9860, 9864, 9867, 9869, 9870, 9871, 9872, 9873, 9874, 9876, 9877, 9878, 9879, 9880, 9881, 9887, 9888, 8723, 9889, 9890, 9891, 9892, 9893, 9894, 9895, 9896, 9897, 8719, 9901, 9902, 9903, 9904, 9905, 9906, 9907, 9909, 9910, 9911, 9912, 9913, 9914, 9915, 9916, 9919, 9920, 9921, 9504, 9509, 8723, 8723, 9922, 8719, 9923, 8723, 9924, 9925, 9926, 9927, 9928, 9929, 9930, 9931, 9932, 9933, 9934, 9935, 9936, 9937, 9938, 9939, 9940, 9941, 9942, 9943, 9944, 9945, 9946, 9948, 9949, 9950, 9951, 9952, 9953, 9954, 9955, 9956, 9957, 9958, 9959, 9960, 9961, 9962, 9963, 9964, 9965, 9966, 9967, 9968, 9969, 9970, 9971, 9972, 9973, 9974, 9975, 9976, 9978, 9979, 9980, 9981, 9982, 9983, 9984, 9985, 9986, 9987, 9988, 9989, 9990, 9993, 9994, 9995, 9996, 9997, 9998, 9999, 10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007, 10008, 10009, 10010, 10011, 10012, 10013, 10014, 10015, 10016, 10017, 10018, 10019, 8879, 8878, 9773, 10022, 10023, 8879, 8878, 8881, 8880, 8879, 8878, 8881, 8880, 9779, 10024, 10025, 9947, 10026, 10027, 10020, 10028, 10029, 10031, 10032, 8879, 8878, 9830, 10033, 10034, 8879, 8878, 10020, 10035, 10036, 8879, 8878, 10020, 10037, 10038, 8879, 8878, 8881, 8880, 8879, 8878, 8881, 8880, 9868, 10039, 10040, 8879, 8878, 10020, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 8878, 8879, 10049, 10050, 10052, 10053, 8878, 8879, 8879, 8878, 10055, 10056, 10057, 10058, 8878, 8879, 8879, 8878, 10060, 10061, 10062, 10063, 10064, 10065, 10066, 8853, 8853, 8853, 10067, 8879, 8878, 9947, 10068, 10069, 8879, 8878, 10020, 10070, 10071, 10072, 10073, 10074, 10075, 10076, 10077, 10078, 10079, 10080, 10081, 8888, 8888, 10083, 10084, 10085, 10086, 10087, 10088, 8915, 10089, 10090, 10091, 8915, 8915, 10089, 10092, 10093, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 10131, 10133, 10136, 10138, 10142, 10146, 10157, 10160, 10162, 10165, 10187, 10189, 10191, 10193, 10195, 10197, 10203, 10207, 10209, 10219, 10222, 10225, 10236, 10241, 10243, 10256, 10260, 10263, 10265, 10273, 10280, 10282, 10294, 10296, 10301, 10303, 10306, 10311, 10314, 10317, 10320, 10327, 10329, 10331, 10333, 10335, 10344, 10361, 10363, 10365, 10367, 10371, 10374, 10376, 10378, 10382, 10384, 10386, 10399, 10401, 10409, 10411, 10413, 10417, 10420, 10422, 10424, 10426, 10428, 10432, 10436, 10438, 10440, 10442, 10446, 10448, 10451, 10454, 10456, 10458, 10460, 10463, 10465, 10467, 10469, 10471, 10474, 10476, 10478, 10481, 10484, 10486, 10488, 10491, 10493, 10495, 10498, 10500, 10516, 10518, 10522, 10524, 10529, 10531, 10533, 10538, 10541, 10543, 10545, 10547, 10551, 10557, 10559, 10561, 10565, 10568, 10572, 10574, 10576, 10580, 10584, 10590, 10592, 10594, 10597, 10599, 10602, 10604, 10606, 10608, 10610, 10614, 10616, 10618, 10622, 10624, 10626, 10628, 10630, 10632, 10636, 10638, 10640, 10643, 10645, 10648, 10650, 10652, 10659, 10661, 10666, 10668, 10670, 10673, 10675, 10678, 10680, 10682, 10685, 10688, 10690, 10692, 10695, 10697, 10699, 10701, 10705, 10707, 10709, 10712, 10714, 10718, 10721, 10723, 10725, 10728, 10730, 10734, 10752, 10755, 10757, 10765, 10774, 10776, 10779, 10781, 10789, 10797, 10802, 10805, 10809, 10811, 10813, 10818, 10820, 10822, 10825, 10828, 10831, 10113, 10115, 8965, 9753, 10120, 8746, 8745, 10122, 10124, 8977, 8975, 10130, 8746, 8745, 10141, 8727, 10145, 8727, 10846, 10848, 10850, 10582, 10582, 10578, 10582, 8656, 10785, 10785, 10785, 10292, 8657, 8554, 10285, 10288, 10292, 10407, 10865, 8746, 8745, 8746, 8745, 10407, 10867, 10206, 8657, 8554, 10216, 10214, 8554, 8657, 8554, 10170, 10179, 10170, 10171, 8746, 8745, 10358, 10763, 8604, 8746, 8745, 8746, 8745, 8746, 8745, 10172, 10356, 8554, 8554, 8554, 8554, 8554, 8554, 8554, 8554, 10179, 10174, 10407, 10871, 10175, 10176, 8746, 8745, 10179, 10407, 10873, 10875, 10876, 10877, 10878, 10783, 8658, 8657, 8731, 8732, 8731, 10783, 10783, 10879, 8657, 8657, 8554, 8657, 8554, 10288, 10202, 10206, 10217, 10212, 10217, 10214, 10216, 10217, 10227, 8746, 8745, 10227, 10229, 10231, 8746, 8745, 8658, 8658, 8607, 10398, 8658, 8607, 10235, 10398, 10239, 10404, 10381, 8658, 8607, 8658, 8607, 8746, 8745, 10398, 10586, 10586, 10251, 10252, 10252, 10253, 10254, 10255, 10578, 10582, 10886, 10887, 10888, 10258, 10279, 10275, 10276, 10268, 10269, 10270, 10271, 10275, 10276, 10277, 10278, 10279, 8657, 10285, 8657, 8554, 8746, 8745, 8746, 8745, 10288, 10783, 10783, 8746, 8745, 10292, 10783, 9166, 9164, 9174, 8568, 8728, 8568, 8728, 10844, 8568, 8728, 10309, 10844, 10308, 8568, 8728, 10309, 10844, 8568, 8728, 10309, 10844, 8567, 8568, 8728, 10309, 10844, 10895, 10896, 10897, 9198, 9196, 10898, 9204, 9202, 10899, 10339, 8657, 10763, 8746, 8745, 8746, 8745, 8746, 8745, 8657, 10341, 10763, 9224, 8746, 8745, 8657, 10358, 8657, 10392, 8600, 8746, 8745, 8746, 8745, 8746, 8745, 8600, 8746, 8745, 8746, 8745, 8657, 10356, 8657, 10358, 8657, 8657, 10763, 8746, 8745, 8746, 8745, 8746, 8745, 10407, 8746, 8745, 10381, 8746, 8745, 10407, 8746, 8745, 8658, 10907, 8658, 8607, 10392, 8746, 8745, 8746, 8745, 8746, 8745, 10398, 8746, 8745, 10407, 8746, 8745, 10404, 8746, 8745, 10407, 8746, 8745, 10910, 10912, 10914, 10919, 10921, 10923, 10928, 10930, 10932, 10943, 10947, 10949, 10408, 10955, 10957, 10959, 10961, 9323, 9321, 10965, 10505, 10503, 10507, 10509, 10968, 10511, 8713, 8723, 9476, 10515, 8656, 10528, 8658, 8657, 9434, 10970, 9445, 9449, 8667, 10556, 10571, 9476, 9481, 10586, 10588, 10751, 9755, 9753, 9759, 9757, 10844, 8746, 8745, 10973, 10975, 10977, 10981, 10984, 10985, 9549, 9555, 10655, 8713, 10717, 8713, 10986, 10664, 8713, 10987, 10989, 10704, 10991, 10717, 8727, 10733, 8727, 9755, 9753, 10844, 8746, 8745, 10785, 10760, 10761, 10763, 9706, 9755, 8728, 8745, 10844, 8746, 8745, 10763, 10751, 9755, 9753, 9759, 9757, 10844, 8746, 8745, 10785, 10760, 10761, 10763, 9755, 8728, 8745, 10844, 8746, 8745, 10785, 10787, 10792, 10834, 9706, 9755, 9753, 10844, 8746, 8745, 10816, 10834, 10836, 9755, 9753, 9759, 9757, 10844, 8746, 8745, 10995, 10998, 11001, 11003, 11005, 11007, 11009, 11016, 11018, 11020, 11022, 11027, 11030, 11032, 11034, 11036, 11038, 11045, 11047, 11049, 11052, 11057, 11059, 11061, 11063, 11068, 11070, 11072, 11075, 11077, 11079, 8844, 8863, 8844, 8844, 11084, 11085, 11074, 11086, 8863, 11089, 11090, 10854, 10940, 11091, 11092, 11093, 11094, 10854, 10940, 11095, 11096, 11097, 8863, 11100, 11103, 10880, 10881, 10903, 10905, 11074, 11108, 11109, 11110, 8844, 8863, 11074, 11113, 11114, 11115, 8863, 8844, 11074, 11118, 11119, 11120, 8863, 8844, 10937, 11123, 11124, 10938, 11125, 11126, 10939, 11127, 11128, 10940, 11129, 11130, 11131, 8844, 8863, 11074, 11134, 11135, 11136, 8863, 8844, 11139, 11144, 11145, 11000, 8883, 8882, 9947, 8844, 8863, 11146, 11150, 11151, 11000, 8883, 8882, 9947, 8844, 8863, 11074, 11152, 11153, 10020, 8883, 8882, 8863, 8844, 11154, 11158, 11159, 11000, 8844, 8863, 11074, 11160, 11161, 8863, 8844, 11162, 9947, 8883, 8882, 8844, 8863, 10020, 8883, 8882, 8863, 8844, 8853, 8853, 8853, 8853, 11169, 11170, 11171, 11173, 11174, 11000, 11175, 8863, 11074, 11178, 11179, 11180, 8863, 8883, 8882, 9947, 11183, 9947, 8883, 8882, 8888, 10020, 8883, 8882, 8888, 11188, 9977, 8883, 8882, 8888, 9991, 8883, 8882, 11193, 10020, 8883, 8882, 8888, 10020, 8883, 8882, 11194, 11195, 11197, 10082, 8916, 8915, 10089, 8916, 11201, 11202, 10089, 8916, 10089, 8916, 11205, 10048, 8916, 8915, 10054, 8916, 11206, 10059, 8916, 8915, 10082, 8916, 8915, 10089, 8916, 8915, 10082, 8916, 8915, 10089, 8916, 11207, 10082, 8916, 8915, 10082, 8916, 8915, 10089, 8916, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 11463, 11464, 11465, 11466, 11467, 11468, 11469, 11470, 11471, 11472, 11473, 11474, 11475, 11476, 10135, 8725, 8724, 11267, 11477, 11478, 8726, 11479, 11480, 8726, 11484, 11289, 11447, 11318, 11485, 11486, 11487, 11442, 11488, 11489, 11490, 11491, 11492, 11493, 11494, 11495, 11496, 11497, 11498, 11500, 11501, 11502, 11503, 11504, 11270, 11271, 9015, 11273, 11506, 10199, 8671, 8670, 11507, 11508, 11509, 11510, 11511, 10199, 8671, 8670, 11512, 10582, 8671, 8670, 11513, 11514, 11515, 11516, 11517, 11518, 11519, 11520, 11521, 11522, 11523, 11524, 11525, 11526, 11527, 11528, 11529, 11530, 10582, 8671, 8670, 11531, 11532, 10582, 8671, 8670, 11533, 11534, 10582, 8671, 8670, 11535, 11536, 11537, 11538, 11539, 11540, 11541, 11543, 11544, 11545, 11546, 11547, 11548, 11554, 11555, 11556, 11557, 11558, 11559, 11560, 11561, 11274, 11563, 10582, 8671, 8670, 10199, 8671, 8670, 11564, 11565, 11566, 11567, 11568, 11569, 11289, 11570, 11571, 11282, 11572, 11573, 11574, 11442, 11575, 11576, 11283, 11284, 11285, 11577, 11578, 11579, 11580, 11581, 11582, 11583, 11584, 11585, 11586, 11587, 11588, 11589, 11590, 11591, 11592, 9093, 11593, 11594, 9102, 9099, 11595, 11596, 11597, 11598, 11599, 11600, 11601, 11602, 11603, 11604, 11605, 11606, 11607, 11608, 11609, 11610, 11318, 11447, 11442, 11289, 11611, 11612, 11616, 11617, 11290, 11291, 9128, 11618, 11619, 11620, 11621, 11622, 11623, 11293, 11624, 11625, 11626, 11627, 11628, 9144, 9147, 11629, 11630, 11631, 11632, 11633, 11634, 11635, 11636, 11637, 11638, 11639, 11640, 11641, 11642, 11643, 11296, 9162, 11644, 11645, 11298, 9172, 11646, 8731, 11647, 11648, 11649, 11650, 11651, 11652, 11653, 11654, 11655, 11656, 11657, 11658, 11659, 11660, 11661, 11662, 11663, 11664, 11665, 11666, 11667, 11668, 11669, 11301, 11302, 11303, 11304, 11673, 11674, 11676, 11677, 11305, 11306, 10677, 8725, 8724, 11679, 10337, 11680, 11681, 11682, 11683, 11684, 11685, 11686, 11687, 11688, 11689, 11690, 11691, 11692, 11693, 11694, 11695, 11310, 11696, 11697, 11698, 11699, 11700, 11701, 11702, 11703, 11704, 11705, 11706, 11707, 11708, 11709, 11710, 11711, 11712, 11713, 11714, 11715, 11716, 11717, 11718, 11719, 11720, 9246, 10369, 8697, 8702, 11315, 11316, 11721, 11722, 11723, 11724, 11725, 11318, 11726, 11727, 11728, 11729, 11730, 11731, 10388, 8671, 8670, 11732, 11734, 11735, 11736, 11737, 11738, 11739, 11740, 11741, 11742, 11743, 11744, 11745, 11746, 11747, 11748, 11323, 11749, 11750, 11751, 11752, 11753, 11754, 11767, 10415, 8703, 8702, 11327, 11328, 9309, 10430, 8703, 8702, 11333, 11772, 11773, 9326, 10444, 8697, 8702, 11338, 10620, 8697, 8702, 11340, 11341, 9347, 10612, 8697, 8702, 11344, 11345, 9358, 10473, 8725, 8724, 10480, 8725, 8724, 10483, 10490, 8725, 8724, 10497, 8725, 8724, 8717, 10677, 8725, 8724, 11775, 11776, 11777, 11778, 11780, 11781, 11782, 11783, 11784, 10520, 8671, 8670, 11364, 11785, 8732, 11786, 10535, 8671, 8670, 11787, 11788, 10582, 8671, 8670, 11789, 8731, 10549, 8671, 8670, 11791, 10582, 8671, 8670, 11792, 11793, 11794, 10563, 8671, 8670, 11378, 11379, 11795, 10578, 8671, 8670, 11796, 10582, 8671, 8670, 11797, 8731, 11798, 11799, 11800, 11801, 11802, 11803, 11804, 11805, 11806, 11807, 10596, 8697, 8702, 11388, 10620, 8697, 8702, 11390, 9507, 10612, 8697, 8702, 11395, 10620, 8697, 8702, 11398, 9530, 9533, 10634, 8703, 8702, 11404, 11406, 11814, 11408, 11815, 10654, 8725, 8724, 11816, 11817, 11818, 11819, 10663, 8725, 8724, 11821, 11822, 10672, 8725, 8724, 10677, 8725, 8724, 10684, 8725, 8724, 10687, 10694, 8725, 8724, 8717, 10703, 8725, 8724, 11825, 10711, 8725, 8724, 11434, 11827, 11828, 8726, 10727, 8725, 8724, 11440, 11829, 11830, 8726, 11445, 11831, 11832, 11451, 11833, 11834, 11835, 11442, 11443, 11836, 10783, 11837, 11450, 11838, 11839, 11840, 11841, 11842, 11843, 11451, 11844, 11845, 11846, 11847, 11848, 11849, 11850, 11851, 11852, 11853, 11854, 11855, 11442, 11443, 11856, 10783, 11857, 11450, 11858, 11859, 11445, 11860, 11861, 11862, 11451, 11863, 11864, 11865, 11447, 11448, 11866, 10783, 11867, 11450, 11868, 11869, 11870, 11871, 11872, 11451, 11873, 11874, 11875, 10807, 10804, 11454, 11876, 8732, 8731, 11457, 9738, 9735, 11460, 11461, 11462, 11877, 11878, 11879, 11880, 11881, 11882, 11883, 11884, 11885, 11917, 11918, 11919, 11920, 11923, 11921, 10852, 8881, 8880, 11924, 11925, 11928, 11926, 11929, 11934, 11932, 11935, 11938, 11939, 11940, 11941, 11733, 10993, 8864, 10993, 8864, 10993, 8864, 10993, 8864, 11942, 11943, 11733, 11733, 11944, 11945, 11733, 11946, 10916, 8881, 8880, 11949, 11950, 11951, 11952, 10925, 8881, 8880, 11955, 11956, 11957, 11958, 10934, 8881, 8880, 11961, 11962, 11963, 11964, 11967, 11970, 11973, 11976, 11977, 11978, 11979, 11081, 8881, 8880, 11982, 11983, 11984, 11988, 11986, 10979, 8881, 8880, 11989, 11990, 11991, 11992, 11993, 11997, 11995, 10979, 8881, 8880, 11998, 11999, 12000, 12001, 12002, 12003, 11024, 8881, 8880, 12006, 12007, 12008, 12009, 12010, 12014, 12012, 10979, 8881, 8880, 12015, 12016, 12017, 12020, 12021, 12023, 12024, 12025, 12026, 12027, 12028, 12029, 12030, 12031, 12032, 12033, 12034, 12035, 10963, 8879, 8878, 12036, 12042, 12040, 10979, 8881, 8880, 12043, 12044, 12045, 11024, 8881, 8880, 12048, 12049, 8879, 8878, 11000, 11011, 8881, 8880, 12050, 12051, 12052, 8879, 8878, 11000, 11011, 8881, 8880, 12054, 12055, 12056, 12057, 11074, 8879, 8878, 11024, 8881, 8880, 12058, 12059, 12060, 12061, 8878, 8879, 11029, 11040, 8881, 8880, 12063, 12064, 12065, 12066, 11051, 8879, 8878, 11081, 8881, 8880, 12067, 12068, 12069, 11074, 8879, 8878, 11065, 8881, 8880, 12071, 12072, 12073, 12074, 11074, 8879, 8878, 11081, 8881, 8880, 12075, 12076, 12077, 11167, 12081, 12082, 12083, 8914, 12084, 12085, 8914, 12087, 8898, 8898, 8909, 12088, 12089, 8915, 12090, 12091, 8914, 12093, 12094, 12095, 8909, 12096, 12097, 8914, 12099, 12100, 12101, 8909, 12102, 12103, 12104, 8914, 12105, 12106, 12107, 11167, 12108, 12109, 12110, 11168, 11172, 12111, 12112, 8915, 8914, 12113, 8911, 12114, 12115, 12116, 8911, 12117, 12118, 12119, 8914, 12120, 12121, 8915, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 12162, 12164, 12169, 12171, 12174, 12175, 12176, 12177, 12180, 12183, 12184, 12185, 12186, 12187, 12188, 12189, 12190, 12191, 12203, 12205, 12208, 12209, 12210, 12211, 12213, 12214, 12215, 12221, 12222, 12223, 12225, 12226, 12227, 12233, 12238, 12240, 12242, 12246, 12247, 12248, 12251, 12252, 12253, 12256, 12257, 12258, 12268, 12273, 12280, 12282, 12283, 12284, 12285, 12286, 12287, 12294, 12297, 12301, 12304, 12305, 12306, 12308, 12313, 12323, 12326, 12327, 12333, 12344, 12345, 12346, 12347, 12348, 12349, 12352, 12353, 12354, 12361, 12367, 12368, 12373, 12375, 12380, 12384, 12385, 12386, 12388, 12389, 12391, 12392, 12394, 12397, 12402, 12406, 12411, 12415, 12416, 12417, 12418, 12419, 12421, 12423, 12424, 12425, 12426, 12427, 12429, 12432, 12434, 12436, 12442, 12446, 12450, 12452, 12454, 12457, 12459, 12468, 12470, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12481, 12483, 12485, 12488, 12490, 12491, 12492, 12497, 12499, 12501, 12504, 12507, 12509, 12511, 12514, 12517, 12518, 12519, 12520, 12521, 12522, 12523, 12524, 12525, 12526, 12527, 12529, 12530, 12531, 12532, 12533, 12534, 12535, 12536, 12537, 12538, 12539, 12540, 12541, 12542, 12543, 12544, 12545, 12546, 12547, 12548, 12549, 12550, 12551, 12552, 12553, 12554, 12555, 12556, 12557, 12558, 12559, 12560, 12561, 12562, 12563, 12568, 12572, 12573, 12574, 12575, 12577, 12579, 12580, 12581, 12582, 12584, 12585, 12586, 12588, 12589, 12590, 12591, 12593, 12594, 12595, 12599, 12600, 12601, 12602, 12603, 12605, 12606, 12607, 12609, 12610, 12611, 12613, 12617, 12619, 12621, 12624, 12625, 12626, 12627, 12628, 12629, 12630, 12631, 12632, 12633, 12634, 12635, 12636, 12637, 12638, 12639, 12640, 12641, 12642, 12643, 12644, 12645, 12646, 12647, 12649, 12651, 12652, 12653, 12657, 12658, 12659, 12660, 12662, 12663, 12664, 12665, 12666, 12667, 12668, 12669, 12670, 12671, 12672, 12673, 12674, 12675, 12676, 12677, 12678, 12679, 12681, 12682, 12683, 12684, 12687, 12688, 12689, 12690, 12691, 12694, 12695, 12696, 12698, 12699, 12702, 12703, 12705, 12707, 12711, 12714, 12715, 12720, 12722, 12724, 12727, 12728, 12730, 12732, 12735, 12736, 12739, 12740, 12743, 12744, 12746, 12748, 12752, 12754, 12755, 12758, 12759, 12760, 12762, 12763, 12764, 12765, 12766, 12767, 12768, 12769, 12772, 12774, 12776, 12161, 12168, 12779, 12784, 12785, 12786, 12787, 12788, 12791, 12792, 12794, 12795, 12796, 12798, 12799, 11779, 10967, 12439, 12441, 12445, 8865, 10900, 12449, 11779, 10967, 12578, 12598, 12312, 12706, 12710, 8865, 12731, 8865, 12747, 11779, 10967, 12706, 12710, 8865, 12800, 12578, 12312, 12731, 8865, 12578, 12731, 12706, 12747, 11779, 10967, 12801, 8865, 12802, 12264, 12267, 12616, 12803, 8865, 12804, 11779, 10967, 12439, 12441, 12449, 12445, 8865, 12237, 11779, 10967, 12229, 12710, 8865, 12231, 8865, 12232, 12616, 12365, 11779, 10967, 12463, 12466, 12444, 12445, 8865, 12438, 12439, 12441, 12447, 12449, 12461, 12465, 12430, 10900, 12235, 12237, 12244, 12245, 11779, 10967, 10900, 12449, 12445, 8865, 12439, 12441, 12264, 12805, 8865, 12806, 12267, 12616, 12343, 12807, 8865, 12808, 12463, 12447, 12466, 12444, 12465, 12430, 12461, 12438, 11779, 10967, 12731, 8865, 12614, 11733, 12578, 12571, 12312, 12706, 12710, 8865, 12747, 12293, 12296, 12299, 12303, 12733, 12749, 12708, 8865, 12604, 12312, 11779, 10967, 12706, 12710, 8865, 12731, 8865, 12811, 12616, 10891, 12337, 10900, 12447, 12461, 12463, 12430, 12444, 12438, 12465, 12466, 11779, 10967, 12430, 10900, 12438, 12447, 12449, 12444, 12445, 12461, 12465, 12466, 12463, 10906, 12351, 10890, 10890, 12351, 11779, 10967, 12366, 10891, 12358, 10900, 10972, 10891, 10993, 8865, 12366, 12614, 12812, 12578, 12571, 12747, 12706, 8865, 12731, 11779, 10967, 12447, 12449, 12438, 12439, 12441, 12463, 12444, 12445, 8865, 12430, 10900, 12465, 12466, 12461, 12598, 12456, 10972, 12401, 10992, 8865, 12401, 10993, 8865, 12410, 10993, 8865, 12410, 10993, 8865, 11779, 10967, 12430, 10900, 12438, 12439, 12441, 12444, 12445, 8865, 12447, 12449, 12456, 12461, 12462, 12463, 12464, 8865, 12465, 12466, 10906, 11779, 10967, 8865, 12731, 8865, 12614, 12815, 12571, 12616, 12747, 8865, 12706, 12710, 8865, 12816, 12817, 12818, 12819, 12820, 12821, 12823, 12824, 12825, 12826, 12827, 12828, 12830, 12831, 12832, 12833, 12834, 12835, 12837, 12838, 12839, 12840, 12841, 12842, 12844, 12845, 12846, 12847, 12848, 12849, 12852, 12853, 12854, 12855, 12856, 12859, 12862, 12863, 12864, 12865, 12866, 12869, 12871, 12872, 12873, 12874, 12875, 12878, 12881, 12882, 12883, 12884, 12885, 12887, 12888, 12890, 12893, 12895, 12898, 11768, 12903, 12904, 12905, 11779, 10967, 12598, 12616, 12908, 12909, 12910, 12911, 12912, 12914, 12915, 12916, 12917, 12918, 12655, 11826, 12710, 12719, 12751, 12771, 12920, 12921, 12922, 12923, 12924, 12925, 12926, 12929, 12930, 12931, 12932, 12933, 12934, 12935, 12939, 12940, 12941, 12942, 12943, 12944, 12945, 12949, 12950, 12951, 12952, 12953, 12954, 12955, 12959, 12960, 12961, 12962, 12963, 12964, 12965, 12968, 12969, 12970, 12971, 12972, 12973, 12974, 12978, 12979, 12980, 12981, 12982, 12983, 12984, 12987, 12988, 12991, 12992, 12994, 12995, 12996, 12997, 12998, 13001, 12999, 13002, 13004, 13005, 13008, 13009, 13011, 13012, 13015, 13016, 13019, 13020, 13023, 13024, 13027, 13028, 13031, 13029, 13032, 13033, 13034, 13035, 13038, 13039, 13042, 13045, 13043, 122, 123, 124, 125, 126, 127, 13057, 13059, 13060, 12179, 12182, 13066, 13070, 13071, 13072, 13080, 13083, 13086, 13093, 13096, 13099, 13105, 13108, 13120, 13127, 13128, 13158, 12428, 13175, 13185, 13196, 13202, 13208, 13212, 13218, 13224, 13227, 13231, 13234, 13238, 13243, 12576, 13248, 13252, 13256, 13259, 13262, 13267, 13270, 13276, 13277, 13281, 13286, 13290, 13296, 13302, 13306, 13310, 13313, 13316, 13320, 13324, 13327, 12686, 13332, 12693, 13340, 12704, 13345, 13347, 13350, 12729, 13356, 13358, 12745, 13365, 13366, 12761, 13372, 13379, 13380, 8864, 13381, 8865, 13384, 13389, 13391, 11672, 11671, 11670, 11678, 11675, 13242, 13395, 13396, 12438, 13397, 13398, 8796, 8864, 12461, 12444, 13399, 13166, 10992, 13400, 8864, 13401, 8864, 10901, 8865, 13402, 8864, 10902, 8865, 12463, 13139, 13131, 13142, 13223, 13242, 13403, 13404, 10966, 13405, 13406, 13407, 8865, 8864, 10972, 13132, 13408, 13409, 10993, 13410, 8864, 13116, 13411, 13355, 10992, 13412, 8864, 13413, 13133, 13134, 13142, 13223, 13242, 13414, 13415, 12196, 13416, 13417, 10993, 13418, 8864, 13420, 13421, 8864, 10972, 8865, 12200, 12201, 13422, 13355, 10992, 13423, 8864, 13201, 13424, 13425, 13426, 13427, 13139, 13142, 13223, 13242, 13428, 13429, 10966, 13431, 13433, 13434, 13435, 10972, 8865, 8864, 13437, 11672, 11671, 11670, 13078, 13217, 13223, 13242, 13439, 13440, 12212, 13441, 13442, 8796, 8864, 13443, 8864, 10902, 8865, 12218, 13444, 13355, 10992, 13445, 8864, 13446, 8864, 10870, 8865, 13139, 13131, 13142, 13223, 13242, 13447, 13448, 10966, 13132, 13449, 13450, 10993, 13451, 8864, 13116, 13452, 13166, 10992, 13453, 8864, 13454, 13455, 10972, 8865, 8864, 13456, 11672, 11671, 11670, 11678, 11675, 13217, 13242, 13457, 13458, 13459, 13460, 13461, 13462, 13166, 10992, 13463, 8864, 13464, 13465, 13466, 8796, 8864, 13467, 13468, 8864, 10902, 8865, 13469, 13470, 13471, 13472, 8864, 10901, 8865, 13473, 13474, 8864, 10870, 8865, 13475, 13476, 11672, 11671, 11670, 11678, 11675, 13217, 13223, 13242, 13477, 13478, 13479, 8864, 10901, 8865, 12250, 13480, 8864, 10902, 8865, 12260, 13481, 13166, 10992, 13482, 8864, 12261, 13483, 13484, 8796, 8864, 12262, 13485, 13487, 13489, 13490, 10972, 8865, 8864, 13491, 13493, 13495, 13496, 13497, 13498, 13499, 13500, 13501, 13502, 13133, 13134, 13142, 13223, 13242, 13503, 13504, 10966, 13352, 13505, 13355, 10992, 13506, 8864, 13507, 13508, 13509, 13510, 13511, 8865, 8864, 10972, 13342, 13512, 13513, 10993, 13514, 8864, 13515, 11672, 11562, 12292, 13516, 12295, 13517, 12298, 13518, 12300, 12302, 13519, 13116, 13520, 13521, 13522, 10993, 13523, 8864, 13524, 13525, 8865, 8864, 10972, 12321, 12321, 12322, 13174, 13119, 13217, 13223, 13242, 13526, 13527, 12325, 13528, 13529, 10993, 13530, 8864, 13531, 13355, 10992, 13532, 8864, 13534, 8864, 8865, 10972, 12335, 12341, 13535, 13536, 12338, 12339, 13537, 12341, 12343, 12444, 12461, 12463, 12438, 13538, 13539, 13540, 13541, 13542, 13543, 13544, 13545, 11672, 11671, 11670, 11678, 11675, 13217, 13223, 13242, 13546, 13547, 13548, 13549, 8864, 10901, 8865, 13550, 13551, 13552, 8864, 10902, 8865, 13553, 13554, 13166, 13555, 13556, 13557, 13558, 13131, 12350, 13559, 13560, 13132, 13561, 13562, 13563, 13139, 13131, 13142, 13223, 13242, 13564, 13565, 10966, 12356, 13566, 13567, 12357, 13568, 11790, 12360, 13569, 13570, 8865, 8864, 13132, 12363, 13571, 13572, 13573, 8864, 12365, 13574, 13133, 13134, 13575, 13577, 13578, 8865, 8864, 10972, 12377, 13579, 12382, 13580, 10993, 13581, 8864, 12382, 13582, 11672, 11671, 11670, 11678, 11675, 13217, 13223, 13242, 13583, 13584, 13585, 13586, 8864, 10902, 8865, 13587, 13588, 13589, 8796, 8864, 13590, 13591, 13592, 13166, 10992, 13593, 8864, 13594, 13595, 8864, 10901, 8865, 13596, 13597, 13598, 13139, 13207, 13142, 11790, 12604, 13599, 13600, 13601, 8865, 8864, 13352, 12733, 13602, 13603, 13604, 8864, 13605, 13606, 13607, 8864, 13342, 12708, 13608, 13609, 13610, 8864, 13611, 13612, 13613, 8864, 11672, 11671, 11670, 11678, 11675, 13217, 13223, 13242, 13614, 13615, 13616, 13617, 8864, 10901, 8865, 13618, 13619, 13620, 8796, 8864, 13621, 13622, 13166, 10992, 13623, 8864, 13624, 13625, 8864, 10902, 8865, 13626, 8864, 10993, 8865, 13627, 13628, 13629, 13630, 8797, 13631, 13632, 13633, 13634, 8864, 10993, 8865, 13174, 13207, 13217, 13223, 13242, 13635, 13636, 10966, 10993, 13637, 8864, 12484, 13638, 13355, 10992, 13639, 8864, 13640, 13642, 13643, 8864, 10972, 8865, 12503, 13644, 10993, 13645, 8864, 12510, 13646, 13647, 10993, 13648, 8864, 13649, 13650, 13655, 13656, 13661, 13662, 13667, 13668, 13669, 13670, 13673, 13674, 13680, 13683, 13686, 13689, 13691, 13692, 13695, 13698, 13702, 13704, 13706, 13708, 13709, 13201, 13207, 13217, 13223, 13242, 13712, 13713, 10966, 12571, 11790, 13714, 12604, 12614, 13715, 8865, 8864, 13717, 13721, 13722, 13285, 13295, 13295, 12650, 12648, 13726, 13309, 13305, 13309, 13727, 13337, 8865, 8864, 13342, 12708, 13728, 8865, 13729, 8865, 8864, 13352, 12733, 13355, 8865, 13360, 12749, 13730, 8865, 8864, 13376, 13731, 8865, 8864, 13732, 13735, 13738, 13739, 13742, 13745, 13746, 13749, 13752, 13753, 13756, 13759, 13760, 13763, 13766, 13767, 13770, 13773, 13774, 13777, 13780, 13782, 12781, 12782, 13784, 12789, 12797, 12919, 13791, 13705, 13707, 13792, 13654, 13660, 13666, 13672, 13678, 13794, 13796, 13798, 13800, 13802, 13804, 13808, 12913, 12919, 13812, 13814, 13817, 13786, 13810, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 13826, 13833, 13834, 13835, 13836, 13837, 13838, 13839, 13840, 13844, 13846, 13847, 13848, 13849, 13850, 13851, 13852, 13853, 13854, 13855, 13856, 13857, 13858, 13860, 13861, 13862, 13863, 13864, 13865, 13866, 13868, 13869, 13870, 13871, 13872, 13873, 13874, 13875, 13876, 13877, 13878, 13879, 13880, 13882, 13895, 8796, 13899, 8797, 13901, 13827, 13828, 13902, 13905, 13906, 13907, 13908, 13909, 13910, 10966, 12466, 13913, 13916, 13917, 13918, 13919, 13921, 13922, 13924, 12430, 13926, 13927, 13928, 12465, 12447, 13930, 13931, 13932, 13933, 13934, 13935, 13936, 13937, 13883, 13938, 13941, 13945, 13946, 13947, 13948, 13951, 13953, 13954, 13956, 13957, 13959, 13961, 13962, 13963, 13964, 13883, 13965, 10966, 13968, 13971, 13973, 13976, 13977, 13978, 13979, 13980, 13982, 13983, 13985, 13986, 13991, 13992, 13993, 13883, 13994, 13997, 13430, 14002, 14003, 14004, 13436, 14006, 14007, 14008, 14009, 14010, 14011, 13883, 14012, 10966, 14015, 14018, 14019, 14021, 14022, 14023, 14024, 12219, 14026, 14027, 14029, 14031, 14032, 14033, 14034, 14035, 14036, 14037, 13883, 14038, 14041, 14042, 14045, 14047, 14048, 14050, 14051, 14053, 14056, 14057, 14058, 14060, 14061, 14062, 14063, 14064, 14065, 13883, 14066, 10966, 14073, 14074, 14076, 14080, 14081, 14084, 14085, 14086, 14091, 14092, 14093, 14096, 14097, 14098, 14101, 14102, 14103, 14104, 14105, 14106, 14107, 13883, 14108, 10966, 14112, 14113, 14114, 14115, 14117, 14118, 14119, 14120, 14122, 14123, 14125, 14126, 14129, 14130, 14131, 13486, 14136, 14137, 14138, 13492, 14149, 14150, 14151, 14152, 13883, 14153, 14156, 14157, 14159, 14160, 14162, 14168, 14169, 14170, 14171, 14174, 14176, 14178, 14179, 14180, 14182, 14184, 14186, 14187, 14189, 14193, 14195, 14198, 14199, 14200, 14201, 14202, 14203, 14204, 14205, 14206, 14207, 13883, 14208, 10966, 14211, 14214, 14216, 12328, 14218, 14219, 14221, 14223, 14224, 14225, 14226, 14227, 12340, 12336, 14230, 14231, 14233, 12340, 14234, 12342, 12447, 14235, 14236, 14237, 12466, 14238, 12465, 12430, 14247, 14248, 14249, 14250, 14251, 14252, 14253, 14254, 10966, 14259, 14260, 14261, 14265, 14266, 14267, 14270, 14275, 14276, 14279, 14283, 14284, 14285, 14286, 13883, 14287, 14290, 14291, 12355, 14294, 14296, 12359, 14297, 14300, 14301, 14302, 14303, 12362, 14307, 14305, 14308, 12364, 14310, 14311, 14315, 14316, 14317, 14318, 14320, 14322, 14324, 14325, 14327, 14328, 14329, 14330, 14331, 14332, 14333, 14334, 10966, 14339, 14340, 14341, 14345, 14346, 14350, 14351, 14353, 14356, 14357, 14358, 14362, 14363, 14364, 14365, 14366, 12578, 14370, 14371, 14372, 14373, 12731, 14377, 14375, 14381, 14379, 14382, 14383, 12706, 14387, 14385, 14391, 14389, 14392, 14393, 14394, 14395, 14396, 14397, 14398, 14399, 10966, 14404, 14405, 14406, 14410, 14411, 14414, 14415, 14417, 14420, 14421, 14422, 14424, 14425, 14426, 14431, 14436, 14437, 14438, 14439, 14440, 14441, 14442, 13883, 14443, 14446, 14447, 14449, 14450, 14452, 14453, 14455, 14459, 14460, 14461, 14462, 14464, 14466, 14467, 14470, 14472, 14474, 14476, 14478, 14484, 14485, 14487, 14490, 14492, 14497, 14498, 14499, 14500, 14501, 14502, 14505, 14506, 12578, 14507, 14509, 14510, 10972, 14512, 14513, 14514, 14516, 14517, 14518, 14519, 14520, 14521, 14523, 14524, 14525, 13881, 13883, 14527, 10993, 14528, 14529, 14530, 14531, 12706, 10993, 14533, 8864, 10993, 14535, 14536, 14537, 14538, 12731, 14539, 10992, 14540, 8864, 14541, 14542, 12747, 10993, 14544, 14545, 13368, 13374, 14546, 10994, 14548, 14549, 14550, 14551, 14553, 14554, 14556, 14557, 14559, 14560, 14562, 14563, 14565, 14566, 14568, 14569, 13382, 13703, 14572, 14573, 14575, 13904, 13903, 14576, 14577, 12958, 12070, 12977, 12078, 14579, 14580, 14430, 12814, 12813, 14072, 14078, 12814, 12813, 14428, 12814, 12813, 14430, 12814, 12813, 12814, 12813, 14269, 12814, 12813, 14428, 14408, 12814, 12813, 14428, 14430, 12814, 12813, 14269, 14408, 12814, 12813, 12814, 12813, 14408, 12814, 12813, 14269, 14428, 12814, 12813, 14430, 12814, 12813, 14343, 14430, 14349, 12814, 12813, 12814, 12813, 14428, 12814, 12813, 14408, 14413, 12814, 12813, 14428, 14430, 12814, 12813, 14582, 14583, 14584, 14482, 14480, 14585, 14586, 13684, 13690, 13696, 13701, 13703, 13705, 13707, 12900, 12901, 12902, 12906, 12037, 12038, 12039, 14594, 14595, 12938, 12938, 12948, 12958, 12070, 12977, 12078, 14571, 14574, 14599, 14596, 14597, 14578, 14581, 14587, 14588, 14589, 14590, 14591, 14592, 14597, 14593, 14600, 14596, 14597, 14598, 121, 122, 123, 124, 125, 126, 127, 14765, 14767, 13063, 14769, 14770, 13199, 14772, 13205, 14775, 13237, 10990, 11774, 10988, 13230, 14778, 14777, 14779, 14781, 14786, 14788, 14789, 14792, 14793, 14794, 13114, 13115, 13215, 13141, 13293, 13221, 13335, 14802, 13237, 11774, 10990, 13230, 10988, 14803, 13940, 13246, 12596, 12592, 14805, 14809, 14813, 13114, 13115, 13215, 13141, 13293, 13221, 13335, 14819, 13230, 10990, 10988, 11553, 13237, 14821, 14820, 14823, 12198, 12197, 12199, 12255, 12465, 14825, 14831, 13076, 13215, 13141, 13293, 13221, 13335, 14837, 13237, 10988, 11774, 13230, 10990, 14838, 13996, 14840, 13265, 14841, 14844, 13076, 14845, 13077, 13215, 13079, 13293, 13221, 13335, 14851, 11774, 13237, 13230, 10988, 10990, 14853, 14852, 14855, 12217, 12216, 14857, 14861, 14863, 12220, 12288, 12228, 12224, 14865, 13129, 13130, 13215, 13141, 13293, 13221, 13335, 14872, 10988, 13237, 11774, 10990, 13230, 14873, 14040, 14876, 14880, 13265, 14882, 13150, 14885, 13205, 14888, 13215, 13156, 13335, 14891, 10988, 11774, 13237, 10990, 13230, 14893, 14892, 14895, 14897, 14899, 14902, 14905, 13199, 14908, 13205, 14911, 13215, 13156, 13293, 13157, 13335, 14915, 13230, 13237, 11553, 10990, 10988, 14917, 14916, 12249, 14918, 12254, 12255, 12259, 14922, 14927, 14930, 14933, 14934, 14937, 13114, 13115, 13215, 13141, 13293, 13221, 13335, 14942, 13237, 10988, 10990, 13230, 11553, 14943, 14155, 14947, 12587, 13103, 13246, 14949, 14953, 14955, 12291, 12281, 12289, 12288, 12466, 12291, 12290, 13114, 13115, 14963, 14965, 12320, 12315, 12317, 12316, 12318, 12320, 12319, 13129, 13130, 13215, 13211, 13293, 13221, 13335, 14975, 11774, 10988, 10990, 13237, 13230, 14977, 14976, 14979, 14981, 14983, 12330, 12329, 12332, 12331, 14985, 13129, 13130, 13215, 13141, 14990, 14991, 14995, 14997, 14998, 15002, 15004, 15005, 15006, 15009, 13215, 13156, 13293, 13157, 10990, 11774, 13237, 10988, 13230, 15014, 15013, 15015, 15018, 13215, 13211, 13129, 13130, 13215, 13141, 13293, 13221, 13335, 15029, 10988, 10990, 13230, 13237, 11774, 15030, 14289, 15033, 13265, 12596, 12592, 12587, 12390, 13246, 15036, 14299, 15042, 15044, 15046, 12372, 12369, 12370, 12372, 12371, 15049, 15054, 13199, 15057, 13205, 15060, 13215, 13156, 13293, 13157, 10990, 13237, 13230, 10988, 11774, 15065, 15064, 15066, 15069, 15072, 15074, 13215, 13141, 12587, 12390, 13265, 12596, 12592, 13246, 15082, 14369, 15087, 15089, 15091, 15094, 15096, 15098, 13150, 15099, 13205, 15102, 13215, 13156, 13293, 13157, 10990, 10988, 11774, 13230, 13237, 15107, 15106, 15108, 15111, 15114, 15116, 15119, 15122, 15123, 13199, 13205, 13215, 13211, 13293, 13221, 13335, 15130, 10988, 10990, 11774, 13237, 13230, 15131, 14445, 15133, 15137, 12495, 12493, 12495, 12494, 15139, 15143, 15146, 13199, 13205, 13215, 13211, 13293, 13221, 10988, 10990, 13237, 13230, 11774, 15161, 14504, 12612, 12570, 13246, 15164, 12587, 13251, 12596, 12592, 13265, 12612, 12608, 15168, 13284, 13280, 13293, 13289, 13299, 15176, 13319, 10988, 11824, 13323, 10990, 11824, 10988, 13319, 13323, 10990, 13319, 10990, 13323, 10988, 11824, 13330, 15181, 13335, 15182, 15184, 15189, 15190, 15192, 15193, 15198, 15200, 15202, 15205, 15206, 15209, 13371, 15210, 15212, 15216, 15229, 15230, 15171, 15172, 14771, 15234, 15235, 15172, 15222, 15238, 15224, 15239, 15226, 15240, 15228, 15241, 13914, 14428, 13920, 14430, 13949, 13955, 13960, 13969, 14319, 13981, 13988, 13989, 13990, 13999, 14016, 14430, 14428, 14043, 14049, 14059, 15244, 15245, 15246, 15247, 15248, 15249, 15250, 15251, 15252, 15253, 14094, 14094, 14099, 14100, 14430, 14121, 14127, 14428, 14132, 14139, 15254, 15255, 15256, 15257, 15258, 15259, 15260, 15261, 15262, 15263, 14158, 14172, 14177, 14181, 14185, 14183, 14188, 14188, 14190, 14191, 14192, 14326, 14321, 14319, 14212, 14463, 14269, 14428, 14430, 14408, 15264, 15265, 15266, 15267, 15268, 15269, 15270, 15271, 15272, 15273, 15274, 15275, 15276, 15277, 15278, 15279, 15280, 15281, 15282, 15283, 14319, 14321, 14326, 15284, 15285, 15286, 15287, 15288, 15289, 15290, 15291, 15292, 15293, 15294, 15295, 15296, 15297, 15298, 15299, 15300, 15301, 15302, 15303, 14451, 14463, 14468, 15148, 15149, 15150, 15307, 15308, 15151, 15152, 15311, 15153, 15312, 15154, 15313, 15155, 15314, 15220, 15315, 15171, 15316, 15172, 15317, 15216, 15318, 15319, 15320, 15222, 15321, 15224, 15322, 15226, 15323, 15228, 15324, 15171, 15172, 15216, 15327, 15218, 15328, 15220, 15329, 15222, 15330, 15224, 15331, 15226, 15332, 15228, 15333, 15334, 15335, 15337, 15338, 15339, 15340, 15341, 15342, 15343, 15344, 15345, 15346, 15347, 15348, 15350, 15351, 15352, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 15360, 15361, 15362, 15365, 15366, 15367, 15369, 15370, 15371, 15372, 15373, 13912, 15378, 15380, 15383, 15384, 15385, 15386, 15387, 15388, 15389, 15390, 15392, 15393, 15394, 15395, 15396, 15397, 15399, 15400, 15401, 15402, 15403, 15404, 15405, 15406, 15407, 15408, 15409, 15410, 15411, 15413, 15414, 15415, 15416, 15417, 13967, 15420, 15421, 15422, 15423, 15424, 15425, 15426, 15427, 15428, 15429, 15430, 15431, 15432, 15433, 15435, 15436, 15437, 15438, 15439, 15440, 15443, 15444, 15446, 15447, 15448, 15449, 15450, 15451, 15452, 15453, 15455, 15456, 15457, 15458, 15459, 14014, 15463, 15464, 15465, 15467, 15468, 15469, 15470, 15471, 15472, 15473, 15474, 15475, 15476, 15477, 15478, 15479, 15481, 15482, 15483, 15484, 15485, 15486, 15488, 15489, 15490, 15491, 15492, 15493, 15494, 15496, 15497, 15498, 15500, 15501, 15502, 15503, 15504, 14068, 15507, 15509, 15510, 15511, 15512, 15513, 15514, 15516, 15517, 15518, 15519, 15520, 15522, 15523, 15524, 15525, 15526, 14110, 15529, 15530, 15531, 15532, 15533, 15534, 15535, 15538, 15540, 15541, 15542, 15543, 15544, 15545, 15546, 15548, 15549, 15550, 15551, 15552, 15553, 15555, 15556, 15557, 15558, 15559, 15560, 15562, 15563, 15564, 15565, 15566, 15567, 15568, 15569, 15570, 15571, 15572, 15573, 15574, 15575, 15576, 15577, 15578, 15579, 15580, 15581, 15582, 15583, 15584, 15585, 15586, 15588, 15589, 15590, 15591, 15592, 14210, 15595, 15597, 15598, 15599, 15600, 15601, 15602, 15603, 15604, 15605, 15606, 14989, 14994, 14996, 15615, 15617, 15618, 15619, 15620, 15621, 15622, 15623, 15624, 15625, 14256, 15628, 15629, 15630, 15631, 15632, 15633, 15634, 15635, 15636, 15637, 15638, 15640, 15641, 15642, 15643, 15644, 15645, 15032, 15648, 15649, 15650, 15651, 15652, 15653, 15655, 15041, 15045, 15659, 15660, 15661, 15662, 15663, 15664, 15665, 15666, 15667, 15668, 15670, 15671, 15672, 15673, 15674, 15675, 15676, 15677, 15678, 14336, 15681, 15683, 15684, 15685, 15686, 15687, 15688, 15689, 15690, 15691, 15692, 15694, 15086, 15093, 15701, 15702, 15703, 15705, 15706, 15707, 15708, 15709, 15710, 15711, 15712, 15713, 14401, 15716, 15718, 15719, 15720, 15722, 15723, 15724, 15725, 15726, 15727, 15728, 15729, 15731, 15732, 15733, 15734, 15735, 15736, 15738, 15739, 15740, 15741, 15742, 15743, 15744, 15745, 15746, 15747, 15748, 15749, 15750, 15751, 15752, 15753, 15754, 15755, 15756, 15757, 15758, 15760, 15761, 15762, 15764, 15765, 15766, 15767, 15768, 15769, 15770, 15771, 15772, 15773, 15774, 15775, 15776, 15778, 15779, 15780, 15781, 15782, 15783, 15784, 15785, 15786, 15787, 15788, 15789, 15790, 15791, 15792, 15793, 15795, 15797, 15188, 15799, 15801, 15197, 15803, 15204, 15806, 15808, 15810, 15811, 15814, 15815, 15816, 15817, 15819, 15211, 15820, 15822, 15824, 15826, 12814, 12813, 15828, 15377, 15829, 15830, 12814, 12813, 12814, 12813, 15831, 15832, 15833, 15834, 15835, 15836, 15837, 15838, 15839, 15840, 15442, 15841, 15445, 15842, 15462, 15843, 15844, 14025, 15845, 15846, 15847, 15849, 15508, 15853, 15856, 15858, 15859, 15860, 15861, 15862, 15863, 15864, 15536, 15865, 15866, 15537, 15867, 15539, 15869, 15871, 15874, 15878, 15879, 15880, 15881, 15882, 15883, 15884, 15885, 15886, 15887, 15888, 15889, 15890, 15891, 15892, 14217, 15893, 15699, 12814, 12813, 15894, 15895, 15896, 12814, 12813, 15897, 12814, 12813, 15898, 15902, 15906, 15908, 15911, 15915, 15696, 15699, 15700, 15697, 15696, 15700, 15657, 15697, 15918, 15919, 15920, 15921, 15682, 15926, 15928, 15696, 15697, 15699, 15700, 15931, 15717, 15935, 15721, 15939, 15941, 15942, 15943, 15944, 15945, 15946, 15947, 15949, 15950, 15952, 15954, 15956, 15958, 15960, 15962, 15211, 15964, 15968, 15970, 15972, 15974, 15976, 15977, 15211, 15978, 15980, 15982, 15984, 15986, 15988, 15990, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 16134, 16136, 15375, 16145, 16147, 16150, 16152, 16157, 16164, 16166, 16169, 16171, 15419, 16176, 16179, 16184, 16186, 16189, 16191, 16200, 16202, 16205, 16207, 15461, 16211, 16215, 16217, 16222, 16224, 16227, 16229, 16240, 16243, 16245, 15506, 16256, 16258, 16261, 16263, 15528, 16277, 16279, 16282, 16284, 16289, 16294, 16296, 16299, 16305, 16307, 16310, 16314, 16316, 16319, 16321, 15594, 16327, 16329, 16334, 16340, 16342, 16344, 16346, 15627, 16352, 16356, 16358, 16361, 16363, 16369, 16371, 16377, 16380, 16387, 16389, 16391, 16393, 15680, 16400, 16402, 16405, 16414, 16416, 16418, 16420, 15715, 16431, 16433, 16436, 16438, 16444, 16446, 16453, 16455, 16457, 16459, 16463, 16466, 16468, 16471, 16473, 16474, 16476, 16479, 16481, 16484, 16486, 16489, 16491, 16496, 16498, 16499, 16501, 16503, 16505, 15777, 15794, 15796, 15166, 15763, 16129, 16128, 16497, 16500, 15158, 15157, 15794, 15364, 15363, 16500, 15763, 15166, 16497, 15158, 15157, 15794, 15796, 16497, 15763, 15166, 16500, 15777, 15794, 15796, 16512, 16504, 15368, 16132, 15794, 15730, 16517, 16518, 16520, 16140, 16523, 16524, 16141, 16525, 16526, 16142, 14799, 14798, 15794, 15391, 13942, 16159, 16160, 16161, 14816, 14815, 15794, 15412, 16175, 13974, 16181, 16182, 15078, 14833, 13987, 15022, 14834, 15794, 15434, 16537, 14000, 16196, 16539, 14848, 16198, 15794, 15454, 16541, 16213, 16544, 16214, 16219, 14869, 14868, 15794, 15480, 16233, 16234, 14054, 16236, 15495, 16238, 15794, 15499, 16249, 16549, 16250, 16251, 16252, 15515, 16254, 15794, 15521, 12814, 12813, 16268, 12814, 12813, 12814, 12813, 16272, 16273, 16559, 15022, 15077, 16562, 14134, 16274, 16564, 14939, 14938, 15794, 15547, 16288, 14165, 16292, 16293, 15127, 15561, 16572, 15022, 15077, 16303, 14196, 16304, 14313, 14972, 14971, 15794, 15587, 16325, 16583, 16326, 16331, 15022, 15077, 16336, 16585, 14992, 15608, 16408, 16337, 16338, 16586, 16587, 16591, 16592, 16594, 16595, 15616, 16339, 15794, 15730, 16350, 16351, 16443, 15022, 15077, 15693, 15023, 16408, 16602, 16410, 16603, 16604, 16605, 15026, 15025, 15794, 15639, 16367, 16606, 16607, 15034, 15654, 16374, 16375, 16608, 16376, 16609, 15048, 15047, 14313, 16382, 16383, 15669, 16385, 15794, 15730, 16397, 16614, 16398, 16399, 15078, 15077, 15693, 15081, 16408, 16409, 16617, 16618, 16410, 16619, 16620, 15704, 16412, 15794, 15730, 16424, 16622, 16425, 16426, 16427, 16624, 16428, 15127, 15126, 15794, 15730, 16442, 16443, 16448, 16449, 16450, 15158, 15157, 15794, 15796, 16500, 15763, 15166, 16497, 15777, 15794, 15796, 16500, 16497, 16641, 16504, 15158, 15157, 15794, 15796, 16497, 16500, 15166, 15763, 15777, 15794, 15796, 16497, 16500, 16502, 16649, 16504, 15813, 15812, 15813, 15957, 15232, 15231, 15237, 15236, 15233, 15827, 15825, 15823, 15821, 15243, 15242, 15305, 15306, 15304, 15310, 15309, 15959, 15951, 15955, 15953, 15959, 15957, 15963, 15961, 15967, 15965, 15967, 15966, 15975, 15973, 15971, 15969, 15326, 15325, 15983, 15979, 15983, 15981, 15991, 15989, 15987, 15985, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 16768, 16773, 16778, 16785, 16789, 16797, 16800, 16805, 16810, 16814, 16821, 16829, 16835, 16843, 16851, 16856, 16862, 16871, 16873, 16875, 16883, 15160, 15159, 16884, 16885, 16886, 15165, 15167, 15163, 16887, 14508, 16888, 16889, 16890, 16878, 16891, 16880, 15160, 15159, 16892, 16893, 16894, 16895, 16896, 16897, 16880, 15163, 15167, 16898, 15165, 16899, 14508, 16868, 16900, 16878, 15160, 15159, 16901, 16902, 16903, 16904, 16905, 16878, 15167, 15165, 15163, 16906, 14508, 16907, 16868, 16908, 16880, 16909, 15175, 15174, 15173, 16910, 16911, 16913, 16882, 15012, 15011, 16914, 16915, 16916, 16917, 16918, 16921, 16922, 16924, 16925, 16927, 14801, 14800, 16928, 16929, 16930, 16931, 13943, 16932, 16933, 16934, 16935, 14818, 14817, 16936, 16937, 16938, 16939, 16940, 14314, 16941, 13419, 14312, 16942, 16943, 15129, 15079, 16944, 16945, 16946, 14836, 14835, 16947, 16948, 16949, 16950, 16952, 16953, 14850, 14849, 16955, 16956, 16957, 16958, 12814, 12813, 16960, 16962, 12814, 12813, 12814, 12813, 16963, 14871, 14870, 16964, 16965, 16966, 16967, 16968, 16969, 16970, 16971, 15012, 14890, 16972, 16973, 16974, 16975, 16976, 16978, 16979, 16980, 14099, 14914, 14913, 16981, 16982, 16983, 16984, 16985, 16986, 16987, 16988, 16989, 16990, 16991, 16992, 16993, 15129, 15079, 16995, 16996, 16998, 16999, 14941, 14940, 17001, 17002, 17003, 17004, 17005, 14166, 17006, 14164, 14163, 17007, 17008, 15012, 15128, 17009, 17010, 14185, 12809, 15129, 15079, 17012, 17013, 17014, 17015, 17016, 14314, 17017, 13576, 14312, 14974, 14973, 17018, 17019, 17020, 17021, 17022, 17024, 14457, 13533, 14456, 17025, 15129, 15079, 17026, 17027, 17028, 14993, 17030, 15167, 14229, 17031, 17032, 17033, 17034, 17035, 17037, 17039, 15012, 15011, 17041, 17042, 17043, 17044, 17045, 17046, 17047, 15129, 15079, 17048, 17049, 17050, 14367, 17051, 15163, 15167, 15080, 17052, 17054, 15028, 15027, 17058, 17059, 17060, 17061, 17062, 17065, 15035, 15037, 14295, 17066, 17067, 17068, 17070, 15129, 15079, 17072, 17073, 14314, 17074, 13576, 14312, 17075, 17076, 15063, 15062, 17077, 17078, 17079, 17080, 17081, 17083, 17084, 15129, 15079, 17085, 17086, 17087, 14367, 15163, 17088, 15167, 15080, 17089, 17090, 17093, 15105, 15104, 17096, 17097, 17098, 17099, 17100, 17102, 17103, 17104, 17106, 15129, 15128, 17107, 17108, 17109, 17110, 17111, 17112, 14457, 13641, 14456, 17113, 17114, 17115, 15160, 15159, 17116, 17117, 17118, 17119, 17120, 16880, 17121, 14508, 17122, 15165, 15163, 15167, 16868, 17123, 16878, 17124, 15175, 15174, 15173, 17125, 17126, 17127, 16880, 17128, 16878, 17130, 16882, 15160, 15159, 17131, 17132, 17133, 17134, 17135, 16878, 17136, 16880, 15165, 17137, 14508, 15167, 17138, 15163, 16868, 17139, 15175, 15174, 15173, 17140, 17141, 16877, 17142, 16878, 16879, 17143, 16880, 17144, 16881, 17146, 16882, 17147, 17148, 17149, 17150, 17151, 17152, 17153, 17154, 17155, 17156, 17157, 17158, 17159, 17160, 17161, 16920, 17105, 16954, 16951, 17105, 16959, 17105, 16977, 17105, 17101, 17105, 16994, 17000, 16997, 17101, 17105, 17101, 17105, 17101, 17105, 17105, 17101, 17101, 17105, 17105, 17082, 17105, 17101, 17162, 17163, 17164, 17165, 17166, 17167, 17168, 17169, 17170, 17171, 17172, 17173, 17174, 17175, 17176, 17177, 17178, 17179, 17180, 17181, 17182, 17183, 17184, 17185, 17186, 17187, 17188, 17189, 17190, 17191, 17192, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17280, 17281, 17282, 17283, 17284, 17285, 17286, 17287, 17288, 17290, 17291, 17292, 17293, 17294, 17295, 17296, 17297, 17298, 17299, 17301, 17302, 17306, 17307, 17308, 17310, 17311, 17314, 17316, 17317, 17318, 17319, 17322, 17325, 17326, 17327, 17329, 17331, 17332, 17334, 17335, 17336, 17337, 17342, 17343, 17344, 17345, 17347, 17349, 17351, 17353, 17354, 17355, 16912, 17359, 17360, 17361, 17362, 17372, 17373, 17374, 17378, 17383, 17384, 17385, 17390, 17392, 17393, 17396, 17397, 17398, 17401, 17402, 17403, 17409, 17410, 17411, 17415, 17416, 17419, 17420, 17421, 17422, 17424, 17425, 17426, 17434, 17435, 17436, 17444, 17445, 17446, 17447, 17451, 17454, 17456, 17460, 17461, 17462, 17466, 17467, 17468, 17473, 17475, 17476, 17479, 17480, 17481, 17483, 12810, 17484, 17485, 17486, 17487, 17492, 17494, 17495, 17496, 17497, 17498, 17504, 17505, 17506, 17508, 17509, 17510, 17513, 17515, 17516, 17524, 17525, 17526, 17533, 17534, 17535, 17538, 17540, 17541, 17542, 17545, 17546, 17547, 17553, 17554, 17555, 17560, 17561, 17562, 17564, 17566, 17567, 17570, 17571, 17572, 17579, 17580, 17581, 17584, 17585, 17587, 17588, 17592, 17593, 17594, 17603, 17604, 17605, 17611, 17612, 17613, 17617, 17618, 17619, 17624, 17626, 17628, 17629, 17630, 17631, 17633, 17635, 17636, 17637, 17641, 17643, 17129, 17645, 17646, 17647, 17648, 17653, 17655, 17656, 17658, 17659, 17661, 17662, 17664, 17665, 17666, 17669, 17671, 17672, 17674, 17676, 17145, 17678, 17679, 17681, 17683, 17685, 17688, 17690, 17692, 17369, 17367, 17609, 17602, 17615, 17694, 17695, 17371, 17601, 17382, 17615, 17609, 17381, 17609, 17389, 17615, 17395, 17568, 17569, 17609, 17610, 17615, 17696, 17408, 17610, 17697, 17698, 17615, 17699, 17418, 17609, 17431, 17615, 17433, 17609, 17430, 17609, 17700, 17601, 17615, 17441, 17442, 17440, 17602, 17701, 17702, 17703, 17610, 17609, 17615, 17704, 17459, 17609, 17705, 17615, 17706, 17465, 17615, 17707, 17602, 17531, 17708, 17532, 17609, 17615, 17530, 17709, 17601, 17609, 17472, 17615, 17478, 17615, 17710, 17711, 17609, 17610, 17615, 17610, 17491, 17489, 17609, 17615, 17569, 17610, 17609, 17502, 17615, 17503, 17609, 17029, 17064, 17071, 17063, 17712, 17601, 17609, 17530, 17532, 17531, 17615, 17713, 17602, 17601, 17714, 17531, 17615, 17530, 17609, 17715, 17602, 17532, 17716, 17609, 17532, 17602, 17531, 17530, 17717, 17601, 17615, 17057, 17055, 17056, 17053, 17071, 17063, 17064, 17069, 17569, 17615, 17610, 17609, 17578, 17601, 17577, 17576, 17718, 17615, 17719, 17602, 17609, 17094, 17095, 17092, 17091, 17600, 17615, 17602, 17599, 17601, 17720, 17721, 17598, 17609, 17610, 17609, 17615, 17616, 17722, 17724, 17727, 17729, 17731, 17733, 17735, 17737, 17739, 17741, 17743, 17745, 17747, 17749, 17751, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17300, 16462, 17305, 17814, 17309, 17820, 16462, 17825, 17328, 17330, 17831, 16462, 17835, 17837, 17838, 17352, 17842, 15178, 15180, 14522, 14496, 15179, 17846, 16770, 17849, 16155, 17852, 17853, 16780, 17856, 17857, 17859, 17862, 16194, 17865, 16791, 17868, 17870, 17872, 17874, 16232, 17877, 16802, 17881, 16807, 17887, 17890, 16287, 17893, 17894, 17896, 17900, 17902, 17905, 17906, 17908, 16823, 17911, 17914, 17917, 17918, 17920, 16831, 17923, 17537, 17539, 17928, 17930, 16366, 17552, 17934, 17936, 17939, 17940, 17942, 16845, 17945, 17583, 17949, 17950, 17952, 16853, 17955, 16441, 17958, 17961, 16462, 17625, 17627, 17967, 17634, 17972, 15179, 14496, 15180, 15178, 14522, 17978, 16462, 17983, 17984, 17660, 17663, 17989, 14522, 15180, 15179, 15178, 14526, 17819, 17995, 17818, 17991, 17995, 17830, 17991, 17824, 17834, 17995, 17840, 17991, 18001, 17993, 17970, 17991, 17964, 17995, 17845, 18002, 18005, 18006, 18007, 18008, 18009, 18012, 18013, 18014, 18015, 18016, 18017, 18018, 18019, 18020, 18021, 18022, 18023, 18024, 18025, 18026, 18028, 18029, 18032, 18034, 18035, 18036, 18037, 18038, 18039, 18040, 18041, 18043, 18044, 18045, 18046, 18047, 18048, 17601, 17443, 18052, 18053, 18054, 17531, 18050, 17602, 17458, 18056, 18057, 18059, 17453, 17601, 18061, 18062, 18064, 18065, 18067, 18068, 18069, 18070, 18072, 18073, 18074, 18075, 18076, 18077, 18080, 18081, 18078, 18082, 18083, 18084, 18085, 18086, 18087, 18088, 18089, 18090, 18091, 18092, 18093, 18094, 18095, 18096, 18097, 18098, 18100, 18101, 18102, 18103, 18104, 18105, 18107, 18108, 18110, 18111, 18112, 18113, 18115, 18116, 18118, 18119, 18120, 18121, 18122, 18124, 18125, 18126, 18127, 18128, 18129, 18130, 18131, 18132, 18133, 18134, 18135, 18136, 18137, 18138, 18139, 18140, 18141, 18143, 18145, 18146, 18147, 18148, 18149, 18150, 18151, 18152, 18153, 18154, 18155, 18158, 18159, 18160, 18161, 18162, 18163, 18164, 17995, 17970, 17991, 17964, 17975, 17993, 17995, 17974, 17977, 17991, 18172, 17991, 17995, 17982, 17981, 17992, 17991, 17993, 17995, 17997, 17994, 18177, 15996, 16004, 15992, 15993, 15995, 15994, 15997, 16001, 16000, 15999, 16002, 16004, 16003, 15349, 16007, 16006, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 18304, 18305, 18306, 18309, 18310, 18311, 18314, 18315, 18316, 18319, 18321, 18322, 18323, 18324, 18325, 18326, 18327, 18328, 18329, 18331, 18332, 18333, 18335, 18336, 18337, 18338, 18339, 18343, 18344, 18345, 18346, 18347, 18348, 18349, 18350, 18351, 18352, 18354, 17899, 18356, 18357, 18359, 18360, 18361, 18362, 18363, 18365, 18366, 18367, 18368, 18371, 18372, 18373, 18375, 18376, 18378, 18379, 18380, 18381, 18384, 18385, 18386, 18387, 18388, 18389, 18390, 18391, 18394, 18396, 18397, 18398, 18399, 18400, 18401, 18402, 18403, 18406, 18408, 18409, 18410, 18411, 18412, 18413, 18414, 18415, 18416, 18417, 18418, 18419, 18420, 18421, 18422, 18423, 18424, 18426, 18427, 18428, 18429, 18430, 18431, 18433, 18435, 18437, 18011, 17380, 18440, 18442, 18445, 18448, 18450, 18027, 18454, 17601, 17417, 17423, 18031, 18456, 18458, 18460, 18463, 18464, 18466, 18468, 18470, 18471, 18475, 18472, 18477, 18478, 18482, 18483, 18480, 18060, 18485, 18486, 18066, 18489, 18491, 18494, 18498, 18501, 18503, 18507, 18511, 18515, 18099, 18519, 18521, 18523, 18525, 18526, 18528, 18114, 18117, 18533, 18535, 18123, 18540, 18544, 18547, 18551, 18553, 18142, 18144, 18558, 18562, 18564, 18566, 18157, 18570, 18573, 18574, 18575, 18576, 18577, 18578, 18579, 18580, 18581, 18582, 18583, 18585, 18586, 18587, 18588, 18589, 18590, 18591, 18592, 18593, 18594, 18596, 18597, 18598, 18599, 15336, 15996, 18600, 18601, 18602, 18603, 18604, 18605, 18606, 16005, 18607, 18608, 18609, 16008, 18610, 18611, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 18689, 18690, 18692, 18693, 18695, 18696, 18698, 18699, 18701, 18704, 18706, 18708, 18712, 18714, 18716, 18718, 18720, 18723, 18726, 18730, 18733, 18735, 18737, 18739, 18740, 18744, 18746, 18748, 18750, 18753, 18754, 18756, 18757, 17639, 18762, 18763, 18765, 18767, 17667, 18770, 18772, 18774, 18776, 18778, 18782, 18784, 18786, 18788, 18790, 18792, 18793, 17394, 18796, 18798, 18800, 18801, 18802, 18805, 18807, 18809, 18811, 18474, 18815, 18816, 18481, 18820, 18822, 18824, 17477, 18828, 17568, 17507, 18833, 18835, 18837, 18839, 18841, 18843, 17568, 18848, 18850, 18853, 18855, 17614, 18860, 18863, 18865, 18867, 18870, 18873, 18875, 18877, 18880, 18883, 18884, 15998, 18888, 18892, 18896, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 18944, 18946, 18948, 18950, 18952, 18953, 18954, 18955, 18956, 18957, 18958, 18959, 18960, 18961, 18963, 18965, 18967, 18969, 18971, 18972, 18973, 18975, 18977, 18978, 18980, 18982, 17817, 18983, 17829, 18985, 17839, 18988, 18991, 18994, 18995, 18996, 18998, 18999, 19001, 19002, 18476, 18814, 19006, 18819, 19010, 19012, 17614, 19013, 19014, 19015, 17518, 19016, 19018, 19020, 17543, 17557, 19022, 19023, 17589, 19025, 19027, 17969, 19029, 17987, 19033, 19038, 19039, 19041, 19042, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19075, 19093, 19096, 19072, 19098, 19073, 19100, 19074, 19102, 19103, 19077, 19104, 19078, 19079, 18444, 19091, 19080, 19081, 18803, 18804, 19082, 19083, 19111, 19112, 19084, 19114, 19091, 19116, 19085, 18493, 19087, 19118, 19091, 18506, 19086, 18510, 19091, 19122, 19123, 19124, 19087, 19125, 19091, 19126, 19088, 19127, 19091, 19128, 19089, 19129, 19091, 19130, 19090, 19131, 19091, 18569, 19092, 19133, 19134, 19095, 19135, 19136, 19137, 18890, 19139, 19140, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19203, 19099, 19205, 19101, 19207, 18780, 19200, 19210, 19212, 19213, 19214, 19215, 19216, 19217, 19218, 19220, 19221, 19224, 19225, 19226, 19228, 19229, 19230, 18497, 19232, 19233, 19234, 19235, 19236, 18514, 19240, 19242, 19243, 19244, 19245, 19246, 18847, 19248, 19250, 18560, 19252, 19254, 19255, 19256, 18859, 19201, 19259, 19260, 19202, 19040, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 18987, 19334, 19338, 19349, 19351, 19353, 19355, 19357, 19360, 19362, 19364, 18852, 19370, 19372, 19373, 19375, 19376, 19329, 19331, 19241, 19211, 19227, 19249, 19253, 19009, 19105, 19110, 19346, 19223, 19238, 19239, 19119, 19222, 19107, 19342, 18997, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19456, 19460, 19463, 19464, 19465, 19467, 19469, 19471, 19473, 19474, 19209, 19475, 19476, 19477, 19461, 19478, 19479, 19480, 19481, 19458, 19468, 19482, 19483, 19484, 19459, 19466, 19485, 19486, 19462, 19487, 19488, 19489, 19490, 19491, 19258, 19261, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19584, 19594, 19598, 19603, 19588, 19604, 19608, 19586, 19587, 19589, 19609, 19612, 19585, 19595, 19599, 19601, 19606, 19613, 19616, 19590, 19618, 19591, 19619, 18882, 19036, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19712, 19716, 19719, 19720, 19721, 19724, 19597, 19717, 19722, 19611, 19726, 19731, 19733, 19265, 19735, 19736, 19264, 19262, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19715, 19718, 19843, 19845, 19725, 19849, 19853, 18879, 18887, 18891, 18895, 19856, 19037, 19857, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19968, 19728, 19970, 19971, 19972, 19975, 19976, 19977, 19978, 19980, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20096, 20098, 19974, 20102, 19855, 20104, 20105, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20100, 20225, 20226, 20228, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20352, 20354, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19377, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20608, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20230, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20481, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127}; int h_C[]= { 2, 4, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481, 483, 485, 487, 489, 491, 494, 496, 498, 500, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 560, 562, 564, 566, 568, 570, 573, 575, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 700, 702, 705, 707, 709, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803, 805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 846, 848, 850, 852, 854, 856, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895, 897, 899, 901, 903, 905, 907, 909, 911, 913, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940, 942, 944, 946, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986, 988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1023, 1025, 1027, 1029, 1031, 1033, 1035, 1037, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1063, 1065, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1091, 1093, 1095, 1097, 1099, 1101, 1103, 1105, 1107, 1109, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1133, 1135, 1137, 1139, 1142, 1144, 1146, 1148, 1151, 1153, 1157, 1159, 1162, 1164, 1168, 1170, 1172, 1174, 1176, 1178, 1180, 1182, 1185, 1187, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1211, 1213, 1216, 1218, 1221, 1223, 1226, 1228, 1231, 1233, 1239, 1241, 1244, 1246, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330, 1332, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1358, 1360, 1362, 1364, 1368, 1370, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1394, 1396, 1400, 1402, 1405, 1407, 1410, 1412, 1415, 1417, 1420, 1422, 1425, 1427, 1430, 1432, 1435, 1437, 1440, 1442, 1444, 1446, 1448, 1450, 1453, 1455, 1459, 1461, 1463, 1465, 1470, 1472, 1474, 1476, 1480, 1482, 1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520, 1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1557, 1559, 1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596, 1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634, 1636, 1638, 1640, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1696, 1698, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1719, 1721, 1723, 1725, 1727, 1729, 1733, 1735, 1741, 1743, 1745, 1747, 1749, 1751, 1754, 1756, 1759, 1761, 1763, 1765, 1767, 1769, 1772, 1774, 1777, 1779, 1782, 1784, 1787, 1789, 1792, 1794, 1797, 1799, 1801, 1803, 1805, 1807, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825, 1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1856, 1858, 1860, 1862, 1866, 1868, 1870, 1872, 1874, 1876, 1863, 1863, 1854, 1854, 1863, 1863, 1922, 1924, 1926, 1928, 1930, 1932, 286, 1477, 1660, 1236, 1236, 1477, 1660, 1730, 1276, 1276, 492, 492, 1236, 1236, 948, 1236, 1236, 1730, 1730, 1738, 1738, 286, 571, 571, 571, 571, 571, 571, 1738, 577, 1236, 1236, 1642, 558, 1236, 1236, 1236, 1236, 1038, 395, 571, 1236, 1236, 1068, 1068, 1131, 1131, 1140, 1140, 1021, 1021, 1642, 1863, 492, 1236, 1236, 492, 1236, 1236, 501, 501, 1236, 1236, 1068, 1068, 1131, 1131, 492, 492, 1236, 1236, 1068, 1068, 1131, 1131, 501, 501, 1236, 1236, 1089, 1089, 558, 1642, 1642, 1809, 947, 571, 577, 1809, 1236, 1236, 1038, 703, 2286, 2288, 2290, 2292, 2295, 2297, 2299, 2301, 2304, 2306, 2308, 2310, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2350, 2352, 2354, 2356, 2358, 2360, 1365, 1365, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 1236, 1236, 1068, 1068, 1131, 1131, 1236, 1236, 1140, 1140, 1089, 1089, 1140, 1140, 1276, 1276, 1365, 1242, 914, 1700, 1021, 1021, 1038, 1038, 947, 948, 1021, 1021, 1038, 1038, 1693, 1021, 1021, 1730, 1738, 1038, 1038, 1039, 2616, 2618, 2620, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2645, 2647, 2650, 2652, 2654, 2656, 1089, 1089, 1066, 1066, 1068, 1068, 1140, 1140, 1067, 1067, 1068, 1068, 1166, 1131, 1131, 1140, 1140, 1154, 1154, 1236, 1236, 1236, 1236, 1242, 1242, 1247, 1365, 1365, 1366, 1354, 1276, 1276, 1333, 1354, 1355, 1356, 1365, 1365, 1366, 1392, 1397, 1466, 1466, 1854, 1642, 1642, 1693, 1700, 1730, 1730, 1738, 1738, 1854, 1854, 1863, 1863, 1854, 1854, 1863, 1863, 1854, 1863, 2980, 2982, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003, 3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041, 3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3066, 3068, 3071, 3073, 3075, 3077, 3079, 3081, 3084, 3086, 3090, 3092, 3095, 3097, 3101, 3103, 3105, 3107, 3109, 3111, 3114, 3116, 3120, 3122, 3125, 3127, 3131, 3133, 3135, 3137, 3140, 3142, 2643, 2643, 3145, 3145, 2643, 2643, 2302, 2302, 2302, 2302, 3145, 3145, 2964, 2971, 2348, 2348, 2293, 3145, 3145, 2348, 2348, 2293, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2302, 2302, 2302, 2302, 2302, 2302, 2348, 2348, 2348, 2348, 2348, 2348, 2311, 2311, 2311, 2311, 3145, 3145, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2348, 2657, 3681, 3683, 3689, 3691, 3148, 3148, 3069, 3069, 3148, 3148, 3150, 3708, 3710, 2643, 2643, 2648, 2643, 2643, 2643, 2643, 2648, 2657, 2964, 2971, 4019, 4021, 3145, 3145, 4054, 4056, 4058, 4060, 4063, 4065, 3145, 3145, 3145, 3145, 3148, 3148, 3087, 3087, 3117, 3117, 3138, 3138, 3145, 3145, 3148, 3148, 3150, 4141, 4143, 4146, 4148, 4153, 4155, 4158, 4160, 4163, 4165, 4167, 4169, 4172, 4174, 4176, 4178, 3846, 3676, 3846, 3846, 4180, 4180, 3846, 3846, 4180, 4180, 4180, 4180, 3676, 3846, 4150, 4180, 4180, 4150, 4170, 4170, 4180, 4180, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689, 6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727, 6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765, 6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803, 6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841, 6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879, 6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917, 6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955, 6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993, 6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031, 7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069, 7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107, 7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145, 7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183, 7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221, 7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259, 7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297, 7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335, 7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373, 7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411, 7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449, 7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487, 7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525, 7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 1883, 1884, 1894, 1895, 1897, 1898, 7549, 7551, 7553, 1933, 1938, 1942, 1951, 1952, 1964, 1970, 1971, 1972, 1973, 1983, 1984, 1985, 1986, 1987, 2037, 2038, 2044, 2045, 2048, 2049, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2070, 2072, 2081, 2082, 2093, 2096, 2100, 2101, 2104, 2105, 2108, 2116, 2117, 2123, 2124, 2126, 2127, 2129, 2130, 2136, 2137, 2139, 2140, 2144, 2145, 2153, 2154, 2155, 2159, 2160, 2161, 2165, 2166, 2167, 2168, 2170, 2171, 2173, 2174, 2175, 2176, 2177, 2178, 2180, 2181, 2183, 2184, 2185, 2186, 2187, 2188, 2192, 2193, 2205, 2208, 2209, 2219, 2229, 2230, 2232, 2233, 2236, 2237, 2267, 2281, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676, 7678, 7680, 7682, 2362, 2363, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 2411, 2412, 2424, 2425, 2427, 2428, 2431, 2432, 2453, 2454, 2457, 2458, 2466, 2467, 2503, 2504, 2511, 2514, 2520, 2533, 2544, 2546, 2550, 2551, 2554, 2555, 2564, 2565, 2569, 2570, 2572, 2598, 2599, 2602, 2603, 2606, 2607, 2610, 7740, 7742, 7744, 7746, 7748, 7750, 7752, 7754, 7756, 7758, 2666, 2667, 2675, 2676, 2677, 2678, 2681, 2682, 2683, 2684, 2685, 2686, 2699, 2708, 2709, 2712, 2713, 2716, 2719, 2731, 2732, 2738, 2739, 2750, 2751, 2758, 2760, 2761, 2763, 2771, 2777, 2778, 2798, 2806, 2807, 2808, 2810, 2811, 2812, 2831, 2834, 2846, 2848, 2881, 2907, 2908, 2928, 2931, 2939, 2940, 2943, 2944, 2959, 2960, 2962, 2963, 2966, 2967, 2969, 2970, 2974, 2976, 7822, 7824, 7826, 7828, 7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866, 7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 3177, 3178, 3186, 3187, 3189, 3190, 3191, 3192, 3193, 3194, 3196, 3197, 3505, 3528, 3587, 3588, 3596, 3597, 3598, 3601, 3602, 3610, 3613, 3614, 3624, 3625, 3626, 3627, 3628, 3629, 3631, 3632, 3633, 3634, 3635, 3636, 3638, 3639, 3640, 3641, 3642, 3643, 3645, 3646, 3647, 3648, 3650, 3651, 3658, 3659, 3666, 3667, 3668, 3669, 3670, 3671, 3672, 3673, 3674, 3675, 3679, 7957, 7959, 3700, 3701, 3702, 3703, 3704, 3705, 3706, 7968, 3826, 3827, 3835, 3837, 3838, 3839, 3840, 3843, 3845, 4007, 4010, 7981, 4039, 4040, 7985, 7987, 7989, 4083, 4084, 4098, 4099, 4100, 4101, 4115, 4118, 4124, 4127, 4132, 4133, 4135, 4136, 4137, 4138, 4139, 8008, 8010, 8012, 8014, 8016, 8018, 8020, 8022, 4221, 4222, 4229, 4230, 5163, 5164, 5178, 5179, 5182, 5183, 5210, 5211, 5227, 5314, 5422, 5426, 5427, 5470, 5473, 5475, 5477, 5478, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 8320, 8322, 8490, 1809, 8494, 8508, 8439, 8502, 8504, 8506, 8320, 8322, 8490, 1809, 8496, 8510, 8498, 8512, 8439, 8502, 8504, 8506, 8397, 8392, 8400, 8399, 8401, 8403, 8402, 8405, 8404, 8064, 0, 8288, 8066, 8064, 5, 8288, 8066, 8288, 8154, 8317, 8444, 8418, 8496, 8498, 8439, 8068, 8071, 8070, 8072, 8075, 8074, 8076, 8152, 8520, 1089, 8079, 8078, 8204, 8215, 8095, 8080, 8080, 947, 8095, 8080, 8482, 8427, 8496, 8498, 8482, 8525, 8081, 8423, 8423, 8423, 8423, 8445, 8083, 8085, 8084, 8527, 8529, 8087, 8086, 8475, 8088, 8090, 8089, 8475, 8091, 8475, 8092, 8093, 8094, 8095, 948, 8441, 8440, 8235, 8100, 8441, 8440, 8417, 8416, 8235, 8100, 8215, 8098, 8235, 8100, 948, 8101, 8104, 8103, 8105, 8108, 8107, 8109, 8150, 8111, 8483, 8112, 8114, 8116, 1809, 8118, 8120, 8122, 8421, 8124, 8152, 8532, 8125, 8126, 8235, 8129, 8128, 8534, 8475, 8130, 8536, 8215, 8222, 8317, 8317, 8131, 8133, 8146, 8425, 8427, 8449, 8451, 8417, 8416, 8539, 8541, 8543, 8146, 8136, 8139, 8138, 8140, 8143, 8142, 8144, 8152, 8547, 8145, 8146, 8319, 8319, 8425, 8147, 8150, 8149, 8425, 8427, 8449, 8451, 8434, 8152, 8151, 8551, 8153, 8152, 8553, 8154, 8478, 8155, 8157, 8478, 1477, 1477, 8418, 8161, 8162, 8164, 8163, 8166, 8165, 8558, 8167, 8560, 8168, 8562, 1089, 8332, 8331, 8341, 8340, 8564, 8313, 8566, 8170, 8169, 8435, 8457, 8171, 8174, 8173, 8175, 8177, 8176, 8571, 8178, 8180, 8179, 8574, 8181, 8183, 8182, 8576, 8578, 8184, 8580, 8185, 8582, 8584, 8586, 8186, 8588, 8187, 8590, 8592, 8594, 1089, 8190, 8189, 8596, 8192, 8191, 8194, 8193, 8400, 8195, 8197, 8196, 8198, 8199, 8201, 8235, 947, 8599, 8215, 8202, 8435, 8203, 8225, 8227, 8229, 8231, 8492, 8225, 8227, 8229, 8231, 8444, 947, 8204, 947, 8313, 8205, 8207, 8206, 8606, 8326, 8208, 8346, 8327, 8346, 8328, 8329, 8338, 8210, 8209, 8339, 8210, 8209, 8212, 8211, 8214, 8213, 8215, 8216, 8218, 8217, 8475, 8219, 8475, 8220, 8221, 8222, 8313, 8317, 8223, 8225, 8227, 8229, 8231, 8444, 8479, 8475, 8233, 8316, 8234, 8235, 8236, 1477, 1477, 8456, 8407, 8628, 8343, 8239, 8346, 8344, 8346, 8345, 1166, 8246, 8351, 8350, 8240, 8242, 8241, 8244, 8243, 8638, 8343, 8245, 8346, 8344, 8346, 8345, 1166, 8246, 8351, 8350, 8248, 8640, 8249, 8642, 8251, 8250, 8644, 8253, 8252, 8346, 8254, 8346, 8255, 8256, 1089, 8259, 8258, 8346, 8333, 1110, 8337, 8336, 8260, 8262, 8261, 8264, 8263, 8646, 8326, 8265, 8648, 8332, 8331, 8266, 8268, 8267, 8341, 8340, 8650, 8386, 8385, 8400, 8387, 8400, 8388, 8389, 8372, 8371, 8400, 8373, 8400, 8374, 8375, 8384, 8376, 8377, 8365, 8363, 8400, 8366, 8400, 8367, 8368, 8379, 8378, 8400, 8380, 8400, 8381, 8382, 8384, 8383, 8400, 8369, 8652, 8269, 8271, 8272, 8274, 8275, 8277, 8279, 8281, 8283, 8285, 8287, 8288, 8313, 8425, 8301, 8289, 8475, 8303, 8304, 8444, 8479, 8478, 8423, 8290, 8468, 8448, 8319, 8309, 8292, 8475, 8310, 8475, 8311, 8312, 8293, 8295, 8475, 8315, 8317, 8660, 8423, 8422, 8309, 8308, 8475, 8310, 8475, 8311, 8312, 8313, 8664, 8475, 8315, 8317, 8666, 8468, 8298, 8319, 8301, 8300, 8475, 8302, 8475, 8303, 8304, 8444, 8479, 8306, 8480, 8483, 8482, 8484, 8427, 8309, 8308, 8475, 8310, 8475, 8311, 8312, 8313, 8669, 8475, 8315, 8316, 8317, 8673, 8423, 8422, 8319, 8320, 8322, 8439, 8326, 8325, 8346, 8327, 8346, 8328, 8329, 8686, 8332, 8324, 8346, 8333, 1110, 8337, 8336, 8688, 8690, 8341, 8340, 8692, 8694, 8696, 8326, 8325, 8346, 8327, 8346, 8328, 8329, 1089, 8332, 8331, 8346, 8333, 8346, 8346, 8334, 1110, 8337, 8336, 8339, 8338, 8699, 8341, 8340, 8701, 8343, 8342, 8346, 8344, 8346, 8345, 1166, 8348, 8351, 8350, 8353, 8352, 8355, 8354, 8356, 8705, 8358, 8357, 8360, 8359, 8361, 8707, 8365, 8364, 8400, 8366, 8400, 8367, 8368, 8407, 1365, 8406, 8709, 1242, 8365, 8363, 8400, 8367, 8368, 8407, 8712, 1365, 8365, 8364, 8400, 8366, 8400, 8367, 8368, 8400, 8369, 8400, 8400, 8370, 8716, 8372, 8371, 8400, 8373, 8400, 8374, 8375, 8384, 8376, 8377, 8379, 8378, 8400, 8380, 8400, 8381, 8382, 8384, 8383, 8386, 8385, 8400, 8387, 8400, 8388, 8389, 8390, 8722, 8397, 8392, 8400, 8398, 8400, 8399, 8401, 8403, 8402, 8405, 8404, 8406, 8393, 8410, 8395, 8411, 8397, 8396, 8400, 8398, 8400, 8399, 8401, 8403, 8402, 8405, 8404, 8406, 8407, 8410, 8409, 8411, 8492, 8494, 8438, 8439, 8502, 8504, 1477, 1477, 1477, 1477, 1477, 1477, 1477, 8453, 1809, 8417, 8416, 8418, 8421, 8420, 8423, 8422, 8424, 8425, 8427, 8429, 8431, 8433, 8435, 8434, 8436, 8437, 8494, 8438, 8439, 8502, 8504, 8441, 8440, 8443, 8442, 8444, 8479, 8478, 8446, 8445, 8447, 8468, 8470, 8448, 8449, 8480, 8483, 8482, 8484, 8451, 8453, 8730, 1660, 1660, 1660, 8457, 8456, 1660, 1660, 1660, 8460, 8459, 8461, 8475, 8462, 8463, 8476, 8479, 8464, 8466, 8465, 8467, 8467, 8468, 8470, 8471, 8473, 8472, 8475, 8474, 8734, 8475, 8475, 8736, 8476, 8479, 8478, 8480, 8483, 8482, 8484, 8486, 8485, 8487, 8488, 8490, 1809, 8492, 8738, 8494, 8740, 8496, 8742, 8498, 8744, 8500, 8502, 8504, 8506, 8513, 8785, 8676, 8609, 8780, 8677, 8780, 8678, 3129, 8787, 3069, 8789, 8791, 8793, 3099, 8795, 3148, 8516, 8516, 8516, 8516, 8516, 8516, 8516, 8516, 8608, 8517, 8517, 8518, 8518, 8556, 8747, 8521, 8521, 8522, 8522, 8674, 8674, 8537, 8537, 8537, 8537, 8602, 8603, 8597, 8555, 8555, 8556, 8597, 8747, 8747, 8674, 8602, 8674, 8603, 8674, 8674, 8608, 8799, 8676, 8609, 8780, 8677, 8780, 8678, 3129, 8802, 8629, 8755, 8804, 8754, 8753, 8780, 8778, 8780, 8779, 3129, 8625, 8683, 8807, 8754, 8611, 8780, 8778, 8780, 8779, 3129, 3069, 3069, 8809, 8811, 8813, 3099, 8815, 8817, 8819, 3129, 8821, 8823, 8825, 3099, 8827, 8829, 3129, 8831, 8629, 8755, 8754, 8753, 3148, 3148, 8833, 8618, 8617, 8620, 8619, 8621, 8623, 8835, 8837, 8839, 8841, 8843, 8624, 8625, 8629, 8632, 8631, 8774, 8633, 8774, 8634, 3099, 3069, 8848, 8850, 8852, 8653, 8653, 8654, 8654, 8674, 8674, 8747, 8856, 8676, 8675, 8780, 8677, 8780, 8678, 3129, 8680, 8859, 8861, 8777, 8682, 8683, 8714, 8718, 8747, 8747, 8747, 8749, 8748, 8750, 8774, 8773, 3099, 8751, 8770, 8774, 8772, 8777, 8752, 8780, 8778, 8780, 8779, 3129, 8765, 8766, 8757, 8868, 8755, 8771, 8770, 8754, 8753, 8780, 8778, 8780, 8779, 3129, 8782, 8755, 8774, 8762, 3099, 8774, 8761, 8760, 8759, 8777, 8756, 8780, 8778, 8780, 8779, 3129, 8765, 8766, 8757, 8873, 3069, 8760, 8759, 8774, 8761, 8774, 8762, 3099, 8777, 8764, 8765, 8766, 8767, 8875, 8877, 8771, 8770, 8777, 8768, 8780, 8778, 8780, 8779, 3129, 8782, 3069, 8771, 8770, 8774, 8772, 8774, 8773, 3099, 8777, 8776, 8780, 8778, 8780, 8779, 3129, 8782, 8783, 8885, 8887, 8857, 8857, 8857, 8857, 8800, 8800, 8805, 8805, 8900, 3846, 3846, 8800, 8800, 8805, 8805, 8862, 8862, 8857, 8857, 8862, 8862, 8890, 8889, 3846, 3846, 3846, 8902, 8890, 8889, 8904, 3846, 3846, 8906, 8890, 8889, 3684, 3684, 8908, 8890, 8889, 3684, 3684, 3684, 8846, 8846, 8854, 8857, 8857, 8862, 8862, 8870, 8869, 8866, 8893, 8895, 8870, 8869, 8871, 8893, 8895, 8913, 8890, 8889, 8892, 8891, 8893, 8895, 8918, 8897, 8897, 8910, 8910, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 1878, 1879, 1880, 1881, 1882, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1896, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1934, 1935, 1936, 1937, 1939, 1940, 1941, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1965, 1966, 1967, 1968, 1969, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2039, 2040, 2041, 2042, 2043, 2046, 2047, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2071, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2080, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2091, 2092, 2094, 2095, 2097, 2098, 2099, 2102, 2103, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2118, 2119, 2120, 2121, 2122, 2125, 2128, 2131, 2132, 2133, 2134, 2135, 2138, 2141, 2142, 2143, 2146, 2147, 2148, 2149, 2150, 2151, 2152, 2156, 2157, 2158, 2162, 2163, 2164, 2169, 2172, 2179, 2182, 2189, 2190, 2191, 2194, 2195, 2196, 2197, 2198, 2199, 2200, 2201, 2202, 2203, 2204, 2206, 2207, 2210, 2211, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2220, 2221, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2231, 2234, 2235, 2238, 2239, 2240, 2241, 2242, 2243, 2244, 2245, 2246, 2247, 2248, 2249, 2250, 2251, 2252, 2253, 2254, 2255, 2256, 2257, 2258, 2259, 2260, 2261, 2262, 2263, 2264, 2265, 2266, 2268, 2269, 2270, 2271, 2272, 2273, 2274, 2275, 2276, 2277, 2278, 2279, 2280, 2282, 2283, 2284, 2361, 2396, 2397, 2398, 2399, 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2408, 2409, 2410, 2413, 2414, 2415, 2416, 2417, 2418, 2419, 2420, 2421, 2422, 2423, 2426, 2429, 2430, 2433, 2434, 2435, 2436, 2437, 2438, 2439, 2440, 2441, 2442, 2443, 2444, 2445, 2446, 2447, 2448, 2449, 2450, 2451, 2452, 2455, 2456, 2459, 2460, 2461, 2462, 2463, 2464, 2465, 2468, 2469, 2470, 2471, 2472, 2473, 2474, 2475, 2476, 2477, 2478, 2479, 2480, 2481, 2482, 2483, 2484, 2485, 2486, 2487, 2488, 2489, 2490, 2491, 2492, 2493, 2494, 2495, 2496, 2497, 2498, 2499, 2500, 2501, 2502, 2505, 2506, 2507, 2508, 2509, 2510, 2512, 2513, 2515, 2516, 2517, 2518, 2519, 2521, 2522, 2523, 2524, 2525, 2526, 2527, 2528, 2529, 2530, 2531, 2532, 2534, 2535, 2536, 2537, 2538, 2539, 2540, 2541, 2542, 2543, 2545, 2547, 2548, 2549, 2552, 2553, 2556, 2557, 2558, 2559, 2560, 2561, 2562, 2563, 2566, 2567, 2568, 2571, 2573, 2574, 2575, 2576, 2577, 2578, 2579, 2580, 2581, 2582, 2583, 2584, 2585, 2586, 2587, 2588, 2589, 2590, 2591, 2592, 2593, 2594, 2595, 2596, 2597, 2600, 2601, 2604, 2605, 2608, 2609, 2611, 2612, 2613, 2614, 2659, 2660, 2661, 2662, 2663, 2664, 2665, 2668, 2669, 2670, 2671, 2672, 2673, 2674, 2679, 2680, 2687, 2688, 2689, 2690, 2691, 2692, 2693, 2694, 2695, 2696, 2697, 2698, 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2710, 2711, 2714, 2715, 2717, 2718, 2720, 2721, 2722, 2723, 2724, 2725, 2726, 2727, 2728, 2729, 2730, 2733, 2734, 2735, 2736, 2737, 2740, 2741, 2742, 2743, 2744, 2745, 2746, 2747, 2748, 2749, 2752, 2753, 2754, 2755, 2756, 2757, 2759, 2762, 2764, 2765, 2766, 2767, 2768, 2769, 2770, 2772, 2773, 2774, 2775, 2776, 2779, 2780, 2781, 2782, 2783, 2784, 2785, 2786, 2787, 2788, 2789, 2790, 2791, 2792, 2793, 2794, 2795, 2796, 2797, 2799, 2800, 2801, 2802, 2803, 2804, 2805, 2809, 2813, 2814, 2815, 2816, 2817, 2818, 2819, 2820, 2821, 2822, 2823, 2824, 2825, 2826, 2827, 2828, 2829, 2830, 2832, 2833, 2835, 2836, 2837, 2838, 2839, 2840, 2841, 2842, 2843, 2844, 2845, 2847, 2849, 2850, 2851, 2852, 2853, 2854, 2855, 2856, 2857, 2858, 2859, 2860, 2861, 2862, 2863, 2864, 2865, 2866, 2867, 2868, 2869, 2870, 2871, 2872, 2873, 2874, 2875, 2876, 2877, 2878, 2879, 2880, 2882, 2883, 2884, 2885, 2886, 2887, 2888, 2889, 2890, 2891, 2892, 2893, 2894, 2895, 2896, 2897, 2898, 2899, 2900, 2901, 2902, 2903, 2904, 2905, 2906, 2909, 2910, 2911, 2912, 2913, 2914, 2915, 2916, 2917, 2918, 2919, 2920, 2921, 2922, 2923, 2924, 2925, 2926, 2927, 2929, 2930, 2932, 2933, 2934, 2935, 2936, 2937, 2938, 2941, 2942, 2945, 2946, 2947, 2948, 2949, 2950, 2951, 2952, 2953, 2954, 2955, 2956, 2957, 2958, 2961, 2965, 2968, 2973, 2975, 2977, 2978, 3176, 3179, 3180, 3181, 3182, 3183, 3184, 3185, 3188, 3195, 3198, 3200, 3201, 3206, 3207, 3209, 3210, 3212, 3213, 3225, 3227, 3228, 3234, 3235, 3263, 3271, 3296, 3297, 3304, 3305, 9116, 9118, 9117, 9032, 9042, 3321, 3330, 3390, 3391, 3393, 3394, 9116, 9117, 9118, 3398, 3400, 3414, 3422, 3425, 3437, 8569, 8572, 9193, 9199, 9205, 3491, 3496, 3516, 3530, 3531, 3533, 3535, 3562, 3563, 3581, 3589, 3590, 3591, 3592, 3593, 3594, 3595, 3599, 3600, 3603, 3604, 3605, 3606, 3607, 3608, 3609, 3611, 3612, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3630, 3637, 3644, 3649, 3652, 3653, 3654, 3655, 3656, 3657, 3660, 3661, 3662, 3663, 3664, 3665, 3677, 3678, 9293, 3687, 3692, 3693, 3694, 3695, 3696, 3697, 3698, 3699, 9394, 3760, 3762, 3764, 3765, 3788, 3789, 3825, 3828, 3829, 3830, 3831, 3832, 3833, 3834, 3836, 3841, 3842, 3844, 9503, 9508, 9566, 9574, 3893, 9588, 3909, 9616, 3974, 3990, 4014, 4015, 4016, 4017, 4022, 4023, 4024, 4025, 4026, 4027, 4028, 4029, 4030, 4031, 4032, 4033, 4034, 4035, 4036, 4037, 4038, 4041, 4042, 4043, 4044, 4045, 4046, 4047, 4048, 4049, 4050, 4051, 4052, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, 4078, 4079, 4080, 4081, 4082, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095, 4096, 4097, 4102, 4103, 4104, 4105, 4106, 4107, 4108, 4109, 4110, 4111, 4112, 4113, 4114, 4116, 4117, 4119, 4120, 4121, 4122, 4123, 4125, 4126, 4128, 4129, 4130, 4131, 4134, 9918, 9765, 8857, 4202, 4203, 9918, 9775, 9777, 9776, 9918, 9918, 9866, 9866, 8857, 4218, 4219, 8800, 4224, 4225, 8805, 4227, 4228, 4231, 4232, 9886, 9822, 8800, 5104, 5105, 9886, 9833, 8805, 5115, 5116, 9886, 9843, 8862, 5126, 5127, 9854, 9853, 9858, 9857, 9862, 9861, 9866, 9865, 8857, 5143, 5144, 9886, 9886, 8862, 5154, 5155, 5158, 5159, 5160, 5161, 5162, 9875, 9883, 5176, 5177, 5180, 5181, 9884, 9883, 9886, 9885, 5206, 5207, 5208, 5209, 9884, 9883, 9886, 9885, 5225, 5226, 5238, 5239, 5240, 5243, 5246, 9898, 9899, 9900, 5254, 9918, 9908, 8857, 5301, 5302, 9918, 9917, 8862, 5311, 5312, 5395, 5396, 5397, 5398, 5399, 5420, 5421, 5423, 5424, 5425, 9992, 10021, 5468, 5469, 5471, 5472, 5474, 5476, 10030, 8897, 5558, 5559, 10030, 10051, 8910, 6140, 6141, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 10132, 10134, 10137, 10139, 10143, 10147, 10158, 10161, 10163, 10166, 10188, 10190, 10192, 10194, 10196, 10198, 10204, 10208, 10210, 10220, 10223, 10226, 10237, 10242, 10244, 10257, 10261, 10264, 10266, 10274, 10281, 10283, 10295, 10297, 10302, 10304, 10307, 10312, 10315, 10318, 10321, 10328, 10330, 10332, 10334, 10336, 10345, 10362, 10364, 10366, 10368, 10372, 10375, 10377, 10379, 10383, 10385, 10387, 10400, 10402, 10410, 10412, 10414, 10418, 10421, 10423, 10425, 10427, 10429, 10433, 10437, 10439, 10441, 10443, 10447, 10449, 10452, 10455, 10457, 10459, 10461, 10464, 10466, 10468, 10470, 10472, 10475, 10477, 10479, 10482, 10485, 10487, 10489, 10492, 10494, 10496, 10499, 10501, 10517, 10519, 10523, 10525, 10530, 10532, 10534, 10539, 10542, 10544, 10546, 10548, 10552, 10558, 10560, 10562, 10566, 10569, 10573, 10575, 10577, 10581, 10585, 10591, 10593, 10595, 10598, 10600, 10603, 10605, 10607, 10609, 10611, 10615, 10617, 10619, 10623, 10625, 10627, 10629, 10631, 10633, 10637, 10639, 10641, 10644, 10646, 10649, 10651, 10653, 10660, 10662, 10667, 10669, 10671, 10674, 10676, 10679, 10681, 10683, 10686, 10689, 10691, 10693, 10696, 10698, 10700, 10702, 10706, 10708, 10710, 10713, 10715, 10719, 10722, 10724, 10726, 10729, 10731, 10735, 10753, 10756, 10758, 10766, 10775, 10777, 10780, 10782, 10790, 10798, 10803, 10806, 10810, 10812, 10814, 10819, 10821, 10823, 10826, 10829, 10832, 10112, 10114, 10116, 10837, 10119, 10118, 10117, 10121, 10123, 10126, 10125, 10129, 10128, 10127, 10140, 10148, 10144, 10148, 10847, 10849, 10851, 8516, 8516, 8516, 8516, 10526, 10784, 10784, 10784, 10152, 10149, 10150, 10151, 10355, 10152, 8517, 10866, 10154, 10153, 10772, 10155, 8518, 10868, 10167, 10200, 10169, 10403, 10168, 10169, 10200, 10169, 10791, 10788, 10791, 10570, 10842, 10589, 8655, 10762, 10835, 10394, 10393, 10396, 10395, 10838, 10837, 8655, 8655, 10173, 10357, 10173, 10248, 10173, 10342, 10340, 10355, 10788, 10791, 8521, 10872, 10567, 10570, 10178, 10177, 10788, 8522, 10874, 3306, 3307, 3308, 3309, 10184, 10234, 10180, 10181, 10182, 10183, 10184, 10185, 3319, 10579, 10200, 10201, 10514, 10583, 10778, 10786, 10205, 8674, 10211, 8655, 10213, 10215, 10786, 10788, 10745, 10743, 10567, 10228, 10230, 10233, 10232, 10579, 10234, 10540, 10521, 10514, 10583, 10754, 10778, 10238, 10240, 10245, 10246, 10247, 10359, 10248, 10250, 10249, 10355, 10527, 10555, 10267, 8655, 10786, 10791, 10786, 10791, 8537, 8537, 3395, 3396, 3397, 10267, 8545, 10786, 10791, 10267, 10555, 10527, 8655, 10759, 10791, 10786, 10791, 8549, 10284, 10397, 10390, 10391, 10287, 10286, 10840, 10839, 10397, 8555, 8555, 10290, 10289, 10291, 8555, 10299, 10298, 10305, 10838, 10837, 10840, 10839, 10843, 10769, 10768, 10770, 10773, 10764, 10738, 10737, 10739, 10742, 10745, 10744, 10746, 10749, 10835, 10795, 10794, 10796, 10801, 3471, 3473, 3475, 10323, 10322, 3478, 10325, 10324, 3481, 10338, 10360, 10793, 10394, 10393, 10396, 10395, 10838, 10837, 10340, 8674, 10793, 10835, 10406, 10405, 10342, 8674, 10360, 10793, 10835, 10347, 10346, 10349, 10348, 10769, 10350, 10835, 10352, 10351, 10354, 10353, 10355, 8674, 10357, 8674, 10359, 10360, 10762, 10394, 10393, 10838, 10837, 10795, 10794, 10796, 10800, 10799, 10380, 10769, 10767, 10770, 10772, 10771, 10389, 10908, 10390, 10391, 10793, 10394, 10393, 10396, 10395, 10838, 10837, 10397, 10738, 10737, 10739, 10741, 10740, 10403, 10406, 10405, 10746, 10748, 10747, 10911, 10913, 10915, 10920, 10922, 10924, 10929, 10931, 10933, 10944, 10948, 10950, 8720, 3686, 10958, 10960, 10962, 10435, 10434, 3757, 10504, 10502, 10506, 10508, 10969, 10510, 10513, 10512, 10514, 8655, 10526, 10527, 10537, 10536, 10540, 10971, 10550, 10553, 10554, 10555, 10570, 10579, 10583, 8674, 10587, 10835, 10838, 10837, 10840, 10839, 10843, 10842, 10589, 10974, 10976, 10978, 10982, 3855, 3857, 10642, 10647, 8720, 10656, 10657, 10658, 3883, 8710, 10665, 3889, 3897, 8720, 3911, 10716, 10720, 10732, 10736, 10738, 10737, 10742, 10741, 10740, 10784, 10759, 10791, 10793, 10835, 10745, 10744, 10743, 10749, 10748, 10747, 10750, 10835, 10838, 10837, 10840, 10839, 10843, 10842, 10841, 10784, 10759, 10791, 10762, 10769, 10768, 10767, 10773, 10772, 10771, 10784, 10786, 10791, 10793, 10835, 10795, 10794, 10801, 10800, 10799, 10815, 10833, 10835, 10838, 10837, 10840, 10839, 10843, 10842, 10841, 10996, 10999, 11002, 11004, 11006, 11008, 11010, 11017, 11019, 11021, 11023, 11028, 11031, 11033, 11035, 11037, 11039, 11046, 11048, 11050, 11053, 11058, 11060, 11062, 11064, 11069, 11071, 11073, 11076, 11078, 11080, 11015, 10956, 10845, 10954, 4195, 4196, 9918, 4201, 10853, 4205, 4206, 9918, 9777, 4209, 4210, 4211, 4212, 9918, 9866, 4215, 4216, 4217, 10855, 4223, 4226, 10909, 10909, 10909, 10909, 9886, 5098, 5099, 5103, 10918, 10917, 9886, 5109, 5110, 5114, 10927, 10926, 9886, 5120, 5121, 5125, 10936, 10935, 9855, 5131, 5132, 9859, 5134, 5135, 9863, 5137, 5138, 9866, 5140, 5141, 5142, 10942, 10941, 9886, 5148, 5149, 5153, 10946, 10945, 11140, 5165, 5166, 9882, 11013, 11012, 10997, 11015, 10956, 11147, 5184, 5185, 9882, 11013, 11012, 10997, 11015, 10951, 9886, 5196, 5197, 11083, 11083, 10952, 10983, 11026, 11155, 5212, 5213, 9882, 11015, 10956, 9886, 5221, 5222, 10983, 11026, 11163, 11014, 11013, 11012, 11015, 10956, 11083, 11083, 10953, 10983, 10954, 10956, 10956, 10983, 10964, 5251, 5252, 5253, 5294, 5295, 9918, 5300, 10980, 9918, 5305, 5306, 5310, 10983, 11013, 11012, 10997, 11184, 11014, 11013, 11012, 11015, 11083, 11083, 11025, 11026, 11189, 11043, 11042, 11041, 11044, 11056, 11055, 11054, 5447, 11083, 11083, 11066, 11067, 11083, 11083, 11082, 5467, 11196, 11198, 11187, 11186, 11185, 11107, 11106, 5530, 5557, 11200, 11199, 11107, 11106, 5592, 11143, 11142, 11141, 11149, 11148, 6034, 11157, 11165, 11156, 11166, 11165, 11164, 11166, 11165, 11164, 11187, 11186, 11185, 11200, 11199, 6139, 11187, 11186, 11185, 11192, 11191, 11190, 11200, 11199, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160, 3161, 3162, 3163, 3164, 3165, 11265, 11431, 11264, 11266, 3170, 3171, 11268, 3173, 3174, 11269, 3199, 8516, 8516, 8516, 3205, 3208, 3211, 8516, 3215, 3216, 3217, 3218, 3219, 3220, 3221, 3222, 3223, 3224, 3226, 3229, 3230, 3231, 3232, 3233, 10156, 10159, 11272, 10164, 3240, 11279, 11278, 11277, 3244, 3245, 3246, 3247, 3248, 11279, 11278, 11277, 3252, 11459, 11459, 11369, 3256, 3257, 3258, 3259, 3260, 3261, 3262, 3264, 3265, 3266, 3267, 3268, 3269, 3270, 3272, 3273, 3274, 3275, 11459, 11459, 11369, 3279, 3280, 11459, 11459, 11369, 3284, 3285, 11459, 11459, 11369, 3289, 3290, 3291, 3292, 3293, 3294, 3295, 3298, 3299, 3300, 3301, 3302, 3303, 3310, 3311, 3312, 3313, 3314, 3315, 3316, 3317, 10186, 3320, 11459, 11276, 11275, 11279, 11278, 11277, 3328, 3329, 3331, 3332, 3333, 3334, 11280, 3336, 3337, 11281, 3339, 3340, 3341, 11446, 3343, 3344, 10218, 10221, 10224, 3348, 3349, 3350, 3351, 3352, 3353, 3354, 3355, 3356, 3357, 3358, 3359, 3360, 3361, 3362, 3363, 11286, 3365, 3366, 11288, 11287, 3369, 3370, 3371, 3372, 3373, 3374, 3375, 3376, 3377, 3378, 3379, 3380, 3381, 3382, 3383, 3384, 8537, 8537, 8537, 8537, 3389, 3392, 3399, 3401, 10259, 10262, 11292, 3405, 3406, 3407, 3408, 3409, 3410, 10272, 3412, 3413, 3415, 3416, 3417, 11294, 11295, 3420, 3421, 3423, 3424, 3426, 3427, 3428, 3429, 3430, 3431, 3432, 3433, 3434, 3435, 3436, 10293, 11297, 3440, 3441, 10300, 11299, 3444, 11300, 3446, 3447, 3448, 3449, 3450, 3451, 3452, 3453, 3454, 3455, 3456, 3457, 3458, 3459, 3460, 3461, 3462, 3463, 3464, 3465, 3466, 3467, 3468, 10310, 10313, 10316, 10319, 3476, 3477, 3479, 3480, 10326, 9210, 11418, 11308, 11307, 3487, 11309, 3489, 3490, 3492, 3493, 3494, 3495, 3497, 3498, 3499, 3500, 3501, 3502, 3503, 3504, 3506, 3507, 10343, 3509, 3510, 3511, 3512, 3513, 3514, 3515, 3517, 3518, 3519, 3520, 3521, 3522, 3523, 3524, 3525, 3526, 3527, 3529, 3532, 3534, 3536, 3537, 3538, 3539, 11311, 11314, 11313, 11312, 10370, 10373, 3546, 3547, 3548, 3549, 3550, 11317, 3552, 3553, 3554, 3555, 3556, 3557, 11321, 11320, 11319, 3561, 3564, 3565, 3566, 3567, 3568, 3569, 3570, 3571, 3572, 3573, 3574, 3575, 3576, 3577, 3578, 11322, 3580, 3582, 3583, 3584, 3585, 3586, 3685, 11326, 11325, 11324, 10416, 10419, 11329, 11332, 11331, 11330, 10431, 3721, 3722, 11334, 11337, 11336, 11335, 10445, 11397, 11397, 11339, 10450, 10453, 11342, 11394, 11393, 11343, 9350, 10462, 11346, 11349, 11348, 11347, 11352, 11351, 11350, 11353, 11356, 11355, 11354, 11359, 11358, 11357, 11360, 11418, 11418, 11361, 3758, 3759, 3761, 3763, 3766, 3767, 3768, 3769, 3770, 11363, 11376, 11362, 10521, 3775, 11365, 3777, 11368, 11367, 11366, 3781, 3782, 11459, 11459, 11369, 3786, 11370, 11373, 11372, 11371, 3793, 11459, 11459, 11374, 3797, 3798, 3799, 11377, 11376, 11375, 10564, 10567, 3805, 11382, 11381, 11380, 3809, 11459, 11459, 11383, 3813, 11384, 3815, 3816, 3817, 3818, 3819, 3820, 3821, 3822, 3823, 3824, 11387, 11386, 11385, 9495, 11397, 11397, 11389, 10601, 11391, 11394, 11393, 11392, 10613, 11397, 11397, 11396, 10621, 11399, 11400, 11403, 11402, 11401, 10635, 11405, 3873, 11407, 3875, 11411, 11410, 11409, 3879, 3880, 3881, 3882, 11413, 11415, 11412, 3887, 3888, 11416, 11415, 11414, 11418, 11418, 11417, 11421, 11420, 11419, 11422, 11425, 11424, 11423, 11426, 11429, 11428, 11427, 3910, 11432, 11431, 11430, 11433, 3916, 3917, 11435, 11438, 11437, 11436, 11439, 3923, 3924, 11441, 10764, 3927, 3928, 10739, 3930, 3931, 3932, 11446, 10754, 3935, 11444, 3937, 10788, 3939, 3940, 3941, 3942, 3943, 3944, 10746, 3946, 3947, 3948, 3949, 3950, 3951, 3952, 3953, 3954, 3955, 3956, 3957, 11446, 10754, 3960, 11444, 3962, 10788, 3964, 3965, 10764, 3967, 3968, 3969, 10770, 3971, 3972, 3973, 11446, 10778, 3977, 11449, 3979, 10788, 3981, 3982, 3983, 3984, 3985, 10796, 3987, 3988, 3989, 11453, 11452, 10808, 3994, 11456, 11455, 10817, 11459, 11458, 10824, 10827, 10830, 4003, 4004, 4005, 4006, 4008, 4009, 4011, 4012, 4013, 4188, 4189, 4193, 4194, 4197, 11922, 11483, 11482, 11481, 11087, 4204, 4207, 11927, 4208, 4213, 11933, 4214, 11098, 4220, 11101, 11104, 10893, 11499, 11499, 11505, 11505, 11542, 11542, 11549, 11549, 4660, 4664, 10909, 10893, 5038, 5040, 10909, 5097, 11757, 11756, 11755, 11111, 5106, 5107, 5108, 11760, 11759, 11758, 11116, 5117, 5118, 5119, 11763, 11762, 11761, 11121, 5128, 5129, 5130, 5133, 5136, 5139, 11132, 5145, 5146, 5147, 11916, 11915, 11764, 11137, 5156, 5157, 5167, 11987, 11810, 11809, 11765, 5171, 5172, 5173, 5174, 5175, 5186, 11996, 11810, 11809, 11766, 5190, 5191, 5192, 5193, 5194, 5195, 11896, 11895, 11894, 5201, 5202, 5203, 5204, 5205, 5214, 12013, 11810, 11809, 11808, 5218, 5219, 5220, 5223, 5224, 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5235, 5236, 5237, 5242, 5244, 5245, 11771, 11770, 11769, 5250, 5296, 12041, 11810, 11809, 11808, 11176, 5303, 5304, 11896, 11895, 11811, 11181, 5313, 11889, 11886, 11887, 11892, 11891, 11890, 5392, 5393, 5394, 11889, 11888, 11887, 11892, 11891, 11890, 5406, 5407, 5408, 5409, 11913, 11912, 11893, 11896, 11895, 11894, 5416, 5417, 5418, 5419, 11899, 11898, 11897, 11902, 11901, 11900, 5434, 5435, 5436, 5437, 11905, 11904, 11903, 11916, 11915, 11906, 5444, 5445, 5446, 11913, 11912, 11907, 11910, 11909, 11908, 5454, 5455, 5456, 5457, 11913, 11912, 11911, 11916, 11915, 11914, 5464, 5465, 5466, 12053, 5500, 5501, 5502, 12079, 5528, 5529, 12079, 11203, 12053, 12062, 12079, 5585, 5586, 12080, 5590, 5591, 11985, 6026, 6027, 6028, 11994, 6032, 6033, 12011, 6040, 6041, 6042, 12022, 6048, 6049, 6050, 12079, 6074, 6075, 6076, 12053, 6098, 6099, 6100, 12062, 12079, 6113, 6114, 12080, 12079, 11208, 12053, 6167, 6168, 6169, 12062, 6175, 6176, 6177, 12079, 6187, 6188, 12080, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 12163, 12165, 12170, 12172, 3166, 3167, 3168, 3169, 3172, 3175, 10856, 3202, 3203, 3204, 10858, 10860, 10862, 3214, 12204, 12206, 3236, 3237, 3238, 3239, 3241, 3242, 3243, 3249, 3250, 3251, 3253, 3254, 3255, 12234, 12239, 12241, 12243, 3276, 3277, 3278, 3281, 3282, 3283, 3286, 3287, 3288, 12269, 12274, 3318, 3322, 3323, 3324, 3325, 3326, 3327, 3335, 3338, 3342, 3345, 3346, 3347, 12309, 12314, 3364, 3367, 3368, 12334, 3385, 3386, 3387, 3388, 10882, 10884, 3402, 3403, 3404, 3411, 3418, 3419, 12374, 12376, 12381, 3438, 3439, 12387, 3442, 3443, 3445, 12393, 12395, 12398, 12403, 12407, 12412, 3469, 3470, 3472, 3474, 12420, 12422, 3482, 3483, 3484, 3485, 3486, 3488, 12433, 12435, 12437, 12443, 3508, 12451, 12453, 12455, 12458, 12460, 12469, 12471, 3540, 3541, 3542, 3543, 3544, 3545, 12479, 12482, 3551, 12486, 12489, 3558, 3559, 3560, 12498, 12500, 12502, 12505, 12508, 3579, 12512, 12515, 3711, 3712, 3713, 3714, 3715, 3716, 3717, 3718, 3719, 3720, 12528, 3723, 3724, 3725, 3726, 3727, 3728, 3729, 3730, 3731, 3732, 3733, 3734, 3735, 3736, 3737, 3738, 3739, 3740, 3741, 3742, 3743, 3744, 3745, 3746, 3747, 3748, 3749, 3750, 3751, 3752, 3753, 3754, 3755, 3756, 12564, 12569, 3771, 3772, 3773, 3774, 3776, 3778, 3779, 3780, 12583, 3783, 3784, 3785, 3787, 3790, 3791, 3792, 3794, 3795, 3796, 3800, 3801, 3802, 3803, 3804, 3806, 3807, 3808, 3810, 3811, 3812, 3814, 12618, 12620, 12622, 3847, 3848, 3849, 3850, 3851, 3852, 3853, 3854, 3856, 3858, 3859, 3860, 3861, 3862, 3863, 3864, 3865, 3866, 3867, 3868, 3869, 3870, 3871, 3872, 3874, 3876, 3877, 3878, 11820, 3884, 3885, 3886, 11823, 3890, 3891, 3892, 3894, 3895, 3896, 3898, 3899, 3900, 3901, 3902, 3903, 3904, 3905, 3906, 3907, 3908, 3912, 3913, 3914, 3915, 3918, 3919, 3920, 3921, 3922, 3925, 3926, 12697, 3929, 12700, 3933, 3934, 3936, 3938, 12712, 3945, 12716, 12721, 12723, 12725, 3958, 3959, 3961, 3963, 3966, 12737, 3970, 12741, 3975, 3976, 3978, 3980, 12753, 3986, 12756, 3991, 3992, 3993, 3995, 3996, 3997, 3998, 3999, 4000, 4001, 4002, 12773, 12775, 12777, 12160, 12167, 12780, 12783, 4198, 4199, 4200, 11088, 12790, 11930, 12793, 11936, 11099, 11102, 11105, 12566, 12565, 10909, 12440, 10909, 12487, 12431, 12448, 12566, 12565, 12192, 12597, 12311, 12193, 12709, 12513, 12194, 12487, 12195, 12566, 12565, 12379, 12709, 12513, 4346, 10892, 12496, 12383, 12487, 10864, 10864, 10864, 10864, 12566, 12565, 4386, 12202, 4388, 12230, 12266, 12311, 4396, 12207, 4398, 12566, 12565, 10909, 12440, 12448, 10909, 12487, 12236, 12566, 12565, 12263, 12709, 12513, 12230, 12487, 12266, 12311, 12270, 12566, 12565, 10869, 10869, 10869, 10909, 12487, 10869, 10909, 12440, 10869, 12448, 10869, 10869, 10869, 12431, 10909, 12236, 10909, 10909, 12566, 12565, 12431, 12448, 10909, 12487, 10909, 12440, 12263, 4591, 12265, 4593, 12266, 12311, 12270, 4600, 12271, 4602, 11551, 11551, 11551, 11550, 11551, 11551, 11551, 11552, 12566, 12565, 12272, 12487, 12277, 12275, 12276, 12277, 12311, 12278, 12709, 12513, 12279, 10909, 10909, 10909, 10909, 12307, 12307, 12307, 12513, 12310, 12311, 12566, 12565, 10909, 12709, 12513, 10909, 12487, 4735, 12496, 12709, 12597, 12496, 11615, 11615, 11615, 11615, 11613, 11614, 11615, 11615, 12566, 12565, 10889, 12431, 10889, 10889, 12448, 10889, 10909, 10889, 10889, 10889, 10889, 12496, 12734, 12709, 12750, 12750, 12566, 12565, 12734, 12750, 12597, 12496, 12396, 12709, 12409, 12408, 12750, 10893, 4878, 10892, 10893, 12378, 12379, 12513, 12383, 12566, 12565, 10894, 12448, 10894, 10909, 12440, 10894, 10894, 10909, 12487, 10894, 12431, 10894, 10894, 10894, 12597, 12615, 12396, 12734, 12400, 12399, 12750, 12405, 12404, 12709, 12409, 12408, 12750, 12414, 12413, 12566, 12565, 10904, 12431, 10904, 10909, 12440, 10904, 10909, 12487, 10904, 12448, 12467, 10904, 10909, 10904, 10909, 12513, 10904, 10904, 12467, 12566, 12565, 12480, 10909, 12487, 10909, 5078, 10909, 12496, 10909, 12506, 10909, 12709, 12513, 11947, 5100, 5101, 5102, 11112, 12822, 11953, 5111, 5112, 5113, 11117, 12829, 11959, 5122, 5123, 5124, 11122, 12836, 11965, 11968, 11971, 11974, 11133, 12843, 11980, 5150, 5151, 5152, 11138, 12850, 12851, 5168, 5169, 5170, 12857, 12860, 12861, 5187, 5188, 5189, 12867, 12870, 12004, 5198, 5199, 5200, 12876, 12879, 12880, 5215, 5216, 5217, 12886, 12018, 12889, 12891, 12894, 12896, 12899, 12516, 5247, 5248, 5249, 12566, 12565, 12597, 12615, 12907, 5297, 5298, 5299, 11177, 12046, 5307, 5308, 5309, 11182, 12654, 12680, 12709, 12718, 12750, 12770, 5386, 5387, 5388, 5389, 5390, 5391, 12927, 5400, 5401, 5402, 5403, 5404, 5405, 12936, 5410, 5411, 5412, 5413, 5414, 5415, 12946, 5428, 5429, 5430, 5431, 5432, 5433, 12956, 5438, 5439, 5440, 5441, 5442, 5443, 12966, 5448, 5449, 5450, 5451, 5452, 5453, 12975, 5458, 5459, 5460, 5461, 5462, 5463, 12985, 5499, 12989, 5527, 12993, 5556, 11204, 5574, 5575, 5584, 5587, 13000, 13003, 6025, 13006, 6031, 13010, 6039, 13013, 6047, 13017, 6073, 13021, 6097, 13025, 6103, 6112, 6115, 13030, 6138, 11209, 6166, 13036, 6174, 13040, 6186, 6189, 13044, 122, 123, 124, 125, 126, 127, 12166, 12173, 13061, 13064, 13065, 10857, 10859, 10861, 10863, 13081, 13084, 13087, 13094, 13097, 13100, 13106, 13109, 13121, 10883, 10885, 13159, 13161, 13176, 13186, 13197, 13203, 13209, 13213, 13219, 13225, 13228, 13232, 13235, 13239, 13244, 13247, 13249, 13253, 13257, 13260, 13263, 13268, 13271, 12623, 13278, 13282, 13287, 13291, 13297, 13303, 13307, 13311, 13314, 13317, 13321, 13325, 13328, 13331, 13333, 13336, 12701, 13343, 12713, 12717, 12726, 13353, 12738, 12742, 13361, 12757, 13367, 13369, 13373, 12778, 4182, 13056, 4185, 13058, 13385, 11931, 11937, 13153, 13152, 13151, 13155, 13154, 12567, 4246, 4247, 13067, 4251, 4252, 13195, 13165, 13068, 13069, 4257, 12734, 13184, 4260, 13183, 4263, 13164, 13163, 13162, 4269, 13169, 13168, 13167, 13073, 13138, 13140, 13178, 13179, 12567, 4292, 4293, 13241, 4296, 4299, 4300, 13136, 13190, 13118, 13341, 4305, 4306, 13195, 4308, 13117, 13351, 4311, 12734, 13184, 4314, 13183, 4316, 13138, 13140, 13178, 13179, 12567, 4335, 4336, 13341, 4339, 4340, 13195, 4342, 13137, 4348, 4351, 13190, 13135, 13188, 13359, 13351, 4357, 12734, 13184, 4360, 13183, 13138, 4363, 4364, 4365, 4366, 13138, 13178, 13179, 12567, 4383, 4384, 13241, 4387, 4389, 4391, 4392, 13075, 13074, 13169, 4397, 13153, 13152, 13151, 13206, 13216, 13222, 12567, 4419, 4420, 13111, 4423, 4424, 13195, 13165, 4429, 13169, 13168, 13167, 13113, 4435, 12734, 13184, 4438, 13183, 4444, 13092, 13091, 13090, 13138, 13140, 13178, 13179, 12567, 4466, 4467, 13241, 13193, 4470, 4471, 13195, 4473, 13117, 13182, 4476, 12734, 13184, 4479, 13183, 4482, 4483, 13089, 13136, 13190, 4487, 13153, 13152, 13151, 13155, 13154, 13216, 12567, 4506, 4507, 4509, 4510, 4511, 4512, 12734, 13184, 4515, 13183, 4517, 4518, 4519, 13195, 13165, 4522, 4523, 13169, 13168, 13167, 4527, 4528, 4529, 4530, 13164, 13163, 13162, 4534, 4535, 13092, 13091, 13090, 4539, 4540, 13153, 13152, 13151, 13155, 13154, 13216, 13222, 12567, 4562, 4563, 4566, 13164, 13163, 13162, 13113, 4574, 13169, 13168, 13167, 13182, 4579, 12734, 13184, 4582, 13183, 13111, 4585, 4586, 13195, 13165, 13359, 4590, 4592, 4594, 4595, 13192, 13102, 13191, 4599, 4601, 4603, 4604, 4605, 4606, 4607, 4608, 4609, 4610, 13138, 13140, 13178, 13179, 12567, 4629, 4630, 13241, 13351, 4633, 12734, 13184, 4636, 13183, 4638, 4641, 4643, 4644, 4645, 13136, 13190, 13135, 13341, 4650, 4651, 13195, 4653, 13137, 4655, 13153, 13104, 13359, 4668, 13111, 4670, 13112, 4672, 13182, 13113, 4675, 13351, 4679, 4680, 4681, 13195, 4683, 13117, 4685, 4686, 13136, 13190, 13118, 13351, 13341, 13359, 13200, 13206, 13178, 13179, 12567, 4718, 4719, 13341, 4722, 4723, 13195, 4725, 13194, 4728, 12734, 13184, 4731, 13183, 4738, 13190, 13188, 13122, 13359, 13344, 4749, 4751, 13266, 13273, 4754, 13354, 13362, 13123, 13124, 13125, 13126, 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, 13153, 13152, 13151, 13155, 13154, 13216, 13222, 12567, 4792, 4793, 4795, 4796, 13164, 13163, 13162, 4800, 4801, 4802, 13169, 13168, 13167, 4806, 4807, 12734, 4809, 4810, 4811, 4812, 13140, 13266, 4817, 4818, 13341, 4820, 4821, 4822, 13138, 13140, 13178, 13222, 12567, 4841, 4842, 13241, 13354, 4846, 4847, 13266, 4852, 13143, 13273, 4859, 4860, 13145, 13144, 13341, 13344, 4866, 4867, 4868, 13148, 13362, 4872, 13200, 13206, 4875, 4880, 4883, 13136, 13190, 13135, 13359, 4888, 13341, 4890, 13195, 4892, 13137, 13351, 4895, 13153, 13152, 13151, 13155, 13154, 13216, 13222, 12567, 4915, 4916, 4918, 4919, 13169, 13168, 13167, 4923, 4924, 4925, 13195, 13165, 4928, 4929, 4930, 12734, 13184, 4933, 13183, 4935, 4936, 13164, 13163, 13162, 4940, 4941, 4942, 13138, 13140, 13178, 13143, 13266, 4955, 4958, 4959, 13145, 13144, 13351, 13354, 4965, 4966, 4967, 13146, 4969, 4970, 4971, 13147, 13341, 13344, 4976, 4977, 4978, 13148, 4980, 4981, 4982, 13149, 13153, 13152, 13151, 13155, 13154, 13216, 13222, 12567, 5003, 5004, 5006, 5007, 13164, 13163, 13162, 5011, 5012, 5013, 13195, 13165, 5016, 5017, 12734, 13184, 5020, 13183, 5022, 5023, 13169, 13168, 13167, 5027, 13191, 13171, 13170, 5031, 5032, 5033, 5034, 13195, 5036, 5037, 5039, 5041, 13173, 13189, 13172, 13200, 13206, 13178, 13179, 12567, 5063, 5064, 13241, 13181, 5067, 13180, 13182, 5070, 12734, 13184, 5073, 13183, 5075, 5081, 5082, 13190, 13189, 13188, 13359, 5087, 13192, 5089, 13191, 13193, 5092, 5093, 13195, 5095, 13194, 11948, 13651, 11954, 13657, 11960, 13663, 11966, 11969, 11972, 11975, 11981, 13675, 13681, 12858, 13687, 12868, 12005, 13693, 12877, 13699, 12019, 12892, 12897, 5241, 13710, 13200, 13206, 13216, 13222, 12567, 5271, 5272, 13241, 13273, 13255, 5284, 13266, 13273, 5290, 13275, 13274, 13718, 12047, 13723, 11812, 11813, 13294, 13301, 13300, 5330, 12685, 12656, 12661, 5344, 12750, 13339, 13338, 13341, 13344, 5356, 13346, 5360, 13349, 13348, 13351, 13354, 12734, 13357, 13359, 13362, 5374, 13364, 13363, 13375, 5382, 13378, 13377, 13733, 13736, 12928, 13740, 13743, 12937, 13747, 13750, 12947, 13754, 13757, 12957, 13761, 13764, 12967, 13768, 13771, 12976, 13775, 13778, 12986, 12990, 13720, 13725, 12086, 13387, 13392, 13725, 13790, 13393, 13394, 12092, 13653, 13659, 13665, 13671, 13677, 13007, 12098, 13014, 13018, 13022, 13026, 13807, 13720, 13725, 13037, 13041, 13816, 13785, 13809, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 13062, 13082, 13085, 13088, 13095, 13098, 13101, 13107, 13110, 13160, 13177, 13187, 13198, 13204, 13210, 13214, 13220, 13226, 13229, 13233, 13236, 13240, 13245, 13250, 13254, 13258, 13261, 13264, 13269, 13272, 13279, 13283, 13288, 13292, 13298, 13304, 13308, 13312, 13315, 13318, 13322, 13326, 13329, 13334, 13370, 13824, 4184, 13825, 4187, 12178, 12181, 13386, 4234, 4235, 4236, 4238, 4239, 4245, 13845, 13829, 4250, 4253, 4254, 4255, 4256, 4258, 4259, 4261, 13830, 4264, 4265, 4266, 13831, 13832, 4270, 4271, 4272, 4273, 4275, 4277, 4280, 4283, 12692, 4291, 4294, 4301, 4302, 4303, 4304, 4307, 4309, 4310, 4312, 4313, 4315, 4318, 4320, 4323, 4326, 12324, 4334, 13845, 4338, 4341, 4343, 4352, 4353, 4354, 4355, 4356, 4358, 4359, 4361, 4362, 4368, 4371, 4374, 12692, 4382, 4385, 13998, 4393, 4394, 4395, 14005, 4400, 4401, 4402, 4404, 4407, 4410, 12692, 4418, 13845, 4422, 4425, 4426, 4430, 4431, 4432, 4433, 13841, 4436, 4437, 4439, 4445, 4446, 4447, 4449, 4451, 4454, 4457, 12692, 4465, 4468, 4469, 4472, 4474, 4475, 4477, 4478, 4480, 4484, 4485, 4486, 4489, 4490, 4491, 4493, 4494, 4497, 12692, 4505, 13845, 4513, 4514, 4516, 4520, 4521, 4524, 4525, 4526, 4531, 4532, 4533, 4536, 4537, 4538, 4542, 4543, 4544, 4546, 4547, 4550, 4553, 12692, 4561, 13845, 4567, 4568, 4569, 4570, 4575, 4576, 4577, 4578, 4580, 4581, 4583, 4584, 4587, 4588, 4589, 14133, 4596, 4597, 4598, 14140, 4612, 4614, 4617, 4620, 12324, 4628, 4631, 4632, 4634, 4635, 4637, 4646, 4647, 4648, 4649, 4652, 4654, 4656, 4657, 4667, 4669, 4671, 4673, 4674, 4678, 4682, 4684, 4687, 4688, 4689, 4690, 4698, 4699, 4701, 4703, 4706, 4709, 12324, 4717, 13845, 4721, 4724, 4726, 13841, 4729, 4730, 4732, 4739, 4740, 4741, 4742, 4747, 13885, 13859, 4752, 4753, 4755, 13889, 4757, 13892, 13843, 4760, 4761, 4762, 13843, 4764, 13842, 13843, 4775, 4776, 4777, 4778, 4779, 4782, 4785, 4791, 13845, 4797, 4798, 4799, 4803, 4804, 4805, 4808, 4813, 4816, 4819, 4824, 4826, 4829, 4832, 12692, 4840, 4843, 4844, 13889, 4849, 4855, 13859, 4858, 4861, 4862, 4863, 4864, 13885, 4869, 14306, 4870, 13892, 4873, 4874, 4884, 4885, 4886, 4887, 4889, 4891, 4893, 4894, 4897, 4898, 4899, 4901, 4902, 4905, 4908, 4914, 13845, 4920, 4921, 4922, 4926, 4927, 4931, 4932, 4934, 4937, 4938, 4939, 4943, 4944, 4947, 4950, 4952, 13859, 4960, 4961, 4962, 4963, 13889, 4968, 14376, 4972, 14380, 4973, 4974, 13885, 4979, 14386, 4983, 14390, 4985, 4986, 4987, 4989, 4990, 4993, 4996, 5002, 13845, 5008, 5009, 5010, 5014, 5015, 5018, 5019, 5021, 5024, 5025, 5026, 5028, 5029, 5030, 5035, 5042, 5043, 5044, 5046, 5048, 5051, 5054, 12692, 5062, 5065, 5066, 5068, 5069, 5071, 5072, 5074, 5083, 5084, 5085, 5086, 5088, 5090, 5091, 5094, 5096, 13652, 13658, 13664, 13676, 13682, 13688, 13694, 13700, 13711, 5256, 5258, 5261, 5264, 5270, 5273, 5276, 13859, 5281, 5286, 5289, 13867, 5292, 5293, 13719, 13724, 5317, 5318, 5321, 5323, 5324, 5331, 5332, 5338, 12685, 12692, 5349, 13884, 5351, 5352, 5353, 5354, 13885, 13887, 5358, 13886, 13888, 5362, 5363, 5364, 5365, 13889, 5367, 13891, 5369, 13890, 5371, 5372, 13892, 13893, 5376, 5377, 13894, 13896, 5381, 13897, 5384, 5385, 13734, 13737, 13741, 13744, 13748, 13751, 13755, 13758, 13762, 13765, 13769, 13772, 13776, 13779, 14552, 14558, 5524, 5526, 5550, 13390, 13388, 5553, 5555, 14561, 14564, 14567, 14570, 5588, 5589, 14069, 14070, 14088, 14071, 14077, 14082, 14088, 14087, 14089, 14088, 14141, 14142, 14145, 14143, 14145, 14144, 14146, 14145, 14147, 14148, 14239, 14245, 14240, 14241, 14242, 14245, 14243, 14244, 14246, 14245, 14257, 14272, 14262, 14263, 14272, 14268, 14271, 14273, 14272, 14274, 14337, 14359, 14342, 14347, 14348, 14354, 14359, 14360, 14359, 14361, 14402, 14433, 14407, 14412, 14418, 14433, 14427, 14429, 14434, 14433, 6015, 6017, 6019, 14481, 14479, 6022, 6024, 14486, 14488, 14491, 14555, 14495, 14494, 14495, 14552, 14555, 14558, 14561, 14564, 14567, 14570, 6135, 6137, 14552, 14555, 14558, 14561, 14564, 14567, 14570, 13781, 13783, 6219, 13787, 13788, 13789, 13801, 13793, 13795, 13797, 13799, 13801, 13803, 13805, 13806, 6491, 13811, 13813, 13815, 121, 122, 123, 124, 125, 126, 127, 4183, 4186, 14720, 4191, 4192, 14732, 14773, 14733, 14776, 14740, 14737, 14741, 14729, 14738, 4248, 13911, 4249, 14782, 13923, 4262, 14790, 4267, 4268, 14795, 14732, 14733, 14735, 14730, 14753, 14736, 14763, 4285, 14740, 14741, 14737, 14738, 14739, 13939, 14804, 14742, 14746, 14745, 14806, 13952, 13958, 14732, 14733, 14735, 14730, 14753, 14736, 14763, 4328, 14738, 14737, 14739, 14741, 14740, 4337, 13966, 13972, 14744, 14743, 14742, 14749, 14748, 14826, 13984, 14732, 14735, 14734, 14753, 14736, 14763, 4376, 14740, 14739, 14741, 14738, 14737, 13995, 14839, 13432, 14747, 14842, 13438, 14732, 14846, 14733, 14735, 14730, 14753, 14736, 14763, 4412, 14741, 14740, 14738, 14729, 14737, 4421, 14013, 14856, 14726, 14721, 14858, 4434, 14028, 14725, 14728, 14723, 14722, 14866, 14732, 14733, 14735, 14734, 14753, 14736, 14763, 4459, 14739, 14740, 14741, 14737, 14738, 14039, 14874, 14046, 14052, 14747, 14883, 14732, 14886, 14733, 14889, 14735, 14730, 14763, 4499, 14729, 14741, 14740, 14737, 14738, 4508, 14067, 14075, 14898, 14900, 14903, 14906, 14732, 14909, 14733, 14912, 14735, 14730, 14753, 14736, 14763, 4555, 14738, 14740, 14741, 14737, 14729, 4564, 14109, 14724, 14919, 14725, 14749, 14726, 14923, 14124, 14931, 13488, 14935, 13494, 14732, 14733, 14735, 14730, 14753, 14736, 14763, 4622, 14740, 14739, 14737, 14738, 14741, 14154, 14944, 14161, 14744, 14743, 14742, 14950, 14175, 14956, 14749, 14748, 14744, 14728, 14727, 14749, 14748, 14732, 14733, 14194, 14966, 14749, 14748, 14744, 14743, 14742, 14749, 14748, 14732, 14733, 14735, 14730, 14753, 14736, 14763, 4711, 14741, 14739, 14737, 14740, 14738, 4720, 14209, 14215, 4727, 14220, 14744, 14731, 14749, 14748, 14986, 14732, 14733, 14735, 14734, 4748, 4750, 4756, 4758, 4759, 4763, 4765, 4766, 15007, 15010, 14735, 14730, 14753, 14736, 14737, 14741, 14740, 14729, 14738, 4794, 14255, 15016, 15019, 14735, 14734, 14732, 14733, 14735, 14734, 14753, 14736, 14763, 4834, 14739, 14737, 14738, 14740, 14741, 14288, 15031, 4845, 14747, 14746, 14745, 14744, 14743, 14742, 4857, 15038, 4865, 15043, 4871, 14744, 14743, 14742, 14749, 14748, 15050, 14323, 14732, 15058, 14733, 15061, 14735, 14730, 14753, 14736, 14737, 14740, 14738, 14729, 14741, 4917, 14335, 15067, 15070, 14352, 15075, 14735, 14730, 14744, 14743, 14747, 14746, 14745, 14742, 4957, 15083, 4964, 15088, 15090, 4975, 15095, 15097, 14732, 15100, 14733, 15103, 14735, 14730, 14753, 14736, 14737, 14729, 14741, 14738, 14740, 5005, 14400, 15109, 15112, 14416, 15117, 15120, 14432, 15124, 14732, 14733, 14735, 14730, 14753, 14736, 14763, 5056, 14739, 14737, 14741, 14740, 14738, 14444, 15132, 14448, 14454, 14744, 14731, 14749, 14748, 15140, 14465, 14471, 14732, 14733, 14735, 14734, 14753, 14736, 14739, 14737, 14740, 14738, 14741, 14503, 15162, 14749, 14748, 14742, 5278, 14744, 14743, 14746, 14745, 14747, 14749, 14748, 5291, 14751, 14750, 14753, 14752, 14754, 15177, 14759, 14755, 14758, 14760, 14761, 14758, 14756, 14759, 14760, 14761, 14759, 14761, 14760, 14757, 14758, 14762, 5346, 14763, 5348, 5350, 5355, 5357, 5359, 5361, 5366, 5368, 5370, 5373, 5375, 5378, 14764, 5380, 5383, 15215, 5498, 5503, 13716, 14515, 13383, 5551, 5552, 14515, 15221, 5577, 15223, 5579, 15225, 5581, 15227, 5583, 14780, 14783, 14784, 14797, 14808, 14811, 15203, 14822, 14828, 14829, 15085, 15092, 15203, 14962, 14854, 14860, 15052, 14875, 14878, 15203, 5708, 5709, 5710, 5711, 5713, 5715, 5716, 5718, 5719, 5720, 14960, 14961, 14959, 14957, 14921, 14925, 14929, 14932, 15024, 15203, 5759, 5760, 5761, 5762, 5763, 5764, 5765, 5766, 5767, 5768, 14945, 14952, 15203, 14957, 14959, 14958, 14960, 14961, 14962, 15203, 15024, 14968, 14969, 14970, 14978, 14988, 14999, 15000, 15001, 15003, 5855, 5856, 5857, 5858, 5859, 5860, 5861, 5862, 5863, 5864, 5872, 5873, 5875, 5876, 5877, 5879, 5881, 5882, 5883, 5884, 15052, 15053, 15056, 5941, 5942, 5944, 5946, 5947, 5949, 5950, 5952, 5953, 5954, 5979, 5980, 5982, 5984, 5986, 5987, 5990, 5991, 5993, 5994, 15135, 15142, 15145, 14473, 14475, 14477, 6020, 6021, 14483, 13679, 6030, 13685, 6036, 14489, 6038, 13697, 6044, 14493, 6046, 13716, 6070, 14515, 6072, 15215, 6096, 6101, 6102, 15156, 6105, 15223, 6107, 15225, 6109, 15227, 6111, 13716, 14515, 15215, 6165, 15217, 6171, 15219, 6173, 15221, 6179, 15223, 6181, 15225, 6183, 15227, 6185, 6198, 6209, 6227, 6228, 6233, 6236, 6446, 6449, 6452, 6455, 6464, 6474, 6477, 6482, 6501, 6504, 6509, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 14766, 14768, 4190, 4233, 14774, 4237, 4240, 4241, 4242, 4243, 4244, 15374, 14787, 14791, 14796, 4274, 4276, 4278, 4279, 4281, 4282, 4284, 4286, 4287, 4288, 4289, 4290, 15398, 4295, 4297, 4298, 14807, 14810, 14814, 4317, 4319, 4321, 4322, 4324, 4325, 4327, 4329, 4330, 4331, 4332, 4333, 15418, 14824, 4344, 4345, 4347, 4349, 4350, 14827, 14832, 4367, 4369, 4370, 4372, 4373, 4375, 4377, 4378, 4379, 4380, 4381, 15441, 4390, 14843, 4399, 14847, 4403, 4405, 4406, 4408, 4409, 4411, 4413, 4414, 4415, 4416, 4417, 15460, 4427, 4428, 14859, 14864, 4440, 4441, 4442, 4443, 14867, 4448, 4450, 4452, 4453, 4455, 4456, 4458, 4460, 4461, 4462, 4463, 4464, 15487, 14877, 14881, 4481, 14884, 4488, 14887, 4492, 4495, 4496, 4498, 4500, 4501, 4502, 4503, 4504, 15505, 14896, 14901, 14904, 14907, 4541, 14910, 4545, 4548, 4549, 4551, 4552, 4554, 4556, 4557, 4558, 4559, 4560, 15527, 4565, 14920, 4571, 4572, 4573, 14924, 14928, 14936, 4611, 4613, 4615, 4616, 4618, 4619, 4621, 4623, 4624, 4625, 4626, 4627, 15554, 14948, 4639, 4640, 4642, 14951, 14954, 4658, 4659, 4661, 4662, 4663, 4665, 4666, 4676, 4677, 14964, 14967, 4691, 4692, 4693, 4694, 4695, 4696, 4697, 4700, 4702, 4704, 4705, 4707, 4708, 4710, 4712, 4713, 4714, 4715, 4716, 15593, 14980, 14984, 4733, 4734, 4736, 4737, 14987, 4743, 4744, 4745, 4746, 15607, 15609, 15610, 15008, 4780, 4781, 4783, 4784, 4786, 4787, 4788, 4789, 4790, 15626, 15017, 15020, 4814, 4815, 4823, 4825, 4827, 4828, 4830, 4831, 4833, 4835, 4836, 4837, 4838, 4839, 15646, 15647, 4848, 4850, 4851, 4853, 4854, 4856, 15039, 15656, 15658, 4876, 4877, 4879, 4881, 4882, 15051, 15055, 4896, 15059, 4900, 4903, 4904, 4906, 4907, 4909, 4910, 4911, 4912, 4913, 15679, 15068, 15073, 15076, 4945, 4946, 4948, 4949, 4951, 4953, 4954, 4956, 15084, 15695, 15698, 4984, 15101, 4988, 4991, 4992, 4994, 4995, 4997, 4998, 4999, 5000, 5001, 15714, 15110, 15115, 15118, 15121, 15125, 5045, 5047, 5049, 5050, 5052, 5053, 5055, 5057, 5058, 5059, 5060, 5061, 15737, 15134, 15138, 5076, 5077, 5079, 5080, 15141, 15144, 15147, 5255, 5257, 5259, 5260, 5262, 5263, 5265, 5266, 5267, 5268, 5269, 15759, 5274, 5275, 5277, 5279, 5280, 5282, 5283, 5285, 5287, 5288, 15169, 5315, 5316, 5319, 5320, 5322, 5325, 5326, 5327, 5328, 5329, 5333, 5334, 5335, 5336, 5337, 5339, 5340, 5341, 5342, 5343, 5345, 5347, 15185, 15798, 15191, 15194, 15802, 15201, 15805, 15207, 5379, 15213, 5497, 5523, 5525, 5549, 15818, 5554, 15809, 5576, 5578, 5580, 5582, 15376, 15381, 5602, 13915, 5604, 5605, 15379, 15381, 15382, 15381, 5613, 5624, 5626, 5628, 5636, 5643, 5644, 5651, 5652, 5653, 14469, 5662, 14543, 5673, 14017, 5678, 5679, 15466, 5694, 5696, 5700, 15850, 14079, 15854, 15857, 5722, 5723, 5725, 5727, 5738, 5744, 5746, 14128, 5748, 5753, 14469, 5757, 14543, 15870, 15872, 15875, 5776, 5783, 5785, 5793, 5794, 5795, 5796, 5797, 5802, 5803, 5804, 5808, 5813, 5814, 5822, 15596, 5830, 14228, 15611, 15613, 5847, 5848, 5849, 15612, 15613, 5852, 15614, 15613, 15899, 15903, 15907, 15909, 15912, 15916, 14278, 14280, 14281, 14282, 14292, 14293, 14304, 14309, 5930, 5931, 5933, 15922, 14344, 15927, 15929, 14374, 14378, 14384, 14388, 15932, 14409, 15936, 14543, 15940, 6004, 6010, 6012, 6014, 6016, 6018, 15948, 6023, 6029, 6035, 6037, 6043, 6045, 6069, 6071, 15809, 6095, 6104, 6106, 6108, 6110, 6134, 6136, 15809, 6164, 6170, 6172, 6178, 6180, 6182, 6184, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 16135, 16137, 16139, 16146, 16148, 16151, 16153, 16158, 16165, 16167, 16170, 16172, 16174, 16177, 16180, 16185, 16187, 16190, 16192, 16201, 16203, 16206, 16208, 16210, 16212, 16216, 16218, 16223, 16225, 16228, 16230, 16241, 16244, 16246, 16248, 16257, 16259, 16262, 16264, 16266, 16278, 16280, 16283, 16285, 16290, 16295, 16297, 16300, 16306, 16308, 16311, 16315, 16317, 16320, 16322, 16324, 16328, 16330, 16335, 16341, 16343, 16345, 16347, 16349, 16353, 16357, 16359, 16362, 16364, 16370, 16372, 16378, 16381, 16388, 16390, 16392, 16394, 16396, 16401, 16403, 16406, 16415, 16417, 16419, 16421, 16423, 16432, 16434, 16437, 16439, 16445, 16447, 16454, 16456, 16458, 16460, 16464, 16467, 16469, 16472, 15170, 16475, 16477, 16480, 16482, 16485, 16487, 16490, 16492, 15186, 15800, 15195, 15804, 15208, 15214, 16478, 16494, 16495, 16470, 16465, 13900, 13898, 15187, 15196, 16452, 16451, 16494, 16494, 16130, 15196, 16465, 16470, 15187, 16452, 16451, 16494, 16495, 15187, 16465, 16470, 15196, 16478, 16494, 16495, 5571, 15807, 16133, 16131, 16494, 16435, 5600, 5601, 5603, 14785, 5607, 5608, 13925, 5610, 5611, 13929, 16144, 16143, 16494, 16149, 16156, 13944, 13950, 14812, 16163, 16162, 16494, 16168, 13970, 16178, 13975, 14830, 16430, 16429, 16407, 16430, 16183, 16494, 16188, 5661, 16195, 14001, 5665, 16199, 16197, 16494, 16204, 5674, 14020, 5680, 14862, 14030, 16221, 16220, 16494, 16226, 14044, 14879, 16235, 14055, 16239, 16237, 16494, 16242, 14894, 5714, 14083, 14090, 14095, 16255, 16253, 16494, 16260, 16267, 16270, 14111, 16269, 16270, 16271, 16270, 14116, 14926, 5747, 16430, 16429, 5754, 16404, 14135, 5758, 16276, 16275, 16494, 16281, 14946, 16291, 14167, 14173, 16430, 16429, 16573, 16302, 16301, 14469, 16404, 14197, 16309, 16313, 16312, 16494, 16318, 14213, 5824, 14982, 14222, 16333, 16332, 15024, 5836, 16404, 16407, 14232, 15085, 15203, 5845, 5846, 5850, 5851, 5853, 5854, 16430, 16429, 16494, 16435, 14258, 14264, 15021, 16430, 16429, 16407, 16404, 14277, 5896, 15024, 5898, 5899, 5900, 16355, 16354, 16494, 16360, 15085, 5909, 5910, 16368, 16373, 14298, 15040, 5918, 15203, 5920, 16430, 16429, 16379, 14458, 14469, 16386, 16384, 16494, 16435, 14338, 5945, 15071, 14355, 16430, 16429, 16407, 16404, 14368, 15085, 5967, 5968, 15092, 5970, 5971, 16413, 16411, 16494, 16435, 14403, 5983, 15113, 14419, 14423, 5992, 14435, 16430, 16429, 16494, 16435, 14543, 15136, 14458, 15183, 14469, 16452, 16451, 16494, 16495, 15196, 16465, 16470, 15187, 16478, 16494, 16495, 15196, 15187, 6092, 15807, 16452, 16451, 16494, 16495, 15187, 15196, 16470, 16465, 16478, 16494, 16495, 15187, 15196, 15203, 6161, 15807, 16652, 16506, 16652, 16651, 16508, 16507, 16511, 16510, 16509, 16516, 16515, 16514, 16513, 16640, 16639, 16630, 16631, 16629, 16633, 16632, 16638, 16634, 16636, 16635, 16638, 16637, 16640, 16639, 16652, 16642, 16652, 16651, 16646, 16645, 16644, 16643, 16648, 16647, 16652, 16650, 16652, 16651, 16656, 16655, 16654, 16653, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 16769, 16774, 16779, 16786, 16790, 16798, 16801, 16806, 16811, 16298, 16822, 16830, 16836, 16844, 16852, 16857, 16863, 16872, 16874, 16876, 5479, 16861, 16860, 5483, 5484, 5485, 16865, 16867, 16864, 5489, 16866, 5491, 5492, 5493, 14532, 5495, 15199, 16861, 16860, 5506, 5507, 5509, 5510, 5511, 5512, 15199, 16864, 16867, 5516, 16865, 5518, 16866, 14511, 5521, 14532, 16861, 16860, 5533, 5534, 5536, 5537, 5538, 14532, 16867, 16865, 16864, 5543, 16866, 5545, 14511, 5547, 15199, 5560, 16870, 16870, 16869, 5565, 5569, 5572, 14547, 16828, 16827, 5595, 5596, 5598, 5599, 16919, 5606, 16923, 5609, 16926, 5612, 16772, 16771, 5616, 5617, 5619, 5620, 16775, 5622, 5623, 5625, 5627, 16777, 16776, 5631, 5632, 5634, 5635, 5637, 16782, 5639, 16781, 16782, 5642, 5645, 16855, 16846, 5648, 5649, 5650, 16784, 16783, 5656, 5657, 5659, 5660, 5663, 5664, 16788, 16787, 5668, 5669, 5671, 5672, 16792, 16840, 5677, 5681, 16793, 16840, 16794, 16840, 5686, 16796, 16795, 5689, 5690, 5692, 5693, 5695, 5697, 5698, 5699, 16828, 16799, 5703, 5704, 5706, 5707, 5712, 5717, 5721, 5724, 16815, 16804, 16803, 5730, 5731, 5733, 5734, 5735, 5736, 5737, 5739, 5740, 5741, 5742, 5743, 5745, 16855, 16826, 5751, 5752, 5755, 5756, 16809, 16808, 5771, 5772, 5774, 5775, 5777, 16864, 5779, 16812, 16867, 5782, 5784, 16855, 16854, 5788, 5789, 16815, 16813, 16855, 16846, 5800, 5801, 5805, 5806, 5807, 16818, 5810, 16817, 16816, 16820, 16819, 5817, 5818, 5820, 5821, 5823, 5825, 16825, 16824, 16825, 5829, 16855, 16826, 5833, 5834, 5835, 16864, 5838, 16867, 16848, 5841, 5842, 5843, 5844, 17036, 17038, 17040, 16828, 16827, 5867, 5868, 5870, 5871, 5874, 5878, 5880, 16855, 16832, 5887, 5888, 5889, 16848, 5891, 16864, 16867, 16847, 5895, 5897, 16834, 16833, 5903, 5904, 5906, 5907, 5908, 5911, 16838, 16864, 16837, 5915, 5916, 5917, 5919, 16855, 16846, 5923, 5924, 16840, 5926, 16839, 16840, 5929, 5932, 16842, 16841, 5936, 5937, 5939, 5940, 5943, 5948, 5951, 16855, 16846, 5957, 5958, 5959, 16848, 16864, 5962, 16867, 16847, 5965, 5966, 5969, 16850, 16849, 5974, 5975, 5977, 5978, 5981, 5985, 5988, 5989, 5995, 16855, 16854, 5998, 5999, 6001, 6002, 6003, 6005, 16859, 16858, 16859, 6009, 6011, 6013, 16861, 16860, 6053, 6054, 6056, 6057, 6058, 15199, 6060, 16866, 6062, 16865, 16864, 16867, 14511, 6067, 14532, 6077, 16870, 16870, 16869, 6082, 6085, 6088, 15199, 6090, 14532, 6093, 14547, 16861, 16860, 6118, 6119, 6121, 6122, 6123, 14532, 6125, 15199, 16865, 6128, 16866, 16867, 6131, 16864, 14511, 6142, 16870, 16870, 16869, 6150, 6152, 15183, 6154, 14532, 14534, 6157, 15199, 6159, 14543, 6162, 14547, 6196, 6197, 6199, 6200, 6207, 6208, 6216, 6217, 6218, 6229, 6230, 6231, 6232, 6234, 6235, 16519, 16527, 16563, 16561, 16542, 16540, 15848, 15852, 16553, 16554, 16556, 16558, 16563, 16561, 15877, 15868, 17011, 16575, 16593, 16590, 15901, 15905, 15910, 15917, 15924, 15923, 15938, 15933, 6441, 6442, 6443, 6444, 6445, 6447, 6448, 6450, 6451, 6453, 6454, 6462, 6463, 6472, 6473, 6475, 6476, 6478, 6479, 6480, 6481, 6489, 6490, 6499, 6500, 6502, 6503, 6505, 6506, 6507, 6508, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 16138, 16154, 16173, 16193, 16209, 16231, 16247, 16265, 16286, 16323, 16348, 16365, 16395, 16422, 16440, 16461, 16483, 16488, 16493, 5480, 5481, 5486, 5487, 5488, 5490, 17312, 5494, 5496, 5504, 5505, 17320, 17323, 5513, 5514, 5515, 5517, 5519, 5520, 5522, 5531, 5532, 17338, 5539, 5540, 5541, 5542, 5544, 5546, 5548, 5561, 5562, 5563, 17358, 5573, 5593, 5594, 17363, 5614, 5615, 17375, 5621, 5629, 5630, 17386, 5638, 5640, 5641, 5646, 5647, 17399, 5654, 5655, 17404, 5666, 5667, 17412, 5675, 5676, 5682, 5683, 5684, 5685, 5687, 5688, 17427, 5701, 5702, 17437, 5726, 5728, 5729, 17448, 17452, 17455, 17457, 5749, 5750, 17463, 5769, 5770, 17469, 5778, 5780, 5781, 5786, 5787, 17482, 5790, 17289, 5792, 5798, 5799, 17488, 5809, 5811, 5812, 5815, 5816, 17499, 5826, 5827, 5828, 5831, 5832, 17511, 5837, 5839, 5840, 5865, 5866, 17527, 5885, 5886, 17536, 5890, 5892, 5893, 5894, 5901, 5902, 17548, 5912, 5913, 5914, 5921, 5922, 17563, 5925, 5927, 5928, 5934, 5935, 17573, 5955, 5956, 17582, 5960, 5961, 5963, 5964, 5972, 5973, 17595, 5996, 5997, 17606, 6006, 6007, 6008, 6051, 6052, 17620, 6059, 6061, 6063, 6064, 6065, 6066, 6068, 6078, 6079, 6080, 6089, 6091, 17644, 6094, 6116, 6117, 17649, 6124, 6126, 6127, 6129, 6130, 6132, 6133, 6143, 6144, 6145, 6153, 6155, 6156, 6158, 6160, 17677, 6163, 17680, 17682, 17684, 17686, 17689, 17691, 17693, 17368, 16522, 16521, 17366, 16521, 6243, 6244, 17370, 17366, 16529, 16530, 16530, 16528, 16532, 16531, 16532, 16533, 17400, 16535, 16536, 16534, 16536, 6266, 17407, 16538, 6269, 6271, 16543, 6274, 16961, 16543, 16546, 16547, 17432, 16547, 16545, 15855, 6287, 16548, 15855, 16550, 16551, 15851, 16548, 6294, 6295, 6296, 16552, 16555, 16555, 6305, 16557, 16560, 6309, 16560, 6314, 17464, 16563, 6317, 16566, 16565, 6320, 15873, 15876, 15876, 16567, 6325, 16566, 16570, 16568, 16570, 16569, 16571, 6336, 6337, 16571, 16574, 16577, 16576, 17490, 16578, 16577, 16581, 16580, 16579, 16581, 16582, 16584, 17023, 16584, 17512, 17520, 17520, 17519, 6363, 17522, 16589, 17523, 16588, 17521, 16589, 6370, 17522, 16598, 6373, 16596, 15900, 16597, 15900, 6378, 16598, 15904, 6382, 15914, 15913, 16601, 16600, 16599, 6388, 16601, 15914, 17675, 17544, 17675, 17590, 17559, 17551, 17559, 17558, 16611, 16610, 16612, 16610, 16615, 16616, 15925, 16613, 6414, 15930, 6416, 16616, 15930, 17591, 17675, 17675, 17590, 16623, 15937, 16625, 15934, 16625, 6431, 6432, 16621, 15937, 16626, 16627, 16627, 16628, 17723, 17725, 17728, 17730, 17732, 17734, 17736, 17738, 17740, 17742, 17744, 17746, 17748, 17750, 17752, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17811, 17807, 17813, 17815, 17816, 17821, 17807, 17826, 17827, 17828, 17832, 17807, 17836, 17346, 17348, 17841, 17843, 17809, 17809, 17808, 17810, 17809, 17847, 17792, 17850, 17793, 17379, 17854, 17794, 17391, 17858, 17860, 17863, 17795, 17866, 17796, 17869, 17871, 17873, 17875, 17797, 17878, 17798, 17882, 17799, 17888, 17891, 17800, 17474, 17895, 17897, 5791, 17903, 17493, 17907, 17909, 17801, 17912, 17915, 17514, 17919, 17921, 17802, 17924, 17926, 17927, 17929, 17931, 17803, 17933, 17935, 17937, 17565, 17941, 17943, 17804, 17946, 17948, 17586, 17951, 17953, 17805, 17956, 17806, 17959, 17962, 17807, 17965, 17966, 17968, 17971, 17973, 17809, 17810, 17809, 17809, 17808, 17979, 17807, 17657, 17985, 17986, 17988, 17990, 17808, 17809, 17809, 17809, 17810, 17315, 17675, 17313, 17675, 17675, 17333, 17675, 17324, 17341, 17675, 17350, 17675, 17687, 17675, 17632, 17675, 17623, 17675, 17844, 18003, 6238, 6239, 6240, 6241, 6242, 6245, 6246, 6248, 6249, 6250, 6251, 6254, 6256, 6257, 6258, 6260, 6261, 6262, 6263, 6264, 6267, 6268, 6272, 6275, 6276, 6280, 6281, 6282, 6283, 6284, 6286, 6288, 6289, 6290, 6291, 6292, 6293, 17880, 17880, 6299, 6300, 6301, 17880, 18051, 17885, 17886, 6307, 6308, 6310, 17884, 17885, 6315, 6316, 6318, 6319, 6321, 6322, 6323, 6324, 6326, 6328, 6330, 6331, 6332, 6334, 6338, 6339, 18079, 6341, 6342, 6343, 6344, 6345, 6346, 6348, 6349, 6350, 6352, 6354, 6355, 6356, 6358, 6360, 6361, 6362, 6364, 6365, 6366, 6367, 6368, 6369, 6371, 6372, 6374, 6375, 6376, 6377, 6379, 6380, 6383, 6384, 6385, 6386, 6387, 6389, 6390, 6393, 6394, 6395, 6396, 6399, 6400, 6401, 6402, 6404, 6405, 6407, 6408, 6410, 6411, 6412, 6413, 6415, 6417, 6418, 6420, 6421, 6422, 6424, 6426, 6427, 6428, 6429, 6430, 6433, 6434, 6436, 6438, 6439, 6440, 18165, 17675, 17632, 17675, 17623, 17642, 17675, 17675, 17640, 17976, 17675, 18173, 17675, 17675, 17654, 17652, 17670, 17675, 17675, 17675, 17996, 17673, 18178, 18169, 17999, 17998, 18000, 18176, 18175, 18004, 18168, 18167, 18166, 18169, 18171, 18170, 18174, 18176, 18175, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17812, 5482, 18307, 17822, 5508, 18312, 17833, 5535, 18317, 18320, 5564, 5566, 5567, 5568, 5570, 17848, 5597, 17851, 5618, 17855, 5633, 18334, 17861, 17864, 5658, 17867, 5670, 17876, 5691, 17879, 5705, 17883, 5732, 17889, 17892, 5773, 18353, 17898, 18355, 17904, 18358, 17910, 5819, 17913, 17916, 18364, 17922, 5869, 17925, 18369, 17932, 5905, 18374, 17938, 18377, 17944, 5938, 17947, 18382, 17954, 5976, 17957, 6000, 17960, 17963, 6055, 18392, 18395, 6081, 6083, 6084, 6086, 6087, 17980, 6120, 18404, 18407, 6146, 6147, 6148, 6149, 6151, 6191, 6192, 6193, 6194, 6202, 6203, 6204, 6205, 6211, 6212, 6213, 6215, 6221, 6222, 6223, 6224, 6225, 6226, 18434, 18436, 18010, 18438, 18330, 18441, 18443, 18446, 18449, 18451, 18453, 18030, 18341, 18340, 18342, 18455, 18457, 18459, 18461, 18042, 18465, 18467, 18469, 6297, 6298, 6302, 18473, 6304, 6306, 6311, 6312, 18058, 18484, 18063, 18487, 18488, 18490, 18071, 18495, 18499, 18502, 18504, 18508, 18512, 18516, 18518, 18520, 18522, 18106, 18109, 18527, 18529, 18530, 18532, 18534, 18536, 18537, 18541, 18545, 18548, 18552, 18554, 18555, 18556, 18559, 18563, 18565, 18156, 18567, 18571, 17726, 6457, 6459, 6460, 6461, 6466, 6467, 6468, 6469, 6470, 6471, 6485, 6486, 6487, 6488, 6493, 6494, 6495, 6496, 6497, 6498, 6510, 6512, 6513, 6515, 18425, 18432, 6520, 6521, 6522, 6551, 6552, 6553, 6556, 18584, 6559, 6560, 6562, 18595, 6565, 6566, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17303, 18308, 17321, 18313, 17339, 18318, 17356, 18700, 17357, 17364, 17376, 17387, 17405, 17413, 17428, 17438, 17449, 17470, 17901, 17500, 17517, 17528, 18370, 17549, 17556, 17574, 18383, 17596, 17607, 17621, 18393, 17638, 18758, 18759, 17650, 18405, 18766, 18768, 18769, 18771, 18773, 18775, 18777, 18779, 18783, 18785, 18787, 18789, 18791, 6252, 18794, 18709, 18797, 18799, 6273, 6277, 6278, 18806, 18808, 18810, 18812, 18813, 18055, 18479, 18817, 18821, 18823, 18825, 18724, 18829, 18728, 18731, 18834, 18836, 18838, 18840, 18842, 18844, 18742, 18849, 18851, 18854, 18856, 18751, 18861, 18864, 18866, 18868, 18871, 18874, 18876, 18878, 18881, 6517, 6519, 18858, 18889, 6558, 6564, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 17304, 17823, 17340, 18951, 18702, 17365, 17377, 17388, 17406, 17414, 17429, 17439, 17450, 17471, 17501, 17529, 17550, 17575, 17597, 17608, 17622, 18976, 18760, 17651, 18981, 17668, 18945, 18984, 18947, 18986, 18949, 18989, 18992, 18993, 6255, 18452, 18033, 19000, 18462, 19003, 19004, 19005, 19007, 19008, 19011, 6329, 18962, 18505, 6347, 6353, 18964, 19017, 19019, 19021, 18966, 18968, 6406, 19024, 18970, 19026, 6437, 18974, 19030, 18979, 19034, 18885, 6554, 18893, 18897, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19076, 19094, 19097, 18688, 6195, 18691, 6206, 18694, 6214, 18990, 18703, 18439, 18705, 18707, 19106, 18710, 18711, 18713, 19108, 19109, 18715, 18717, 18049, 19113, 18719, 19115, 18721, 18492, 18722, 19117, 18725, 6335, 18727, 19120, 18729, 19121, 18732, 6359, 18524, 18531, 18734, 18538, 18736, 6392, 18738, 6398, 18741, 18549, 18743, 18557, 18745, 6423, 18747, 18568, 18749, 19132, 18752, 6458, 19031, 18761, 6484, 19035, 18886, 19138, 18894, 18898, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 6190, 19204, 6201, 19206, 6210, 19208, 18697, 6237, 6247, 6253, 18795, 6259, 6265, 6270, 19219, 6279, 6285, 6303, 18818, 6313, 6327, 18826, 6333, 19231, 6340, 18830, 6351, 18831, 6357, 19237, 6381, 6391, 18539, 6397, 18543, 6403, 19247, 6409, 6419, 19251, 6425, 6435, 18857, 6456, 19257, 18755, 6483, 18869, 18764, 19263, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19333, 6220, 18447, 18496, 18500, 18509, 18513, 18832, 18845, 18846, 18550, 19367, 18572, 19028, 6465, 19032, 6492, 19328, 19330, 19358, 19335, 19358, 19365, 19368, 19347, 19336, 19343, 19345, 19350, 19358, 19358, 19352, 19344, 19339, 19341, 19340, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 18781, 18827, 18517, 18542, 18546, 18561, 18862, 18872, 6511, 6514, 19457, 6523, 6524, 6525, 19363, 6527, 6528, 6529, 6530, 19337, 19369, 6534, 6535, 6536, 19348, 19363, 6542, 6543, 19354, 6545, 6546, 6548, 6549, 6550, 19470, 19472, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19332, 6518, 6526, 6531, 19361, 6533, 6537, 19356, 19359, 19366, 6541, 6544, 19350, 19596, 19600, 19602, 19607, 19614, 19617, 19371, 6557, 19374, 6563, 19593, 19592, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 6516, 6532, 6538, 6539, 6540, 6547, 19714, 19605, 19610, 19723, 19727, 6555, 6561, 19734, 6570, 6571, 19732, 19713, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19841, 19842, 19844, 19615, 19846, 19729, 6567, 19851, 19851, 19851, 19852, 6574, 19840, 6576, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 19847, 19969, 19848, 19730, 19850, 6568, 6569, 6572, 6573, 6575, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20097, 19973, 20101, 19854, 20103, 19979, 19981, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20224, 20099, 20227, 20229, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20353, 20355, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20480, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 6577, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20736, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 20864, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127}; bool h_Op[]= { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; #define THREADS_PER_BLOCK 128 #define BLOCKS_PER_GRID 1 #define SIZE_OF_IN 6656 #define SIZE_OF_AC 14464 __device__ void ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) { int i= blockDim.x * blockIdx.x + threadIdx.x; __shared__ float R[165*THREADS_PER_BLOCK]; const int t= THREADS_PER_BLOCK; __shared__ float final; final=0; R[i + 0*t] = A[i + 0*t]; R[i + 1*t] = A[i + 1*t]; R[i + 2*t] = A[i + 2*t]; R[i + 3*t] = A[i + 3*t]; R[i + 4*t] = A[i + 4*t]; R[i + 5*t] = A[i + 5*t]; R[i + 6*t] = A[i + 6*t]; R[i + 7*t] = A[i + 7*t]; R[i + 8*t] = A[i + 8*t]; R[i + 9*t] = A[i + 9*t]; R[i + 10*t] = A[i + 10*t]; R[i + 11*t] = A[i + 11*t]; R[i + 12*t] = A[i + 12*t]; R[i + 13*t] = A[i + 13*t]; R[i + 14*t] = A[i + 14*t]; R[i + 15*t] = A[i + 15*t]; R[i + 16*t] = A[i + 16*t]; R[i + 17*t] = A[i + 17*t]; R[i + 18*t] = A[i + 18*t]; R[i + 19*t] = A[i + 19*t]; R[i + 20*t] = A[i + 20*t]; R[i + 21*t] = A[i + 21*t]; R[i + 22*t] = A[i + 22*t]; R[i + 23*t] = A[i + 23*t]; R[i + 24*t] = A[i + 24*t]; R[i + 25*t] = A[i + 25*t]; R[i + 26*t] = A[i + 26*t]; R[i + 27*t] = A[i + 27*t]; R[i + 28*t] = A[i + 28*t]; R[i + 29*t] = A[i + 29*t]; R[i + 30*t] = A[i + 30*t]; R[i + 31*t] = A[i + 31*t]; R[i + 32*t] = A[i + 32*t]; R[i + 33*t] = A[i + 33*t]; R[i + 34*t] = A[i + 34*t]; R[i + 35*t] = A[i + 35*t]; R[i + 36*t] = A[i + 36*t]; R[i + 37*t] = A[i + 37*t]; R[i + 38*t] = A[i + 38*t]; R[i + 39*t] = A[i + 39*t]; R[i + 40*t] = A[i + 40*t]; R[i + 41*t] = A[i + 41*t]; R[i + 42*t] = A[i + 42*t]; R[i + 43*t] = A[i + 43*t]; R[i + 44*t] = A[i + 44*t]; R[i + 45*t] = A[i + 45*t]; R[i + 46*t] = A[i + 46*t]; R[i + 47*t] = A[i + 47*t]; R[i + 48*t] = A[i + 48*t]; R[i + 49*t] = A[i + 49*t]; R[i + 50*t] = A[i + 50*t]; R[i + 51*t] = A[i + 51*t]; __syncthreads(); for (int iter=0; iter< n_iter; iter++) { R[i + 52*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]]; R[i + 53*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]]; R[i + 54*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]]; R[i + 55*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]]; R[i + 56*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]]; R[i + 57*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]]; R[i + 58*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]]; R[i + 59*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]]; R[i + 60*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]]; R[i + 61*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]]; R[i + 62*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]]; __syncthreads(); R[i + 63*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]]; R[i + 64*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]]; R[i + 65*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]]; R[i + 66*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]]; R[i + 67*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]]; R[i + 68*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]]; R[i + 69*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]]; __syncthreads(); R[i + 70*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]]; R[i + 71*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]]; R[i + 72*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]]; R[i + 73*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]]; R[i + 74*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]]; R[i + 75*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]]; R[i + 76*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]]; R[i + 77*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]]; R[i + 78*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]]; __syncthreads(); R[i + 79*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]]; R[i + 80*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]]; R[i + 81*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]]; R[i + 82*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]]; R[i + 83*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]]; R[i + 84*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]]; R[i + 85*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]]; R[i + 86*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]]; R[i + 87*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]]; __syncthreads(); R[i + 88*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]]; R[i + 89*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]]; R[i + 90*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]]; R[i + 91*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]]; R[i + 92*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]]; R[i + 93*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]]; R[i + 94*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]]; __syncthreads(); R[i + 95*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]]; R[i + 96*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]]; R[i + 97*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]]; R[i + 98*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]]; R[i + 99*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]]; R[i + 100*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]]; R[i + 101*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]]; __syncthreads(); R[i + 102*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]]; R[i + 103*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]]; R[i + 104*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]]; R[i + 105*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]]; R[i + 106*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]]; R[i + 107*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]]; __syncthreads(); R[i + 108*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]]; R[i + 109*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]]; R[i + 110*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]]; R[i + 111*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]]; R[i + 112*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]]; R[i + 113*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]]; R[i + 114*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]]; __syncthreads(); R[i + 115*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]]; R[i + 116*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]]; R[i + 117*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]]; R[i + 118*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]]; R[i + 119*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]]; __syncthreads(); R[i + 120*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]]; R[i + 121*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]]; R[i + 122*t] = Op[i + 70*t] ? R[B[i + 70*t]] * R[C[i + 70*t]] : R[B[i + 70*t]] + R[C[i + 70*t]]; R[i + 123*t] = Op[i + 71*t] ? R[B[i + 71*t]] * R[C[i + 71*t]] : R[B[i + 71*t]] + R[C[i + 71*t]]; R[i + 124*t] = Op[i + 72*t] ? R[B[i + 72*t]] * R[C[i + 72*t]] : R[B[i + 72*t]] + R[C[i + 72*t]]; R[i + 125*t] = Op[i + 73*t] ? R[B[i + 73*t]] * R[C[i + 73*t]] : R[B[i + 73*t]] + R[C[i + 73*t]]; __syncthreads(); R[i + 126*t] = Op[i + 74*t] ? R[B[i + 74*t]] * R[C[i + 74*t]] : R[B[i + 74*t]] + R[C[i + 74*t]]; R[i + 127*t] = Op[i + 75*t] ? R[B[i + 75*t]] * R[C[i + 75*t]] : R[B[i + 75*t]] + R[C[i + 75*t]]; R[i + 128*t] = Op[i + 76*t] ? R[B[i + 76*t]] * R[C[i + 76*t]] : R[B[i + 76*t]] + R[C[i + 76*t]]; R[i + 129*t] = Op[i + 77*t] ? R[B[i + 77*t]] * R[C[i + 77*t]] : R[B[i + 77*t]] + R[C[i + 77*t]]; R[i + 130*t] = Op[i + 78*t] ? R[B[i + 78*t]] * R[C[i + 78*t]] : R[B[i + 78*t]] + R[C[i + 78*t]]; __syncthreads(); R[i + 131*t] = Op[i + 79*t] ? R[B[i + 79*t]] * R[C[i + 79*t]] : R[B[i + 79*t]] + R[C[i + 79*t]]; R[i + 132*t] = Op[i + 80*t] ? R[B[i + 80*t]] * R[C[i + 80*t]] : R[B[i + 80*t]] + R[C[i + 80*t]]; R[i + 133*t] = Op[i + 81*t] ? R[B[i + 81*t]] * R[C[i + 81*t]] : R[B[i + 81*t]] + R[C[i + 81*t]]; R[i + 134*t] = Op[i + 82*t] ? R[B[i + 82*t]] * R[C[i + 82*t]] : R[B[i + 82*t]] + R[C[i + 82*t]]; __syncthreads(); R[i + 135*t] = Op[i + 83*t] ? R[B[i + 83*t]] * R[C[i + 83*t]] : R[B[i + 83*t]] + R[C[i + 83*t]]; R[i + 136*t] = Op[i + 84*t] ? R[B[i + 84*t]] * R[C[i + 84*t]] : R[B[i + 84*t]] + R[C[i + 84*t]]; R[i + 137*t] = Op[i + 85*t] ? R[B[i + 85*t]] * R[C[i + 85*t]] : R[B[i + 85*t]] + R[C[i + 85*t]]; R[i + 138*t] = Op[i + 86*t] ? R[B[i + 86*t]] * R[C[i + 86*t]] : R[B[i + 86*t]] + R[C[i + 86*t]]; __syncthreads(); R[i + 139*t] = Op[i + 87*t] ? R[B[i + 87*t]] * R[C[i + 87*t]] : R[B[i + 87*t]] + R[C[i + 87*t]]; R[i + 140*t] = Op[i + 88*t] ? R[B[i + 88*t]] * R[C[i + 88*t]] : R[B[i + 88*t]] + R[C[i + 88*t]]; R[i + 141*t] = Op[i + 89*t] ? R[B[i + 89*t]] * R[C[i + 89*t]] : R[B[i + 89*t]] + R[C[i + 89*t]]; R[i + 142*t] = Op[i + 90*t] ? R[B[i + 90*t]] * R[C[i + 90*t]] : R[B[i + 90*t]] + R[C[i + 90*t]]; __syncthreads(); R[i + 143*t] = Op[i + 91*t] ? R[B[i + 91*t]] * R[C[i + 91*t]] : R[B[i + 91*t]] + R[C[i + 91*t]]; R[i + 144*t] = Op[i + 92*t] ? R[B[i + 92*t]] * R[C[i + 92*t]] : R[B[i + 92*t]] + R[C[i + 92*t]]; R[i + 145*t] = Op[i + 93*t] ? R[B[i + 93*t]] * R[C[i + 93*t]] : R[B[i + 93*t]] + R[C[i + 93*t]]; __syncthreads(); R[i + 146*t] = Op[i + 94*t] ? R[B[i + 94*t]] * R[C[i + 94*t]] : R[B[i + 94*t]] + R[C[i + 94*t]]; R[i + 147*t] = Op[i + 95*t] ? R[B[i + 95*t]] * R[C[i + 95*t]] : R[B[i + 95*t]] + R[C[i + 95*t]]; __syncthreads(); R[i + 148*t] = Op[i + 96*t] ? R[B[i + 96*t]] * R[C[i + 96*t]] : R[B[i + 96*t]] + R[C[i + 96*t]]; __syncthreads(); R[i + 149*t] = Op[i + 97*t] ? R[B[i + 97*t]] * R[C[i + 97*t]] : R[B[i + 97*t]] + R[C[i + 97*t]]; __syncthreads(); R[i + 150*t] = Op[i + 98*t] ? R[B[i + 98*t]] * R[C[i + 98*t]] : R[B[i + 98*t]] + R[C[i + 98*t]]; __syncthreads(); R[i + 151*t] = Op[i + 99*t] ? R[B[i + 99*t]] * R[C[i + 99*t]] : R[B[i + 99*t]] + R[C[i + 99*t]]; __syncthreads(); R[i + 152*t] = Op[i + 100*t] ? R[B[i + 100*t]] * R[C[i + 100*t]] : R[B[i + 100*t]] + R[C[i + 100*t]]; __syncthreads(); R[i + 153*t] = Op[i + 101*t] ? R[B[i + 101*t]] * R[C[i + 101*t]] : R[B[i + 101*t]] + R[C[i + 101*t]]; __syncthreads(); R[i + 154*t] = Op[i + 102*t] ? R[B[i + 102*t]] * R[C[i + 102*t]] : R[B[i + 102*t]] + R[C[i + 102*t]]; __syncthreads(); R[i + 155*t] = Op[i + 103*t] ? R[B[i + 103*t]] * R[C[i + 103*t]] : R[B[i + 103*t]] + R[C[i + 103*t]]; __syncthreads(); R[i + 156*t] = Op[i + 104*t] ? R[B[i + 104*t]] * R[C[i + 104*t]] : R[B[i + 104*t]] + R[C[i + 104*t]]; __syncthreads(); R[i + 157*t] = Op[i + 105*t] ? R[B[i + 105*t]] * R[C[i + 105*t]] : R[B[i + 105*t]] + R[C[i + 105*t]]; __syncthreads(); R[i + 158*t] = Op[i + 106*t] ? R[B[i + 106*t]] * R[C[i + 106*t]] : R[B[i + 106*t]] + R[C[i + 106*t]]; __syncthreads(); R[i + 159*t] = Op[i + 107*t] ? R[B[i + 107*t]] * R[C[i + 107*t]] : R[B[i + 107*t]] + R[C[i + 107*t]]; __syncthreads(); R[i + 160*t] = Op[i + 108*t] ? R[B[i + 108*t]] * R[C[i + 108*t]] : R[B[i + 108*t]] + R[C[i + 108*t]]; __syncthreads(); R[i + 161*t] = Op[i + 109*t] ? R[B[i + 109*t]] * R[C[i + 109*t]] : R[B[i + 109*t]] + R[C[i + 109*t]]; __syncthreads(); R[i + 162*t] = Op[i + 110*t] ? R[B[i + 110*t]] * R[C[i + 110*t]] : R[B[i + 110*t]] + R[C[i + 110*t]]; __syncthreads(); R[i + 163*t] = Op[i + 111*t] ? R[B[i + 111*t]] * R[C[i + 111*t]] : R[B[i + 111*t]] + R[C[i + 111*t]]; __syncthreads(); R[i + 164*t] = Op[i + 112*t] ? R[B[i + 112*t]] * R[C[i + 112*t]] : R[B[i + 112*t]] + R[C[i + 112*t]]; if (i==0) { final += R[164*t]; } __syncthreads(); } if (i==0) { A[0]= final;} }
76
#include <stdio.h> #define ARRAY_SIZE 10000 #define TPB 256 __device__ float saxpy(float x, float y, float a) { return a*x+y; } __global__ void saxpyKernel(float* x, float* y, float a) { const int i = blockIdx.x*blockDim.x + threadIdx.x; y[i] = saxpy(x[i], y[i], a); } __host__ void saxpyCPU(float* x, float* y, float a) { for (int i = 0; i < ARRAY_SIZE; i++) { y[i] = a * x[i] + y[i]; } } int main() { // Declare a pointer for an array of floats float x_cpu[ARRAY_SIZE]; float y_cpu[ARRAY_SIZE]; float* x_gpu = 0; float* y_gpu = 0; float y_res[ARRAY_SIZE]; const float a = 2; bool flag; // Array initialization for (int i = 0; i < ARRAY_SIZE; i++) { y_cpu[i] = i; x_cpu[i] = 1; } /* GPU CALCULATION */ // Allocate device memory cudaMalloc(&x_gpu, ARRAY_SIZE*sizeof(float)); cudaMalloc(&y_gpu, ARRAY_SIZE*sizeof(float)); // Copy the arrays from CPU to GPU cudaMemcpy(x_gpu, x_cpu, ARRAY_SIZE*sizeof(float), cudaMemcpyHostToDevice); cudaMemcpy(y_gpu, y_cpu, ARRAY_SIZE*sizeof(float), cudaMemcpyHostToDevice); // Launch kernel to compute and store distance values saxpyKernel<<<(ARRAY_SIZE+TPB-1) / TPB, TPB>>>(x_gpu, y_gpu, a); cudaMemcpy(y_res, y_gpu, ARRAY_SIZE*sizeof(float), cudaMemcpyDeviceToHost); printf("Computing SAXPY on the GPU… Done!\n"); /* CPU CALCULATION */ saxpyCPU(x_cpu, y_cpu, a); printf("Computing SAXPY on the CPU… Done!\n"); /* COMPARE THE RESULTS */ flag = 1; for (int i = 0; i < ARRAY_SIZE; i++) { if(y_res[i] != y_cpu[i]) { flag = 0; break; } } printf("Comparing the output for each implementation… "); if (flag) { printf("Correct!\n"); } else { printf("Incorrect\n"); } cudaFree(x_gpu); // Free the memory cudaFree(y_gpu); return 0; }
77
#include<iostream> #include<string> #include<malloc.h> #include<fstream> #include<sstream> #include<vector> #include<cmath> #include<cstdio> #include<stdlib.h> #include<cuda.h> #include<cuda_runtime.h> #include <map> #include <iomanip> #include <sys/time.h> #include<assert.h> #define THREADSPERBLOCK 256 #define EPS 0.01 using namespace std; template <class T> __device__ static T distanceComponentGPU(T *elementA, T *elementB) { T dist = 0.0f; dist = elementA[0] - elementB[0]; dist = dist * dist; return dist; } template <class T> __device__ static T distanceFinalizeGPU(int n_dim, T *components) { T dist = 0.0f; for (unsigned int cnt = 0; cnt < n_dim; cnt++) dist += components[cnt]; dist = sqrt(dist); return dist; } template <class T> __device__ static T distanceGPU(int n_dim, T *elementA, T *elementB) { T dist = 0.0f; for (unsigned int cnt = 0; cnt < n_dim; cnt++) { T di = (elementA[cnt] - elementB[cnt]); dist += di * di; } dist = sqrt(dist); return dist; } class Internal; class DataIO { public: DataIO(); ~DataIO(); float* readData(const char* fileName); float* getData(); const char* getFileName(); int getNumElements(); int getNumClusters(); int getDimensions(); int getDataSize(); void setDataSize(int numData); void printClusters(int numData, int numClust, int numDim, float *data, float *ctr, int *assign); template <typename T> T allocZeroedDeviceMemory(int memSize) { T retVal; cudaMalloc((void**) &retVal, memSize); cudaMemset(retVal, 0, memSize); return retVal; } template <typename T> T allocInitializedDeviceMemory(int memSize, int preSet) { T retVal; cudaMalloc((void**) &retVal, memSize); cudaMemset(retVal, preSet, memSize); return retVal; } template <typename T> T allocDeviceMemory(int memSize, T data) { T retVal; cudaMalloc((void**) &retVal, memSize); cudaMemcpy(retVal, data, memSize, cudaMemcpyHostToDevice); return retVal; } template <typename T> T allocDeviceMemory(int memSize) { T retVal; cudaMalloc((void**) &retVal, memSize); return retVal; } private: Internal* ip; }; class Internal { private: int N; int K; int n_dim; int dataSize; bool deviceCheck; bool printTime; float* data; const char* fileName; const char* execName; public: Internal() { N=K= dataSize = 0; n_dim=0; deviceCheck = true; printTime = true; data = NULL; } ~ Internal() { delete data; } int getNumElements() { return N; }; int getNumClusters() { return K; }; int getDimensions() { return n_dim; }; const char* getExecName() { return execName; } const char* getFileName() { return fileName; } int getDataSize() { return dataSize; } void setExecName(const char* en) { execName = en; } void setFileName(const char* fn) { fileName = fn; } void setDataSize(int numData) { dataSize = numData; } float* getData() { return data; } void printParams() { cout<<"Number of Conformations : "<<N<<endl; cout<<"Number of Clusters : "<<K<<endl; } float* readFile(const char* fileName) { string line; ifstream infile; float pars[3]; int numData; infile.open(fileName, ios::in); if (!infile.is_open()) { cout << "Error in readFile(): Unable to find or open file \"" << fileName << "\"." << endl; exit(1); } assert(!infile.fail()); try { for (int i = 0; i < 3; i++) { getline(infile, line); if (infile.eof()) throw 42; istringstream buffer(line); if (!(buffer >> pars[i])) throw 1337; } N = (int) pars[0]; K = (int) pars[1]; n_dim = (int) pars[2]; if ((numData = dataSize) == 0) { printParams(); numData = N * n_dim; } data = (float*) malloc(sizeof(float) * numData); memset(data, 0, sizeof(float) * numData); for (int i = 0; i < numData; i++) { getline(infile, line); if (infile.eof()) throw 42; istringstream buffer(line); if (!(buffer >> data[i])) throw 1337; } } catch (int e) { cout << "Error in dataIO::readFile(): "; if (e == 42) cout << "reached end of file \"" << fileName << "\" prematurely" << endl; else if (e == 1337) cout << "can only read floating point numbers" << endl; else cout << "reading file content failed" << endl; cout << " Please check parameters and file format" << endl; return NULL; } infile.close(); assert(!infile.fail()); return data; } }; DataIO::DataIO() { ip = new Internal; } DataIO::~DataIO() { delete ip; } float* DataIO::readData(const char* fileName) { float* data; data = ip->readFile(fileName); return data; } float* DataIO::getData() { return ip->getData(); } const char* DataIO::getFileName() { return ip->getFileName(); } int DataIO::getNumElements() { return ip->getNumElements(); } int DataIO::getNumClusters() { return ip->getNumClusters(); } int DataIO::getDimensions() { return ip->getDimensions(); } int DataIO::getDataSize() { return ip->getDataSize(); } void DataIO::printClusters(int numData, int numClust, int n_dim, float *data, float *ctr, int *assign) { cout << "Data clusters:" << endl; for (int i = 0; i < numClust; i++) { cout << "Cluster " << i << " ("; int count = 0; for (int j = 0; j < numData; j++) { if (assign[j] == i) { // print out vectors cout << "{"; for (int cnt = 0; cnt < n_dim; cnt++) cout << data[n_dim * j + cnt] << ((cnt < n_dim-1) ? ", " : ""); cout << "}, "; count++; } } if (count > 0) cout << "\b\b"; if (ctr != NULL) { cout << ") ctr {"; for (int cnt = 0; cnt < n_dim; cnt++) cout << ctr[n_dim * i + cnt] << ", "; cout << "\b\b}" << endl; } else cout << ")" << endl; } } class Timing{ private: map<string, cudaEvent_t> startMap; map<string, cudaEvent_t> stopMap; public: Timing(); ~Timing(); void start(string timerName); void stop(string timerName); void report(); void report(string timerName); }; Timing::Timing(){ } Timing::~Timing(){ } void Timing::start(string timerName){ cudaEventCreate(&startMap[timerName]); cudaEventRecord(startMap[timerName], 0); } void Timing::stop(string timerName){ cudaEventCreate(&stopMap[timerName]); cudaEventRecord(stopMap[timerName], 0); } void Timing::report(){ cudaEvent_t currentTime; cudaEventCreate(&currentTime); cudaEventRecord(currentTime,0); float timeMs; string status = ""; cout << "Current Timings:" << endl; cout << setw(15) << "Timer" << setw(15) << "Time (ms)" << setw(15) << "Status" << endl; for( map<string, cudaEvent_t>::iterator it=startMap.begin(); it!=startMap.end() ; ++it){ if(stopMap.find((*it).first) != stopMap.end()){ cudaEventElapsedTime(&timeMs, (*it).second, stopMap[(*it).first]); status="done"; } else { cudaEventElapsedTime(&timeMs, (*it).second , currentTime); status="running"; } cout << setw(15) << (*it).first << setw(15) << timeMs << setw(15) << status << endl; } } void Timing::report(string timerName){ cudaEvent_t currentTime; cudaEventCreate(&currentTime); cudaEventRecord(currentTime,0); float timeMs; if(startMap.find(timerName) == startMap.end()){ cout << "Timer \"" << timerName << "\" was never started." << endl; return; } else if(stopMap.find(timerName) == stopMap.end()){ cudaEventElapsedTime(&timeMs, startMap[timerName], currentTime); cout << timerName << " = " << timeMs << " ms (running)" << endl; return; } cudaEventElapsedTime(&timeMs, startMap[timerName], stopMap[timerName]); cout << timerName << " = " << timeMs << " ms" << endl; } template <unsigned int BLOCKSIZE, class T> __device__ static void reduceOne(int tid, T *s_A) { if (BLOCKSIZE >= 1024) { if (tid < 512) { s_A[tid] += s_A[tid + 512]; } __syncthreads(); } if (BLOCKSIZE >= 512) { if (tid < 256) { s_A[tid] += s_A[tid + 256]; } __syncthreads(); } if (BLOCKSIZE >= 256) { if (tid < 128) { s_A[tid] += s_A[tid + 128]; } __syncthreads(); } if (BLOCKSIZE >= 128) { if (tid < 64) { s_A[tid] += s_A[tid + 64]; } __syncthreads(); } if (tid < 32) { if (BLOCKSIZE >= 64) { s_A[tid] += s_A[tid + 32]; } if (BLOCKSIZE >= 32) { s_A[tid] += s_A[tid + 16]; } if (BLOCKSIZE >= 16) { s_A[tid] += s_A[tid + 8]; } if (BLOCKSIZE >= 8) { s_A[tid] += s_A[tid + 4]; } if (BLOCKSIZE >= 4) { s_A[tid] += s_A[tid + 2]; } if (BLOCKSIZE >= 2) { s_A[tid] += s_A[tid + 1]; } } } template <unsigned int BLOCKSIZE, class T, class U> __device__ static void reduceTwo(int tid, T *s_A, U *s_B) { if (BLOCKSIZE >= 1024) { if (tid < 512) { s_A[tid] += s_A[tid + 512]; s_B[tid] += s_B[tid + 512]; } __syncthreads(); } if (BLOCKSIZE >= 512) { if (tid < 256) { s_A[tid] += s_A[tid + 256]; s_B[tid] += s_B[tid + 256]; } __syncthreads(); } if (BLOCKSIZE >= 256) { if (tid < 128) { s_A[tid] += s_A[tid + 128]; s_B[tid] += s_B[tid + 128]; } __syncthreads(); } if (BLOCKSIZE >= 128) { if (tid < 64) { s_A[tid] += s_A[tid + 64]; s_B[tid] += s_B[tid + 64]; } __syncthreads(); } if (tid < 32) { if (BLOCKSIZE >= 64) { s_A[tid] += s_A[tid + 32]; s_B[tid] += s_B[tid + 32]; } if (BLOCKSIZE >= 32) { s_A[tid] += s_A[tid + 16]; s_B[tid] += s_B[tid + 16]; } if (BLOCKSIZE >= 16) { s_A[tid] += s_A[tid + 8]; s_B[tid] += s_B[tid + 8]; } if (BLOCKSIZE >= 8) { s_A[tid] += s_A[tid + 4]; s_B[tid] += s_B[tid + 4]; } if (BLOCKSIZE >= 4) { s_A[tid] += s_A[tid + 2]; s_B[tid] += s_B[tid + 2]; } if (BLOCKSIZE >= 2) { s_A[tid] += s_A[tid + 1]; s_B[tid] += s_B[tid + 1]; } } } __global__ static void assignToClusters_KMCUDA(int N, int K, int n_dim, float *X, float *CTR, int *ASSIGN) { extern __shared__ float array[]; float *s_center = (float*) array; unsigned int t = blockDim.x * blockIdx.x + threadIdx.x; unsigned int tid = threadIdx.x; if (t < N) { float minDist = 0.0; int minIndex = 0; for (unsigned int k = 0; k < K; k++) { float dist = 0.0; unsigned int offsetD = 0; while (offsetD < n_dim) { if (offsetD + tid < n_dim) s_center[tid] = CTR[k * n_dim + offsetD + tid]; __syncthreads(); for (unsigned int d = offsetD; d < min(offsetD + blockDim.x, n_dim); d++) { dist += distanceComponentGPU(s_center + (d - offsetD), X + (d * N + t)); } offsetD += blockDim.x; __syncthreads(); } dist = distanceFinalizeGPU<float>(1, &dist); if (dist < minDist || k == 0) { minDist = dist; minIndex = k; } } ASSIGN[t] = minIndex; } } __global__ static void calcScore_CUDA(int N, int n_dim, float *X, float *CTR, int *ASSIGN, float *SCORE) { extern __shared__ float array[]; float *s_scores = (float*) array; float *s_center = (float*) &s_scores[blockDim.x]; int k = blockIdx.x; int tid = threadIdx.x; s_scores[tid] = 0.0; unsigned int offsetN = tid; while (offsetN < N) { float dist = 0.0; unsigned int offsetD = 0; while (offsetD < n_dim) { if (offsetD + tid < n_dim) s_center[tid] = CTR[k * n_dim + offsetD + tid]; __syncthreads(); if (ASSIGN[offsetN] == k) { for (unsigned int d = offsetD; d < min(offsetD + blockDim.x, n_dim); d++) { dist += distanceComponentGPU(s_center + (d - offsetD), X + (d * N + offsetN)); } } offsetD += blockDim.x; __syncthreads(); } s_scores[tid] += distanceFinalizeGPU(1, &dist); offsetN += blockDim.x; } __syncthreads(); reduceOne<THREADSPERBLOCK>(tid, s_scores); if (tid == 0) SCORE[k] = s_scores[tid]; } __global__ static void calcCentroids_CUDA(int N, int n_dim, float *X, float *CTR, int *ASSIGN) { extern __shared__ float array[]; int *s_numElements = (int*) array; float *s_centerParts = (float*) &s_numElements[blockDim.x]; int k = blockIdx.x; int tid = threadIdx.x; float clusterSize = 0.0; s_numElements[tid] = 0; for (unsigned int d = 0; d < n_dim; d++) { s_centerParts[tid] = 0.0; unsigned int offset = tid; while (offset < N) { if (ASSIGN[offset] == k) { s_centerParts[tid] += X[d * N + offset]; if (d == 0) s_numElements[tid]++; } offset += blockDim.x; } __syncthreads(); if (d == 0) { reduceTwo<THREADSPERBLOCK>(tid, s_centerParts, s_numElements); if (tid == 0) clusterSize = (float) s_numElements[tid]; } else { reduceOne<THREADSPERBLOCK>(tid, s_centerParts); } if (tid == 0) if (clusterSize > 0) CTR[k * n_dim + d] = s_centerParts[tid] / clusterSize; } } float kmeansGPU(int N, int K, int n_dim, float *x, float *ctr, int *assign, unsigned int maxIter, DataIO *data) { dim3 block(THREADSPERBLOCK); dim3 gridK(K); dim3 gridN((int)ceil((float)N/(float)THREADSPERBLOCK)); int sMemAssign=(sizeof(float)*THREADSPERBLOCK); int sMemScore=(sizeof(float)*2*THREADSPERBLOCK); int sMemCenters=(sizeof(float)*THREADSPERBLOCK+sizeof(int)*THREADSPERBLOCK); float *x_d = data->allocDeviceMemory<float*>(sizeof(float) * N * n_dim, x); float *ctr_d = data->allocDeviceMemory<float*>(sizeof(float) * K * n_dim, ctr); int *assign_d = data->allocDeviceMemory<int*>(sizeof(int) * N); float *s_d = data->allocZeroedDeviceMemory<float*>(sizeof(float) * K); float *s = (float*) malloc(sizeof(float) * K); float oldscore = -1000.0, score = 0.0; if (maxIter < 1) maxIter = INT_MAX; unsigned int iter = 0; while (iter < maxIter && ((score - oldscore) * (score - oldscore)) > EPS) { oldscore = score; if (iter > 0) { calcCentroids_CUDA<<<gridK, block, sMemCenters>>>(N, n_dim, x_d, ctr_d, assign_d); } iter++; assignToClusters_KMCUDA<<<gridN, block, sMemAssign>>>(N, K, n_dim, x_d, ctr_d, assign_d); calcScore_CUDA<<<gridK, block, sMemScore>>>(N, n_dim, x_d, ctr_d, assign_d, s_d); cudaMemcpy(s, s_d, sizeof(float) * K, cudaMemcpyDeviceToHost); score = 0.0; for (int i = 0; i < K; i++) score += s[i]; } cout << "Number of iterations: " << iter << endl; cudaMemcpy(ctr, ctr_d, sizeof(float) * K * n_dim, cudaMemcpyDeviceToHost); cudaMemcpy(assign, assign_d, sizeof(int) * N , cudaMemcpyDeviceToHost); cudaFree(x_d); cudaFree(ctr_d); cudaFree(assign_d); cudaFree(s_d); free(s); return score; } int main() { Timing timer; cudaSetDevice(0); DataIO* data = new DataIO; float score = 0.0f; float* x = data->readData("alanine_2000MB.dat"); int N = data->getNumElements(); int K = data->getNumClusters(); int n_dim = data->getDimensions(); float* ctr = (float*) malloc(sizeof(float) * K * n_dim); memset(ctr, 0, sizeof(float) * K * n_dim); int* assign = (int*) malloc(sizeof(int) * N); memset(assign, 0, sizeof(int) * N); for (unsigned int k = 0; k < K; k++) { for (unsigned int d = 0; d < n_dim; d++) { ctr[k * n_dim + d] = x[d * N + k]; } } timer.start("kmeansGPU"); score = kmeansGPU(N, K, n_dim, x, ctr, assign, (unsigned int)0, data); timer.stop("kmeansGPU"); // data->printClusters(N, K, D, x, ctr, assign); timer.report(); free(x); free(ctr); free(assign); cout << "Done clustering" << endl; return 0; }
78
#include <cuda.h> #include <stdio.h> #include <sys/time.h> #include <stdio.h> #define CUDA_CHECK_RETURN(value) \ { \ cudaError_t _m_cudaStat = value; \ if (_m_cudaStat != cudaSuccess) \ { \ fprintf(stderr, "Error %s at line %d in file %s\n", \ cudaGetErrorString(_m_cudaStat), __LINE__, __FILE__); \ exit(1); \ } \ } enum { NELEMS = 1 << 23 }; double wtime() { struct timeval t; gettimeofday(&t, NULL); return (double)t.tv_sec + (double)t.tv_usec * 1E-6; } __global__ void vadd(const float *a, const float *b, float *c, int n) { int i = blockDim.x * blockIdx.x + threadIdx.x; if (i < n) c[i] = a[i] + b[i]; } int main() { size_t size = sizeof(float) * NELEMS; double tgpu = 0, tmem = 0; float elapsedTime = 0; cudaEvent_t start, stop; /* Allocate vectors on host */ float *h_A = (float *)malloc(size); float *h_B = (float *)malloc(size); float *h_C = (float *)malloc(size); if (h_A == NULL || h_B == NULL || h_C == NULL) { fprintf(stderr, "Allocation error.\n"); exit(EXIT_FAILURE); } for (int i = 0; i < NELEMS; ++i) { h_A[i] = rand() / (float)RAND_MAX; h_B[i] = rand() / (float)RAND_MAX; } /* Allocate vectors on device */ float *d_A = NULL, *d_B = NULL, *d_C = NULL; tmem = -wtime(); CUDA_CHECK_RETURN(cudaMalloc((void **)&d_A, size)); CUDA_CHECK_RETURN(cudaMalloc((void **)&d_B, size)); CUDA_CHECK_RETURN(cudaMalloc((void **)&d_C, size)); /* Copy the host vectors to device */ CUDA_CHECK_RETURN(cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice)) CUDA_CHECK_RETURN(cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice)) tmem += wtime(); /* Launch the kernel */ cudaEventCreate(&start); cudaEventCreate(&stop); tgpu = -wtime(); int threadsPerBlock = 1024; int blocksPerGrid = (NELEMS + threadsPerBlock - 1) / threadsPerBlock; cudaEventRecord(start,0); vadd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, NELEMS); cudaEventRecord(stop,0); cudaEventSynchronize(stop); CUDA_CHECK_RETURN(cudaDeviceSynchronize()); tgpu += wtime(); CUDA_CHECK_RETURN(cudaGetLastError()); cudaEventElapsedTime(&elapsedTime,start,stop); cudaEventDestroy(start); cudaEventDestroy(stop); /* Copy the device vectors to host */ tmem -= wtime(); CUDA_CHECK_RETURN(cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost)); tmem += wtime(); for (int i = 0; i < NELEMS; ++i) { if (fabs(h_A[i] + h_B[i] - h_C[i]) > 1e-5) { fprintf(stderr, "Result verification failed at element %d!\n", i); exit(EXIT_FAILURE); } } cudaFree(d_A); cudaFree(d_B); cudaFree(d_C); free(h_A); free(h_B); free(h_C); cudaDeviceReset(); printf("GPU version (sec.): %.6lf\n", tgpu); printf("Memory ops. (sec.): %.6lf\n", tmem); printf("Total time (sec.): %.6lf\n", tgpu + tmem); printf("Events Time %.6f\n", elapsedTime); return 0; }
79
#include <stdio.h> #define N (2048 * 2048) #define THREADS_PER_BLOCK 512 #define RADIUS 3 __global__ void add(int *in,int *out,int size) { __shared__ int temp[THREADS_PER_BLOCK + (2*RADIUS)]; int globalIdx = blockIdx.x * blockDim.x + threadIdx.x; int localIdx = threadIdx.x + RADIUS; int localSum = 0 ; temp[localIdx] = in[globalIdx]; if(threadIdx.x < RADIUS) { if((globalIdx - RADIUS) >= 1) { temp[localIdx - RADIUS] = in[globalIdx - RADIUS]; }else { temp[localIdx - RADIUS] = 0; } if((globalIdx + THREADS_PER_BLOCK) < size) { temp[localIdx + THREADS_PER_BLOCK] = in[globalIdx + THREADS_PER_BLOCK]; }else { temp[localIdx + THREADS_PER_BLOCK] = 0; } } __syncthreads(); for(int i = -RADIUS; i <= RADIUS; i++) { localSum = localSum + temp[threadIdx.x + RADIUS + i]; } out[globalIdx] = localSum; __syncthreads(); } int main(void) { int *a,*b; int *d_a,*d_b; int size = N * sizeof(int); cudaMalloc((void **)&d_a,size); cudaMalloc((void **)&d_b,size); a = (int *)malloc(size); b = (int *)malloc(size); for(int i = 0; i < N;i++) { a[i] = 1; } cudaMemcpy(d_a,a,size,cudaMemcpyHostToDevice); add<<<(N + THREADS_PER_BLOCK-1)/THREADS_PER_BLOCK,THREADS_PER_BLOCK>>>(d_a,d_b,N); cudaMemcpy(b,d_b,size,cudaMemcpyDeviceToHost); printf("Hello world %d\n",b[120]); free(a); free(b); cudaFree(d_a); cudaFree(d_b); return 0; }
80
#include "utils.cuh" namespace Utils { //////////////////////////////////////////////////////////////////////////// inline uint iDivUp(uint a, uint b) { return (a % b != 0) ? (a / b + 1) : (a / b); } //////////////////////////////////////////////////////////////////////////// void computeGridSize( uint n, uint blockSize, uint &numBlocks, uint &numThreads ) { numThreads = min(blockSize, n); numBlocks = iDivUp(n, numThreads); } //////////////////////////////////////////////////////////////////////////// };
81
#include <stdio.h> #include <stdlib.h> #include <math.h> #define N = 8 /* Nome: Nathana Facion RA:191079 */ /* Exercicio 7 - Matriz Add */ /* Data: 20/04/2017 */ __global__ void addMatriz(int *A,int *B, int *C, int linhas, int colunas ){ int i = threadIdx.x + blockDim.x*blockIdx.x; // linha int j = threadIdx.y + blockDim.y*blockIdx.y; // coluna if ((i < linhas) && (j < colunas)){ C[i*colunas+j] = A[i*colunas+j] + B[i*colunas+j]; } } int main() { int *A, *B, *C; int i, j; // Declaracao do cuda int *A_Cuda; int *B_Cuda; int *C_Cuda; //Input int linhas, colunas; scanf("%d", &linhas); scanf("%d", &colunas); size_t size = linhas*colunas* sizeof(int); //Alocando memória na CPU A = (int *)malloc(size); B = (int *)malloc(size); C = (int *)malloc(size); // Malloc para GPU cudaMalloc(&A_Cuda, size); cudaMalloc(&B_Cuda, size); cudaMalloc(&C_Cuda, size); //Inicializar for(i = 0; i < linhas; i++){ for(j = 0; j < colunas; j++){ A[i*colunas+j] = B[i*colunas+j] = i+j; } } // Copia para GPU cudaMemcpy(A_Cuda, A, size, cudaMemcpyHostToDevice); cudaMemcpy(B_Cuda, B, size, cudaMemcpyHostToDevice); dim3 threadPorBloco(8, 8); // O numero de blocos deve variar baseado na entrada dim3 numeroBlocos( (int)ceil((float)linhas/threadPorBloco.x), (int)ceil((float)colunas/threadPorBloco.y) ); addMatriz<<<numeroBlocos,threadPorBloco>>>(A_Cuda,B_Cuda,C_Cuda,linhas,colunas); cudaMemcpy(C, C_Cuda, size, cudaMemcpyDeviceToHost); long long int somador=0; //Manter esta computação na CPU for(i = 0; i < linhas; i++){ for(j = 0; j < colunas; j++){ somador+=C[i*colunas+j]; } } printf("%lli\n", somador); free(A); free(B); free(C); // Libera memoria da GPU cudaFree(A_Cuda); cudaFree(B_Cuda); cudaFree(C_Cuda); }
82
#include "includes.h" extern "C" { } __global__ void reduce_sum_partial(const float* input, float* output, unsigned int len) { // from http://www.techdarting.com/2014/06/parallel-reduction-in-cuda.html // Load a segment of the input vector into shared memory __shared__ float partialSum[2*256]; int globalThreadId = blockIdx.x*blockDim.x + threadIdx.x; unsigned int t = threadIdx.x; unsigned int start = 2*blockIdx.x*blockDim.x; if ((start + t) < len) { partialSum[t] = input[start + t]; } else { partialSum[t] = 0.0; } if ((start + blockDim.x + t) < len) { partialSum[blockDim.x + t] = input[start + blockDim.x + t]; } else { partialSum[blockDim.x + t] = 0.0; } // Traverse reduction tree for (unsigned int stride = blockDim.x; stride > 0; stride /= 2) { __syncthreads(); if (t < stride) partialSum[t] += partialSum[t + stride]; } __syncthreads(); // Write the computed sum of the block to the output vector at correct index if (t == 0 && (globalThreadId*2) < len) { output[blockIdx.x] = partialSum[t]; } }
83
#include "includes.h" __global__ void adagrad_update_1D_1D(float* x, float* d, float* m, float clip, float lr, int size) { int tid = blockIdx.x * blockDim.x + threadIdx.x; int stride = gridDim.x * blockDim.x; for (; tid < size; tid += stride) { if (d[tid] > clip) d[tid] = clip; if (d[tid] < -clip) d[tid] = -clip; m[tid] += d[tid] * d[tid]; x[tid] -= lr * d[tid] / sqrt(m[tid] + 0.00000001); d[tid] = 0; } }
84
// This program computer the sum of two N-element vectors using unified memory // By: Nick from CoffeeBeforeArch #include <stdio.h> #include <cassert> #include <iostream> using std::cout; // CUDA kernel for vector addition // No change when using CUDA unified memory __global__ void vectorAdd(int *a, int *b, int *c, int N) { // Calculate global thread thread ID int tid = (blockDim.x * blockIdx.x) + threadIdx.x; // Boundary check if (tid < N) { c[tid] = a[tid] + b[tid]; } } int main() { // Array size of 2^16 (65536 elements) const int N = 1 << 16; size_t bytes = N * sizeof(int); // Declare unified memory pointers int *a, *b, *c; // Allocation memory for these pointers cudaMallocManaged(&a, bytes); cudaMallocManaged(&b, bytes); cudaMallocManaged(&c, bytes); // Get the device ID for prefetching calls int id = cudaGetDevice(&id); // Set some hints about the data and do some prefetching cudaMemAdvise(a, bytes, cudaMemAdviseSetPreferredLocation, cudaCpuDeviceId); cudaMemAdvise(b, bytes, cudaMemAdviseSetPreferredLocation, cudaCpuDeviceId); cudaMemPrefetchAsync(c, bytes, id); // Initialize vectors for (int i = 0; i < N; i++) { a[i] = rand() % 100; b[i] = rand() % 100; } // Pre-fetch 'a' and 'b' arrays to the specified device (GPU) cudaMemAdvise(a, bytes, cudaMemAdviseSetReadMostly, id); cudaMemAdvise(b, bytes, cudaMemAdviseSetReadMostly, id); cudaMemPrefetchAsync(a, bytes, id); cudaMemPrefetchAsync(b, bytes, id); // Threads per CTA (1024 threads per CTA) int BLOCK_SIZE = 1 << 10; // CTAs per Grid int GRID_SIZE = (N + BLOCK_SIZE - 1) / BLOCK_SIZE; // Call CUDA kernel vectorAdd<<<GRID_SIZE, BLOCK_SIZE>>>(a, b, c, N); // Wait for all previous operations before using values // We need this because we don't get the implicit synchronization of // cudaMemcpy like in the original example cudaDeviceSynchronize(); // Prefetch to the host (CPU) cudaMemPrefetchAsync(a, bytes, cudaCpuDeviceId); cudaMemPrefetchAsync(b, bytes, cudaCpuDeviceId); cudaMemPrefetchAsync(c, bytes, cudaCpuDeviceId); // Verify the result on the CPU for (int i = 0; i < N; i++) { assert(c[i] == a[i] + b[i]); } // Free unified memory (same as memory allocated with cudaMalloc) cudaFree(a); cudaFree(b); cudaFree(c); cout << "COMPLETED SUCCESSFULLY!\n"; return 0; }
85
#include<stdio.h> #include<stdlib.h> #include <sys/time.h> #define imin(a,b) (a<b?a:b) const int N = 16777216; const int TH_B = 512; const int blocksPerGrid = imin( 32, (N+TH_B-1) / TH_B ); long long start_timer() { struct timeval tv; gettimeofday(&tv, NULL); return tv.tv_sec * 1000000 + tv.tv_usec; } long long stop_timer(long long start_time,char *name) { struct timeval tv; gettimeofday(&tv, NULL); long long end_time = tv.tv_sec * 1000000 + tv.tv_usec; float total_time = (end_time - start_time)/1000000.0; printf("Value of Blocks Per Grid : %d",blocksPerGrid); //print execution time for cpu if(name=="Cpu") { printf("\nC) (T%s) Execution Time for Serial Algorithm or %s : %.5f sec\n",name,name,total_time); } //print execution time for gpu and kernel time if(name=="Gpu") { printf("\nE) Kernel execution Time is %.5f sec\n",total_time); printf("\nF) (T%s) Execution Time for Parallel Algorithm or %s : %.5f sec\n",name,name,total_time); } //print execution time for memory allocation in gpu if(name=="memalloctgpu") { printf("\nB) Memory allocation Time for GPU is : %.5f sec\n",total_time); } //print execution time for memory allocation in cpu if(name=="memalloctcpu") { printf("\nA) Memory allocation Time for CPU is : %.5f sec\n",total_time); } //print condition for cpu to gpu time if(name=="c2g") { printf("\nD) Data Transfer from CPU to GPU time is : %.5f sec\n",total_time); } //print condition for gpu to cpu transfer time if(name=="g2c") { printf("\nG) Data Transfer from GPU to CPU time is : %.5f sec\n",total_time); } return ((end_time) - (start_time)); } __global__ void GPU_big_dot( float *a, float *b, float *c ) { __shared__ float cache[TH_B]; int tid = threadIdx.x + blockIdx.x * blockDim.x; int cacheIndex = threadIdx.x; float temp = 0; while (tid < N) { temp += a[tid] * b[tid]; tid += blockDim.x * gridDim.x; } // assign the cache cache[cacheIndex] = temp; // synchronize threads in this block __syncthreads(); int i = blockDim.x/2; while (i != 0) { if (cacheIndex < i) cache[cacheIndex] += cache[cacheIndex + i]; __syncthreads(); i /= 2; } if (cacheIndex == 0) c[blockIdx.x] = cache[0]; } float CPU_big_dot(float *a,float *b) { float cpu_sum; for(int tid =0;tid<N;tid++) { a[tid]=tid; b[tid]= tid * 2; cpu_sum += a[tid] * b[tid]; } return cpu_sum; } int main( void ) { long long s_t; long long s_tt; long long s_tt_g2c; long long s_t_c2g; long long cpu_i; long long gpu_i; float spu; float CPU_SUM; long long s_t_cpu_memalloc; long long s_t_gpu_memalloc; float *a, *b, c, *partial_c; float *d_a, *d_b, *d_partial_c; s_t_cpu_memalloc = start_timer(); // allocate memory on the cpu side a = (float*)malloc( N*sizeof(float) ); b = (float*)malloc( N*sizeof(float) ); partial_c = (float*)malloc( blocksPerGrid*sizeof(float) ); stop_timer(s_t_cpu_memalloc,(char*)"memalloctcpu"); s_t_gpu_memalloc = start_timer(); // allocate the memory on the GPU cudaMalloc( (void**)&d_a, N*sizeof(float) ) ; cudaMalloc( (void**)&d_b, N*sizeof(float) ) ; cudaMalloc( (void**)&d_partial_c, blocksPerGrid*sizeof(float) ) ; stop_timer(s_t_gpu_memalloc,(char*)"memalloctgpu"); //fill in the host memory with data for (int i=0; i<N; i++) { a[i] = i; //b[i] = i * 2; b[i] = i; } s_t = start_timer(); CPU_SUM = CPU_big_dot(a,b); cpu_i=stop_timer(s_t,(char*)"Cpu"); s_t_c2g = start_timer(); // copy the arrays 'a' and 'b' to the GPU cudaMemcpy( d_a, a, N*sizeof(float), cudaMemcpyHostToDevice ) ; cudaMemcpy( d_b, b, N*sizeof(float), cudaMemcpyHostToDevice ) ; stop_timer(s_t_c2g,(char*)"c2g"); s_tt = start_timer(); GPU_big_dot<<<blocksPerGrid,TH_B>>>( d_a, d_b, d_partial_c ); gpu_i=stop_timer(s_tt, (char*)"Gpu"); // copy the array 'c' back from the GPU to the CPU s_tt_g2c = start_timer(); cudaMemcpy( partial_c, d_partial_c, blocksPerGrid*sizeof(float), cudaMemcpyDeviceToHost ) ; stop_timer(s_tt_g2c, (char*)"g2c"); spu=(float)((float)cpu_i/(float)gpu_i); printf("\nH) Total SpeedUp is : %f \n",spu); // finish up on the CPU side c = 0; for (int i=0; i<blocksPerGrid; i++) { c += partial_c[i]; } printf( "\nI) GPU dot-product value is %f = %.6g\n", c,c); printf( "\nJ) CPU dot-product value is %f = %.6g\n\n", CPU_SUM,CPU_SUM ); // free memory on the gpu side cudaFree( d_a ) ; cudaFree( d_b ) ; cudaFree( d_partial_c ) ; // free memory on the cpu side free( a ); free( b ); free( partial_c ); }
86
#include<iostream> const int SHARED_MEM_SIZE = 128*sizeof(int); __global__ void ReverseFunc(int *a, int *r, int N){ __shared__ int sh[SHARED_MEM_SIZE]; int id = threadIdx.x + blockDim.x*blockIdx.x; sh[threadIdx.x] = a[id]; __syncthreads(); r[id] = sh[blockDim.x-threadIdx.x-1]; } int main(){ int *a, *r; int *d_a, *d_r; int N = 1024; int size = N*sizeof(int); a = (int*)malloc(size); r = (int*)malloc(size); cudaMalloc(&d_a, size); cudaMalloc(&d_r, size); for(int i = 0; i < N; i++){a[i] = i;} cudaMemcpy(d_a,a,size,cudaMemcpyHostToDevice); int threadsPerBlock = 64; int blocksPerGrid = (N+threadsPerBlock-1)/threadsPerBlock; ReverseFunc<<<blocksPerGrid,threadsPerBlock>>>(d_a, d_r, N); // cudaThreadSynchronize(); cudaMemcpy(r,d_r,size,cudaMemcpyDeviceToHost); // for(int i = 0; i< N; i++){std::cout << r[i] << std::endl;} free(a); free(r); cudaFree(d_a); cudaFree(d_r); return 0; }
87
/* * purpose: just a demo to show how vector addition can be done on * the GPU with just a single thread block * compilation: nvcc ./single_thread_block_vector_addition.cu * usage: ./a.out */ #include <stdio.h> #define N 100 /* * GPU kernel */ __global__ void VecAdd(float *A, float *B, float *C) { int i; i = threadIdx.x; C[i] = A[i] + B[i]; } /* * host main */ int main() { int i; dim3 numBlocks, threadsPerBlock; float *A, *B, *C; /* * using CUDA unified memory, first allocate * the memory then initialize with some dummy content */ cudaMallocManaged(&A, N * sizeof(float)); cudaMallocManaged(&B, N * sizeof(float)); cudaMallocManaged(&C, N * sizeof(float)); for (i = 0; i < N; i++) { A[i] = (float) i; B[i] = (float) (N - i); C[i] = (float) 0; } /* * set up GPU kernel execution configuration * however, this time we send in explicit parameters * directly */ threadsPerBlock.x = N; numBlocks.x = 1; /* launch the GPU kernel */ VecAdd<<<1, N>>>(A, B, C); cudaDeviceSynchronize(); /* print result */ for (i = 0; i < N; i++) { printf("%d %f\n", i, C[i]); } /* make clean */ cudaFree(C); cudaFree(B); cudaFree(A); return(0); }
88
// moveArrays.cu // // demonstrates CUDA interface to data allocation on device (GPU) // and data movement between host (CPU) and device. #include <stdio.h> #include <assert.h> #include <cuda.h> int main(void) { float *a_h, *b_h; // pointers to host memory float *a_d, *b_d; // pointers to device memory int N = 14; int i; // allocate arrays on host a_h = (float *)malloc(sizeof(float)*N); b_h = (float *)malloc(sizeof(float)*N); // allocate arrays on device cudaMalloc((void **) &a_d, sizeof(float)*N); cudaMalloc((void **) &b_d, sizeof(float)*N); // initialize host data for (i=0; i<N; i++) { a_h[i] = 10.f+i; b_h[i] = 0.f; } // send data from host to device: a_h to a_d cudaMemcpy(a_d, a_h, sizeof(float)*N, cudaMemcpyHostToDevice); // copy data within device: a_d to b_d cudaMemcpy(b_d, a_d, sizeof(float)*N, cudaMemcpyDeviceToDevice); // retrieve data from device: b_d to b_h cudaMemcpy(b_h, b_d, sizeof(float)*N, cudaMemcpyDeviceToHost); // check result for (i=0; i<N; i++) assert(a_h[i] == b_h[i]); // cleanup free(a_h); free(b_h); cudaFree(a_d); cudaFree(b_d); return 0; }
89
#include "includes.h" __global__ void convn_valid_kernel(float *output, float *data, float *kernel, const int H, const int W, const int kH, const int kW) { // Matrix index int x = blockIdx.x*blockDim.x + threadIdx.x; int y = blockIdx.y*blockDim.y + threadIdx.y; // vH, vW stands for valid H and valid W const int vH = H - kH + 1; const int vW = W - kW + 1; if (x >= vH || y >= vW) return; x += kH - 1; y += kW - 1; float sum = 0; for (int i = 0; i < kW; ++i) for(int j = 0; j < kH; ++j) sum += kernel[ i * kH + j ] * data[ (y - i) * H + (x - j) ]; x -= kH - 1; y -= kW - 1; output[ y * vH + x ] = sum; }
90
#include "cuda_runtime.h" #include "device_launch_parameters.h" #include <stdio.h> #include <stdlib.h> #include <math.h> #include <time.h> #include <iostream> #define ARRAY_SIZE 1024 #define BLOCK_DIM 1024 using namespace std; __global__ void fill_histrogram(int *dev_out, int *dev_in) { int i = blockIdx.x * blockDim.x + threadIdx.x; atomicAdd(&dev_out[dev_in[i]], 1); } int main() { int a[ARRAY_SIZE], b[100]; int *dev_in, *dev_out; srand(time(NULL)); cudaMalloc((void **)&dev_in, ARRAY_SIZE * sizeof(int)); cudaMalloc((void **)&dev_out, 100 * sizeof(int)); for (int i = 0; i < ARRAY_SIZE; ++i) { a[i] = rand() % 100; } cudaMemcpy(dev_in, a, ARRAY_SIZE * sizeof(int), cudaMemcpyHostToDevice); fill_histrogram <<< ARRAY_SIZE / BLOCK_DIM, BLOCK_DIM >>>(dev_out, dev_in); cudaMemcpy(b, dev_out, 100 * sizeof(int), cudaMemcpyDeviceToHost); for (int i = 0; i < 100; ++i) { cout << "Out[" << i << "]: " << b[i] << endl; } cudaFree(dev_in); cudaFree(dev_out); return 0; }
91
/* ============================================================================ Name : lab_1.cu Author : Boyarskikh_Nikita Version : Copyright : Description : CUDA compute reciprocals ============================================================================ */ #include <iostream> #include <stdlib.h> static void CheckCudaErrorAux (const char *, unsigned, const char *, cudaError_t); #define CUDA_CHECK_RETURN(value) CheckCudaErrorAux(__FILE__,__LINE__, #value, value) #define TIME 100 #define LENGTH 100 #define STEPX 1 #define STEPT 0.5 /** * CUDA kernel that computes reciprocal values for a given vector */ __global__ void reciprocalKernel(float *data, float *new_data, const float time, float step_x, float step_t, const unsigned length) { unsigned idx = blockIdx.x*blockDim.x+threadIdx.x; if(idx%int(length/step_x)) { data[int(length/step_x-1)]=new_data[int(length/step_x-1)]+5*step_t; if(idx%int(length/step_x)!=int(length/step_x-1)) { new_data[idx%int(length/step_x)]=(data[idx%int(length/step_x)+1]-2*data[idx%int(length/step_x)]+data[idx%int(length/step_x)-1])/step_x/step_x*step_t+data[idx%int(length/step_x)]; } } } /** * Host function that copies the data and launches the work on GPU */ float *gpuReciprocal(float *data) { float *gpuData, *new_data; CUDA_CHECK_RETURN(cudaMalloc((void **)&new_data, sizeof(float)*LENGTH/STEPX)); CUDA_CHECK_RETURN(cudaMemcpy(new_data, data, sizeof(float)*LENGTH/STEPX, cudaMemcpyHostToDevice)); CUDA_CHECK_RETURN(cudaMalloc((void **)&gpuData, sizeof(float)*LENGTH/STEPX)); CUDA_CHECK_RETURN(cudaMemcpy(gpuData, data, sizeof(float)*LENGTH/STEPX, cudaMemcpyHostToDevice)); static const int BLOCK_SIZE = 10; const int blockCount = (LENGTH/STEPX)/BLOCK_SIZE; for (unsigned i=0; i < TIME/STEPT; i++) { if(i&1) { reciprocalKernel<<<blockCount, BLOCK_SIZE>>> (gpuData, new_data, TIME, STEPX, STEPT, LENGTH); } else { reciprocalKernel<<<blockCount, BLOCK_SIZE>>> (new_data, gpuData, TIME, STEPX, STEPT, LENGTH); } } if(!int(TIME/STEPT)&1) CUDA_CHECK_RETURN(cudaMemcpy(data, gpuData, sizeof(float)*LENGTH/STEPX, cudaMemcpyDeviceToHost)); else CUDA_CHECK_RETURN(cudaMemcpy(data, new_data, sizeof(float)*LENGTH/STEPX, cudaMemcpyDeviceToHost)); CUDA_CHECK_RETURN(cudaFree(gpuData)); CUDA_CHECK_RETURN(cudaFree(new_data)); return data; } void initialize(float *data) { for (unsigned i = 0; i < LENGTH/STEPX; ++i) { data[i] = 0; } } int main(void) { float *data = new float[int(LENGTH/STEPX)]; initialize(data); /* Verify the results */ data=gpuReciprocal(data); for (unsigned long i=0; i<LENGTH/STEPX; i++) { std::cout<<data[i]<<std::endl; } /* Free memory */ delete[] data; return 0; } /** * Check the return value of the CUDA runtime API call and exit * the application if the call has failed. */ static void CheckCudaErrorAux (const char *file, unsigned line, const char *statement, cudaError_t err) { if (err == cudaSuccess) return; std::cerr << statement<<" returned " << cudaGetErrorString(err) << "("<<err<< ") at "<<file<<":"<<line << std::endl; exit (1); }
92
#include "includes.h" __global__ void x33(float* x34, float* x35, float* x36, int x37) { int x38 = gridDim.x * blockDim.x; int x39 = threadIdx.x + blockIdx.x * blockDim.x; while (x39 < x37) { int x40 = x39; x36[x40] = x34[x40] / x35[x40]; x39 = x39 + x38; } }
93
#include <iostream> #include <stdio.h> #include <stdlib.h> #include <math.h> using namespace std; float reduce_cpu(float* data, int* pat){ float sum = 0; int m = pat[0]; int numElement = pat[1]; for(int i = 0; i < numElement; i++) { float prod = 1; for(int j = 2; j < m+2; j++) prod *= data[pat[j]*numElement+i]; sum += prod; } return sum; } __global__ void reduce_kernel1(float* d_out, float* d_in, const int size) { extern __shared__ float s_data[]; const int tid = threadIdx.x; const int i = blockIdx.x*blockDim.x + threadIdx.x; if (i >= size) s_data[tid] = 0; else s_data[tid] = d_in[i]; __syncthreads(); for(unsigned int s = blockDim.x/2; s > 0; s >>= 1) { if(tid < s) { s_data[tid] += s_data[tid + s]; } __syncthreads(); } if(tid == 0) { d_out[blockIdx.x] = s_data[0]; } } inline void swap(float* &p1, float* &p2) { float* tmp = p1; p1 = p2; p2 = tmp; } __global__ void reduce_kernel4(float* d_out, float* d_in, const int size) { extern __shared__ float s_data[]; unsigned int tid = threadIdx.x; unsigned int i = blockIdx.x*(blockDim.x*2) + threadIdx.x; s_data[tid] = 0; __syncthreads(); if (i + blockDim.x < size ) s_data[tid] = d_in[i] + d_in[i + blockDim.x]; else if (i < size) s_data[tid] = d_in[i]; __syncthreads(); for(unsigned int s = blockDim.x/2; s > 0; s >>= 1) { if(tid < s) { s_data[tid] += s_data[tid + s]; } __syncthreads(); } if(tid == 0) {d_out[blockIdx.x] = s_data[0];} } float reduction(float* d_data, float* d_buf, const int numElement, int numThread, void (*reduce_kernel)(float* d_out, float* d_data, const int size), bool isKernel4 = false) { float* d_src = NULL; //always store input data float* d_dst = NULL; //always store output data //for the first level of reduction int n = numElement; d_src = d_data; d_dst = d_buf; int numBlock = numElement/numThread + (numElement%numThread? 1 : 0); int sharedMemSize = sizeof(float)*numThread; while(n > 1) { if(isKernel4) numBlock = numBlock/2 + (numBlock%2 ? 1 : 0); reduce_kernel<<<numBlock, numThread, sharedMemSize>>>(d_dst, d_src, n); cudaThreadSynchronize(); //for the next level n = numBlock; numBlock = n/numThread+ (n%numThread ? 1 : 0); swap(d_dst, d_src); } cudaDeviceSynchronize(); swap(d_dst, d_src); float result = 0; cudaMemcpy(&result, d_dst, sizeof(float), cudaMemcpyDeviceToHost); return result; } __global__ void dot_kernel(float* dev_data, float* dev_dot, int* dev_pat) { const int index = blockIdx.x*blockDim.x + threadIdx.x; int m = dev_pat[0]; int numElement = dev_pat[1]; if (index < numElement) { float prod = 1; for(int j = 2; j < m+2; j++) prod = prod*dev_data[dev_pat[j]*numElement+index]; dev_dot[index] = prod; __syncthreads(); } } /* ******************************************************************************************* ******************************************************************************************** */ float* setdev_data(float* data, int dSize, int numElement) { float* dev_data; //datadeviceĿ cudaMalloc((void**)&dev_data, sizeof(float)*dSize); cudaMemcpy(dev_data, data, sizeof(float)*dSize, cudaMemcpyHostToDevice); return dev_data; } float* setdev_dot(int numElement) { float* dev_dot; //dev_dotattribute֮dot-product cudaMalloc((void**)&dev_dot, sizeof(float)*numElement); return dev_dot; } float* setdev_out(int numElement) { float* dev_out; cudaMalloc((void**)&dev_out, sizeof(float)*numElement); return dev_out; } int* setdev_pat(int* pat, int m) { int* dev_pat;//patdeviceĿ cudaMalloc((void**)& dev_pat, sizeof(int)*(m+2)); cudaMemcpy(dev_pat, pat, sizeof(int)*(m+2), cudaMemcpyHostToDevice); return dev_pat; } float reduce_gpu(int numElement, int numThread, float* dev_data, float* dev_dot, float* dev_out, int* dev_pat) { dot_kernel<<<numElement/numThread+(numElement%numThread ? 1 : 0),numThread>>>(dev_data, dev_dot, dev_pat); return reduction(dev_dot, dev_out, numElement, numThread, reduce_kernel4); } void test_reduction() { ////////////////////////////////////////////////////////////////////////Ķ///////////////////////////////////////////// const int numElement = 512*500; const int numAttribute = 100; ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //const int numElement = 1024*1024; //const int numAttribute = 100; const int dSize = numElement*numAttribute; cudaEvent_t start, stop; cudaEventCreate(&start); cudaEventCreate(&stop); ////////////////////////////////////////////////////////////////////////Ķ///////////////////////////////////////////// //Data Generator for the Table float* data = (float*)malloc(sizeof(float)*dSize); for(int i = 0; i < dSize; i++) { //data[i] = (float)rand()/RAND_MAX; data[i] = 0.5; } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// const int numThread = 512; float elapsedTime = 0.0f; float result = 0; float* dev_data; //datadeviceĿ cudaMalloc((void**)&dev_data, sizeof(float)*dSize); cudaMemcpy(dev_data, data, sizeof(float)*dSize, cudaMemcpyHostToDevice); float* dev_dot; //dev_dotattribute֮dot-product cudaMalloc((void**)&dev_dot, sizeof(float)*numElement); float* dev_out; cudaMalloc((void**)&dev_out, sizeof(float)*numElement); ////////////////////////////////////////////////////////////////////////Ķ///////////////////////////////////////////// int m = 3; //3attributepattern int* pat = (int *)malloc(sizeof(int)*(m+2)); pat[0] = m; pat[1] = numElement; pat[2] = 1; pat[3] = 2; pat[4] = 3; ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// int* dev_pat;//patdeviceĿ cudaMalloc((void**)& dev_pat, sizeof(int)*(m+2)); cudaMemcpy(dev_pat, pat, sizeof(int)*(m+2), cudaMemcpyHostToDevice); //**************************************CPU******************************************* cudaEventRecord(start, 0); printf("numElement = %d\n", numElement); printf("reduce_cpu result: %f\n", reduce_cpu(data, pat)); cudaEventRecord(stop, 0); cudaEventSynchronize(stop); cudaEventElapsedTime(&elapsedTime, start, stop); printf("cpu elapsed time: %.3f ms\n", elapsedTime); printf("---------------------------------\n\n"); //************************************GPU****************************************** elapsedTime = 0.0f; cudaEventRecord(start, 0); dot_kernel<<<numElement/numThread+(numElement%numThread ? 1 : 0),numThread>>>(dev_data, dev_dot, dev_pat); // float* dot = (float*)malloc(sizeof(float)*numElement); // cudaMemcpy(dot, dev_dot, sizeof(float)*numElement, cudaMemcpyDeviceToHost); // float testsum = 0; // for (int i = 0; i < numElement; i++) // testsum += dot[i]; // cout<<"dot result = "<<testsum<<endl; result = reduction(dev_dot, dev_out, numElement, numThread, reduce_kernel4); cudaEventRecord(stop, 0); cudaEventSynchronize(stop); cudaEventElapsedTime(&elapsedTime, start, stop); printf("kernel1 elapsed time: %.3f ms\n", elapsedTime); printf("kernel1 result: %f\n\n", result); cudaFree(dev_data); cudaFree(dev_out); cudaFree(dev_dot); free(data); }
94
/* This is a automatically generated test. Do not modify */ #include <stdio.h> #include <stdlib.h> #include <math.h> __global__ void compute(float comp, int var_1,int var_2,int var_3,float var_4,float var_5,float var_6,float var_7,float var_8,float var_9,float var_10,float var_11,float var_12,float var_13,float* var_14,float var_15,float var_16,float var_17,float var_18,float var_19,float var_20,float var_21,float var_22,float var_23,float var_24,float var_25,float var_26,float var_27,float var_28) { for (int i=0; i < var_1; ++i) { for (int i=0; i < var_2; ++i) { if (comp <= -1.6060E36f / ldexpf(+1.5617E35f, 2)) { comp += (+1.3779E36f - floorf(-1.2482E-43f)); comp = fmodf(-1.5149E36f / var_4, sinhf(-0.0f + (var_5 + var_6 * +1.0563E35f + (-1.6218E-43f + -1.5339E-35f)))); float tmp_1 = acosf(+1.3883E20f * atanf((+0.0f + (var_7 / var_8 / var_9)))); comp = tmp_1 / var_10 + var_11 / var_12 * var_13 - (-0.0f * +1.6618E-41f); for (int i=0; i < var_3; ++i) { var_14[i] = +1.2734E-37f; comp = var_14[i] + logf(+0.0f); comp += (var_15 * var_16 * var_17); comp = (-1.4694E-36f * (+1.7018E3f + +1.7157E-36f - (var_18 / -1.8436E36f))); } if (comp <= (var_19 - +1.4128E34f * (-1.2866E35f - (var_20 - (+1.9375E35f + var_21))))) { comp += (var_22 * +1.8236E34f - var_23 * var_24 - var_25); float tmp_2 = +1.8835E-26f; comp = tmp_2 + (var_26 - -1.8132E-35f); } if (comp < sinf(+1.0853E-44f)) { float tmp_3 = var_27 * var_28; comp += tmp_3 / (-1.6435E36f - -1.6854E-44f); } } } } printf("%.17g\n", comp); } float* initPointer(float v) { float *ret = (float*) malloc(sizeof(float)*10); for(int i=0; i < 10; ++i) ret[i] = v; return ret; } int main(int argc, char** argv) { /* Program variables */ float tmp_1 = atof(argv[1]); int tmp_2 = atoi(argv[2]); int tmp_3 = atoi(argv[3]); int tmp_4 = atoi(argv[4]); float tmp_5 = atof(argv[5]); float tmp_6 = atof(argv[6]); float tmp_7 = atof(argv[7]); float tmp_8 = atof(argv[8]); float tmp_9 = atof(argv[9]); float tmp_10 = atof(argv[10]); float tmp_11 = atof(argv[11]); float tmp_12 = atof(argv[12]); float tmp_13 = atof(argv[13]); float tmp_14 = atof(argv[14]); float* tmp_15 = initPointer( atof(argv[15]) ); float tmp_16 = atof(argv[16]); float tmp_17 = atof(argv[17]); float tmp_18 = atof(argv[18]); float tmp_19 = atof(argv[19]); float tmp_20 = atof(argv[20]); float tmp_21 = atof(argv[21]); float tmp_22 = atof(argv[22]); float tmp_23 = atof(argv[23]); float tmp_24 = atof(argv[24]); float tmp_25 = atof(argv[25]); float tmp_26 = atof(argv[26]); float tmp_27 = atof(argv[27]); float tmp_28 = atof(argv[28]); float tmp_29 = atof(argv[29]); compute<<<1,1>>>(tmp_1,tmp_2,tmp_3,tmp_4,tmp_5,tmp_6,tmp_7,tmp_8,tmp_9,tmp_10,tmp_11,tmp_12,tmp_13,tmp_14,tmp_15,tmp_16,tmp_17,tmp_18,tmp_19,tmp_20,tmp_21,tmp_22,tmp_23,tmp_24,tmp_25,tmp_26,tmp_27,tmp_28,tmp_29); cudaDeviceSynchronize(); return 0; }
95
#include<stdio.h> #include<stdlib.h> #include<unistd.h> #include<stdbool.h> #include <cuda.h> #include <cuda_runtime.h> extern "C" void allocateMemory(int **arr, int arraySize) { cudaMallocManaged(arr, ( (arraySize* sizeof(int)))); } extern "C" void callCudaFree(int* local) { cudaFree(local); } //extern void callMPI(int* local,int* arr,int arrSize,int mpi_size,int x_rank); extern "C" void cudaInit( int myrank) { int cE; int cudaDeviceCount = 1; if( (cE = cudaGetDeviceCount( &cudaDeviceCount)) != cudaSuccess ) { printf(" Unable to determine cuda device count, error is %d, count is %d\n", cE, cudaDeviceCount ); exit(-1); } if( (cE = cudaSetDevice( myrank % cudaDeviceCount )) != cudaSuccess ) { printf(" Unable to have rank %d set to cuda device %d, error is %d \n", myrank, (myrank % cudaDeviceCount), cE); exit(-1); } } __global__ void mergeKernel(int j, int mpi_size, int mpi_rank, int *arr, int arrSize, int sizeCompare,int* prev_local, int* next_local) { //nt *prev_local = NULL; //int *next_local = NULL; bool sameVal = false; int i = blockIdx.x*blockDim.x + threadIdx.x; int global_idx = i + arrSize / mpi_size * mpi_rank; int x = global_idx ^ j; int x_rank = x / (arrSize / mpi_size); if ( global_idx >= x ) { if ( mpi_rank == x_rank ) { if(sameVal == false) { sameVal = true; } } else { if ( prev_local == NULL ) { //prev_local = calloc(arrSize / mpi_size, sizeof(int)); //allocateMemory(&prev_local,arrSize/mpi_size); prev_local = arr + arrSize / mpi_size * x_rank; //callMPI(prev_local,arr,arrSize,mpi_size,x_rank); } if ( (sizeCompare & x) == 0 && arr[i] < prev_local[i] ) { arr[i] = prev_local[i]; } if ( (sizeCompare & x) != 0 && arr[i] > prev_local[i] ) { arr[i] = prev_local[i]; } } } else { if ( x_rank == mpi_rank ) { int y = x - arrSize / mpi_size * mpi_rank; if ( (global_idx & sizeCompare) == 0 && arr[i] > arr[y] ) { int temp = arr[i]; arr[i] = arr[y]; arr[y] = temp; } if ( (global_idx & sizeCompare) != 0 && arr[i] < arr[y] ) { int temp = arr[i]; arr[i] = arr[y]; arr[y] = temp; } } else { if ( next_local == NULL ) { //next_local = calloc(arrSize / mpi_size, sizeof(int)); //allocateMemory(&next_local,arrSize/mpi_size); next_local = arr + arrSize / mpi_size * x_rank; //callMPI(next_local,arr,arrSize,mpi_size,x_rank); } if ( (global_idx & sizeCompare) == 0 && arr[i] > next_local[i] ) { arr[i] = next_local[i]; } if ( (global_idx & sizeCompare) != 0 && arr[i] < next_local[i] ) { arr[i] = next_local[i]; } } } } extern "C" void mergeKernelLaunch(int blockSize,int threadsCount,int j, int mpi_size, int mpi_rank, int *arr, int arrSize, int sizeCompare,int* prev_local, int* next_local) { mergeKernel<<<blockSize,threadsCount>>>(j, mpi_size, mpi_rank, arr, arrSize, sizeCompare, prev_local, next_local); }
96
#include<stdio.h> #include<stdlib.h> __global__ void print_gpu(void) { printf("Houston, we have a problem in section [%d,%d] \ From Apollo 13\n", threadIdx.x,blockIdx.x); } int main(void) { printf("This is Houston. Say again, please. \ From Base\n"); print_gpu<<<2,2>>>(); cudaDeviceSynchronize(); return 0; }
97
#include <curand_kernel.h> namespace curfil { namespace gpu { __global__ void setup_kernel(int seed, curandState *state) { int id = blockIdx.x * blockDim.x + threadIdx.x; /* Each thread gets same seed, a different sequence number, no offset */ curand_init(seed, id, 0, &state[id]); } __global__ void generate_uniform_kernel(curandState* state, unsigned int* result) { int id = blockIdx.x * blockDim.x + threadIdx.x; unsigned int count = 0; float x; /* Copy state to local memory for efficiency */ curandState localState = state[id]; /* Generate pseudo-random uniforms */ for (int n = 0; n < 10000; n++) { x = curand_uniform(&localState); /* Check if > .5 */ if (x > .5) { count++; } } /* Copy state back to global memory */ state[id] = localState; /* Store results */ result[id] += count; } } }
98
#include <math.h> #include <fstream> #include <iostream> #include <stdio.h> #include <stdlib.h> #include <time.h> #include <cuda_runtime.h> //Se definen los valores fijos a utilizar en el programa #define H 288 //Cada bloque manejara 100 datos correspondientes a 5 minutos de mediciones en intervalos de 3 segundos #define B 2 //Se trabajaran 2 bloques, uno para cada dia #define VUELTAS 28800 //Cantidad de datos por arreglo #define N 30 //Varible utilizada en pruebas using namespace std; __global__ void inversion(float *x, float *y) { int i = blockIdx.x*blockDim.x + threadIdx.x; if (i < N) y[i] = x[N-1-i]; } __global__ void raices(float *x, float *y) { int i = blockIdx.x*blockDim.x + threadIdx.x; if (i < N) y[i] = sqrt (x[i]); } __global__ void potencia3(float *x, float *y) { int i = blockIdx.x*blockDim.x + threadIdx.x; if (i < N) y[i] = pow ((double)x[i],3.0); } __global__ void media(float* arreglo) { float sumatoria = 0; float med = 0; //54 for(int i=0;i<VUELTAS;i++){ sumatoria = sumatoria + arreglo[i]; } med = sumatoria/(float) VUELTAS; sumatoria = med; } //Subrutina que calcula cual fue la mayor medicion en el dia con hora a la que fue medida __global__ void mayor(float* arreglo){ float may=arreglo[0]; for(int i=0;i<VUELTAS;i++) { if(arreglo[i]>may){ may=arreglo[i];} } } //Subrutina que calcula cual fue la menor medicion en el dia con hora a la que fue medida __global__ void menor(float* arreglo){ float men=arreglo[0]; for(int i=0;i<VUELTAS;i++) { if(arreglo[i]<men){ men=arreglo[i];} } } //Subrutina que calcula la prediccion de datos para un dia siguiente a traves de la regresion lineal de un tipo de medicion hecha por cada 5 minutos en intervalos de 3 segundos __global__ void prediccion(float* arreglo, float* salida){ int i = blockIdx.x*blockDim.x + threadIdx.x; int q = 0; float k = 100.0; float m = 0; float sumatoria = 0; float sumasDif = 0; float potencia = 0; float pendiente = 0; //float nueva = 0; q = i*100; for(int j = q; j<q+100; j++){ sumatoria = sumatoria + arreglo[j]; } sumatoria = sumatoria/k; for(int j = q; j<q+100; j++){ sumasDif = arreglo[j] - sumatoria; } potencia = (float)pow((double)sumasDif,2.00); pendiente = potencia/k; for(int j = q; j<q+100; j++){ salida[j] = sumatoria + pendiente*m; m = m + 1; } } //Inicio del programa int main(void) { // declaraciones de componentes CUDA, Streams y memoria cudaStream_t stream1, stream2, stream3, stream4, stream5, stream6; cudaStreamCreate(&stream1); cudaStreamCreate(&stream2); cudaStreamCreate(&stream3); cudaStreamCreate(&stream4); cudaStreamCreate(&stream5); cudaStreamCreate(&stream6); //Se abren los archivos y se limpian ofstream ArchivoPrediccion("181113_estCU.csv"); ArchivoPrediccion.close(); ofstream ArchivoPrediccion2("181114_estCU.csv"); ArchivoPrediccion2.close(); //Se crean los vectores que guardaran los string de horas de los archivos .csv string horas[VUELTAS]; string horas2[VUELTAS]; //Se inician las variables que guardaran los tiempos de ejecucion de cada kernel float milliseconds1 = 0; float milliseconds2 = 0; float milliseconds3 = 0; float milliseconds4 = 0; float milliseconds5 = 0; float milliseconds6 = 0; //Se crean las variables de vectores que llevaran datos y compiaran entre el host y el device float *vectorTemperatura1, *vectorHumedad1, *vectorPresion1, *res_stream1, *res_stream2, *res_stream3; float *vectorTemperatura2, *vectorHumedad2, *vectorPresion2, *res_stream4, *res_stream5, *res_stream6; float *dev_res1, *dev_res2, *dev_res3; float *dev_res4, *dev_res5, *dev_res6; // reserva en el host // reserva en el device cudaMalloc( (void**)&dev_res1, VUELTAS*sizeof(float)); cudaMalloc( (void**)&dev_res2, VUELTAS*sizeof(float)); cudaMalloc( (void**)&dev_res3, VUELTAS*sizeof(float)); cudaMalloc( (void**)&dev_res4, VUELTAS*sizeof(float)); cudaMalloc( (void**)&dev_res5, VUELTAS*sizeof(float)); cudaMalloc( (void**)&dev_res6, VUELTAS*sizeof(float)); //Asignacion de memoria al host cudaHostAlloc((void**)&vectorTemperatura1,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&vectorHumedad1,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&vectorPresion1,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&vectorTemperatura2,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&vectorHumedad2,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&vectorPresion2,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream1,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream2,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream3,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream4,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream5,VUELTAS*sizeof(float),cudaHostAllocDefault); cudaHostAlloc((void**)&res_stream6,VUELTAS*sizeof(float),cudaHostAllocDefault); // se crean los eventos cudaEvent_t start, stop; cudaEventCreate(&start); cudaEventCreate(&stop); ///////////////////////////////////////////////////////////////////////////////////////////////// // Inicializacion de datos por lectura de archivos .csv // Se leen los datos del dia 1 ifstream datos("181113.csv"); string linea; int contadorPosicion = 0; // Se obtienen los datos separados de cada linea guardada while(getline(datos,linea)){ string delimiter = ";"; size_t pos = 0; string token; int cont = 0; while ((pos = linea.find(delimiter)) != std::string::npos) { token = linea.substr(0, pos); linea.erase(0, pos + delimiter.length()); if(cont == 0){ horas[contadorPosicion] = token; } if(cont == 1){ vectorTemperatura1[contadorPosicion] = (float)(::atof(token.c_str())); } if(cont == 2){ vectorHumedad1[contadorPosicion] = (float)(::atof(token.c_str())); } if(cont == 3){ vectorPresion1[contadorPosicion] = (float)(::atof(token.c_str())); } cont = cont + 1; } contadorPosicion = contadorPosicion + 1; } //////////////////////////////////////////////////////////////////////////////// //Se ejecutan 3 kernels cada uno en un stream diferente y haciendolo en 288 bloques cada uno, de manera aplicar regresion lineal cada 100 datos equivalente a 5 minutos de mediciones para el dia 1 for(int i=0;i < H;i++){ // copia de datos hacia el device cudaMemcpyAsync(dev_res1, vectorTemperatura1, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream1); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorTemperatura1, dev_res1); cudaEventRecord(stop); cudaMemcpyAsync(res_stream1, dev_res1, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost,stream1); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds1, start, stop); ///////////////////////////////////////////////////////////////////////////// cudaMemcpyAsync(dev_res2, vectorHumedad1, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream2); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorHumedad1, dev_res2); cudaEventRecord(stop); cudaMemcpyAsync(res_stream2, dev_res2, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost, stream2); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds2, start, stop); //////////////////////////////////////////////////////////////////////////////// cudaMemcpyAsync(dev_res3, vectorPresion1, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream3); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorPresion1, dev_res3); cudaEventRecord(stop); cudaMemcpyAsync(res_stream3, dev_res3, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost,stream3); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds3, start, stop); } /////////////////////////////////////////////////////////////////////////////// //Se sincronizan los streams cudaStreamSynchronize(stream1); // wait for stream1 to finish cudaStreamSynchronize(stream2); // wait for stream2 to finish cudaStreamSynchronize(stream3); // wait for stream3 to finish ///////////////////////////////////////////////////////////////////////////////////////////////////////////// // Se leen los datos del dia 2 ifstream datos2("181114.csv"); contadorPosicion = 0; // Se obtienen los datos separados de cada linea guardada while(getline(datos2,linea)){ string delimiter = ";"; size_t pos = 0; string token; int cont = 0; while ((pos = linea.find(delimiter)) != std::string::npos) { token = linea.substr(0, pos); linea.erase(0, pos + delimiter.length()); if(cont == 0){ horas2[contadorPosicion] = token; } if(cont == 1){ vectorTemperatura2[contadorPosicion] = (float)(::atof(token.c_str())); } if(cont == 2){ vectorHumedad2[contadorPosicion] = (float)(::atof(token.c_str())); } if(cont == 3){ vectorPresion2[contadorPosicion] = (float)(::atof(token.c_str())); } cont = cont + 1; } contadorPosicion = contadorPosicion + 1; } //////////////////////////////////////////////////////////////////////////////// //Se ejecutan 3 kernels cada uno en un stream diferente y haciendolo en 288 bloques cada uno, de manera aplicar regresion lineal cada 100 datos equivalente a 5 minutos de mediciones para el dia 2 for(int i=0;i < H;i++){ // copia de datos hacia el device cudaMemcpyAsync(dev_res4, vectorTemperatura2, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream4); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorTemperatura2, dev_res4); cudaEventRecord(stop); cudaMemcpyAsync(res_stream4, dev_res4, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost,stream4); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds4, start, stop); ///////////////////////////////////////////////////////////////////////////// cudaMemcpyAsync(dev_res5, vectorHumedad2, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream5); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorHumedad2, dev_res5); cudaEventRecord(stop); cudaMemcpyAsync(res_stream5, dev_res5, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost, stream5); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds5, start, stop); //////////////////////////////////////////////////////////////////////////////// cudaMemcpyAsync(dev_res6, vectorPresion2, VUELTAS*sizeof(float), cudaMemcpyHostToDevice,stream6); //Se hace la medicion del tiempo atraves de events cudaEventRecord(start); prediccion<<<1, H>>>(vectorPresion2, dev_res6); cudaEventRecord(stop); cudaMemcpyAsync(res_stream6, dev_res6, VUELTAS*sizeof(float), cudaMemcpyDeviceToHost,stream6); cudaEventSynchronize(stop); cudaEventElapsedTime(&milliseconds6, start, stop); } /////////////////////////////////////////////////////////////////////////////// //Se sincronizan los streams cudaStreamSynchronize(stream4); // wait for stream1 to finish cudaStreamSynchronize(stream5); // wait for stream2 to finish cudaStreamSynchronize(stream6); // wait for stream3 to finish ///////////////////////////////////////////////////////////////////////////////// //Se guardan los datos predecidos en un archivo csv correspondiente ofstream Archivo("181113_estCU.csv"); for(int i=0;i<VUELTAS;i++){ Archivo << horas[i] << ";" << res_stream1[i] << ";" << res_stream2[i] << ";" << res_stream3[i] << ";" << endl; } Archivo.close(); ofstream Archivo2("181114_estCU.csv"); for(int i=0;i<VUELTAS;i++){ Archivo2 << horas2[i] << ";" << res_stream4[i] << ";" << res_stream5[i] << ";" << res_stream6[i] << ";" << endl; } Archivo2.close(); //Se imprimen los tiempos que tardaron cada uno de los kernels printf("Tiempo del kernel para la prediccion de temperaturas del dia 1: %f milisegundos\n", milliseconds1); printf("Tiempo del kernel para la prediccion de humedades del dia 1: %f milisegundos\n", milliseconds2); printf("Tiempo del kernel para la prediccion de presiones del dia 1: %f milisegundos\n", milliseconds3); printf("Tiempo del kernel para la prediccion de temperaturas del dia 2: %f milisegundos\n", milliseconds4); printf("Tiempo del kernel para la prediccion de humedades del dia 2: %f milisegundos\n", milliseconds5); printf("Tiempo del kernel para la prediccion de presiones del dia 2: %f milisegundos\n", milliseconds6); //Se destruyen todos los componentes CUDA y se libera la memoria cudaEventDestroy(start); cudaEventDestroy(stop); cudaStreamDestroy(stream1); cudaStreamDestroy(stream2); cudaStreamDestroy(stream3); cudaStreamDestroy(stream4); cudaStreamDestroy(stream5); cudaStreamDestroy(stream6); cudaFree(dev_res1); cudaFree(dev_res2); cudaFree(dev_res3); cudaFree(dev_res4); cudaFree(dev_res5); cudaFree(dev_res6); // salida printf("\npulsa INTRO para finalizar..."); fflush(stdin); char tecla = getchar(); return 0; }
99
#include "includes.h" const int Nthreads = 1024, maxFR = 100000, NrankMax = 3, nmaxiter = 500, NchanMax = 32; ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// // THIS UPDATE DOES NOT UPDATE ELOSS? ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////// __global__ void computePCfeatures(const double *Params, const int *counter, const float *dataraw, const int *st, const int *id, const float *x, const float *W, const float *U, const float *mu, const int *iW, const int *iC, const float *wPCA, float *featPC){ volatile __shared__ float sPCA[81 * NrankMax], sW[81 * NrankMax], sU[NchanMax * NrankMax]; volatile __shared__ int iU[NchanMax]; int bid, nt0, t, tidx, tidy, k, NT, ind, Nchan, NchanU, Nfilt, Nrank; float X = 0.0f, Y = 0.0f; NT = (int) Params[0]; nt0 = (int) Params[4]; Nchan = (int) Params[9]; Nfilt = (int) Params[1]; Nrank = (int) Params[6]; NchanU = (int) Params[10]; tidx = threadIdx.x; tidy = threadIdx.y; bid = blockIdx.x; if (tidy==0) iU[tidx] = iC[tidx + NchanU * iW[bid]]; __syncthreads(); sU[tidx + tidy*NchanU]= U[iU[tidx] + Nchan * bid + Nchan * Nfilt * tidy]; while (tidx<nt0){ sW[tidx + tidy*nt0] = W[tidx + bid*nt0 + Nfilt * nt0 * tidy]; sPCA[tidx + tidy*nt0] = wPCA[tidx + nt0 * tidy]; tidx += blockDim.x; } tidx = threadIdx.x; __syncthreads(); // first, compute wPCA projections of the filter Y = 0.0f; for (k =0; k<Nrank; k++){ X = 0.0f; for (t=0;t<nt0;t++) X += sW[t + k*nt0] * sPCA[t + tidy * nt0]; Y += X * sU[tidx + k*NchanU]; } //now for each matching spike, compute the features for(ind=0; ind<counter[0];ind++) if (id[ind]==bid){ X = Y * x[ind]; // - mu[bid]); for (t=0;t<nt0; t++) X += dataraw[st[ind] + t + NT * iU[tidx]] * sPCA[t + nt0*tidy]; featPC[tidx + tidy*NchanU + ind * NchanU*Nrank] = X; } }
100
#include <stdio.h> #include <math.h> #include <time.h> #include <unistd.h> #include <cuda_runtime_api.h> #include <errno.h> /* To compile: nvcc -o NishantLinear NishantLinear.cu ./NishantLinear */ typedef struct point_t { double x; double y; } point_t; int n_data = 1000; __device__ int d_n_data = 1000; point_t data[] = { {74.32,120.84},{65.26,121.98},{77.84,134.32},{69.57,135.09}, {83.26,119.06},{83.05,140.32},{65.65,115.63},{76.75,151.06}, {84.45,150.47},{69.40,138.40},{82.03,147.61},{27.33,61.48}, {25.60,51.97},{38.04,88.45},{77.76,145.24},{28.43,81.89}, {31.71,78.13},{27.36,73.40},{54.04,109.85},{43.06,91.41}, {36.52,83.97},{43.11,96.10},{ 6.18,29.10},{92.31,165.55}, {67.81,143.99},{25.45,55.32},{54.02,90.03},{28.22,64.80}, {62.75,111.23},{36.57,71.48},{94.73,155.45},{79.13,153.33}, {27.86,72.36},{20.11,65.43},{59.94,114.35},{85.52,155.16}, {85.37,140.60},{65.97,118.83},{88.21,169.64},{35.79,83.69}, {21.90,73.79},{56.63,128.76},{91.98,165.94},{ 1.36,37.69}, {81.26,139.19},{71.64,107.32},{85.98,163.26},{93.96,162.49}, {79.91,145.04},{ 6.11,39.39},{44.73,73.84},{68.92,139.47}, {77.18,141.98},{72.13,129.30},{50.28,108.96},{61.16,111.38}, {66.14,140.93},{44.08,97.81},{16.77,40.34},{16.70,50.86}, {59.13,112.08},{15.15,59.60},{93.81,143.23},{49.23,92.65}, {32.74,67.49},{68.25,126.33},{ 9.56,36.93},{31.83,75.94}, {40.71,87.30},{11.10,40.29},{58.04,126.23},{95.12,134.62}, { 0.68,28.04},{19.12,60.55},{84.81,155.16},{38.99,88.37}, {66.88,123.61},{ 6.86,42.43},{93.37,137.53},{15.58,59.18}, { 0.30,31.59},{88.57,143.82},{87.95,165.66},{40.17,87.81}, {13.46,58.73},{44.00,76.35},{25.69,68.91},{62.71,112.10}, {91.10,153.90},{73.52,130.98},{16.42,41.91},{ 5.87,36.39}, {90.12,161.07},{84.88,138.05},{53.95,106.06},{87.79,154.60}, {77.27,143.05},{13.49,74.97},{13.14,47.77},{ 9.30,41.92}, { 0.71,21.96},{26.81,66.82},{42.23,80.13},{ 6.66,27.61}, {67.69,110.99},{72.84,127.78},{86.78,144.62},{95.84,164.17}, {67.52,129.59},{48.65,112.08},{18.89,43.44},{51.07,96.06}, {88.19,173.36},{65.55,130.34},{39.75,62.84},{55.27,106.13}, {71.83,135.84},{34.28,88.20},{ 8.81,52.14},{93.18,142.03}, {47.54,73.37},{97.50,166.12},{11.10,42.77},{68.67,127.02}, {85.34,145.07},{93.61,167.18},{ 1.89,44.21},{86.51,149.85}, {15.29,40.05},{ 4.21,30.16},{14.52,46.99},{16.92,69.83}, { 8.20,43.81},{96.34,172.12},{50.00,93.94},{88.14,147.53}, {85.91,163.13},{26.42,56.73},{97.38,154.94},{ 2.65,16.28}, { 6.81,39.91},{21.70,75.92},{94.83,168.82},{50.81,102.41}, {12.90,40.37},{29.38,83.34},{57.93,120.19},{40.92,70.33}, {38.56,69.62},{77.29,133.60},{33.13,73.56},{99.41,177.31}, {86.41,148.28},{74.31,131.53},{61.05,103.73},{23.24,59.15}, {63.39,123.16},{70.53,115.20},{67.13,136.04},{31.30,73.20}, {95.79,164.82},{58.68,118.67},{71.03,109.72},{93.72,154.28}, {67.05,132.54},{70.35,124.09},{29.84,71.01},{20.24,59.78}, { 0.97,41.22},{93.39,149.87},{ 6.22,36.20},{85.37,149.42}, {94.99,179.85},{ 2.49,36.99},{16.85,74.31},{63.26,109.26}, {43.93,96.97},{63.80,118.28},{65.35,128.19},{75.85,140.19}, {78.66,131.83},{31.72,85.87},{36.43,102.70},{57.29,127.00}, {29.47,67.71},{37.65,66.89},{69.97,119.91},{81.83,148.86}, {16.01,55.54},{39.07,96.46},{82.40,145.82},{ 3.48,21.48}, {24.46,49.97},{65.16,120.89},{40.30,61.66},{48.65,100.57}, {17.35,61.49},{78.57,129.88},{82.11,158.20},{78.76,150.95}, {40.42,95.94},{15.98,89.37},{58.24,114.69},{30.77,77.66}, {30.12,66.37},{23.12,59.60},{ 3.97,26.67},{70.79,152.61}, {31.55,58.79},{71.76,141.49},{65.38,125.36},{ 0.09,40.37}, {48.35,99.59},{28.35,71.36},{77.63,134.21},{80.16,155.93}, {57.03,104.57},{73.94,122.89},{69.52,126.04},{69.12,128.15}, {49.23,92.20},{29.51,75.73},{12.42,35.78},{23.99,68.83}, {87.57,150.45},{85.75,153.99},{65.77,116.91},{62.66,89.46}, {94.36,169.09},{71.34,130.37},{26.77,66.00},{84.96,152.25}, {99.36,171.00},{25.27,65.15},{40.90,83.09},{41.88,87.31}, {50.28,95.14},{34.46,72.83},{72.08,110.45},{28.18,92.23}, {55.72,96.12},{24.05,64.82},{11.10,45.74},{33.01,67.92}, {56.14,89.96},{51.34,92.68},{75.68,124.52},{86.01,138.68}, {15.52,36.84},{78.84,139.72},{50.60,99.34},{84.86,143.68}, {33.44,89.45},{95.15,159.16},{98.66,174.63},{98.89,177.31}, { 0.22,32.76},{65.77,135.07},{62.00,120.58},{45.87,97.80}, { 4.91,20.11},{67.67,122.38},{42.67,87.39},{97.96,167.90}, { 7.06,30.63},{83.70,154.09},{86.63,144.36},{32.89,65.96}, { 5.21,22.98},{ 5.98,26.51},{66.30,137.16},{39.17,77.89}, { 2.73,28.32},{ 5.91,25.71},{32.78,68.70},{ 5.73,35.92}, {92.45,166.73},{26.56,69.02},{33.36,67.78},{ 7.42,35.05}, {31.91,73.37},{57.78,103.77},{18.36,46.33},{ 7.04,40.37}, {15.50,37.36},{92.22,143.66},{ 0.24,19.94},{72.72,125.55}, {85.87,150.62},{78.44,122.55},{18.35,46.92},{99.34,162.19}, {94.40,158.10},{99.60,183.60},{60.63,133.23},{58.63,108.60}, {81.73,135.89},{78.93,133.47},{34.54,70.76},{93.71,169.14}, {34.14,70.68},{25.99,70.68},{67.14,96.65},{79.89,137.52}, {21.90,64.34},{61.94,122.48},{11.52,36.18},{41.74,66.51}, {18.88,57.27},{35.65,81.68},{16.33,51.15},{93.05,167.55}, {54.26,83.98},{53.83,102.20},{28.78,82.46},{64.35,110.83}, {53.51,116.26},{ 2.82,32.22},{53.36,92.03},{71.24,111.89}, {87.15,163.87},{18.73,64.74},{83.52,153.50},{84.01,154.92}, {36.49,77.57},{62.86,119.17},{32.24,81.11},{56.85,123.39}, {75.97,136.19},{58.48,129.23},{44.89,80.77},{91.11,155.67}, {19.70,56.69},{44.00,88.89},{75.66,138.43},{46.95,91.04}, {44.01,88.91},{67.16,109.81},{54.44,102.18},{28.62,72.39}, {43.76,84.11},{84.32,139.89},{31.99,75.00},{20.26,73.70}, {16.35,51.26},{59.89,111.55},{11.70,49.09},{83.08,159.45}, {99.58,184.11},{15.73,37.30},{89.59,146.56},{14.12,48.10}, {27.15,42.01},{28.96,60.06},{ 7.82,31.16},{93.89,160.81}, {72.27,120.85},{87.42,151.10},{29.47,72.81},{57.93,117.23}, { 1.70,29.60},{38.55,82.96},{81.79,157.78},{15.70,47.43}, { 7.32,56.93},{70.99,140.63},{25.80,73.71},{45.59,93.13}, {24.08,68.93},{14.22,58.86},{90.03,165.33},{47.91,101.52}, {48.27,93.92},{86.80,142.92},{20.46,53.98},{66.31,115.18}, {99.92,184.57},{19.87,61.97},{ 9.73,42.02},{45.59,78.71}, { 8.35,22.65},{91.29,183.42},{38.82,77.83},{14.30,36.17}, {28.80,68.88},{59.77,111.10},{83.90,149.05},{50.79,110.46}, {49.95,80.42},{85.09,143.31},{51.53,103.90},{63.44,122.52}, {72.61,149.96},{65.26,124.07},{31.63,79.17},{14.05,53.27}, {36.71,83.60},{19.73,63.11},{14.95,45.66},{54.11,100.24}, {10.18,30.76},{41.49,77.74},{24.38,72.10},{84.09,131.94}, { 3.57,39.81},{ 2.78,27.64},{24.93,61.37},{95.99,168.54}, {42.30,92.71},{18.98,59.48},{76.28,121.80},{79.30,151.37}, { 7.00,37.39},{80.79,135.72},{69.44,120.73},{42.77,92.25}, {32.20,88.33},{73.59,142.65},{17.59,54.86},{95.83,160.03}, {11.02,48.32},{ 9.93,42.93},{33.89,76.38},{12.13,40.50}, {75.94,147.26},{ 9.23,48.75},{ 7.60,37.19},{28.25,74.34}, {61.68,125.95},{94.42,153.15},{57.66,109.37},{80.98,147.79}, {56.09,125.36},{84.58,134.57},{10.71,50.68},{65.78,109.09}, { 5.53,29.65},{21.76,66.76},{29.72,68.32},{30.95,65.11}, {33.28,76.75},{32.27,76.25},{66.89,125.02},{61.77,130.69}, {21.02,62.13},{32.91,82.69},{70.89,135.40},{ 8.94,16.91}, {29.94,65.56},{65.69,123.48},{14.80,33.48},{ 9.57,41.51}, {89.72,152.22},{64.24,122.87},{91.64,149.30},{46.82,102.14}, {50.99,110.36},{17.79,38.14},{ 7.90,51.68},{12.78,47.33}, {27.85,85.08},{67.02,121.10},{62.72,116.87},{61.31,121.87}, {72.12,124.47},{28.11,76.38},{63.64,123.50},{66.97,107.79}, { 6.35,42.15},{89.92,165.27},{62.12,113.19},{17.84,45.99}, {33.67,66.11},{26.25,57.52},{44.71,110.28},{93.14,158.82}, {54.20,127.63},{46.93,95.42},{67.46,143.27},{79.18,141.36}, {54.55,110.88},{ 4.95,27.55},{31.70,64.97},{30.73,48.94}, {27.91,61.66},{75.79,140.06},{38.66,77.44},{90.18,169.84}, {42.99,97.27},{68.93,124.92},{55.59,117.87},{39.67,81.86}, {89.35,159.60},{52.51,109.72},{ 8.49,42.26},{21.53,59.50}, {51.38,83.29},{90.07,151.22},{11.97,49.68},{82.04,152.58}, {47.71,87.95},{97.42,165.81},{66.17,118.73},{28.23,67.72}, {70.68,134.71},{15.39,73.56},{43.41,85.49},{71.98,135.77}, {91.54,166.17},{78.44,131.82},{75.21,140.69},{64.99,121.77}, {55.80,120.61},{28.26,54.50},{64.89,117.80},{56.68,86.63}, {95.42,167.13},{97.62,165.29},{37.77,91.08},{33.34,84.16}, { 4.98,32.59},{28.97,68.62},{58.70,122.63},{79.84,137.92}, {32.96,71.35},{70.15,116.32},{72.12,134.93},{87.84,145.90}, {37.58,74.91},{63.70,125.43},{51.04,96.37},{32.89,82.41}, {31.47,72.95},{65.71,123.19},{96.25,157.46},{33.41,88.58}, {73.69,124.34},{57.08,124.45},{58.55,107.26},{86.84,161.96}, { 9.62,28.89},{70.74,132.77},{68.89,129.50},{30.79,66.63}, {84.18,156.88},{94.64,171.65},{52.65,86.94},{10.52,33.08}, {38.17,75.30},{98.23,166.00},{ 7.75,35.38},{64.33,121.67}, {20.65,58.43},{62.53,113.41},{46.49,97.40},{14.85,35.92}, {74.12,143.61},{ 1.02,15.85},{12.87,42.28},{48.12,91.79}, {61.07,112.44},{77.01,139.88},{79.93,144.04},{36.84,84.94}, {33.85,60.73},{83.60,159.64},{12.23,47.55},{45.34,103.84}, {66.93,117.43},{21.56,69.56},{54.89,108.03},{57.71,116.51}, {76.57,133.11},{41.43,98.74},{88.17,151.47},{16.57,55.41}, { 1.30,33.67},{46.81,103.18},{ 0.19,26.49},{17.91,68.60}, {41.37,97.56},{46.12,92.01},{71.36,145.82},{ 8.14,38.29}, {39.45,73.01},{20.97,65.88},{49.34,100.46},{21.48,59.98}, {38.58,90.56},{69.89,149.19},{25.62,63.62},{59.26,126.66}, {54.69,120.65},{98.54,172.69},{72.37,131.63},{50.46,105.66}, {10.51,47.47},{86.15,125.82},{29.42,64.20},{71.03,127.79}, {21.88,63.20},{38.56,73.82},{23.67,63.21},{66.31,123.16}, {79.91,150.99},{ 1.26,19.88},{34.65,84.60},{ 2.93,36.05}, {53.99,126.60},{85.32,144.81},{45.63,107.71},{84.45,141.47}, {19.25,56.39},{ 0.52,31.56},{33.02,67.43},{ 7.00,37.20}, {82.26,143.53},{ 4.45,15.35},{22.45,75.67},{76.26,137.05}, {20.22,56.74},{35.92,74.35},{ 0.04,28.10},{83.36,150.05}, {64.10,121.94},{ 7.78,29.86},{83.17,125.35},{10.58,35.47}, {65.62,119.24},{72.56,127.30},{37.73,84.99},{93.05,153.36}, {35.86,81.75},{85.52,131.55},{81.75,143.62},{62.45,109.58}, {94.79,157.53},{77.74,134.35},{19.22,57.07},{70.78,121.14}, {99.37,161.95},{ 7.02,27.35},{82.54,124.89},{92.82,162.19}, {49.32,90.72},{95.46,153.94},{ 4.44,44.30},{52.79,112.48}, { 0.45,27.60},{59.99,105.48},{61.27,113.11},{36.60,91.98}, {39.19,62.24},{23.68,74.57},{43.64,101.62},{48.14,109.21}, {33.56,66.36},{12.16,51.62},{84.84,133.20},{36.73,87.83}, {77.97,148.53},{25.78,61.35},{ 6.88,19.81},{84.02,150.62}, {74.04,129.53},{36.17,77.70},{10.10,55.22},{82.12,133.04}, {65.12,114.23},{26.15,61.14},{55.79,119.04},{ 5.13,26.96}, { 9.71,39.05},{47.23,86.33},{88.17,140.47},{72.00,136.55}, {50.19,89.91},{99.03,166.27},{21.80,57.90},{15.84,62.55}, {97.93,169.82},{74.70,150.72},{62.10,117.73},{88.59,177.33}, {10.67,32.20},{86.19,139.54},{ 0.86,38.95},{43.94,85.59}, {65.26,125.30},{ 5.12,36.78},{27.90,70.23},{48.49,95.07}, {26.33,50.10},{74.26,130.64},{28.17,65.67},{85.53,154.38}, { 8.81,33.59},{59.30,110.24},{ 8.41,45.21},{86.78,117.81}, {71.55,108.99},{73.00,128.87},{ 7.57,46.42},{ 2.67,16.23}, {89.76,160.13},{73.35,128.80},{13.22,47.10},{57.21,117.30}, {21.69,58.75},{ 1.84,10.87},{74.03,126.89},{32.43,65.31}, {18.91,36.75},{79.01,137.13},{88.99,130.99},{16.45,59.89}, { 4.14,34.35},{36.84,83.81},{98.42,154.48},{ 1.50,52.05}, {92.91,175.37},{89.54,149.20},{65.71,118.76},{83.84,149.47}, {20.52,73.44},{70.11,128.04},{32.45,74.00},{72.44,123.91}, {93.91,149.22},{34.12,88.83},{50.65,113.43},{33.81,79.51}, {12.18,52.09},{30.27,61.00},{69.99,118.16},{56.61,112.51}, {36.00,90.54},{ 8.47,27.15},{29.54,47.31},{14.50,58.68}, {79.92,143.07},{78.10,143.79},{98.15,174.48},{30.29,72.34}, {57.69,101.31},{ 2.09,33.80},{ 5.90,46.20},{58.34,104.23}, {66.17,141.37},{55.53,110.07},{96.92,167.10},{ 1.50,33.93}, {26.19,65.67},{23.48,72.74},{90.92,160.50},{91.19,139.91}, { 3.88,44.28},{62.88,106.53},{56.04,116.06},{10.11,30.51}, {71.35,138.82},{88.37,157.42},{73.00,147.11},{64.14,111.08}, {49.26,114.19},{49.88,112.10},{49.18,101.53},{48.13,96.06}, {33.33,76.01},{94.52,162.99},{78.18,136.67},{51.02,104.82}, {44.69,108.02},{47.99,106.22},{16.25,49.11},{16.16,50.10}, {39.00,88.35},{15.85,50.41},{46.26,100.51},{25.21,46.36}, {45.35,95.33},{39.77,92.25},{28.30,80.66},{75.07,127.22}, {74.78,129.95},{20.69,64.27},{37.14,93.13},{57.61,107.97}, { 2.63,45.27},{81.08,152.29},{56.31,107.93},{50.35,94.40}, {55.35,101.37},{55.53,115.00},{29.57,58.12},{ 1.66,24.24}, {87.56,147.66},{62.13,117.35},{46.82,104.00},{86.97,147.44}, {41.02,89.98},{17.06,62.61},{82.41,136.36},{23.22,42.70}, {18.75,67.28},{71.33,131.04},{69.52,129.33},{82.63,147.12}, {47.24,90.92},{22.65,65.96},{73.05,139.46},{70.24,128.58}, {29.19,69.72},{40.67,72.89},{69.21,114.59},{ 4.61,26.99}, { 8.77,53.62},{93.77,153.76},{90.60,155.79},{87.58,173.84}, {91.49,158.66},{45.29,110.83},{94.97,166.03},{53.88,102.45}, {48.87,94.11},{ 0.63,26.45},{67.53,115.30},{58.60,117.09}, {65.46,130.14},{69.45,139.73},{ 6.01,36.67},{70.72,123.43}, {39.03,97.08},{24.29,65.89},{ 7.03,35.23},{56.64,114.54}, {52.23,105.48},{66.33,125.88},{51.49,100.16},{14.78,62.37}, {23.72,54.24},{90.24,161.83},{66.28,110.00},{ 5.60,45.74}, {12.64,54.09},{ 7.18,25.05},{56.95,117.69},{69.10,117.20}, {36.09,91.50},{ 4.58,30.30},{33.13,58.84},{65.16,109.84}, {31.16,63.92},{57.47,106.93},{32.84,75.74},{26.60,71.48}, { 9.90,43.94},{94.26,159.14},{90.71,150.67},{19.62,65.93}, {65.93,136.01},{51.32,105.70},{37.18,78.73},{50.31,88.51}, {93.10,151.38},{39.46,81.33},{21.54,75.04},{97.69,166.07}, {79.40,142.16},{14.70,39.74},{94.09,171.81},{43.79,93.80}, {62.05,110.89},{79.22,134.78},{97.36,168.21},{90.50,166.31}, {83.33,146.74},{95.86,167.96},{ 0.16,34.61},{42.31,90.83}, {92.62,151.94},{35.59,82.69},{74.19,135.22},{63.46,128.10}, {44.86,107.00},{57.32,125.09},{45.04,91.50},{84.27,165.01}, {57.91,128.78},{85.40,140.95},{48.96,93.90},{74.52,132.30}, {57.24,116.84},{58.48,102.05},{69.03,126.67},{ 4.38,39.47}, {51.33,92.33},{19.61,62.98},{59.83,112.01},{70.57,118.57}, { 5.45,35.48},{28.72,54.61},{22.55,49.25},{69.93,124.02}, {63.43,117.73},{72.72,133.87},{77.01,140.12},{34.51,71.52}, {14.37,31.51},{ 3.24,31.74},{ 6.99,46.12},{ 0.44,-4.78}, {12.34,45.71},{71.62,135.47},{81.04,137.69},{30.62,64.26}, {23.27,63.97},{95.44,177.74},{19.31,60.92},{67.51,120.81}, {68.89,136.62},{65.18,128.79},{43.58,103.31},{76.18,152.18}, {78.16,142.12},{13.17,55.95},{83.40,139.03},{ 0.57,27.11}, {99.35,162.87},{64.00,102.77},{50.54,107.18},{56.45,117.60}, {26.03,51.83},{63.38,110.88},{73.76,137.72},{ 1.76,30.03}, {71.03,131.12},{ 0.53,33.73},{32.10,90.32},{22.91,60.97}, {61.07,116.18},{11.66,52.86},{22.94,46.74},{38.12,88.13}, {84.17,142.08},{39.19,72.19},{46.30,81.32},{58.31,100.03}, {15.84,34.20},{ 8.05,33.07},{46.34,99.75},{66.27,119.29}, {14.38,37.29},{94.29,165.90},{ 2.14,29.37},{84.18,154.89}, {24.02,58.82},{89.02,140.67},{78.31,132.86},{14.09,63.99}, {58.63,137.58},{83.66,156.76},{82.29,129.03},{ 6.96,39.48}, { 2.73,24.93},{71.83,133.05},{75.65,136.18},{82.53,154.15}, { 8.62,61.15},{32.22,88.34},{11.56,35.27},{44.96,97.85}, {99.65,165.85},{60.11,113.71},{ 3.62,24.97},{88.03,138.06}, {90.15,163.07},{90.64,149.29},{ 5.75,27.35},{51.11,100.58}, {20.92,43.65},{59.70,109.50},{69.38,138.45},{27.90,78.59}, {26.52,68.55},{22.67,54.08},{48.17,96.37},{ 0.19,33.79}, {40.42,80.04},{65.17,120.38},{95.98,162.88},{50.44,99.58}, {31.94,89.91},{27.18,63.57},{74.36,129.34},{ 5.46,28.48}, {35.21,81.14},{37.94,70.78},{16.22,53.52},{52.52,115.72} }; double residual_error(double x, double y, double m, double c) { double e = (m * x) + c - y; return e * e; } double rms_error(double m, double c) { int i; double mean; double error_sum = 0; for(i=0; i<n_data; i++) { error_sum += residual_error(data[i].x, data[i].y, m, c); } mean = error_sum / n_data; return sqrt(mean); } int time_difference(struct timespec *start, struct timespec *finish, long long int *difference) { long long int ds = finish->tv_sec - start->tv_sec; long long int dn = finish->tv_nsec - start->tv_nsec; if(dn < 0 ) { ds--; dn += 1000000000; } *difference = ds * 1000000000 + dn; return !(*difference > 0); } __device__ double d_residual_error(double x, double y, double m, double c) { double e = (m * x) + c - y; return e * e; } __global__ void d_rms_error(double *m, double *c, double *error_sum_arr, point_t *d_data) { int i = threadIdx.x + blockIdx.x * blockDim.x; error_sum_arr[i] = d_residual_error(d_data[i].x, d_data[i].y, *m, *c); } int main() { int i; double bm = 1.3; double bc = 10; double be; double dm[8]; double dc[8]; double e[8]; double step = 0.01; double best_error = 999999999; int best_error_i; int minimum_found = 0; double om[] = {0,1,1, 1, 0,-1,-1,-1}; double oc[] = {1,1,0,-1,-1,-1, 0, 1}; struct timespec start, finish; long long int time_elapsed; clock_gettime(CLOCK_MONOTONIC, &start); cudaError_t error; double *d_dm; double *d_dc; double *d_error_sum_arr; point_t *d_data; be = rms_error(bm, bc); error = cudaMalloc(&d_dm, (sizeof(double) * 8)); if(error){ fprintf(stderr, "cudaMalloc on d_dm returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } error = cudaMalloc(&d_dc, (sizeof(double) * 8)); if(error){ fprintf(stderr, "cudaMalloc on d_dc returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } error = cudaMalloc(&d_error_sum_arr, (sizeof(double) * 1000)); if(error){ fprintf(stderr, "cudaMalloc on d_error_sum_arr returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } error = cudaMalloc(&d_data, sizeof(data)); if(error){ fprintf(stderr, "cudaMalloc on d_data returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } while(!minimum_found) { for(i=0;i<8;i++) { dm[i] = bm + (om[i] * step); dc[i] = bc + (oc[i] * step); } error = cudaMemcpy(d_dm, dm, (sizeof(double) * 8), cudaMemcpyHostToDevice); if(error){ fprintf(stderr, "cudaMemcpy to d_dm returned %d %s\n", error, cudaGetErrorString(error)); } error = cudaMemcpy(d_dc, dc, (sizeof(double) * 8), cudaMemcpyHostToDevice); if(error){ fprintf(stderr, "cudaMemcpy to d_dc returned %d %s\n", error, cudaGetErrorString(error)); } error = cudaMemcpy(d_data, data, sizeof(data), cudaMemcpyHostToDevice); if(error){ fprintf(stderr, "cudaMemcpy to d_data returned %d %s\n", error, cudaGetErrorString(error)); } for(i=0;i<8;i++) { double h_error_sum_arr[1000]; double error_sum_total; double error_sum_mean; d_rms_error <<<100,10>>>(&d_dm[i], &d_dc[i], d_error_sum_arr, d_data); cudaDeviceSynchronize(); error = cudaMemcpy(&h_error_sum_arr, d_error_sum_arr, (sizeof(double) * 1000), cudaMemcpyDeviceToHost); if(error){ fprintf(stderr, "cudaMemcpy to error_sum returned %d %s\n", error, cudaGetErrorString(error)); } for(int j=0; j<n_data; j++) { //Add each error sum to the error sum total. error_sum_total += h_error_sum_arr[j]; } error_sum_mean = error_sum_total / n_data; e[i] = sqrt(error_sum_mean); if(e[i] < best_error) { best_error = e[i]; best_error_i = i; } //Reset the error sum total. error_sum_total = 0; } printf("best m,c is %lf,%lf with error %lf in direction %d\n", dm[best_error_i], dc[best_error_i], best_error, best_error_i); if(best_error < be) { be = best_error; bm = dm[best_error_i]; bc = dc[best_error_i]; } else { minimum_found = 1; } } //Free memory for d_dm error = cudaFree(d_dm); if(error){ fprintf(stderr, "cudaFree on d_dm returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } //Free memory for d_dc error = cudaFree(d_dc); if(error){ fprintf(stderr, "cudaFree on d_dc returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } error = cudaFree(d_data); if(error){ fprintf(stderr, "cudaFree on d_data returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } error = cudaFree(d_error_sum_arr); if(error){ fprintf(stderr, "cudaFree on d_error_sum_arr returned %d %s\n", error, cudaGetErrorString(error)); exit(1); } printf("minimum m,c is %lf,%lf with error %lf\n", bm, bc, be); clock_gettime(CLOCK_MONOTONIC, &finish); time_difference(&start, &finish, &time_elapsed); printf("Time elapsed was %lldns or %0.9lfs\n", time_elapsed, (time_elapsed/1.0e9)); return 0; }