serial_no int64 1 24.2k | cuda_source stringlengths 11 9.01M |
|---|---|
14,901 | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <pthread.h>
#define MAX_THREAD 24
/*
0123456789
0 oooooooooo
1 o@@@@@@@@o Problem solving example
2 o@@@@@@@@o 8 * 6 matrix
3 o@@@@@@@@o + 1px padding around matrix
4 o@@@@@@@@o
5 o@@@@@@@@o
6 o@@@@@@@@o
7 oooooooooo
*/
int nprocs, display, gen, width, height;
int** arr;
int** tmp;
pthread_barrier_t tbarrier;
struct timespec begin, end;
//CUDA and Thread
void dump();
//single & multi thread
int setPixel(int x, int y);
void* Thread(void *args);
void nextGenPixel(int start, int end, int width);
void copyAndResetData(int start, int end, int width);
typedef struct{
int start;
int end;
} bound;
__device__ int cudaNeighbor(int *mem, int index, int width){
//live cell with 2 or 3 neighbors -> keep live
//dead cell with 3 neighbors -> revive
//other cases : die
int current = mem[index];
int neighbor = mem[index-width-1]+mem[index-width]+mem[index-width+1]+
mem[index-1]+mem[index+1]+mem[index+width-1]+mem[index+width]+mem[index+width+1];
if((current == 1 && neighbor == 2) || (current == 1 && neighbor == 3) || (current == 0 && neighbor == 3)){
return 1;
}else{
return 0;
}
}
__global__ void my_kernel(int *cuda_mem, int *cuda_tmp, int height, int width, int gen){
int index = threadIdx.x + blockIdx.x * blockDim.x;
if(blockIdx.x == 0 || blockIdx.x == height-1 || threadIdx.x == 0 || threadIdx.x == width-1){
//Do nothing
}else{
cuda_tmp[index] = cudaNeighbor(cuda_mem, index, width);
}
cuda_mem[index] = cuda_tmp[index];
cuda_tmp[index] = 0;
}
int main(int argc, char *argv[]){
pthread_t thread[MAX_THREAD];
FILE *fp;
char buffer[20];
int x, y, size;
//This is for convert 2d array to 1d
int *mat_1d, *mat_1d_tmp;
int *cuda_mem, *cuda_tmp;
char *x_map, *y_map;
if(argc!=7){
printf("Parameter Error!\n");
printf("./glife <input file> <display> <nprocs> <# of generation> <width> <height>\n");
exit(1);
}
display = atoi(argv[2]);
nprocs = atoi(argv[3]);
gen = atoi(argv[4]);
width = atoi(argv[5]);
height = atoi(argv[6]);
//Make matrix
arr = (int**)malloc(sizeof(int*) * (height+2));
for(int i=0; i<height+2; i++){
arr[i] = (int*)malloc(sizeof(int) * (width+2));
}
tmp = (int**)malloc(sizeof(int*) * (height+2));
for(int i=0; i<height+2; i++){
tmp[i] = (int*)malloc(sizeof(int) * (width+2));
}
//length = (height+2) * (width+2);
size = (height+2) * (width+2) * sizeof(int);
mat_1d = (int*)malloc(size);
mat_1d_tmp = (int*)malloc(size);
//Initialize
for(int a=0; a<height+2; a++){
for(int b=0; b<width+2; b++){
arr[a][b] = 0;
tmp[a][b] = 0;
mat_1d[a*(width+2)+b] = 0;
mat_1d_tmp[a*(width+2)+b] = 0;
}
}
if((fp=fopen(argv[1],"r")) == NULL){
fprintf(stderr, "error");
exit(2);
}
//Mapping
while(fgets(buffer, 20, fp) != NULL){
y_map = strtok(buffer, " ");
x_map = strtok(NULL, " ");
y = atoi(y_map);
x = atoi(x_map);
arr[x][y] = 1;
mat_1d[x*(width+2) +y] = 1;
}
if(nprocs == 0){
//CUDA
clock_gettime(CLOCK_MONOTONIC, &begin);
cudaMalloc(&cuda_mem, size);
cudaMalloc(&cuda_tmp, size);
cudaMemcpy(cuda_mem, mat_1d, size, cudaMemcpyHostToDevice);
cudaMemcpy(cuda_tmp, mat_1d_tmp, size, cudaMemcpyHostToDevice);
for(int i=0; i<gen; i++){
//Kernel code
my_kernel<<< height+2 , width+2 >>>(cuda_mem, cuda_tmp, height+2, width+2, gen);
cudaDeviceSynchronize();
}
cudaMemcpy(mat_1d, cuda_mem, size, cudaMemcpyDeviceToHost);
cudaMemcpy(mat_1d_tmp, cuda_tmp, size, cudaMemcpyDeviceToHost);
clock_gettime(CLOCK_MONOTONIC, &end);
for(int i=0;i<height+2;i++){
for(int j=0;j<width+2;j++){
arr[i][j] = mat_1d[i*(width+2) +j ];
}
}
}else{
//SINGLE AND MULTI THREAD
//Divide height into nprocs pieces
bound section[MAX_THREAD];
int x = 0;
int y = 0;
int div = height/nprocs;
for(int k=0; k<nprocs; k++){
if(k == (nprocs-1)){
y = height;
section[k].start = x;
section[k].end = y;
}else{
y+=div;
section[k].start = x;
section[k].end = y;
x+=div;
}
}
pthread_barrier_init(&tbarrier, NULL, nprocs);
clock_gettime(CLOCK_MONOTONIC, &begin);
for(int i=0; i<nprocs; i++){
pthread_create(&thread[i], NULL, Thread, §ion[i]);
}
for(int j=0; j<nprocs; j++){
pthread_join(thread[j], NULL);
}
clock_gettime(CLOCK_MONOTONIC, &end);
pthread_barrier_destroy(&tbarrier);
}
printf("Execution time : %2.3f sec\n",(end.tv_sec - begin.tv_sec)+(end.tv_nsec-begin.tv_nsec)/1000000000.0);
if(display == 1){
dump();
}
free(arr);
free(tmp);
free(mat_1d);
free(mat_1d_tmp);
cudaFree(cuda_mem);
cudaFree(cuda_tmp);
return 0;
}
void *Thread(void *args){
//get args with struct
bound *section = (bound*)args;
for(int i=0; i<gen; i++){
nextGenPixel(section[0].start, section[0].end, width);
pthread_barrier_wait(&tbarrier);
copyAndResetData(section[0].start, section[0].end, width);
pthread_barrier_wait(&tbarrier);
}
}
void nextGenPixel(int start, int end, int wdth){
int head = start;
int tail = end;
if(head == 0){
head = 1;
}
if(tail == height){
tail++;
}
for(int i=head; i<tail; i++){
for(int j=1; j<=wdth; j++){
tmp[i][j]=setPixel(i,j);
}
}
}
void copyAndResetData(int start, int end, int wdth){
int tail = end;
if(tail == height){
tail +=2;
}
for(int a=start; a<tail; a++){
for(int b=0; b<wdth+2; b++){
arr[a][b] = tmp[a][b];
tmp[a][b] = 0;
}
}
}
int setPixel(int x, int y){
//live cell with 2 or 3 neighbors -> keep live
//dead cell with 3 neighbors -> revive
//other cases : die
int current = arr[x][y];
int neighbor = arr[x-1][y-1]+arr[x][y-1]+arr[x+1][y-1]+arr[x+1][y]+arr[x+1][y+1]+arr[x][y+1]+arr[x-1][y+1]+arr[x-1][y];
if((current == 1 && neighbor == 2) || (current == 1 && neighbor == 3) || (current == 0 && neighbor == 3)){
return 1;
}else{
return 0;
}
}
void dump(){
// print arr info
printf("%d x %d matrix\n", width, height);
printf("========================================\n");
for(int a=1; a<=height; a++){
for(int b=1; b<=width; b++){
if(arr[a][b]==1){
printf("o");
}else{
printf("-");
}
}
printf("\n");
}
printf("========================================\n");
}
|
14,902 | #include <iostream>
#include <thrust/device_vector.h>
#include <thrust/host_vector.h>
#include <thrust/random/linear_congruential_engine.h>
#include <thrust/random/uniform_real_distribution.h>
struct raw_access {
thrust::minstd_rand rng;
thrust::uniform_real_distribution<double> dist;
raw_access (thrust::uniform_real_distribution<double> dist, thrust::minstd_rand rng) : dist(dist), rng(rng) {};
__device__ __host__
double operator()(const int &i) {
rng.discard(i * 100);
double x = dist(rng);
double y = dist(rng);
double power = pow(x, 2) + pow(y, 2);
if (power <= 1)
{
return 1.0;
}
return 0.0;
}
};
int main(){
int N = 100000;
thrust::minstd_rand rng(100);
thrust::uniform_real_distribution<double> dist(0.0, 1.0);
thrust::device_vector<double> vetor(N);
thrust::counting_iterator<int> iter(0);
raw_access ra(dist, rng);
thrust::transform(iter, iter+vetor.size(), vetor.begin(), ra);
double sum = thrust::reduce(vetor.begin(), vetor.end(), 0.0, thrust::plus<double>());
double pi = (double)4 * sum / N;
std::cout << "pi " << pi;
printf("\n");
return 0;
} |
14,903 | #include "includes.h"
__global__ void computeCovDxdPi(int *valid_points, int *starting_voxel_id, int *voxel_id, int valid_points_num, double *inverse_covariance, int voxel_num, double gauss_d1, double gauss_d2, double *point_gradients, double *cov_dxd_pi, int valid_voxel_num)
{
int id = threadIdx.x + blockIdx.x * blockDim.x;
int stride = blockDim.x * gridDim.x;
int row = blockIdx.y;
int col = blockIdx.z;
if (row < 3 && col < 6) {
double *icov0 = inverse_covariance + row * 3 * voxel_num;
double *icov1 = icov0 + voxel_num;
double *icov2 = icov1 + voxel_num;
double *cov_dxd_pi_tmp = cov_dxd_pi + (row * 6 + col) * valid_voxel_num;
double *pg_tmp0 = point_gradients + col * valid_points_num;
double *pg_tmp1 = pg_tmp0 + 6 * valid_points_num;
double *pg_tmp2 = pg_tmp1 + 6 * valid_points_num;
for (int i = id; i < valid_points_num; i += stride) {
double pg0 = pg_tmp0[i];
double pg1 = pg_tmp1[i];
double pg2 = pg_tmp2[i];
for ( int j = starting_voxel_id[i]; j < starting_voxel_id[i + 1]; j++) {
int vid = voxel_id[j];
cov_dxd_pi_tmp[j] = icov0[vid] * pg0 + icov1[vid] * pg1 + icov2[vid] * pg2;
}
}
}
} |
14,904 | #include <stdio.h>
__global__ void loop()
{
printf("This is iteration number %d\n", threadIdx.x + (blockIdx.x*blockDim.x));
}
int main()
{
/*
* When refactoring `loop` to launch as a kernel, be sure
* to use the execution configuration to control how many
* "iterations" to perform.
*
* For this exercise, be sure to use more than 1 block in
* the execution configuration.
*/
int NUM_BLOCKS = 2;
int NUM_THREADS = 5;
loop<<<NUM_BLOCKS, NUM_THREADS>>>();
cudaDeviceSynchronize();
}
|
14,905 |
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
int main()
{
cudaDeviceProp prop;
int count;
cudaGetDeviceCount(&count);
printf("Number of devices: %d\n", count);
for (int i = 0; i < count; i++) {
cudaGetDeviceProperties(&prop, i);
printf("\n --- General Information for Device %d --- \n", i);
printf("\tName: %s\n", prop.name);
printf("\tCompute capability : %d.%d\n", prop.major, prop.minor);
printf("\tClock Rate : %d\n", prop.clockRate);
printf("\tDevice copy overlap: %s\n", prop.deviceOverlap ? "Enabled" : "Disabled");
printf("\tKernel execution timeout: %s\n", prop.kernelExecTimeoutEnabled ? "Enabled" : "Disabled");
printf("\n --- Memory Information for Device %d --- \n", i);
printf("\tTotal global mem : %ld bytes\n", prop.totalGlobalMem);
printf("\tTotal constant mem : %ld bytes\n", prop.totalConstMem);
printf("\tMax mem pitch : %ld bytes\n", prop.memPitch);
printf("\tTexture alignment : %ld\n", prop.textureAlignment);
printf("\n --- Multiprocessor Information for Device %d --- \n", i);
printf("\tMultiprocessor count: %d\n", prop.multiProcessorCount);
printf("\tShared memory per mp: %ld bytes\n", prop.sharedMemPerMultiprocessor);
printf("\tRegisters per mp : %d\n", prop.regsPerBlock);
printf("\tThreads in warp : %d\n", prop.warpSize);
printf("\tMax threads per block %d \n", prop.maxThreadsPerBlock);
printf("\tMax thread dimensions : (%d, %d, %d)\n", prop.maxThreadsDim[0],
prop.maxThreadsDim[1],
prop.maxThreadsDim[2]);
printf("\tMax grid dimensions : (%d, %d, %d)\n", prop.maxGridSize[0],
prop.maxGridSize[1],
prop.maxGridSize[2]);
printf("\n");
}
return 0;
} |
14,906 | //pass
//--blockDim=32 --gridDim=2
#include <cuda.h>
__global__ void test_Prog(int *A, int N) {
int tid = threadIdx.x;
int bid = blockIdx.x;
int idx = blockDim.x * bid + tid;
int tmp = A[idx + 1];
tmp = tmp + 11;
A[idx] += tmp;
} |
14,907 | // From Appendix B.16 of the CUDA-C Programming Guide.
#include "stdio.h"
#include "cuda.h"
__global__ void helloCUDA(float f) {
printf("Hello thread %d, f=%f\n", threadIdx.x, f);
}
int main() {
helloCUDA<<<1, 5>>>(1.2345f);
cudaDeviceReset();
return 0;
}
|
14,908 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <cuda.h>
#define N 10000
#define M 10000
#define K 10000
#define tile_size 16
__global__ void matrix_mul_shared(float *a, float *b, float *c) {
__shared__ int a_tile[tile_size][tile_size]; //define shared memory tile for matrix a
__shared__ int b_tile[tile_size][tile_size]; //define shared memory tile for matrix b
int row = blockIdx.y * tile_size + threadIdx.y;
int col = blockIdx.x * tile_size + threadIdx.x;
float temp = 0.0; //store sum
int tileIdx;
//Load one tile into shared memory
for (int s = 0; s < gridDim.x; s++) {
tileIdx = row * K + s * tile_size + threadIdx.x;
if(tileIdx >= K*K)
a_tile[threadIdx.y][threadIdx.x] = 0; //check if K is divisible by tile size for a_tile
else
a_tile[threadIdx.y][threadIdx.x] = a[tileIdx];
tileIdx = (s * tile_size + threadIdx.y) * K + col;
if(tileIdx >= K*K)
b_tile[threadIdx.y][threadIdx.x] = 0; //check if K is divisible by tile size for b_tile
else
b_tile[threadIdx.y][threadIdx.x] = b[tileIdx];
__syncthreads();
for (int j = 0; j < tile_size; j++)
temp += a_tile[threadIdx.y][j] * b_tile[j][threadIdx.x]; //add and multiply
__syncthreads();
}
if(row < K && col < K)
c[row * K + col] = temp; //store the result
}
//Function to initialize matrices with random values
void randomInit (float *data, int size)
{
for (int i = 0; i < size; i++)
for (int j = 0; j < size; j++)
*(data + i * size + j) = rand() % 1024;
}
int main(void) {
float *a, *b, *c; //CPU copies
float *d_a, *d_b, *d_c; //GPU copies
int matrix_size = N * M * sizeof(float);
cudaEvent_t start, stop,start1,stop1,start2,stop2;
float time,time1,time2;
//Start the cuda timer
cudaEventCreate(&start);
cudaEventCreate(&start1);
cudaEventCreate(&start2);
cudaEventCreate(&stop);
cudaEventCreate(&stop1);
cudaEventCreate(&stop2);
//Allocate CPU memory
a = (float *) malloc(matrix_size); randomInit(a, N);
b = (float *) malloc(matrix_size); randomInit(b, M);
c = (float *) malloc(matrix_size);
//Allocate GPU memory
cudaMalloc((void **) &d_a, matrix_size);
cudaMalloc((void **) &d_b, matrix_size);
cudaMalloc((void **) &d_c, matrix_size);
//Copy from CPU memory to GPU memory
cudaEventRecord( start1, 0 );
cudaMemcpy( d_a, a, matrix_size, cudaMemcpyHostToDevice);
cudaMemcpy( d_b, b, matrix_size, cudaMemcpyHostToDevice);
cudaEventRecord( stop1, 0 );
cudaEventSynchronize(stop1);
cudaEventElapsedTime( &time1, start1, stop1 );
cudaEventDestroy( start1 );
cudaEventDestroy( stop1 );
//Set thread and grid dimensions
dim3 tBlock(16, 16);
dim3 Grid((N + 16 - 1)/tBlock.x, (M + 16 - 1)/tBlock.y);
cudaEventRecord( start, 0 );
//Call kernels
matrix_mul_shared<<< Grid, tBlock >>> (d_a,d_b,d_c);
cudaEventRecord( stop, 0 );
cudaEventSynchronize(stop);
cudaEventElapsedTime( &time, start, stop );
cudaEventDestroy( start );
cudaEventDestroy( stop );
printf("GPU Execution Time without memory transfer= %f\n",time);
//Copy from device to host
cudaEventRecord( start2, 0 );
cudaMemcpy( c, d_c, matrix_size, cudaMemcpyDeviceToHost);
cudaEventRecord( stop2, 0 );
cudaEventSynchronize(stop2);
cudaEventElapsedTime( &time2, start2, stop2 );
cudaEventDestroy( start2 );
cudaEventDestroy( stop2 );
float tTime=time+time1+time2;
printf("GPU Execution time with memory transfer =%f\n",tTime);
//free cpu and gpu memory
free(a); free(b); free(c);
cudaFree(d_a); cudaFree(d_b); cudaFree(d_c);
return 0;
} |
14,909 | //note: please do not modify this file manually!
// this file has been generated automatically by BOAST version 0.99996
// by: make boast_kernels
/*
!=====================================================================
!
! S p e c f e m 3 D G l o b e V e r s i o n 7 . 0
! --------------------------------------------------
!
! Main historical authors: Dimitri Komatitsch and Jeroen Tromp
! Princeton University, USA
! and CNRS / University of Marseille, France
! (there are currently many more authors!)
! (c) Princeton University and CNRS / University of Marseille, April 2014
!
! This program is free software; you can redistribute it and/or modify
! it under the terms of the GNU General Public License as published by
! the Free Software Foundation; either version 2 of the License, or
! (at your option) any later version.
!
! This program is distributed in the hope that it will be useful,
! but WITHOUT ANY WARRANTY; without even the implied warranty of
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
! GNU General Public License for more details.
!
! You should have received a copy of the GNU General Public License along
! with this program; if not, write to the Free Software Foundation, Inc.,
! 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
!
!=====================================================================
*/
#ifndef INDEX2
#define INDEX2(isize,i,j) i + isize*j
#endif
#ifndef INDEX3
#define INDEX3(isize,jsize,i,j,k) i + isize*(j + jsize*k)
#endif
#ifndef INDEX4
#define INDEX4(isize,jsize,ksize,i,j,k,x) i + isize*(j + jsize*(k + ksize*x))
#endif
#ifndef INDEX5
#define INDEX5(isize,jsize,ksize,xsize,i,j,k,x,y) i + isize*(j + jsize*(k + ksize*(x + xsize*y)))
#endif
#ifndef NDIM
#define NDIM 3
#endif
#ifndef NGLLX
#define NGLLX 5
#endif
#ifndef NGLL2
#define NGLL2 25
#endif
#ifndef NGLL3
#define NGLL3 125
#endif
#ifndef NGLL3_PADDED
#define NGLL3_PADDED 128
#endif
#ifndef N_SLS
#define N_SLS 3
#endif
#ifndef IREGION_CRUST_MANTLE
#define IREGION_CRUST_MANTLE 1
#endif
#ifndef IREGION_INNER_CORE
#define IREGION_INNER_CORE 3
#endif
#ifndef IFLAG_IN_FICTITIOUS_CUBE
#define IFLAG_IN_FICTITIOUS_CUBE 11
#endif
#ifndef R_EARTH_KM
#define R_EARTH_KM 6371.0f
#endif
#ifndef COLORING_MIN_NSPEC_INNER_CORE
#define COLORING_MIN_NSPEC_INNER_CORE 1000
#endif
#ifndef COLORING_MIN_NSPEC_OUTER_CORE
#define COLORING_MIN_NSPEC_OUTER_CORE 1000
#endif
#ifndef BLOCKSIZE_TRANSFER
#define BLOCKSIZE_TRANSFER 256
#endif
static __device__ void compute_element_strain_undoatt(const int ispec, const int ijk_ispec, const int * d_ibool, const float * s_dummyx_loc, const float * s_dummyy_loc, const float * s_dummyz_loc, const float * d_xix, const float * d_xiy, const float * d_xiz, const float * d_etax, const float * d_etay, const float * d_etaz, const float * d_gammax, const float * d_gammay, const float * d_gammaz, const float * sh_hprime_xx, float * epsilondev_loc, float * epsilon_trace_over_3){
int tx;
int K;
int J;
int I;
int l;
int offset;
float tempx1l;
float tempx2l;
float tempx3l;
float tempy1l;
float tempy2l;
float tempy3l;
float tempz1l;
float tempz2l;
float tempz3l;
float xixl;
float xiyl;
float xizl;
float etaxl;
float etayl;
float etazl;
float gammaxl;
float gammayl;
float gammazl;
float duxdxl;
float duxdyl;
float duxdzl;
float duydxl;
float duydyl;
float duydzl;
float duzdxl;
float duzdyl;
float duzdzl;
float templ;
float fac1;
float fac2;
float fac3;
tx = threadIdx.x;
K = (tx) / (NGLL2);
J = (tx - ((K) * (NGLL2))) / (NGLLX);
I = tx - ((K) * (NGLL2)) - ((J) * (NGLLX));
tempx1l = 0.0f;
tempx2l = 0.0f;
tempx3l = 0.0f;
tempy1l = 0.0f;
tempy2l = 0.0f;
tempy3l = 0.0f;
tempz1l = 0.0f;
tempz2l = 0.0f;
tempz3l = 0.0f;
for (l = 0; l <= NGLLX - (1); l += 1) {
fac1 = sh_hprime_xx[(l) * (NGLLX) + I];
tempx1l = tempx1l + (s_dummyx_loc[(K) * (NGLL2) + (J) * (NGLLX) + l]) * (fac1);
tempy1l = tempy1l + (s_dummyy_loc[(K) * (NGLL2) + (J) * (NGLLX) + l]) * (fac1);
tempz1l = tempz1l + (s_dummyz_loc[(K) * (NGLL2) + (J) * (NGLLX) + l]) * (fac1);
fac2 = sh_hprime_xx[(l) * (NGLLX) + J];
tempx2l = tempx2l + (s_dummyx_loc[(K) * (NGLL2) + (l) * (NGLLX) + I]) * (fac2);
tempy2l = tempy2l + (s_dummyy_loc[(K) * (NGLL2) + (l) * (NGLLX) + I]) * (fac2);
tempz2l = tempz2l + (s_dummyz_loc[(K) * (NGLL2) + (l) * (NGLLX) + I]) * (fac2);
fac3 = sh_hprime_xx[(l) * (NGLLX) + K];
tempx3l = tempx3l + (s_dummyx_loc[(l) * (NGLL2) + (J) * (NGLLX) + I]) * (fac3);
tempy3l = tempy3l + (s_dummyy_loc[(l) * (NGLL2) + (J) * (NGLLX) + I]) * (fac3);
tempz3l = tempz3l + (s_dummyz_loc[(l) * (NGLL2) + (J) * (NGLLX) + I]) * (fac3);
}
offset = (ispec) * (NGLL3_PADDED) + tx;
xixl = d_xix[offset];
etaxl = d_etax[offset];
gammaxl = d_gammax[offset];
xiyl = d_xiy[offset];
etayl = d_etay[offset];
gammayl = d_gammay[offset];
xizl = d_xiz[offset];
etazl = d_etaz[offset];
gammazl = d_gammaz[offset];
duxdxl = (xixl) * (tempx1l) + (etaxl) * (tempx2l) + (gammaxl) * (tempx3l);
duxdyl = (xiyl) * (tempx1l) + (etayl) * (tempx2l) + (gammayl) * (tempx3l);
duxdzl = (xizl) * (tempx1l) + (etazl) * (tempx2l) + (gammazl) * (tempx3l);
duydxl = (xixl) * (tempy1l) + (etaxl) * (tempy2l) + (gammaxl) * (tempy3l);
duydyl = (xiyl) * (tempy1l) + (etayl) * (tempy2l) + (gammayl) * (tempy3l);
duydzl = (xizl) * (tempy1l) + (etazl) * (tempy2l) + (gammazl) * (tempy3l);
duzdxl = (xixl) * (tempz1l) + (etaxl) * (tempz2l) + (gammaxl) * (tempz3l);
duzdyl = (xiyl) * (tempz1l) + (etayl) * (tempz2l) + (gammayl) * (tempz3l);
duzdzl = (xizl) * (tempz1l) + (etazl) * (tempz2l) + (gammazl) * (tempz3l);
templ = (duxdxl + duydyl + duzdzl) * (0.3333333333333333f);
epsilondev_loc[0] = duxdxl - (templ);
epsilondev_loc[1] = duydyl - (templ);
epsilondev_loc[2] = (duxdyl + duydxl) * (0.5f);
epsilondev_loc[3] = (duzdxl + duxdzl) * (0.5f);
epsilondev_loc[4] = (duzdyl + duydzl) * (0.5f);
*(epsilon_trace_over_3) = templ;
}
__global__ void compute_strain_kernel(const float * d_displ, const float * d_veloc, float * epsilondev_xx, float * epsilondev_yy, float * epsilondev_xy, float * epsilondev_xz, float * epsilondev_yz, float * epsilon_trace_over_3, const int NSPEC, const int NSPEC_STRAIN_ONLY, const float deltat, const int * d_ibool, const float * d_xix, const float * d_xiy, const float * d_xiz, const float * d_etax, const float * d_etay, const float * d_etaz, const float * d_gammax, const float * d_gammay, const float * d_gammaz, const float * d_hprime_xx){
int ispec;
int ijk_ispec;
int tx;
int iglob;
float eps_trace_over_3;
float epsdev[(5)];
__shared__ float s_dummyx_loc[(NGLL3)];
__shared__ float s_dummyy_loc[(NGLL3)];
__shared__ float s_dummyz_loc[(NGLL3)];
__shared__ float sh_hprime_xx[(NGLL2)];
ispec = blockIdx.x + (blockIdx.y) * (gridDim.x);
ijk_ispec = threadIdx.x + (NGLL3) * (ispec);
tx = threadIdx.x;
if (tx < NGLL2) {
sh_hprime_xx[tx] = d_hprime_xx[tx];
}
if (ispec < NSPEC) {
iglob = d_ibool[ijk_ispec] - (1);
s_dummyx_loc[tx] = d_displ[0 + (3) * (iglob)] + (deltat) * (d_veloc[0 + (3) * (iglob)]);
s_dummyy_loc[tx] = d_displ[1 + (3) * (iglob)] + (deltat) * (d_veloc[1 + (3) * (iglob)]);
s_dummyz_loc[tx] = d_displ[2 + (3) * (iglob)] + (deltat) * (d_veloc[2 + (3) * (iglob)]);
}
__syncthreads();
if (ispec < NSPEC) {
compute_element_strain_undoatt(ispec, ijk_ispec, d_ibool, s_dummyx_loc, s_dummyy_loc, s_dummyz_loc, d_xix, d_xiy, d_xiz, d_etax, d_etay, d_etaz, d_gammax, d_gammay, d_gammaz, sh_hprime_xx, epsdev, &eps_trace_over_3);
if (NSPEC_STRAIN_ONLY == 1) {
epsilon_trace_over_3[tx] = eps_trace_over_3;
} else {
epsilon_trace_over_3[ijk_ispec] = eps_trace_over_3;
}
epsilondev_xx[ijk_ispec] = epsdev[0];
epsilondev_yy[ijk_ispec] = epsdev[1];
epsilondev_xy[ijk_ispec] = epsdev[2];
epsilondev_xz[ijk_ispec] = epsdev[3];
epsilondev_yz[ijk_ispec] = epsdev[4];
}
}
|
14,910 | #include <cstdio>
#include <cstring>
typedef struct {
int width;
int height;
float* elements;
} Matrix;
#define MATRIX_SIZE 1024
#define BLOCK_SIZE 16
Matrix a, b, c;
void Init() {
a.width = a.height = MATRIX_SIZE;
b.width = b.height = MATRIX_SIZE;
a.elements = (float*) malloc(MATRIX_SIZE*MATRIX_SIZE*sizeof(float));
b.elements = (float*) malloc(MATRIX_SIZE*MATRIX_SIZE*sizeof(float));
for (int i = 0; i < MATRIX_SIZE; ++i)
for (int j = 0; j < MATRIX_SIZE; ++j)
a.elements[i*MATRIX_SIZE+j] = b.elements[i*MATRIX_SIZE+j] = i + j;
c.width = c.height = MATRIX_SIZE;
c.elements = (float*) malloc(MATRIX_SIZE*MATRIX_SIZE*sizeof(float));
memset(c.elements, 0, sizeof(MATRIX_SIZE*MATRIX_SIZE*sizeof(float)));
}
void MatMul(const Matrix A, const Matrix B, Matrix C) {
for (int i = 0; i < MATRIX_SIZE; ++i)
for (int j = 0; j < MATRIX_SIZE; ++j)
for (int t = 0; t < MATRIX_SIZE; ++t)
C.elements[i*MATRIX_SIZE+j] += A.elements[i*MATRIX_SIZE+t] * B.elements[i+t*MATRIX_SIZE];
printf("%f\n", C.elements[3*A.width+3]);
}
int main() {
Init();
MatMul(a, b, c);
return 0;
}
|
14,911 | #include <stdio.h>
# define TPB 256
__global__ void hello(){
const int Id = threadIdx.x + blockDim.x * blockIdx.x;
printf("Hello World! My threadId is %d\n",Id);
}
int main(){
int Block = 1;
hello<<<Block, TPB>>>();
cudaDeviceSynchronize();
return 0;
}
|
14,912 | /*
STEPS
1. Allocate host memory and initialized host data e.g. malloc
2. Allocate device memory e.g cudaMalloc
3. Transfer input data from host to device memory e.g cudaMemcpy
4. Execute kernels
5. Transfer output from device memory to host
6. Free Host & CUDA memory e.g. free & cudaFree
*/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#include <cuda.h>
#include <cuda_runtime.h>
#define N 10000000
#define MAX_ERR 1e-6
__global__ void vector_add(float *out, float *a, float *b, int n) {
/*
For the k-th thread, the loop starts from k-th element and iterates
through the array with a loop stride of 256. For example, in the 0-th
iteration, the k-th thread computes the addition of k-th element. In the
next iteration, the k-th thread computes the addition of (k+256)-th
element, and so on. Following figure shows an illustration of the idea.
*/
int index = threadIdx.x; // thread identifier inside block. values from 0-255
int stride = blockDim.x; // number of threads in a block i.e. 256
/*
1. Compilation: there will be 256 copies of this function for each thread
2. Initialize: i holds value of thread idx i.e. 0 to 255 (each thread has unique identifier)
3. Increment: each i value then increamented by 255 (block dimension) hence loop will run for N/256 itreation
*/
for(int i = index; i < n; i+=stride){
out[i] = a[i] + b[i];
}
}
int main(){
float *a, *b, *out;
float *d_a, *d_b, *d_out;
// 1. Allocate memory
a = (float*)malloc(sizeof(float) * N);
b = (float*)malloc(sizeof(float) * N);
out = (float*)malloc(sizeof(float) * N);
// 1. Initialize array
for(int i = 0; i < N; i++){
a[i] = 1.0f; b[i] = 2.0f;
}
// 2. Allocate device memory
cudaMalloc((void**)&d_a, sizeof(float) * N);
cudaMalloc((void**)&d_b, sizeof(float) * N);
cudaMalloc((void**)&d_out, sizeof(float) * N);
// 3. Transfer input data from host to device
cudaMemcpy(d_a, a, sizeof(float) * N, cudaMemcpyHostToDevice);
cudaMemcpy(d_b, b, sizeof(float) * N, cudaMemcpyHostToDevice);
// 4. Kernel launch
vector_add<<<1,256>>>(d_out, d_a, d_b, N);
// 5. Transfer output from device memory to host
cudaMemcpy(out, d_out, sizeof(float) * N, cudaMemcpyDeviceToHost);
// Verification
for(int i = 0; i < N; i++){
assert(fabs(out[i] - a[i] - b[i]) < MAX_ERR);
}
printf("PASSED\n");
// 6. Free cuda memory
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_out);
// 6. Deallocate host memory
free(a);
free(b);
free(out);
}
|
14,913 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <unistd.h>
#include <sys/time.h>
#include <stdint.h>
#define MAX 10
#define MIN 1
void lu_decomp(float *a, float *u,int dimension);
__global__ void DUKernel(float *D_a, float *D_u,unsigned int size);
uint64_t getTime();
int main(int argc, char **argv){
float *a, *u, *l;
int dimension;
dimension = atoi(argv[1]);
a= (float*)malloc(sizeof(float) * (dimension*dimension));
l= (float*)malloc(sizeof(float) * (dimension*dimension));
u= (float*)malloc(sizeof(float) * (dimension*dimension));
for(int i = 0; i<dimension; i++)
{
for(int j = 0; j<dimension; j++)
{
a[(i*dimension)+j] = rand() % (MAX - MIN) + MIN;
u[(i*dimension)+j] = a[(i*dimension)+j];
if(i == j)
{
l[(i*dimension)+j] = 1;
}
else
{
l[(i*dimension)+j] = 0;
}
}
}
for(int k = 0; k < dimension-1; k++)
{
for(int j=k+1; j < dimension; j++ )
{
l[(j*dimension)+k] = a[(j*dimension)+k]/a[(k*dimension)+k];
u[(j*dimension)+k]=0;
}
}
/*printf("U before\n");
for(int i = 0; i<dimension; i++)
{
for(int j = 0; j<dimension; j++)
{
printf("%15f",u[(i*dimension)+j]);
}
printf("\n");
}*/
float diff_allowed=10;
lu_decomp(a,u,dimension);
float temp =0;
float x=0;
/* remove this comment for verification
for(int i =0; i < dimension; i++)
{
for(int j=0; j < dimension; j++)
{
temp =0;
for(int k=0; k < dimension; k++)
{
temp = temp + l[(i*dimension)+k]* u[(k*dimension)+j];
temp=a[(i*dimension)+j];
}
//printf("%15f",temp);
if((abs(temp-a[(i*dimension)+j])>diff_allowed))
{
x=abs(temp-a[(i*dimension)+j]);
printf("problem");
printf("diff: %5f\n",x);
}
}
//printf("\n");
}
REMOVE THIS COMMENT FOR VERIFICATION.
*/
//printf("\n");
//printf("U Matrix:\n");
/*
for(int i = 0; i<dimension; i++)
{
for(int j = 0; j<dimension; j++)
{
printf("%15f",u[(i*dimension)+j]);
}
printf("\n");
}
for(int i = 0; i<dimension; i++)
{
for(int j = 0; j<dimension; j++)
{
printf("%15f",l[(i*dimension)+j]);
}
printf("\n");
}
printf("\n");
printf("Original Matrix:\n");
for(int i = 0; i<dimension; i++)
{
for(int j = 0; j<dimension; j++)
{
printf("%15f",a[(i*dimension)+j]);
}
printf("\n");
}*/
return 0;
}
void lu_decomp(float *a,float *u, int dimension)
{
float *d_a ,*d_u;
uint64_t astart, aend;
cudaMalloc(&d_a, (dimension*dimension)*sizeof(float));
cudaMalloc(&d_u, (dimension*dimension)*sizeof(float));
astart = getTime();
//Copying data to device from host
cudaMemcpy(d_a, a, sizeof(float)*dimension*(dimension),cudaMemcpyHostToDevice);
cudaMemcpy(d_u, u, sizeof(float)*dimension*(dimension),cudaMemcpyHostToDevice);
//Kernel call
if(dimension<1001)
DUKernel<<<dimension ,dimension>>>(d_a, d_u ,dimension);
else
DUKernel<<<(dimension*dimension/1000),1000>>>(d_a, d_u ,dimension);
//DUKernel<<<1024 ,100,4*dimension*dimension>>>(d_a,d_u, dimension);
//Coping data to host from device
//cudaMemcpy(a,d_a,sizeof(float)*dimension*(dimension),cudaMemcpyDeviceToHost);
//cudaMemcpy(l,d_l,sizeof(float)*dimension*(dimension),cudaMemcpyDeviceToHost);
cudaMemcpy(u,d_u,sizeof(float)*dimension*(dimension),cudaMemcpyDeviceToHost);
aend = getTime();
printf("%d, %f \n",dimension,(aend-astart)/1000000.0);
//Deallocating memory on the device
cudaFree(d_a);
cudaFree(d_u);
}
__global__ void DUKernel(float *D_a,float *D_u, unsigned int dimension)
{
// 10x10 size matrix is for experiment, so argv[1]=10
int k=threadIdx.x;
int j=blockIdx.x;
int p= threadIdx.x+(blockIdx.x*blockDim.x);
__syncthreads();
int i=0;
int s=0;
while(i<threadIdx.x && s< blockIdx.x)
{
D_u[p]=D_u[p]-(D_u[((s%1000)*dimension)+(k*(j/1000))+k] * ((D_u[((j%1000)*dimension)+(i*(j/1000))+i])/D_u[((j%1000)*dimension)+(j*(j/1000))+j]));
i++;
s++;
}
// __syncthreads();
}
uint64_t getTime(){
struct timeval t;
gettimeofday(&t, NULL);
return (uint64_t)(t.tv_sec)*1000000 + (uint64_t)(t.tv_usec);
} |
14,914 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
__global__
void addArrays(int* a, int* b,int* c)
{
int i = threadIdx.x;
c[i] = a[i] + b[i];
}
int main()
{
const int count = 5;
const int size = count * sizeof(int);
int ha[] = {1,2,3,4,5};
int hb[] = {10,20,30,40,50};
int hc[count];
int *da, *db, *dc;
cudaMalloc(&da,size);
cudaMalloc(&db,size);
cudaMalloc(&dc,size);
cudaMemcpy(da,ha,size,cudaMemcpyHostToDevice);
cudaMemcpy(db,hb,size,cudaMemcpyHostToDevice);
addArrays <<<1,count>>>(da,db,dc);
cudaMemcpy(hc,dc,size,cudaMemcpyDeviceToHost);
for(int i = 0; i < count;i++)
printf("%d ",hc[i]);
}
|
14,915 | #include <stdio.h>
int main(int argc, char* argv[]){
printf("Hello world\n");
return 0;
}
|
14,916 | #include<stdio.h>
#include<string.h>
#include<stdlib.h>
#include<math.h>
#include <cuda_runtime.h>
//#include <cutil_inline.h>
using namespace std;
#define SUBMATRIX_SIZE 10000
//#define NUM_BIN 5000
//#define HIST_MIN 0.0
//#define HIST_MAX 3.5
#define NUM_BIN 27 // for log binning
#define HIST_MIN 0.0 // for degrees
#define HIST_MAX 100.0 // for degrees
#define CONV_FACTOR 57.2957795 // 180/pi
//float bin_edges[30] = {0.001000,0.001585,0.002512,0.003981,0.006310,0.010000,0.010000,0.015849,0.025119,0.039811,0.063096,0.100000,0.100000,0.158489,0.251189,0.398107,0.630957,1.000000,1.000000,1.584893,2.511886,3.981072,6.309573,10.000000,10.000000,15.848932,25.118864,39.810717,63.095734,100.000000};
////////////////////////////////////////////////////////////////////////
__global__ void distance(float *a0, float *d0, float *a1, float *d1, int xind, int yind, int *dev_hist, float* dev_bin_edges)
{
//float bin_edges[30] = {0.001000,0.001585,0.002512,0.003981,0.006310,0.010000,0.010000,0.015849,0.025119,0.039811,0.063096,0.100000,0.100000,0.158489,0.251189,0.398107,0.630957,1.000000,1.000000,1.584893,2.511886,3.981072,6.309573,10.000000,10.000000,15.848932,25.118864,39.810717,63.095734,100.000000};
float bin_edges[NUM_BIN] = {0.0000,0.001000,0.001585,0.002512,0.003981,0.006310,0.010000,0.015849,0.025119,0.039811,0.063096,0.100000,0.158489,0.251189,0.398107,0.630957,1.000000,1.584893,2.511886,3.981072,6.309573,10.000000,15.848932,25.118864,39.810717,63.095734,100.000000};
int idx = blockIdx.x * blockDim.x + threadIdx.x;
int thread_idx = idx;
idx += xind;
float alpha = a0[idx], delta0 = d0[idx];
float cos_d0 = cos(delta0), sin_d0 = sin(delta0), dist;
int ymax = yind + SUBMATRIX_SIZE;
int bin_index = 0;
int offset = 0;
float a_diff, sin_a_diff, cos_a_diff;
float cos_d1, sin_d1, numer, denom, mult1, mult2;
for(int i=yind; i<ymax; i++)
{
//if(idx > i) ///////// CHECK THIS
//if(idx >= i)
{
a_diff = a1[i] - alpha;
sin_a_diff = sin(a_diff);
cos_a_diff = cos(a_diff);
sin_d1 = sin(d1[i]);
cos_d1 = cos(d1[i]);
mult1 = cos_d1 * cos_d1 * sin_a_diff * sin_a_diff;
mult2 = cos_d0 * sin_d1 - sin_d0 * cos_d1 * cos_a_diff;
mult2 = mult2 * mult2;
numer = sqrt(mult1 + mult2);
denom = sin_d0 *sin_d1 + cos_d0 * cos_d1 * cos_a_diff;
//dist = atan(num);
dist = atan2(numer,denom);
dist *= CONV_FACTOR; // Convert to degrees
if(dist < HIST_MIN)
bin_index = 0;
else if(dist >= HIST_MAX)
bin_index = NUM_BIN + 1;
else
{
//bin_index = int(((dist - HIST_MIN) * NUM_BIN / HIST_MAX) +1);
bin_index = 0;
for (int j=0;j<NUM_BIN-1;j++)
{
//bin_index = 5;
//if (dist>=0.1*j && dist<0.1*(j+1))
//if (dist>=dev_bin_edges[j] && dist<dev_bin_edges[j+1])
if (dist>=bin_edges[j] && dist<bin_edges[j+1])
{
bin_index = j+1;
break;
}
}
}
offset = ((NUM_BIN+2)*thread_idx);
bin_index += offset;
dev_hist[bin_index]++;
}
}
}
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
int main(int argc, char **argv)
{
float *d_alpha0, *d_delta0;
float *h_alpha0, *h_delta0;
float *d_alpha1, *d_delta1;
float *h_alpha1, *h_delta1;
int NUM_PARTICLES;
if (argc < 4)
{
printf("\nMust pass in cluster_data file on command line!\n");
printf("\nUsage: ", argv[0] );
printf(" <cluster_data file> <distances file> \n\n");
exit(1);
}
FILE *infile0, *infile1, *outfile ;
infile0 = fopen(argv[1],"r");
infile1 = fopen(argv[2],"r");
outfile = fopen(argv[3], "w");
//////////////////////////////////////////////////////////////////////
// Read in the cluster_data file
////////////////////////////////////////////////////////////////////////////
char axis_titles[256];
char dummy[256];
////////////////////////////////////////////////////////////////////////////
// Read in the first file
////////////////////////////////////////////////////////////////////////////
fscanf(infile0, "%s %s %s", &axis_titles, &dummy, &axis_titles);
fscanf(infile0, "%d", &NUM_PARTICLES);
int size = NUM_PARTICLES * sizeof(float);
printf("SIZE0 # particles: %d\n",NUM_PARTICLES);
h_alpha0 = (float*)malloc(size);
h_delta0 = (float*)malloc(size);
for(int i=0; i<NUM_PARTICLES; i++)
{
fscanf(infile0, "%f %s %f %s ", &h_alpha0[i], &dummy, &h_delta0[i], &dummy);
//fscanf(infile, "%f%s %f ", &h_alpha[i], &dummy, &h_delta[i]);
//printf("%e %s %e\n", h_alpha0[i], dummy, h_delta0[i]);
}
////////////////////////////////////////////////////////////////////////////
// Read in the second file
////////////////////////////////////////////////////////////////////////////
fscanf(infile1, "%s %s %s", &axis_titles, &dummy, &axis_titles);
fscanf(infile1, "%d", &NUM_PARTICLES);
printf("SIZE1 # particles: %d\n",NUM_PARTICLES);
h_alpha1 = (float*)malloc(size);
h_delta1 = (float*)malloc(size);
for(int i=0; i<NUM_PARTICLES; i++)
{
fscanf(infile1, "%f %s %f %s ", &h_alpha1[i], &dummy, &h_delta1[i], &dummy);
//fscanf(infile, "%f%s %f ", &h_alpha[i], &dummy, &h_delta[i]);
// printf("%e %s %e\n", h_alpha[i], dummy, h_delta[i]);
}
////////////////////////////////////////////////////////////////////////////
//allocation of histogram
///////////////////////////////////////////////////////////////////////////
int *hist, *dev_hist;
// Original
//int size_hist = SUBMATRIX_SIZE * (NUM_BIN+2);
//int size_hist_bytes = size_hist*sizeof(int);
// Log binning
//float h_bin_edges[30] = {0.001000,0.001585,0.002512,0.003981,0.006310,0.010000,0.010000,0.015849,0.025119,0.039811,0.063096,0.100000,0.100000,0.158489,0.251189,0.398107,0.630957,1.000000,1.000000,1.584893,2.511886,3.981072,6.309573,10.000000,10.000000,15.848932,25.118864,39.810717,63.095734,100.000000};
float h_bin_edges[NUM_BIN] = {0.0000,0.001000,0.001585,0.002512,0.003981,0.006310,0.010000,0.015849,0.025119,0.039811,0.063096,0.100000,0.158489,0.251189,0.398107,0.630957,1.000000,1.584893,2.511886,3.981072,6.309573,10.000000,15.848932,25.118864,39.810717,63.095734,100.000000};
for (int i=0;i<NUM_BIN;i++)
{
printf("%d %f\n",i,h_bin_edges[i]);
}
printf("\n");
float *dev_bin_edges;
cudaMalloc((void **) &dev_bin_edges, (NUM_BIN*sizeof(float)));
cudaMemset(dev_bin_edges, 0, NUM_BIN);
cudaMemcpy(dev_bin_edges, h_bin_edges, NUM_BIN, cudaMemcpyHostToDevice );
//int nbins = 30;
int size_hist = SUBMATRIX_SIZE * (NUM_BIN+2);
int size_hist_bytes = size_hist*sizeof(int);
hist = (int*)malloc(size_hist_bytes);
memset(hist, 0, size_hist_bytes);
printf("size_hist: %d\n",size_hist_bytes);
cudaMalloc((void **) &dev_hist, (size_hist_bytes));
cudaMemset(dev_hist, 0, size_hist_bytes);
unsigned long *hist_array;
hist_array = (unsigned long*)malloc((NUM_BIN+2) * sizeof(unsigned long));
memset(hist_array, 0, (NUM_BIN+2)*sizeof(unsigned long));
////////////////////////////////////////////////////////////////////////////
// Define the grid and block size
////////////////////////////////////////////////////////////////////////////
dim3 grid, block;
grid.x =100;
block.x = SUBMATRIX_SIZE/grid.x; //NUM_PARTICLES/block.x;
////////////////////////////////////////////////////////////////////////////
cudaMalloc((void **) &d_alpha0, size );
cudaMalloc((void **) &d_delta0, size );
cudaMalloc((void **) &d_alpha1, size );
cudaMalloc((void **) &d_delta1, size );
// Check to see if we allocated enough memory.
if (0==d_alpha0 || 0==d_delta0 || 0==d_alpha1 || 0==d_delta1 || 0==dev_hist)
{
printf("couldn't allocate memory\n");
return 1;
}
// Initialize array to all 0's
cudaMemset(d_alpha0,0,size);
cudaMemset(d_delta0,0,size);
cudaMemset(d_alpha1,0,size);
cudaMemset(d_delta1,0,size);
cudaMemcpy(d_alpha0, h_alpha0, size, cudaMemcpyHostToDevice );
cudaMemcpy(d_delta0, h_delta0, size, cudaMemcpyHostToDevice );
cudaMemcpy(d_alpha1, h_alpha1, size, cudaMemcpyHostToDevice );
cudaMemcpy(d_delta1, h_delta1, size, cudaMemcpyHostToDevice );
int x, y;
int num_submatrices = NUM_PARTICLES / SUBMATRIX_SIZE;
int bin_index = 0;
for(int k = 0; k < num_submatrices; k++)
{
y = k*SUBMATRIX_SIZE;
// printf("%d %d\n",k,y);
for(int j = 0; j < num_submatrices; j++)
{
x = j *SUBMATRIX_SIZE;
//printf("----\n");
//printf("%d %d\t\t%d %d\n",k,y,j,x);
//printf("----\n");
cudaMemset(dev_hist,0,size_hist_bytes);
distance<<<grid,block>>>(d_alpha0, d_delta0,d_alpha1, d_delta1, x, y, dev_hist, dev_bin_edges);
cudaMemcpy(hist, dev_hist, size_hist_bytes, cudaMemcpyDeviceToHost);
for(int m=0; m<size_hist; m++)
{
bin_index = m%(NUM_BIN+2);
//if(bin_index == 0)
//printf("\n");
//printf("%3i:%3i ", m, hist[m]);
//printf("%3i ", hist[m]);
hist_array[bin_index] += hist[m];
}
//printf("\n");
}
}
unsigned long total = 0;
float bin_width = (HIST_MAX - HIST_MIN) / NUM_BIN;
float bins_mid = 0;
fprintf(outfile, "%s %s\n", "Angular Distance(radians)","Number of Entries");
for(int k=0; k<NUM_BIN+1; k++)
{
//bins_mid = bin_width*(k - 0.5);
float lo = h_bin_edges[k];
float hi = h_bin_edges[k+1];
bins_mid = (hi+lo)/2.0;
fprintf(outfile, "%.3e %s %lu \n", bins_mid, ",", hist_array[k]);
total += hist_array[k];
}
printf("total: %lu \n", total);
fclose(infile0);
fclose(infile1);
fclose(outfile);
free(h_alpha0);
free(h_delta0);
free(h_alpha1);
free(h_delta1);
free(hist);
cudaFree(d_alpha0);
cudaFree(d_delta0);
cudaFree(d_alpha1);
cudaFree(d_delta1);
cudaFree(dev_hist);
cudaFree(dev_bin_edges);
return 0;
}
//////////////////////////////////////////////////////////////////////
|
14,917 | #include <iostream>
#include <math.h>
using namespace std;
void print3DArray(float ***a, int n) {
for(int i=0;i<n;i++) {
for(int j=0;j<n;j++) {
for(int k=0;k<n;k++) {
cout<<a[i][j][k]<<' ';
}
cout<<endl;
}
cout<<endl<<endl<<endl;
}
}
void print2DArray(float **a, int n) {
for(int i=0;i<n;i++) {
for(int j=0;j<n;j++) {
cout<<a[i][j]<<' ';
}
cout<<endl;
}
}
__global__ void multiply(float ***a, float ***b, int n) {
int k = threadIdx.x+blockIdx.x;
int j = threadIdx.y+blockIdx.y;
int i = threadIdx.z+blockIdx.z;
if(i<n && j<n && k<n) {
a[i][j][k] = a[0][j][i] * b[0][i][k];
}
}
__global__ void add(float ***a, int n) {
int k = threadIdx.x+blockIdx.x;
int j = threadIdx.y+blockIdx.y;
int i = threadIdx.z+blockIdx.z;
if(i<n && j<n && k<n) {
for(int l=1;l<n;l++) {
a[0][j][k]+=a[l][j][k];
}
}
}
int main(void) {
int n = 100;
// int blockSize = 256;
// int blocks = (N + blockSize - 1) / blockSize;
float ***a, ***b;
cudaMallocManaged(&a, n*sizeof(float**));
cudaMallocManaged(&b, n*sizeof(float**));
for(int i=0;i<n;i++) {
cudaMallocManaged(&(a[i]), n*sizeof(float*));
cudaMallocManaged(&(b[i]), n*sizeof(float*));
}
for(int i=0;i<n;i++) {
for(int j=0;j<n;j++) {
cudaMallocManaged(&(a[i][j]), n*sizeof(float));
cudaMallocManaged(&(b[i][j]), n*sizeof(float));
}
}
for(int i=0;i<n;i++) {
for(int j=0;j<n;j++) {
a[0][i][j] = rand() % 4 + 1;
b[0][i][j] = rand() % 4 + 1;
}
}
// Print A, B
cout<<"A:"<<endl;
print2DArray(*a, n);
cout<<"B:"<<endl;
print2DArray(*b, n);
// Multiply matrices
multiply<<< 10, dim3(10, 10, 10) >>>(a, b, n);
// Set number of threads and blocks
// int blockSize = 1024;
// int numBlocks = sqrt(n / blockSize);
// addSquareMatrices<<< 1, dim3(32, 32, 32) >>>(x, y, n);
cudaDeviceSynchronize();
add<<< 1, dim3(10, 10, 10) >>>(a, n);
cudaDeviceSynchronize();
// Print A*B
cout<<"A*B:"<<endl;
print2DArray(*a, n);
cout<<"done"<<endl;
cudaFree(a);
cudaFree(b);
return 0;
} |
14,918 | // takes a graph adjacency matrix for a directed graph, and converts it
// to a 2-column matrix of pairs (i,j), meaning an edge from vertex i to
// vertex j; the output matrix must be in lexicographical order
// not claimed efficient, either in speed or in memory usage
#include <cuda.h>
#include <stdio.h>
// needs -lrt link flag for C++
#include <time.h>
float timediff(struct timespec t1, struct timespec t2)
{ if (t1.tv_nsec > t2.tv_nsec) {
t2.tv_sec -= 1;
t2.tv_nsec += 1000000000;
}
return t2.tv_sec-t1.tv_sec + 0.000000001 * (t2.tv_nsec-t1.tv_nsec);
}
// tgkernel1() finds the number of 1s to be handled by a thread, used
// to determine where in the output matrix a thread writes its portion
// arguments:
// dadjm: the adjacency matrix (NOT assumed symmetric), 1 for edge, 0
// otherwise; note: matrix is overwritten by the function
// n: number of rows and columns of adjm
// dcounts: output array, counts of 1s
__global__ void tgkernel1(int *dadjm, int n, int *dcounts)
{ int tot1s,j;
// need to find my thread number among the totality of all
// threads in all blocks
int me = blockDim.x * blockIdx.x + threadIdx.x;
tot1s = 0;
for (j = 0; j < n; j++) {
if (dadjm[n*me+j] == 1) {
dadjm[n*me+tot1s++] = j;
}
dcounts[me] = tot1s;
}
}
// tgkernel2() has the given thread write its rows into the output
// matrix
__global__ void tgkernel2(int *dadjm, int n,
int *dcounts, int *dstarts, int *doutm)
{ int outrow,num1si,j;
int me = blockDim.x * blockIdx.x + threadIdx.x;
// fill in this thread's portion of doutm
outrow = dstarts[me];
num1si = dcounts[me];
if (num1si > 0) {
for (j = 0; j < num1si; j++) {
doutm[2*outrow+2*j] = me;
doutm[2*outrow+2*j+1] = dadjm[n*me+j];
}
}
}
// replaces counts by cumulative counts
void cumulcounts(int *c, int *s, int n)
{ int i;
s[0] = 0;
for (i = 1; i < n; i++) {
s[i] = s[i-1] + c[i-1];
}
}
int *transgraph(int *hadjm, int n, int *nout, int gsize, int bsize)
{ int *dadjm; // device adjacency matrix
int *houtm; // host output matrix
int *doutm; // device output matrix
int *hcounts; // host counts vector
int *dcounts; // device counts vector
int *hstarts; // host starts vector
int *dstarts; // device starts vector
hcounts = (int *) malloc(n*sizeof(int));
hstarts = (int *) malloc(n*sizeof(int));
cudaMalloc((void **)&dadjm,n*n*sizeof(int));
cudaMalloc((void **)&dcounts,n*sizeof(int));
cudaMalloc((void **)&dstarts,n*sizeof(int));
houtm = (int *) malloc(n*n*sizeof(int));
cudaMalloc((void **)&doutm,n*n*sizeof(int));
cudaMemcpy(dadjm,hadjm,n*n*sizeof(int),cudaMemcpyHostToDevice);
dim3 dimGrid(gsize,1);
dim3 dimBlock(bsize,1,1);
// calculate counts and starts first
tgkernel1<<<dimGrid,dimBlock>>>(dadjm,n,dcounts);
cudaMemcpy(hcounts,dcounts,n*sizeof(int),cudaMemcpyDeviceToHost);
cumulcounts(hcounts,hstarts,n);
*nout = hstarts[n-1] + hcounts[n-1];
cudaMemcpy(dstarts,hstarts,n*sizeof(int),cudaMemcpyHostToDevice);
tgkernel2<<<dimGrid,dimBlock>>>(dadjm,n,dcounts,dstarts,doutm);
cudaMemcpy(houtm,doutm,2*(*nout)*sizeof(int),cudaMemcpyDeviceToHost);
free(hcounts);
free(hstarts);
cudaFree(dadjm);
cudaFree(dcounts);
cudaFree(dstarts);
return houtm;
}
int main(int argc, char **argv)
{ int i,j;
int *adjm; // host adjacency matrix
int *outm; // host output matrix
int n = atoi(argv[1]);
int gsize = atoi(argv[2]);
int bsize = atoi(argv[3]);
int nout;
adjm = (int *) malloc(n*n*sizeof(int));
for (i = 0; i < n; i++)
for (j = 0; j < n; j++)
if (i == j) adjm[n*i+j] = 0;
else adjm[n*i+j] = rand() % 2;
if (n < 10) {
printf("adjacency matrix: \n");
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++) printf("%d ",adjm[n*i+j]);
printf("\n");
}
}
struct timespec bgn,nd;
clock_gettime(CLOCK_REALTIME, &bgn);
outm = transgraph(adjm,n,&nout,gsize,bsize);
printf("num rows in out matrix = %d\n",nout);
if (nout < 50) {
printf("out matrix: \n");
for (i = 0; i < nout; i++)
printf("%d %d\n",outm[2*i],outm[2*i+1]);
}
clock_gettime(CLOCK_REALTIME, &nd);
printf("%f\n",timediff(bgn,nd));
}
|
14,919 | ///*
//* /brief Implementation of computing Restricted Voronoi Diagram.
//*/
//
//#include <cuda\cuda_rvd.h>
//#include "device_atomic_functions.h"
//
//namespace Gpu_Rvd{
//
// CudaRestrictedVoronoiDiagram::CudaRestrictedVoronoiDiagram(Mesh* m, Points* p, int iter, std::vector<int> sample_facet, int k){
// vertex_ = m->v_ptr();
// vertex_nb_ = m->get_vertex_nb();
// facets_ = m->f_ptr();
// facet_nb_ = m->get_facet_nb();
// points_ = p->v_ptr();
// points_nb_ = p->get_vertex_nb();
//
// k_ = k;
// points_nn_ = (index_t*)malloc(sizeof(index_t) * k_ * points_nb_);
// facets_nn_ = (index_t*)malloc(sizeof(index_t) * facet_nb_);
// dimension_ = m->dimension();
//
// dev_vertex_ = nil;
// dev_points_ = nil;
// dev_facets_ = nil;
// dev_points_nn_ = nil;
// dev_facets_nn_ = nil;
// dev_ret_ = nil;
// dev_seeds_info_ = nil;
// dev_seeds_poly_nb = nil;
// dev_retidx = nil;
//
// mesh_ = m;
// x_ = p;
//
// NN_ = NearestNeighborSearch::create(dimension_);
// //knn_ = new CudaKNearestNeighbor(*p, *m, k_);
// iter_nb_ = iter;
//
// is_store_ = true;
// store_filename_counter_ = 0;
// sample_facet_ = sample_facet;
// }
//
// //CudaRestrictedVoronoiDiagram::CudaRestrictedVoronoiDiagram(Mesh m, Points p, index_t k, const index_t* points_nn, const index_t* facets_nn) :
// // vertex_(m.v_ptr()),
// // vertex_nb_(m.get_vertex_nb()),
// // points_(p.v_ptr()),
// // points_nb_(p.get_vertex_nb()),
// // facets_(m.f_ptr()),
// // facet_nb_(m.get_facet_nb()),
// // k_(k),
// // points_nn_(points_nn),
// // facets_nn_(facets_nn),
// // dimension_(p.dimension()),
// // dev_vertex_(nil),
// // dev_points_(nil),
// // dev_facets_(nil),
// // dev_points_nn_(nil),
// // dev_facets_nn_(nil),
// // dev_ret_(nil),
// // host_ret_(nil),
// // dev_seeds_info_(nil),
// // dev_seeds_poly_nb(nil)
// //{
// //}
//
// CudaRestrictedVoronoiDiagram::~CudaRestrictedVoronoiDiagram()
// {
// if (points_nn_ != nil){
// free(points_nn_);
// points_nn_ = nil;
// }
// if (facets_nn_ != nil){
// free(facets_nn_);
// facets_nn_ = nil;
// }
// //delete knn_;
// }
//
// /*
// * \brief Atomic operation add.
// *
// */
// __device__
// double MyAtomicAdd(double* address, double val){
// unsigned long long int* address_as_ull = (unsigned long long int*)address;
//
// unsigned long long int old = *address_as_ull, assumed;
//
// do{
// assumed = old;
// old = atomicCAS(address_as_ull, assumed, __double_as_longlong(val + __longlong_as_double(assumed)));
// } while (assumed != old);
//
// return __longlong_as_double(old);
// }
//
// /*
// * \breif Manipulates the computed RVD data.
// */
// __device__
// void action(
// const CudaPolygon& polygon, const index_t& current_seed, double* retdata
// ){
// index_t _v1 = 0;
// index_t _v2, _v3;
//
// double3 pos1, pos2, pos3;
// //double d1, d2, d3;
// int triangle_nb = polygon.vertex_nb - 2;
// if (triangle_nb <= 0) return;
//
// double total_weight = 0.0;
// double3 centriodTimesWeight = { 0.0, 0.0, 0.0 };
//
// double current_weight = 0.0;
// double3 current_posTimesWeight = { 0.0, 0.0, 0.0 };
//
// //atomicAdd(&g_seeds_polygon_nb[current_seed], 1);
// for (index_t i = 1; i < polygon.vertex_nb - 1; ++i)
// {
// _v2 = i; _v3 = i + 1;
//
// pos1 = { polygon.vertex[_v1].x, polygon.vertex[_v1].y, polygon.vertex[_v1].z };
// //d1 = polygon.vertex[_v1].w;
//
// pos2 = { polygon.vertex[_v2].x, polygon.vertex[_v2].y, polygon.vertex[_v2].z };
// //d2 = polygon.vertex[_v2].w;
//
// pos3 = { polygon.vertex[_v3].x, polygon.vertex[_v3].y, polygon.vertex[_v3].z };
// //d3 = polygon.vertex[_v3].w;
//
// computeTriangleCentriod(pos1, pos2, pos3, centriodTimesWeight, total_weight);
//
// current_weight += total_weight;
// current_posTimesWeight.x += centriodTimesWeight.x;
// current_posTimesWeight.y += centriodTimesWeight.y;
// current_posTimesWeight.z += centriodTimesWeight.z;
//
// total_weight = 0.0;
// centriodTimesWeight = { 0.0, 0.0, 0.0 };
// //MyAtomicAdd(&retdata[0], 1.0);
// }
// MyAtomicAdd(&retdata[current_seed * 4 + 0], current_posTimesWeight.x);
// MyAtomicAdd(&retdata[current_seed * 4 + 1], current_posTimesWeight.y);
// MyAtomicAdd(&retdata[current_seed * 4 + 2], current_posTimesWeight.z);
// MyAtomicAdd(&retdata[current_seed * 4 + 3], current_weight);
//
// }
//
// /*
// * \brief Clips the Polygon by the middle plane defined by point i and j.
// */
// __device__
// void clip_by_plane(
// CudaPolygon& ping,
// CudaPolygon& pong,
// const double3& position_i,
// const double3& position_j,
// const index_t& j
// ){
//
// //reset the pong
// //pong.vertex_nb = 0;
//
// if (ping.vertex_nb == 0)
// return;
//
// // Compute d = n . (2m), where n is the
// // normal vector of the bisector [i, j]
// // and m the middle point of the bisector.
// double d = 0.0;
// double3 temp = sub(position_i, position_j);
// d = dot(add(position_i, position_j), temp);
//
// //The predecessor of the first vertex is the last vertex
// int prev_k = ping.vertex_nb - 1;
//
// //get the position data
//
//
// //then we compute prev_vertex_position "cross" n
// //prev_l = prev_vertex_position . n
// double prev_l = ping.vertex[prev_k].x * temp.x + ping.vertex[prev_k].y * temp.y + ping.vertex[prev_k].z * temp.z;
//
// int prev_status = sgn(2.0 * prev_l - d);
//
// //traverse the Vertex in this Polygon
// for (int k = 0; k < ping.vertex_nb; ++k){
//
//
//
// double l = ping.vertex[k].x * temp.x + ping.vertex[k].y * temp.y + ping.vertex[k].z * temp.z;
//
// int status = sgn(2.0 * l - d);
//
// //If status of edge extremities differ,
// //then there is an intersection.
// if (status != prev_status && (prev_status) != 0){
// // create the intersection and update the Polyon
// CudaVertex I;
//
// //compute the position and weight
// double denom = 2.0 * (prev_l - l);
// double lambda1, lambda2;
//
// // Shit happens!
// if (m_fabs(denom) < 1e-20)
// {
// lambda1 = 0.5;
// lambda2 = 0.5;
// }
// else
// {
// lambda1 = (d - 2.0 * l) / denom;
// // Note: lambda2 is also given
// // by (2.0*l2-d)/denom
// // (but 1.0 - lambda1 is a bit
// // faster to compute...)
// lambda2 = 1.0 - lambda1;
// }
//
// //Set the Position of Vertex
// I.x = lambda1 * ping.vertex[prev_k].x + lambda2 * ping.vertex[k].x;
// I.y = lambda1 * ping.vertex[prev_k].y + lambda2 * ping.vertex[k].y;
// I.z = lambda1 * ping.vertex[prev_k].z + lambda2 * ping.vertex[k].z;
//
// //Set the Weight of Vertex
// I.w = (lambda1 * ping.vertex[prev_k].w + lambda2 * ping.vertex[k].w);
//
// if (status > 0)
// {
// I.neigh_s = (j);
// }
// else {
// I.neigh_s = (ping.vertex[k].neigh_s);
// }
//
// //add I to pong
// pong.vertex[pong.vertex_nb] = I;
// pong.vertex_nb++;
// }
// if (status > 0)
// {
// //add vertex to pong
// pong.vertex[pong.vertex_nb] = ping.vertex[k];
// pong.vertex_nb++;
// }
//
// prev_status = status;
// prev_l = l;
// prev_k = k;
// }
// //reset the pong
// //pong.vertex_nb = 0;
//
// //if (ping.vertex_nb == 0)
// // return;
// //
// //// Compute d = n . (2m), where n is the
// //// normal vector of the bisector [i, j]
// //// and m the middle point of the bisector.
// //double d = 0.0;
// //double3 temp = sub(position_i, position_j);
// //d = dot(add(position_i, position_j), temp);
//
// ////The predecessor of the first vertex is the last vertex
// //int prev_k = ping.vertex_nb - 1;
//
// ////get the position data
// ////CudaVertex* prev_vk = &ping.vertex[prev_k];
//
// ////then we compute prev_vertex_position "cross" n
// ////prev_l = prev_vertex_position . n
// //
// //double prev_l = ping.vertex[prev_k].x * temp.x + ping.vertex[prev_k].y * temp.y + ping.vertex[prev_k].z * temp.z;
//
// //int prev_status = sgn(2.0 * prev_l - d);
// //
// ////traverse the Vertex in this Polygon
// //for (int k = 0; k < ping.vertex_nb; ++k){
//
// // //CudaVertex* vk = &ping.vertex[k];
// //
// // double l = ping.vertex[k].x * temp.x + ping.vertex[k].y * temp.y + ping.vertex[k].z + temp.z;
// //
// // int status = sgn(2.0 * l - d);
//
// // //If status of edge extremities differ,
// // //then there is an intersection.
// // if (status != prev_status && (prev_status) != 0){
// // // create the intersection and update the Polyon
// // CudaVertex I;
//
// // //compute the position and weight
// // double denom = 2.0 * (prev_l - l);
// // double lambda1, lambda2;
//
// // // Shit happens!
// // if (m_fabs(denom) < 1e-20)
// // {
// // lambda1 = 0.5;
// // lambda2 = 0.5;
// // }
// // else
// // {
// // lambda1 = (d - 2.0 * l) / denom;
// // // Note: lambda2 is also given
// // // by (2.0*l2-d)/denom
// // // (but 1.0 - lambda1 is a bit
// // // faster to compute...)
// // lambda2 = 1.0 - lambda1;
// // }
//
// // //Set the Position of Vertex
// // I.x = lambda1 * ping.vertex[prev_k].x + lambda2 * ping.vertex[k].x;
// // I.y = lambda1 * ping.vertex[prev_k].y + lambda2 * ping.vertex[k].y;
// // I.z = lambda1 * ping.vertex[prev_k].z + lambda2 * ping.vertex[k].z;
//
// // //Set the Weight of Vertex
// // I.w = (lambda1 * ping.vertex[prev_k].w + lambda2 * ping.vertex[k].w);
//
// // if (status > 0)
// // {
// // I.neigh_s = (j);
// // }
// // else {
// // I.neigh_s = (ping.vertex[k].neigh_s);
// // }
//
// // //add I to pong
// // pong.vertex[pong.vertex_nb] = I;
// // pong.vertex_nb++;
// // }
// // if (status > 0)
// // {
// // //add vertex to pong
// // pong.vertex[pong.vertex_nb] = ping.vertex[k];
// // pong.vertex_nb++;
// // }
//
// // //prev_vk = vk;
// // //prev_vertex_position = vertex_position;
// // prev_status = status;
// // prev_l = l;
// // prev_k = k;
// //}
//
// }
//
// /*
// * \brief Swaps the content of ping and pong.
// * stores the result in ping.
// */
// __device__
// void swap_polygon(CudaPolygon& ping, CudaPolygon& pong){
// //CudaPolygon t = ping;
// for (int i = 0; i < pong.vertex_nb; ++i){
// ping.vertex[i] = pong.vertex[i];
// }
// ping.vertex_nb = pong.vertex_nb;
//
// pong.vertex_nb = 0;
// }
//
//
// /*
// * \brief Intersects a polygon with a points.
// */
// __device__
// void intersection_clip_facet_SR(
// CudaPolygon& current_polygon,
// const index_t& i,
// const double* points,
// const index_t& points_nb,
// index_t* points_nn,
// const index_t& k
// ){
// CudaPolygon polygon_buffer;
// polygon_buffer.vertex_nb = 0;
// //load /memory[points] 3 times.
// double3 pi = {
// points[i * 3 + 0],
// points[i * 3 + 1],
// points[i * 3 + 2]
// };
//
// for (int t = 1; t < k; ++t){
//
// //load /memory[points_nn] k times.
// index_t j = points_nn[i * k + t];
//
// //load /memroy[points] k * 3 times.
// double3 pj = {
// points[j * 3 + 0],
// points[j * 3 + 1],
// points[j * 3 + 2]
// };
//
// double dij = distance2(pi, pj);
// double R2 = 0.0;
//
// for (index_t tt = 0; tt < current_polygon.vertex_nb; ++tt){
// double3 pk = { current_polygon.vertex[tt].x, current_polygon.vertex[tt].y, current_polygon.vertex[tt].z };
// double dik = distance2(pi, pk);
// R2 = max(R2, dik);
// }
// if (dij > 4.1 * R2){
// return;
// }
//
// clip_by_plane(current_polygon, polygon_buffer, pi, pj, j);
// //swap_polygon(current_polygon, polygon_buffer);
//
// for (int i = 0; i < polygon_buffer.vertex_nb; ++i){
// current_polygon.vertex[i] = polygon_buffer.vertex[i];
// }
// current_polygon.vertex_nb = polygon_buffer.vertex_nb;
//
// polygon_buffer.vertex_nb = 0;
//
//
// }
// }
//
// __device__
// void store_info(int facet, int seed, CudaPolygon p, double* address){
// address[0] = facet;
// address[1] = seed;
// address[2] = p.vertex_nb;
// address[3] = 0;
// for (index_t t = 0; t < p.vertex_nb; ++t){
// address[4 + t * 4 + 0] = p.vertex[t].x;
// address[4 + t * 4 + 1] = p.vertex[t].y;
// address[4 + t * 4 + 2] = p.vertex[t].z;
// address[4 + t * 4 + 3] = p.vertex[t].w;
// }
// }
// __global__
// void kernel(
// double* vertex, index_t vertex_nb,
// double* points, index_t points_nb,
// index_t* facets, index_t facets_nb,
// index_t* points_nn, index_t k_p,
// index_t* facets_nn, index_t dim,
// double* retdata, int* retindex
// ){
// index_t tid = blockIdx.x * blockDim.x + threadIdx.x;
//
// if (tid >= facets_nb) return;
//
// //load \memory[facet] 3 times.
// const int3 facet_index = {
// facets[tid * dim + 0],
// facets[tid * dim + 1],
// facets[tid * dim + 2]
// };
//
//
// CudaPolygon current_polygon;
// current_polygon.vertex_nb = 3;
//
// current_polygon.vertex[0].x = vertex[facet_index.x * dim + 0];
// current_polygon.vertex[0].y = vertex[facet_index.x * dim + 1];
// current_polygon.vertex[0].z = vertex[facet_index.x * dim + 2];
// current_polygon.vertex[0].w = 1.0;
//
// current_polygon.vertex[1].x = vertex[facet_index.y * dim + 0];
// current_polygon.vertex[1].y = vertex[facet_index.y * dim + 1];
// current_polygon.vertex[1].z = vertex[facet_index.y * dim + 2];
// current_polygon.vertex[1].w = 1.0;
//
// current_polygon.vertex[2].x = vertex[facet_index.z * dim + 0];
// current_polygon.vertex[2].y = vertex[facet_index.z * dim + 1];
// current_polygon.vertex[2].z = vertex[facet_index.z * dim + 2];
// current_polygon.vertex[2].w = 1.0;
//
// index_t current_seed = facets_nn[tid];
//
// intersection_clip_facet_SR(
// current_polygon,
// current_seed,
// points,
// points_nb,
// points_nn,
// k_p
// );
//
// //now we get the clipped polygon stored in "polygon", do something.
// action(
// current_polygon,
// current_seed,
// retdata
// );
//
// for (index_t v = 0; v < current_polygon.vertex_nb; ++v)
// {
// retindex[tid * 10 + v] = current_polygon.vertex[v].neigh_s;
// }
//
// return;
// //CudaPolygon current_store = current_polygon;
// //doesn't have the stack?
// //const short idx = threadIdx.x;
//
// //__shared__ index_t to_visit[128][CUDA_Stack_size];
// //index_t to_visit_pos = 0;
//
// //index_t has_visited[CUDA_Stack_size];
// //index_t has_visited_nb = 0;
// //bool has_visited_flag = false;
//
// ////load \memory[facets_nn] 1 time.
// //to_visit[idx][to_visit_pos++] = facets_nn[tid];
// //has_visited[has_visited_nb++] = to_visit[idx][0];
// //
// //
// //index_t counter = 0;
// //while (to_visit_pos){
// // index_t current_seed = to_visit[idx][to_visit_pos - 1];
// // to_visit_pos--;
// //
// // intersection_clip_facet_SR(
// // current_polygon,
// // current_seed,
// // points,
// // points_nb,
// // points_nn,
// // k_p
// // );
//
// // //now we get the clipped polygon stored in "polygon", do something.
// // action(
// // current_polygon,
// // current_seed,
// // retdata
// // );
// // //MyAtomicAdd(&retdata[current_seed], 1);
// // //MyAtomicAdd(&retdata[2000], 1);
// // //store_info(tid, current_seed, current_polygon, &retdata[tid * 400 + counter * 40]);
// // //Propagate to adjacent seeds
// // for (index_t v = 0; v < current_polygon.vertex_nb; ++v)
// // {
// // int ns = current_polygon.vertex[v].neigh_s;
// // if (ns != -1)
// // {
// // for (index_t ii = 0; ii < has_visited_nb; ++ii)
// // {
// // //if the neighbor seed has clipped the polygon
// // //the flag should be set "true"
// // if (has_visited[ii] == ns)
// // has_visited_flag = true;
// // }
// // //the neighbor seed is new!
// // if (!has_visited_flag/* && has_visited_nb < 90*/)
// // {
// // to_visit[idx][to_visit_pos++] = ns;
// // has_visited[has_visited_nb++] = ns;
// // }
// // has_visited_flag = false;
// // }
// // }
// // //current_polygon = current_store;
// // current_polygon.vertex_nb = 3;
//
// // current_polygon.vertex[0].x = v1.x; current_polygon.vertex[0].y = v1.y; current_polygon.vertex[0].z = v1.z; current_polygon.vertex[0].w = 1.0;
// // current_polygon.vertex[1].x = v2.x; current_polygon.vertex[1].y = v2.y; current_polygon.vertex[1].z = v2.z; current_polygon.vertex[1].w = 1.0;
// // current_polygon.vertex[2].x = v3.x; current_polygon.vertex[2].y = v3.y; current_polygon.vertex[2].z = v3.z; current_polygon.vertex[2].w = 1.0;
// // counter++;
// //}
// }
//
// __global__
// void go_rvd(
// double* vertex, index_t vertex_nb,
// double* points, index_t points_nb,
// index_t* facets, index_t facets_nb,
// index_t* points_nn, index_t k_p,
// int* next, index_t dim, index_t data_size,
// double* retdata, int* retindex
// ){
//
// index_t tid = blockIdx.x * blockDim.x + threadIdx.x;
// index_t fid = tid / data_size;
// index_t offset = tid % data_size;
// index_t max_size = next[(data_size + 1) * fid];
// if (offset >= max_size) return;
//
// index_t current_seed = next[(data_size + 1) * fid + 1 + offset];
//
// //load \memory[facet] 3 times.
// const int3 facet_index = {
// facets[fid * dim + 0],
// facets[fid * dim + 1],
// facets[fid * dim + 2]
// };
//
//
// CudaPolygon current_polygon;
// current_polygon.vertex_nb = 3;
//
// current_polygon.vertex[0].x = vertex[facet_index.x * dim + 0];
// current_polygon.vertex[0].y = vertex[facet_index.x * dim + 1];
// current_polygon.vertex[0].z = vertex[facet_index.x * dim + 2];
// current_polygon.vertex[0].w = 1.0;
//
// current_polygon.vertex[1].x = vertex[facet_index.y * dim + 0];
// current_polygon.vertex[1].y = vertex[facet_index.y * dim + 1];
// current_polygon.vertex[1].z = vertex[facet_index.y * dim + 2];
// current_polygon.vertex[1].w = 1.0;
//
// current_polygon.vertex[2].x = vertex[facet_index.z * dim + 0];
// current_polygon.vertex[2].y = vertex[facet_index.z * dim + 1];
// current_polygon.vertex[2].z = vertex[facet_index.z * dim + 2];
// current_polygon.vertex[2].w = 1.0;
//
//
// intersection_clip_facet_SR(
// current_polygon,
// current_seed,
// points,
// points_nb,
// points_nn,
// k_p
// );
//
// //now we get the clipped polygon stored in "polygon", do something.
// action(
// current_polygon,
// current_seed,
// retdata
// );
//
// for (index_t v = 0; v < current_polygon.vertex_nb; ++v)
// {
// retindex[tid * 10 + v] = current_polygon.vertex[v].neigh_s;
// }
// return;
// }
//
// void CudaRestrictedVoronoiDiagram::knn_search(){
// NN_->set_points(points_nb_, points_);
// update_neighbors();
//
// //result_print("points_nn.txt", points_nn_, k_ * points_nb_, k_);
// //result_print("facets_nn.txt", facets_nn_, facet_nb_, 1);
// }
//
// void CudaRestrictedVoronoiDiagram::update_neighbors(){
// parallel_for(
// parallel_for_member_callback(this, &CudaRestrictedVoronoiDiagram::store_neighbors_CB),
// 0, points_nb_, 1, true
// );
//
// parallel_for(
// parallel_for_member_callback(this, &CudaRestrictedVoronoiDiagram::store_f_neighrbors_CB),
// 0, facet_nb_, 1, true
// );
// }
//
// void CudaRestrictedVoronoiDiagram::store_neighbors_CB(index_t v){
// index_t nb = geo_min(k_, points_nb_);
//
// // Allocated on the stack(more thread-friendly and
// // no need to deallocate)
// index_t* neighbors = (index_t*)alloca(
// sizeof(index_t) * nb
// );
// double* dist = (double*)alloca(
// sizeof(double) * nb
// );
// NN_->get_nearest_neighbors(nb, v, neighbors, dist);
// for (index_t t = 0; t < k_; ++t){
// points_nn_[v * k_ + t] = neighbors[t];
// }
// }
//
// void CudaRestrictedVoronoiDiagram::store_f_neighrbors_CB(index_t v){
// index_t nb = 1;
//
// // Allocated on the stack(more thread-friendly and
// // no need to deallocate)
// index_t* neighbors = (index_t*)alloca(
// sizeof(index_t) * nb
// );
// double* dist = (double*)alloca(
// sizeof(double) * nb
// );
//
// NN_->get_nearest_neighbors(nb, vertex_ + dimension_* facets_[v * dimension_], neighbors, dist);
// facets_nn_[v] = neighbors[0];
// }
//
// void CudaRestrictedVoronoiDiagram::update_points(){
// //x_->clear();
//
// for (int i = 0; i < points_nb_; ++i){
// double d = host_ret_[i * 4 + 3];
// if (fabs(host_ret_[i * 4 + 3]) > 1e-30){
// host_ret_[i * 4 + 0] /= host_ret_[i * 4 + 3];
// host_ret_[i * 4 + 1] /= host_ret_[i * 4 + 3];
// host_ret_[i * 4 + 2] /= host_ret_[i * 4 + 3];
//
// x_->set_vertex(&host_ret_[i * 4], dimension_, i);
// }
// else
// {
// //std::cout << "point " << i << " hsa no RVD." << std::endl;
//
// }
// //x_->add_vertexd(&host_ret_[i * 4], dimension_);
// }
//
// std::vector<int> sample_facet(points_nb_);
// if (is_store_){
// std::string name = "C:\\Users\\JWhan\\Desktop\\DATA\\RVD_" + String::to_string(store_filename_counter_) + ".xyz";
// points_save_xyz(name, *x_, sample_facet);
// store_filename_counter_++;
// }
// }
//
// __host__
// void CudaRestrictedVoronoiDiagram::compute_Rvd(){
// CudaStopWatcher watcher("compute_rvd_global");
// watcher.start();
// std::vector<double> update_data(points_nb_ * 4);
// //allocate_and_copy(GLOBAL_MEMORY);
// host_ret_ = (double*)malloc(sizeof(double) * points_nb_ * (dimension_ + 1));
// host_retidx = (int*)malloc(sizeof(int) * facet_nb_ * 50);
// //Allocate
// //Input data.
// cudaMalloc((void**)&dev_vertex_, DOUBLE_SIZE * vertex_nb_ * dimension_);
// cudaMalloc((void**)&dev_points_, DOUBLE_SIZE * points_nb_ * dimension_);
// cudaMalloc((void**)&dev_facets_, sizeof(index_t) * facet_nb_ * dimension_);
// cudaMalloc((void**)&dev_points_nn_, sizeof(index_t) * points_nb_ * k_);
// cudaMalloc((void**)&dev_facets_nn_, sizeof(index_t) * facet_nb_ * 1);
// cudaMalloc((void**)&dev_ret_, sizeof(double) * points_nb_ * 4);
// cudaMalloc((void**)&dev_retidx, sizeof(int) * facet_nb_ * 50);
//
// //Output result.
// //cudaMalloc((void**)&dev_ret_, sizeof(double) * facet_nb_ * 10 * 40);
// //Copy
// cudaMemcpy(dev_vertex_, vertex_, DOUBLE_SIZE * vertex_nb_ * dimension_, cudaMemcpyHostToDevice);
// //cudaMemcpy(dev_points_, points_, DOUBLE_SIZE * points_nb_ * dimension_, cudaMemcpyHostToDevice);
// cudaMemcpy(dev_facets_, facets_, sizeof(index_t) * facet_nb_ * dimension_, cudaMemcpyHostToDevice);
// CheckCUDAError("Allocating device memory");
//
// const int data_size = 10;
//
// int* next = (int*)malloc(sizeof(int) * facet_nb_ * (data_size + 1));
// int* dev_next;
// cudaMalloc((void**)&dev_next, sizeof(int) * facet_nb_ * (data_size + 1));
// for (index_t t = 0; t < iter_nb_; ++t){
// //set a vector for handling the
// to_visited.resize(facet_nb_);
// has_visited.clear();
// has_visited.resize(facet_nb_);
// update_data.clear();
// update_data.resize(points_nb_ * 4);
// if (t > 20) k_ = 10;
// knn_search();
// CudaStopWatcher iter_watcher("iteration");
// iter_watcher.start();
// cudaMemset(dev_ret_, 0, sizeof(double) * points_nb_ * 4);
// cudaMemset(dev_retidx, -2, sizeof(int) * facet_nb_ * 50);
// cudaMemcpy(dev_points_, points_, DOUBLE_SIZE * points_nb_ * dimension_, cudaMemcpyHostToDevice);
// cudaMemcpy(dev_points_nn_, points_nn_, sizeof(index_t) * points_nb_ * k_, cudaMemcpyHostToDevice);
// cudaMemcpy(dev_facets_nn_, facets_nn_, sizeof(index_t) * facet_nb_ * 1, cudaMemcpyHostToDevice);
//
// //might be improved dim3 type.
// int threads = 128;
// int blocks = facet_nb_ / threads + ((facet_nb_ % threads) ? 1 : 0);
// //dim3 blocks(512, facet_nb_ / 512 + ((facet_nb_ % 512) ? 1 : 0));
// //dim3 threads(1, 1, 1);
//
// kernel << < blocks, threads >> > (
// dev_vertex_, vertex_nb_,
// dev_points_, points_nb_,
// dev_facets_, facet_nb_,
// dev_points_nn_, k_,
// dev_facets_nn_, dimension_,
// dev_ret_, dev_retidx
// );
// CheckCUDAError("kernel function");
// copy_back();
// //printf("--------------passsed kernel----------------");
// //result_print("facetnn.txt", facets_nn_, facet_nb_, 4);
// //result_print("retdata.txt", host_ret_, points_nb_ * 4, 4);
// //result_print("retidx.txt", host_retidx, facet_nb_ * 100, 10);
//
// for (index_t t = 0; t < points_nb_ * 4; ++t){
// update_data[t] += host_ret_[t];
// }
//
// for (index_t t = 0; t < facet_nb_; ++t){
// has_visited[t].insert(facets_nn_[t]);
// for (index_t s = 0; s < data_size; ++s){
// int cur_seed = host_retidx[t * data_size + s];
// if (cur_seed < -1) break;
// if (cur_seed >= 0 && cur_seed < points_nb_){
// if (has_visited[t].find(cur_seed) == has_visited[t].end()){
// has_visited[t].insert(cur_seed);
// to_visited[t].push(cur_seed);
// }
// }
// }
// }
//
// int count = 0;
// while (!check_task_finished(to_visited)){
// count++;
// //put the data into a special array.
// go_next(next, 5 + 1);
// cudaMemcpy(dev_next, next, sizeof(int) * facet_nb_ * (5 + 1), cudaMemcpyHostToDevice);
// cudaMemset(dev_retidx, -2, sizeof(int) * facet_nb_ * 50);
// //result_print("next.txt", next, facet_nb_ * (data_size+1), data_size + 1);
// //go
// threads = 256;
// blocks = (facet_nb_ * 5) / threads + (((facet_nb_ * 5) % threads) ? 1 : 0);
// go_rvd << < blocks, threads >> >(
// dev_vertex_, vertex_nb_,
// dev_points_, points_nb_,
// dev_facets_, facet_nb_,
// dev_points_nn_, k_,
// dev_next, dimension_, 5,
// dev_ret_, dev_retidx
// );
//
// cudaMemcpy(host_ret_, dev_ret_, sizeof(double) * points_nb_ * 4, cudaMemcpyDeviceToHost);
// cudaMemcpy(host_retidx, dev_retidx, sizeof(int) * facet_nb_ * data_size * 5, cudaMemcpyDeviceToHost);
// CheckCUDAError("copy back");
// //result_print("go_rvd.txt", host_retidx, facet_nb_ * 100, 10);
// for (index_t t = 0; t < points_nb_ * 4; ++t){
// update_data[t] += host_ret_[t];
// }
// insert_to_visited(host_retidx, data_size);
// }
//
//
// //result_print("retdata.txt", host_ret_, facet_nb_ * 400, 4);
// //result_print("C:\\Users\\JWhan\\Desktop\\DATA\\retdata.txt", host_ret_, points_nb_ * 4, 4);
// is_store_ = false;
// for (int i = 0; i < points_nb_; ++i){
// double d = update_data[i * 4 + 3];
// if (fabs(update_data[i * 4 + 3]) > 1e-30){
// update_data[i * 4 + 0] /= update_data[i * 4 + 3];
// update_data[i * 4 + 1] /= update_data[i * 4 + 3];
// update_data[i * 4 + 2] /= update_data[i * 4 + 3];
//
// x_->set_vertex(&update_data[i * 4], dimension_, i);
// }
// else
// {
// //std::cout << "point " << i << " hsa no RVD." << std::endl;
//
// }
// //x_->add_vertexd(&host_ret_[i * 4], dimension_);
// }
//
// if (is_store_){
// std::vector<int> sample_facet(points_nb_);
// std::string name = "C:\\Users\\JWhan\\Desktop\\DATA\\RVD_" + String::to_string(store_filename_counter_) + ".xyz";
// points_save_xyz(name, *x_, sample_facet);
// store_filename_counter_++;
// }
// //update_points();
// iter_watcher.stop();
// iter_watcher.synchronize();
// iter_watcher.print_elaspsed_time(std::cout);
//
// }
//
// watcher.stop();
// watcher.synchronize();
// watcher.print_elaspsed_time(std::cout);
// std::string name = "C:\\Users\\JWhan\\Desktop\\DATA\\RVD.xyz";
// points_save_xyz(name, *x_, sample_facet_);
// free_memory();
// }
//
// __host__
// void CudaRestrictedVoronoiDiagram::allocate_and_copy(DeviceMemoryMode mode){
// unsigned int free_memory, total_memory;
// cuMemGetInfo(&free_memory, &total_memory);
// std::cerr << "Avaiable GPU memory : "
// << free_memory
// << " Bytes"
// << " (Total memory : "
// << total_memory
// << " Bytes)"
// << std::endl
// << "Starting cudaMalloc..\n";
// host_ret_ = (double*)malloc(sizeof(double) * points_nb_ * (dimension_ + 1));
// //host_ret_ = (double*)malloc(sizeof(double) * facet_nb_ * 10 * 40);
// //cudaMalloc((void**)&dev_seeds_info_, DOUBLE_SIZE * points_nb_ * (dimension_ + 1));
// //cudaMemcpyToSymbol(g_seeds_information, &dev_seeds_info_, sizeof(double*), size_t(0), cudaMemcpyHostToDevice);
// //cudaMalloc((void**)&dev_seeds_poly_nb, INT_SIZE * points_nb_);
// //cudaMemcpyToSymbol(g_seeds_polygon_nb, &dev_seeds_poly_nb, sizeof(int*), size_t(0), cudaMemcpyHostToDevice);
//
// switch (mode)
// {
// case GLOBAL_MEMORY:
// {
// //Allocate
// //Input data.
// cudaMalloc((void**)&dev_vertex_, DOUBLE_SIZE * vertex_nb_ * dimension_);
// cudaMalloc((void**)&dev_points_, DOUBLE_SIZE * points_nb_ * dimension_);
// cudaMalloc((void**)&dev_facets_, sizeof(index_t) * facet_nb_ * dimension_);
// cudaMalloc((void**)&dev_points_nn_, sizeof(index_t) * points_nb_ * k_);
// cudaMalloc((void**)&dev_facets_nn_, sizeof(index_t) * facet_nb_ * 1);
//
// //Output result.
// //cudaMalloc((void**)&dev_ret_, sizeof(double) * facet_nb_ * 10 * 40);
// cudaMalloc((void**)&dev_ret_, sizeof(double) * points_nb_ * 4);
// CheckCUDAError("Allocating device memory");
//
// //Copy
// cudaMemcpy(dev_vertex_, vertex_, DOUBLE_SIZE * vertex_nb_ * dimension_, cudaMemcpyHostToDevice);
// //cudaMemcpy(dev_points_, points_, DOUBLE_SIZE * points_nb_ * dimension_, cudaMemcpyHostToDevice);
// cudaMemcpy(dev_facets_, facets_, sizeof(index_t) * facet_nb_ * dimension_, cudaMemcpyHostToDevice);
// //cudaMemcpy(dev_points_nn_, points_nn_, sizeof(index_t) * points_nb_ * k_, cudaMemcpyHostToDevice);
// //cudaMemcpy(dev_facets_nn_, facets_nn_, sizeof(index_t) * facet_nb_ * 1, cudaMemcpyHostToDevice);
// cuMemGetInfo(&free_memory, &total_memory);
// std::cerr << "Left GPU memory : "
// << free_memory
// << " Bytes"
// << " (Total memory : "
// << total_memory
// << " Bytes)"
// << std::endl;
// CheckCUDAError("Copying data from host to device");
// }
// break;
// case CONSTANT_MEMORY:
// {
// cudaMalloc((void**)&dev_facets_, sizeof(index_t) * facet_nb_ * dimension_);
// cudaMalloc((void**)&dev_facets_nn_, sizeof(index_t) * facet_nb_ * 1);
//
// //Output result.
// cudaMalloc((void**)&dev_ret_, sizeof(double) * points_nb_ * 4);
// CheckCUDAError("Allocating device memory");
//
// cudaMemcpyToSymbol(c_vertex, vertex_, DOUBLE_SIZE * vertex_nb_ * dimension_);
// cudaMemcpyToSymbol(c_points, points_, DOUBLE_SIZE * points_nb_ * dimension_);
// cudaMemcpyToSymbol(c_points_nn, points_nn_, INT_SIZE * points_nb_ * k_);
//
// cudaMemcpy(dev_facets_nn_, facets_nn_, sizeof(index_t) * facet_nb_ * 1, cudaMemcpyHostToDevice);
// cudaMemcpy(dev_facets_, facets_, sizeof(index_t) * facet_nb_ * dimension_, cudaMemcpyHostToDevice);
// CheckCUDAError("Copying data from host to device");
// }
// break;
// case TEXTURE_MEMORY:
// break;
// default:
// break;
// }
// }
//
// __host__
// void CudaRestrictedVoronoiDiagram::free_memory(){
// cudaFree(dev_vertex_);
// cudaFree(dev_points_);
// cudaFree(dev_facets_);
// cudaFree(dev_points_nn_);
// cudaFree(dev_facets_nn_);
// cudaFree(dev_ret_);
// cudaFree(dev_seeds_info_);
// cudaFree(dev_seeds_poly_nb);
// if (host_ret_ != nil){
// free(host_ret_);
// host_ret_ = nil;
// }
// }
//
// __host__
// void CudaRestrictedVoronoiDiagram::copy_back(){
// //cudaMemcpy(host_ret_, dev_ret_, sizeof(double) * facet_nb_ * 10 * 40, cudaMemcpyDeviceToHost);
// cudaMemcpy(host_ret_, dev_ret_, sizeof(double) * points_nb_ * 4, cudaMemcpyDeviceToHost);
// cudaMemcpy(host_retidx, dev_retidx, sizeof(int) * facet_nb_ * 10, cudaMemcpyDeviceToHost);
// CheckCUDAError("copy back");
// }
//
// __host__
// void CudaRestrictedVoronoiDiagram::print_return_data(std::string filename) const{
// for (int i = 0; i < points_nb_; ++i)
// {
// if (fabs(host_ret_[i * 4 + 3]) >= 1e-12){
// host_ret_[i * 4 + 0] /= host_ret_[i * 4 + 3];
// host_ret_[i * 4 + 1] /= host_ret_[i * 4 + 3];
// host_ret_[i * 4 + 2] /= host_ret_[i * 4 + 3];
// }
// }
// index_t line_num = 4;
// std::ofstream f;
// f.open(filename);
// for (index_t t = 0; t < facet_nb_ * 100; ++t){
// //for (index_t t = 0; t < points_nb_; ++t){
// f << std::setprecision(18);
// f << "point " << t << " ";
// f << host_ret_[t * 4 + 0] << " "
// << host_ret_[t * 4 + 1] << " "
// << host_ret_[t * 4 + 2] << " "
// << host_ret_[t * 4 + 3] << " " << std::endl;
// }
// f.close();
// }
//} |
14,920 | #include "includes.h"
__global__ void blurnaive(float* matrix, float* output, int firstFrame, int numFrames, int frameCount, int max, int length){
// int frame = firstFrame + (blockIdx.y*blockDim.y+ threadIdx.y);
int chan = (blockIdx.x*blockDim.x+ threadIdx.x)<<1;
float amp = 0.0f;
float freq = 0.0f;
int frame;
if (chan < length) {
for (frame = firstFrame; frame != frameCount; frame = (frame + 1) % max) {
amp += matrix[frame*length+chan];
freq += matrix[frame*length+chan+1];
}
output[chan] = (float) (amp / numFrames);
output[chan+1] = (float) (freq / numFrames);
}
} |
14,921 | /*
testeando
*/
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
const static int threadsPerBlock = 1024;
const static int blocksPerGrid = 10;
const static int M = 3;
/////////
__global__ void dot_pdt(float* d_vtr_a, float* d_vtr_b, const int M,
float* d_vtr_o);
int parseCLI(const int argc, char**argv, char *filename_0, char *filename_1);
void read_mtx(const char *in_filename, float *mtx, const unsigned int M,
const unsigned int N);
/////////
int main(int argc, char** argv) {
// CPU
char in_filename_1[20], in_filename_2[20];
float *vtr_a, *vtr_b, *vtr_out;
vtr_a = (float*) malloc(M * sizeof(float));
vtr_b = (float*) malloc(M * sizeof(float));
vtr_out = (float*) malloc(blocksPerGrid * sizeof(float));
// parseCLI(argc, argv, in_filename_1, in_filename_2);
// read_mtx(in_filename_1, vtr_a, M, 1);
// read_mtx(in_filename_2, vtr_b, M, 1);
vtr_a[0] = 1.;
vtr_a[1] = 1.;
vtr_a[2] = 1.;
vtr_b[0] = 1.;
vtr_b[1] = 1.;
vtr_b[2] = 1.;
//
printf("Vtor a:\n");
for (size_t i = 0; i < M; i++) {
printf("%f\t", vtr_a[i]);
}
printf("\nVtor b:\n");
for (size_t i = 0; i < M; i++) {
printf("%f\t", vtr_b[i]);
}
printf("\n");
//
// GPU
dim3 dimBlock(threadsPerBlock, 1, 1);
dim3 dimGrid(blocksPerGrid, 1, 1);
float *d_vtr_a, *d_vtr_b, *d_vtr_out;
cudaMalloc((void**) &d_vtr_a, M * sizeof(float));
cudaMalloc((void**) &d_vtr_b, M * sizeof(float));
cudaMalloc((void**) &d_vtr_out, blocksPerGrid * sizeof(float));
cudaMemcpy(d_vtr_a, vtr_a, M * sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(d_vtr_b, vtr_b, M * sizeof(float), cudaMemcpyHostToDevice);
dot_pdt<<<dimGrid, dimBlock>>>(d_vtr_a, d_vtr_b, M, d_vtr_out);
cudaMemcpy(vtr_out, d_vtr_out, M * sizeof(float), cudaMemcpyDeviceToHost);
float result = 0.;
for (size_t i = 0; i < M; ++i) {
printf("vtr_out: %f\n", vtr_out[i]);
result += vtr_out[i];
}
printf("dot pdt: %f\n", result);
return 0;
}
///////// Kernels
__global__ void dot_pdt(float* d_vtr_a, float* d_vtr_b, const int M,
float* d_vtr_o) {
int ti = threadIdx.x + blockIdx.x * blockDim.x;
__shared__ float tmp_sum[threadsPerBlock];
tmp_sum[threadIdx.x] = 0;
float tmp = 0;
while (ti < M) {
tmp += d_vtr_a[ti] * d_vtr_b[ti];
ti += blockDim.x * gridDim.x;
}
tmp_sum[threadIdx.x] = tmp;
__syncthreads();
// Ahora tengo q reducir tmp_sum
size_t idx = blockDim.x / 2;
while (idx != 0) {
if (threadIdx.x < idx) {
tmp_sum[threadIdx.x] += tmp_sum[threadIdx.x + idx];
}
__syncthreads();
idx /= 2;
}
if (threadIdx.x == 0) {
d_vtr_o[blockIdx.x] = tmp_sum[0];
}
return;
}
///////// Functions
int parseCLI(const int argc, char**argv, char *filename_0, char *filename_1) {
bool m_flag, v_flag;
char c;
while ((c = getopt(argc, argv, "m:v:")) != -1) {
switch (c) {
case 'm':
if (sizeof(optarg) > 20) {
fprintf(stderr, "Filename too big.\n");
return 1;
}
memset(filename_0, '\0', sizeof(optarg));
strcpy(filename_0, optarg);
m_flag = 1;
break;
case 'v':
if (sizeof(optarg) > 20) {
fprintf(stderr, "Filename too big.\n");
return 1;
}
memset(filename_1, '\0', sizeof(optarg));
strcpy(filename_1, optarg);
filename_1 = optarg;
v_flag = 1;
break;
case '?':
if (optopt == 'i' || optopt == 'o')
fprintf(stderr, "Options require an argument.\n");
return 1;
default:
fprintf(stderr, "Mal\n");
return 1;
}
}
if ((m_flag && v_flag) != 1) {
fprintf(stderr, "Mal\n");
return 1;
}
return 0;
}
void read_mtx(const char *in_filename, float *mtx, const unsigned int M,
const unsigned int N) {
FILE *in_file;
in_file = fopen(in_filename, "r");
for (int i = 0; i < M; i++) {
for (int j = 0; j < N; j++) {
fscanf(in_file, "%f\t", &mtx[j + (i * N)]);
}
}
return;
}
|
14,922 | #include "includes.h"
__global__ void mat_hadamard(float *a, float *b, float *c, int rows, int columns)
{
const int i = blockDim.y * blockIdx.y + threadIdx.y,
j = blockDim.x * blockIdx.x + threadIdx.x;
if (i < rows && j < columns)
{
int k = i * columns + j;
c[k] = a[k] * b[k];
}
} |
14,923 | #include <vector_types.h>
#include <iostream>
__global__
void writePBO(uchar4 * d_pbo, const float * d_levelset)
{
const int i = threadIdx.x + blockDim.x * blockIdx.x;
const int j = threadIdx.y + blockDim.y * blockIdx.y;
const int idx = i + j * blockDim.x * gridDim.x;
d_pbo[idx].x = 0;
d_pbo[idx].y = 0;
//d_pbo[idx].z = -d_levelset[idx] * 10;
if (d_levelset[idx] <= 0)
d_pbo[idx].z = 255;
else
d_pbo[idx].z = 0;
d_pbo[idx].w = 255;
}
void writePBO(dim3 blocks,
dim3 threads,
uchar4 * d_pbo,
const float * d_levelset)
{
std::cout << "Writing to PBO (blocks: "<< blocks.x << "x" << blocks.y <<
" threads: "<< threads.x << "x" << threads.y<< ")" << std::endl;
writePBO<<<blocks,threads>>>(d_pbo, d_levelset);
} |
14,924 | // compile -> nvcc quiz3.cu -o quiz3
// execute -> quiz3.exe | quiz3.out
// Bruno Maglioni A01700879
#include <stdio.h>
#define N 9 //size of original matrix
#define K N/3 //size of compressed matrrix
#define ThreadsPerBlock N/K
#define NumBlocks N/K
__global__ void compress(float *mat, int n, float *comp, int k){
int x = threadIdx.x + blockIdx.x * blockDim.x; // columns
int y = threadIdx.y + blockIdx.y * blockDim.y; // rows
int offset = x + y * blockDim.x * gridDim.x; // where the thread is on the grid
if(x < K && y < K){
for(int i = 0; i < y; i++){
for(int j = 0; j < x; j++){
comp[j + (i * k)] += mat[offset + (j + (i * n))]/n;
}
}
offset += blockDim.x * gridDim.x;
}
}
void print_mat(float *mat, int n){
for (int i = 0; i < n; i++){
for (int j = 0; j < n; j++){
printf("%.1f\t", mat[i*n+j]);
}
printf("\n");
}
printf("\n");
}
void fill_mat(float *mat, int n){
int c = 0;
for (int i = 0; i < n; i++){
for (int j = 0; j < n; j++){
mat[i*n+j] = c++;
}
}
}
int main(){
float *h_compress, *h_matrix; // CPU variables
float *d_compress, *d_matrix; // GPU variables
// Allocate memory on CPU
h_compress = (float *)malloc(sizeof(float) * K * K);
h_matrix = (float *)malloc(sizeof(float) * N * N);
// Allocate memory on GPU
cudaMalloc((void**)&d_compress, sizeof(float) * K * K);
cudaMalloc((void**)&d_matrix, sizeof(float) * N * N);
// Fill matrix
fill_mat(h_matrix, N);
printf("\n input mat \n");
print_mat(h_matrix, N);
// Copy CPU variables to GPU
cudaMemcpy(d_compress, h_compress, sizeof(float)* K * K, cudaMemcpyHostToDevice);
cudaMemcpy(d_matrix, h_matrix, sizeof(float)* N * N, cudaMemcpyHostToDevice);
// Create grids
dim3 blocks(NumBlocks, NumBlocks);
dim3 threads(ThreadsPerBlock, ThreadsPerBlock);
// Call function in GPU
compress<<<blocks, threads>>>(d_matrix, N, d_compress, K);
// Copy result matrix from GPU to CPU
cudaMemcpy(h_compress, d_compress, sizeof(float) * K * K, cudaMemcpyDeviceToHost);
// Print compressed matrix
printf("\n compress mat \n");
print_mat(h_compress, N);
// Free CPU memory
free(h_compress);
free(h_matrix);
// Free GPU memory
cudaFree(d_compress);
cudaFree(d_matrix);
return 0;
}
|
14,925 | #include "includes.h"
#define MINVAL 1e-7
__global__ void MaxElement(double* Mtr, int Size, int i, int*strnum)
{
double MaxValue=Mtr[i*Size+i];
*strnum=i;
for(int k=i; k<Size; k++)
{
if(fabs(Mtr[i*(Size)+k])>fabs(MaxValue))
{
*strnum=*strnum+1; //ýòî äëÿ êîìïèëÿòîðà ÷åêåðà
*strnum=k;
MaxValue=Mtr[i*(Size)+k];
}
}
if(fabs(MaxValue)<MINVAL) //åñëè ìàêñèìàëüíûé ýëåìåíò íèæå ïîðîãîâîãî çíà÷åíèÿ, òî âîçâðàùàåì -1 -> îïðåäåëèòåëü ðàâåí 0 è âûõîäèì èç öèêëà
{
*strnum=-1;
}
} |
14,926 | #include <stdio.h>
#include <cmath>
#include <malloc.h>
#include <time.h>
#include <stdlib.h>
__host__ int *gen_linked_list_1(int N)
{
int *list = NULL;
if (NULL != list)
{
free(list);
list = NULL;
}
if (0 == N)
{
printf("N is 0, exit\n");
exit(-1);
}
list = (int *)malloc(N * sizeof(int));
if (NULL == list)
{
printf("Can not allocate memory for output array\n");
exit(-1);
}
int i;
for (i = 0; i < N; i++)
list[i] = i - 1;
return list;
}
//i和j的后继元素交换位置
__host__ void swap(int *list, int i, int j)
{
if (i < 0 || j < 0 || i == j)
return;
int p = list[i]; //保存i后继元素下标p
int q = list[j]; //保存j后继元素下标q
if (p == -1 || q == -1)
return; //如果有一个没有后继元素
int pnext = list[p]; //保存p的后继元素下标
int qnext = list[q]; //保存q的后继元素下标
//i,j的后继元素交换位置
if (p == j)
{ //j是i的后继
list[i] = q;
list[j] = list[q];
list[q] = j;
}
else if (i == q)
{ //i是j的后继
list[j] = p;
list[i] = list[p];
list[p] = i;
}
else
{
list[i] = q; //i的后继改为q
list[j] = p; //j的后继改为p
list[p] = qnext; //p的后继元素改为原来q的后继
list[q] = pnext; //q的后继元素改为原来p的后继
}
}
__host__ int *gen_linked_list_2(int N)
{
int *list;
list = gen_linked_list_1(N);
int p = N / 5;
int i;
for (i = 0; i < N; i += 2)
{
int k = (i + i + p) % N;
swap(list, i, k);
}
return list;
}
__global__ void my_order(int N, int *a, int *b, int allnum)
{
int part = N / allnum;
for (int n = part * threadIdx.y; n < part * (threadIdx.y + 1); n++)
{
if (a[n] != -1)
b[a[n]] = n;
else
b[N] = n;
}
}
int main(void)
{
int N = 10000000;
int *a, *b;
int *qq = NULL;
qq = gen_linked_list_2(N);
int *result = (int *)malloc(sizeof(int) * (N + 1));
cudaMalloc((void **)&a, sizeof(int) * N);
cudaMalloc((void **)&b, sizeof(int) * (N + 1));
int allnum = 1 * 1 * 1 * 100;
cudaEvent_t start1;
cudaEventCreate(&start1);
cudaEvent_t stop1;
cudaEventCreate(&stop1);
cudaEventRecord(start1, NULL);//计时开始
dim3 grid(1, 1);
dim3 block(1, 100);
cudaMemcpy(a, qq, sizeof(int) * N, cudaMemcpyHostToDevice);
my_order<<<grid, block>>>(N, a, b, allnum);
cudaDeviceSynchronize();//同步
cudaMemcpy(result, b, sizeof(int) * (N + 1), cudaMemcpyDeviceToHost);
for (int n = 0; n < N; n++)
{
int hold = result[result[N]];
result[result[N]] = n;
result[N] = hold;
}
cudaEventRecord(stop1, NULL);
cudaEventSynchronize(stop1);
float totaltime = 0.0f;
cudaEventElapsedTime(&totaltime, start1, stop1);
printf("List ranking cuda time = %f\n", totaltime);
cudaFree(a);
cudaFree(b);
free(result);
free(qq);
return 0;
} |
14,927 | #include <bits/stdc++.h>
#include <cuda_runtime.h>
using namespace std;
void initialize(int *A, int N) {
for (int i = 0; i < N; i++)
A[i] = (rand()%10 + 1);
}
__global__ void get_stats(int *A, int *min, int *max, int *sum, int *square_sum, int N) {
int index = threadIdx.x;
if (index < N) {
atomicMin(min, A[index]);
atomicMax(max, A[index]);
atomicAdd(sum, A[index]);
atomicAdd(square_sum, A[index]*A[index]);
}
}
__global__ void get_sum(int *A, int *sum, int N) {
int index = threadIdx.x;
if (index < N)
atomicAdd(sum, A[index]);
}
__global__ void get_square_sum(int *A, int *square_sum, int N) {
int index = threadIdx.x;
if (index < N)
atomicAdd(square_sum, A[index]*A[index]);
}
__global__ void get_min(int *A, int *min, int N) {
int index = threadIdx.x;
if (index < N)
atomicMin(min, A[index]);
}
__global__ void get_max(int *A, int *max, int N) {
int index = threadIdx.x;
if (index < N)
atomicMax(max, A[index]);
}
int main() {
int N = 1e+3;
int min = INT_MAX, max = INT_MIN, sum = 0, square_sum = 0;
int *d_min, *d_max, *d_sum, *d_square_sum;
cudaMalloc((void **)&d_min, sizeof(int));
cudaMalloc((void **)&d_max, sizeof(int));
cudaMalloc((void **)&d_sum, sizeof(int));
cudaMalloc((void **)&d_square_sum, sizeof(int));
int *d_A;
int *A = (int *) malloc(sizeof(int)*N);
initialize(A, N);
cudaMalloc((void **)&d_A, sizeof(int)*N);
cudaMemcpy(d_A, A, sizeof(int)*N, cudaMemcpyHostToDevice);
cudaMemcpy(d_min, &min, sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_max, &max, sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_sum, &sum, sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_square_sum, &square_sum, sizeof(int), cudaMemcpyHostToDevice);
get_stats<<<1, N>>>(d_A, d_min, d_max, d_sum, d_square_sum, N);
cudaMemcpy(&min, d_min, sizeof(int), cudaMemcpyDeviceToHost);
cudaMemcpy(&max, d_max, sizeof(int), cudaMemcpyDeviceToHost);
cudaMemcpy(&sum, d_sum, sizeof(int), cudaMemcpyDeviceToHost);
cudaMemcpy(&square_sum, d_square_sum, sizeof(int), cudaMemcpyDeviceToHost);
float mean = sum/(N*1.0);
float SD = sqrt((square_sum+2*mean*sum+N*mean*mean)/(N*1.0));
cout << "Min: " << min << endl;
cout << "Max: " << max << endl;
cout << "Mean: " << mean << endl;
cout << "SD: " << SD << endl;
return 0;
} |
14,928 | //pass
//--blockDim=64 --gridDim=1 --no-inline
#include "cuda.h"
__device__ int bar(float* A) {
if(threadIdx.x != 0) {
return 0;
}
A[4] = 26.8f;
return 1;
}
__global__ void foo(float* A) {
int y = bar(A);
}
|
14,929 | #include <cuda.h>
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <device_functions.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define BLOCKSIZE 512
__host__ unsigned int getMax(unsigned int* Data, int n)
{
unsigned int mx = Data[0];
for (int i = 1; i < n; i++)
if (Data[i] > mx)
mx = Data[i];
return mx;
}
__global__ void FixUpScan(unsigned int* PreScan, unsigned int* PreScannedAuxiliary, unsigned int Size)
{
int tid = threadIdx.x;
int BlockOffset=0;
for(int offset = 2*blockIdx.x*blockDim.x; offset<Size; offset+=2*blockDim.x*gridDim.x)
{
if((BlockOffset*gridDim.x+blockIdx.x)<(Size+2*BLOCKSIZE-1)/(2*BLOCKSIZE))
{
if ((offset+2*tid) < Size)
PreScan[offset+2*tid]+=PreScannedAuxiliary[BlockOffset*gridDim.x+blockIdx.x];
if ((offset+2*tid+1) < Size)
PreScan[offset+2*tid+1]+=PreScannedAuxiliary[BlockOffset*gridDim.x+blockIdx.x];
}
BlockOffset++;
}
}
__global__ void GetDiff_N_Sn(unsigned char* Bits, unsigned int* PreScan, unsigned int* Diff_N_Sn, int Size)
{
*Diff_N_Sn = Size - PreScan[Size-1];
if(Bits[Size-1]==1) (*Diff_N_Sn)--;
Bits[Size-1]=Bits[Size-1]; //
}
__global__ void Sort(unsigned int* InData, unsigned int* PreScan, unsigned int* OutData, unsigned char* Bits, unsigned int* Diff_N_Sn, int Size)
{
for(int index = blockIdx.x*blockDim.x+threadIdx.x; index<Size; index+=blockDim.x*gridDim.x)
{
if(index<Size)
{
if(Bits[index]==0)
{
OutData[index - PreScan[index]] = InData[index];
}
else
OutData[PreScan[index]+(*Diff_N_Sn)] = InData[index];
}
}
}
__global__ void Exchange(unsigned int* InData, unsigned int* OutData, int Size)
{
for(int index=blockIdx.x*blockDim.x+threadIdx.x; index<Size; index+=blockDim.x*gridDim.x)
{
InData[index]=OutData[index];
}
}
__global__ void KernelPrescan(unsigned int* Data, unsigned char* Bits, unsigned int* PreScan, unsigned int* Auxiliary, int Size, int bit)
{
extern __shared__ unsigned int Tmp[];
int tid = threadIdx.x;
int AuxiliaryIndex=0;
for(int OffsetTid = 2*blockIdx.x*blockDim.x; OffsetTid<Size; OffsetTid+=2*blockDim.x*gridDim.x)
{
int offset=1;
if(OffsetTid+tid<Size)
{
Tmp[tid] = (Data[OffsetTid+tid]>>bit)&1;
Bits[OffsetTid+tid]=(Data[OffsetTid+tid]>>bit)&1;
}
else
Tmp[tid] = 0;
if(OffsetTid+tid+blockDim.x<Size)
{
Tmp[tid+blockDim.x] = (Data[OffsetTid+tid+blockDim.x]>>bit)&1;
Bits[OffsetTid+tid+blockDim.x] = (Data[OffsetTid+tid+blockDim.x]>>bit)&1;
}
else
Tmp[tid+blockDim.x] = 0;
for (int d = blockDim.x; d > 0; d>>=1)
{
__syncthreads();
if(tid<d)
{
int ai = offset*(2*tid+1)-1;
int bi = offset*(2*tid+2)-1;
Tmp[bi]+=Tmp[ai];
}
offset*=2;
}
if(tid==0)
{ if((gridDim.x*AuxiliaryIndex+blockIdx.x)<(Size+2*BLOCKSIZE-1)/(2*BLOCKSIZE))
Auxiliary[gridDim.x*AuxiliaryIndex+blockIdx.x]=Tmp[2*blockDim.x-1];
Tmp[2*blockDim.x-1]=0;
}
for(int d=1; d<2*blockDim.x; d*=2)
{
offset>>=1;
__syncthreads();
if(tid<d)
{
int ai = offset*(2*tid+1)-1;
int bi = offset*(2*tid+2)-1;
int t = Tmp[ai];
Tmp[ai]=Tmp[bi];
Tmp[bi]+=t;
}
}
__syncthreads();
if((OffsetTid+2*tid)<Size)
PreScan[OffsetTid+2*tid] = Tmp[2*tid];
if((OffsetTid+2*tid+1)<Size)
PreScan[OffsetTid+2*tid+1] = Tmp[2*tid+1];
__syncthreads();
AuxiliaryIndex++;
}
}
__global__ void KernelPrescanRecursive(unsigned int* PreScan, unsigned int* Auxiliary, int Size, int LastLevel)
{
extern __shared__ unsigned int Tmp[];
int tid = threadIdx.x;
int AuxiliaryIndex=0;
for(int OffsetTid = 2*blockIdx.x*blockDim.x; OffsetTid<Size; OffsetTid+=2*blockDim.x*gridDim.x)
{
int offset=1;
if(OffsetTid+tid<Size)
{
Tmp[tid] = PreScan[OffsetTid+tid];
}
else
Tmp[tid] = 0;
if(OffsetTid+tid+blockDim.x<Size)
{
Tmp[tid+blockDim.x] = PreScan[OffsetTid+tid+blockDim.x];
}
else
Tmp[tid+blockDim.x] = 0;
for (int d = blockDim.x; d > 0; d>>=1)
{
__syncthreads();
if(tid<d)
{
int ai = offset*(2*tid+1)-1;
int bi = offset*(2*tid+2)-1;
Tmp[bi]+=Tmp[ai];
}
offset*=2;
}
if(tid==0)
{ if( ((gridDim.x*AuxiliaryIndex+blockIdx.x)<(Size+2*BLOCKSIZE-1)/(2*BLOCKSIZE)) && LastLevel==0)
Auxiliary[gridDim.x*AuxiliaryIndex+blockIdx.x]=Tmp[2*blockDim.x-1];
Tmp[2*blockDim.x-1]=0;
}
for(int d=1; d<2*blockDim.x; d*=2)
{
offset>>=1;
__syncthreads();
if(tid<d)
{
int ai = offset*(2*tid+1)-1;
int bi = offset*(2*tid+2)-1;
int t = Tmp[ai];
Tmp[ai]=Tmp[bi];
Tmp[bi]+=t;
}
}
__syncthreads();
if((OffsetTid+2*tid)<Size)
PreScan[OffsetTid+2*tid] = Tmp[2*tid];
if((OffsetTid+2*tid+1)<Size)
PreScan[OffsetTid+2*tid+1] = Tmp[2*tid+1];
__syncthreads();
AuxiliaryIndex++;
}
}
__host__ void PreScanRecursive(unsigned int** ListAux, unsigned int* ListAuxSize, int CountAuxiliary, int Depth)
{
if(CountAuxiliary==0) return;
int LastLevel=0;
if(ListAuxSize[Depth]<=2*BLOCKSIZE)
{
LastLevel=1;
KernelPrescanRecursive<<<512, BLOCKSIZE, 2*BLOCKSIZE*sizeof(unsigned int)>>>(ListAux[Depth], NULL, ListAuxSize[Depth], LastLevel);
}
else
{
KernelPrescanRecursive<<<512, BLOCKSIZE, 2*BLOCKSIZE*sizeof(unsigned int)>>>(ListAux[Depth], ListAux[Depth+1], ListAuxSize[Depth], LastLevel);
}
PreScanRecursive(ListAux, ListAuxSize, CountAuxiliary-1, Depth+1);
if(LastLevel==0)
FixUpScan<<<512,BLOCKSIZE>>>(ListAux[Depth], ListAux[Depth+1], ListAuxSize[Depth]);
}
int main()
{
int Size;
fread(&Size, sizeof(int), 1, stdin);
unsigned int* Data = (unsigned int*)malloc(Size*sizeof(unsigned int));
fread(Data, Size*sizeof(unsigned int), 1, stdin);
unsigned int m = getMax(Data, Size);
unsigned int* PreScan;
unsigned char* Bits;
unsigned int* Dev_Data;
unsigned int* Diff_N_Sn;
unsigned int* OutData;
cudaMalloc((void**)&Dev_Data, Size*sizeof(unsigned int));
cudaMalloc((void**)&PreScan, Size*sizeof(unsigned int));
cudaMalloc((void**)&Bits, Size*sizeof(unsigned char));
cudaMalloc((void**)&Diff_N_Sn, sizeof(unsigned int));
cudaMalloc((void**)&OutData, Size*sizeof(unsigned int));
cudaMemcpy(Dev_Data, Data, Size*sizeof(unsigned int), cudaMemcpyHostToDevice);
int CountAuxiliary = 0;
int PrevAuxSize = Size;
int NextAuxSize=0;
do
{
NextAuxSize = (PrevAuxSize+2*BLOCKSIZE-1)/(2*BLOCKSIZE);
PrevAuxSize = NextAuxSize;
CountAuxiliary++;
} while(NextAuxSize >= 2*BLOCKSIZE);
unsigned int* ListAuxSize = (unsigned int*)malloc(CountAuxiliary*sizeof(unsigned int));
unsigned int** ListAux;
ListAux = (unsigned int**)malloc(CountAuxiliary*sizeof(unsigned int*));
PrevAuxSize = Size;
NextAuxSize=0;
for(int i=0; i<CountAuxiliary; i++)
{
NextAuxSize = (PrevAuxSize+2*BLOCKSIZE-1)/(2*BLOCKSIZE);
ListAuxSize[i] = NextAuxSize;
cudaMalloc((void**)&ListAux[i], NextAuxSize*sizeof(unsigned int));
//cudaMemcpy(ListAux[i], ListSupport[i], NextAuxSize*sizeof(unsigned int), cudaMemcpyHostToDevice);
PrevAuxSize = NextAuxSize;
}
for (unsigned int bit = 0; (m>>bit) > 0; bit++)
{
KernelPrescan<<<512,BLOCKSIZE, 2*BLOCKSIZE*sizeof(unsigned int)>>>(Dev_Data, Bits, PreScan, ListAux[0], Size, bit); //invoke CountSort by every bit
PreScanRecursive(ListAux, ListAuxSize, CountAuxiliary, 0);
FixUpScan<<<512,BLOCKSIZE>>>(PreScan, ListAux[0], Size);
GetDiff_N_Sn<<<1,1>>>(Bits, PreScan, Diff_N_Sn, Size);
Sort<<<512,BLOCKSIZE>>>(Dev_Data, PreScan, OutData, Bits, Diff_N_Sn, Size);
Exchange<<<512,BLOCKSIZE>>>(Dev_Data, OutData, Size);
if ((m>>bit)==1)
{
break;
}
}
cudaMemcpy(Data, Dev_Data, Size*sizeof(unsigned int), cudaMemcpyDeviceToHost);
fwrite(Data, Size*sizeof(unsigned int), 1, stdout);
return 0;
}
|
14,930 | #include "includes.h"
__global__ void vecmul(float *A, float* B, float *C, int size)
{
// Row and Column indexes:
int row = blockIdx.y*blockDim.y+threadIdx.y;
int col = blockIdx.x*blockDim.x+threadIdx.x;
// Are they bellow the maximum?
if (col < size && row < size) {
float result = 0;
for(int ix=0;ix<size;ix++) {
result += A[row*size+ix]*B[ix*size+col];
}
C[row*size+col] = result;
}
} |
14,931 | #include<stdlib.h>
#include<stdio.h>
#include<time.h>
void init_array(float*a ,const int N);
void init_mat(float*a ,const int N,const int M);
void print_array(float*a ,const int N);
void print_mat(float*a ,const int N,const int M);
__global__
void kernel(float* vec,float* mat,float* out,const int N,const int M)
{
int tid = threadIdx.x + blockIdx.x*blockDim.x;
float sum=0;
if(tid<M)
{
if(tid<M)
{
for(int i=0;i<N;i++)
sum = sum + vec[i]*mat[(i*M)+ tid];
out[tid]=sum;
}
}
}
int main()
{
srand(time(NULL));
float *a,*b,*c;
float *d_a,*d_b,*d_c;
int N=3;
int M=4;
//a = [1*3],b= [3*4], c=[1*4]
a = (float*)malloc(sizeof(float)*N); //input vector
b = (float*)malloc(sizeof(float)*N*M); //input matrix
c = (float*)malloc(sizeof(float)*M); //output vector
init_array(a,N);
init_mat(b,N,M);
init_array(c,M);
printf("Initial data:\n");
print_array(a,N);
printf("\n\n\n\n");
print_mat(b,N,M);
printf("\n\n\n\n");
print_array(c,M);
printf("\n\n\n\n");
cudaMalloc(&d_a,sizeof(float)*N);
cudaMalloc(&d_b,sizeof(float)*N*M);
cudaMalloc(&d_c,sizeof(float)*M);
cudaMemcpy(d_a,a,sizeof(float)*N,cudaMemcpyHostToDevice);
cudaMemcpy(d_b,b,sizeof(float)*N*M,cudaMemcpyHostToDevice);
kernel<<<M/256+1,256>>>(d_a,d_b,d_c,N,M);
cudaError_t err = cudaGetLastError();
if (err != cudaSuccess)
printf("Error: %s\n", cudaGetErrorString(err));
cudaMemcpy(c,d_c,sizeof(float)*M,cudaMemcpyDeviceToHost);
printf("Final data:\n");
print_array(c,M);
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
delete[] a;
delete[] b;
delete[] c;
return 0;
}
void init_array(float* a,const int N)
{
for(int i=0;i<N;i++)
a[i]=rand()%N+1;
}
void init_mat(float*a ,const int N,const int M)
{
for(int i=0;i<N;i++)
for(int j=0;j<M;j++)
a[i*M + j]= rand()%N +1;
}
void print_array(float* a,const int N)
{
for(int i=0;i<N;i++)
printf("%f ",a[i]);
printf("\n");
}
void print_mat(float*a ,const int N,const int M)
{
for(int i=0;i<N;i++)
{
for(int j=0;j<M;j++)
{
printf("% f",a[i*M + j]);
}
printf("\n");
}
printf("\n");
}
|
14,932 | #include <iostream>
#include <math.h>
#include <stdio.h>
__global__
void add(double *a,double*b,double* c,int n) {
int id = blockIdx.x * blockDim.x + threadIdx.x;
if(id>0)
c[id] = a[id] + b[id];
}
int main()
{
int n = 100;
// Device input vectors
double *d_a;
double *d_b;
//Device output vector
double *d_c;
int i=0;
cudaMallocManaged(&d_a,n*sizeof(double));
cudaMallocManaged(&d_b,n*sizeof(double));
cudaMallocManaged(&d_c,n*sizeof(double));
for ( i = 0; i < n; i++) {
d_a[i] = i;
d_b[i] = i;
}
int blockSize = 512;
// Number of thread blocks in grid
int gridSize = (int)ceil((float)n/blockSize);
add <<< gridSize,blockSize >>>(d_a,d_b,d_c,n);
cudaDeviceSynchronize();
printf("%d %d\n",gridSize,blockSize );
for(i=0;i<n;i++)
{
printf("%f + %f = %f\n",d_a[i],d_b[i],d_c[i]);
}
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
/*float maxError = 0.0f;
for (int i = 0; i < n; i++)
maxError = fmax(maxError, fabs(d_c[i]-3.0f));
std::cout << "Max error: " << maxError << std::endl;*/
}
|
14,933 | //Author: Naveen Milind Chalawadi
//Assumption Size of the 2nd Array is always less than the 1st Array
// Input Arrays - (CPU)Array1,Array2 (GPU)GA1,GA2
// Output Array - (CPU)Arrayout (GPU)GAout
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include <iostream>
#include <stdlib.h>
#include <cuda.h>
#include <time.h>
using namespace std;
//Macro for checking errors when using Cuda APIs and printing the error report on screen
#define Handle_Error(err) (HANDLE_ERROR(err))
static void Handle_Error(cudaError_t err) {
if (err != cudaSuccess) {
cout << "Error!!!!" << endl;
cout << cudaGetErrorString(err) << endl;
}
}
//Function Definition and Declaration
__global__ void Kconvolve(float*, float*, float*, int, int);
__global__ void Kconvolve(float* GAout, float* GA1, float* GA2, int m, int n)
{
//compute the global id for the thread
int thread_id = blockIdx.x * blockDim.x + threadIdx.x;
float temp_sum = 0;
int startpoint = thread_id - (n / 2); // as x(m)*h(n) = h(n)*x(m-n)
//here m is the size of 1st Array and n is the size of the 2nd Array
for (int j = 0; j < n; j++) {
int k = startpoint + j;
if (k >= 0 && k < m) {
temp_sum += GA1[k] * GA2[j];
}
}
GAout[thread_id] = temp_sum;
__syncthreads();
}
int main(int argc,char **argv)
{
if (argc != 3)
{
printf("Error!!! Need two arguments (number of elements)sizes of Array_1 and Array_2\n");
exit(1);
}
int m = atoi(argv[1]);
int n = atoi(argv[2]);
int datasize1 = m * sizeof(float);
int datasize2 = n * sizeof(float);
float* Array1 = (float*)malloc(datasize1);
float* Array2 = (float*)malloc(datasize2);
/*
memset(Array1, 1, datasize1);
memset(Array2, 1, datasize2);
*/
//Initialize Input Arrays (I'm just using initial value of 1 assigned to all elements in both the arrays
for (int i = 0; i < m; i++) {
Array1[i] = 1;
}
for (int i = 0; i < n; i++) {
Array2[i] = 1;
}
//int l = m + n - 1;
//int datasize3 = l * sizeof(float);
//float* Arrayout = (float*)malloc(datasize3);
//performing circular convolution
float* Arrayout = (float*)malloc(datasize1);
int width = m;
//Initialize Output Array to 0;
for (int i = 0; i < m; i++) {
Arrayout[i] = 0;
}
//GPU Implementation starts here
cudaDeviceProp prop;
HANDLE_ERROR(cudaGetDeviceProperties(&prop, 0)); // getting properties of cuda device to get the calculate threads
dim3 threads(prop.maxThreadsPerBlock);//calculate the gridsize and blocksizze
dim3 blocks(width / threads.x + 1);
float* GA1, * GA2, * GAout; //pointer to device memory
HANDLE_ERROR(cudaMalloc(&GA1, datasize1)); // allocate memory to device pointers
HANDLE_ERROR(cudaMalloc(&GA2, datasize2));
HANDLE_ERROR(cudaMalloc(&GAout, datasize1));
HANDLE_ERROR(cudaMemcpy(GA1, Array1, datasize1, cudaMemcpyHostToDevice)); //copy the values from CPU to GPU
HANDLE_ERROR(cudaMemcpy(GA2, Array2, datasize2, cudaMemcpyHostToDevice));
//Assigning clock timers to time the implementations
cudaEvent_t startTime, stopTime;
float elapsedTime = 0;
cudaEventCreate(&startTime);
cudaEventCreate(&stopTime);
//Perform the convolution
cudaEventRecord(startTime);
Kconvolve <<<blocks, threads >>> (GAout, GA1, GA2, m, n);
cudaThreadSynchronize();
cudaEventRecord(stopTime);
//Get the elapsed time
cudaEventSynchronize(stopTime);
cudaEventElapsedTime(&elapsedTime, startTime, stopTime);
HANDLE_ERROR(cudaMemcpy(Arrayout, GAout, datasize1, cudaMemcpyDeviceToHost)); //copy the calculated values back into the CPU from GPU
for (int i = 0; i < m; ++i) {
cout << Arrayout[i] << '\t';
}
cudaFree(GA1);//Free the allocated memory
cudaFree(GA2);
cudaFree(GAout);
free(Array1); //free the allocated memory for input
free(Array2);
free(Arrayout);
printf("\nComputation Time: %f ms", elapsedTime);
//print the time taken for convolution using GPU
return 0;
}
|
14,934 | #include "includes.h"
// filename: vsquare.cu
// a simple CUDA kernel to element multiply vector with itself
extern "C" // ensure function name to be exactly "vsquare"
{
}
__global__ void ax(const int lengthC, const double a, const double *b, double *c)
{
int i = threadIdx.x + blockIdx.x * blockDim.x;
if (i<lengthC)
{
c[i] = a*b[i]; // REMEMBER ZERO INDEXING IN C LANGUAGE!!
}
} |
14,935 | #include "includes.h"
__global__ void kernel_2dfd(float *g_u1, float *g_u2, const int nx, const int iStart, const int iEnd)
{
// global to line index
unsigned int ix = blockIdx.x * blockDim.x + threadIdx.x;
// smem idx for current point
unsigned int stx = threadIdx.x + NPAD;
unsigned int idx = ix + iStart * nx;
// shared memory for x dimension
__shared__ float line[BDIMX + NPAD2];
// a coefficient related to physical properties
const float alpha = 0.12f;
// register for y value
float yval[9];
for (int i = 0; i < 8; i++) yval[i] = g_u2[idx + (i - 4) * nx];
// skip for the bottom most y value
int iskip = NPAD * nx;
#pragma unroll 9
for (int iy = iStart; iy < iEnd; iy++)
{
// get yval[8] here
yval[8] = g_u2[idx + iskip];
// read halo part
if(threadIdx.x < NPAD)
{
line[threadIdx.x] = g_u2[idx - NPAD];
line[stx + BDIMX] = g_u2[idx + BDIMX];
}
line[stx] = yval[4];
__syncthreads();
// 8rd fd operator
if ( (ix >= NPAD) && (ix < nx - NPAD) )
{
// center point
float tmp = coef[0] * line[stx] * 2.0f;
#pragma unroll
for(int d = 1; d <= 4; d++)
{
tmp += coef[d] * ( line[stx - d] + line[stx + d]);
}
#pragma unroll
for(int d = 1; d <= 4; d++)
{
tmp += coef[d] * (yval[4 - d] + yval[4 + d]);
}
// time dimension
g_u1[idx] = yval[4] + yval[4] - g_u1[idx] + alpha * tmp;
}
#pragma unroll 8
for (int i = 0; i < 8 ; i++)
{
yval[i] = yval[i + 1];
}
// advancd on global idx
idx += nx;
__syncthreads();
}
} |
14,936 | //
// Adapted from Paulius Micikevicius (pauliusm@nvidia.com)
//
#include <stdio.h>
#include <stdlib.h>
#define NUM_GPUS 2
void process_error( const cudaError_t &error, char *string=0, bool verbose=false )
{
if( error != cudaSuccess || verbose )
{
if( string )
printf( string );
printf( ": %s\n", cudaGetErrorString( error ) );
}
if( error != cudaSuccess )
exit(-1);
}
int main( int argc, char *argv[] )
{
size_t num_bytes = 16*1024*1024;
int nreps = 10;
int gpu_0 = 0;
int gpu_1 = 1;
if( argc >= 2 )
num_bytes = (size_t)( atoi( argv[1] ) * 1024*1024 );
if( argc >= 3 )
nreps = atoi( argv[2] );
if( argc >= 4 )
gpu_0 = atoi( argv[3] );
if( argc >= 5 )
gpu_1 = atoi( argv[4] );
cudaError_t error = cudaSuccess;
cudaDeviceProp gpu_prop;
cudaGetDeviceProperties( &gpu_prop, gpu_0 );
printf("GPU 0: %s\n", gpu_prop.name );
cudaGetDeviceProperties( &gpu_prop, gpu_1 );
printf("GPU 1: %s\n", gpu_prop.name );
void *d_a[4] = {0, 0, 0, 0};
void *d_b[4] = {0, 0, 0, 0};
cudaSetDevice( gpu_0 );
error = cudaMalloc( &d_a[0], num_bytes );
process_error( error, "allocate a on GPU 0" );
error = cudaMalloc( &d_b[0], num_bytes );
process_error( error, "allocate b on GPU 0" );
error = cudaDeviceEnablePeerAccess( gpu_1, 0 );
process_error( error, "enable GPU 0 to access GPU 1's memory" );
cudaSetDevice( gpu_1 );
error = cudaMalloc( &d_a[1], num_bytes );
process_error( error, "allocate a on GPU 1" );
error = cudaMalloc( &d_b[1], num_bytes );
process_error( error, "allocate b on GPU 1" );
error = cudaDeviceEnablePeerAccess( gpu_0, 0 );
process_error( error, "enable GPU 1 to access GPU 0's memory" );
cudaSetDevice( gpu_0 );
float elapsed_time_ms = 0.f;
float throughput_gbs = 0.f;
cudaEvent_t start, stop;
cudaEventCreate( &start );
cudaEventCreate( &stop );
cudaStream_t stream_on_gpu_0, stream_on_gpu_1;
cudaSetDevice( gpu_0 );
cudaStreamCreate( &stream_on_gpu_0 );
cudaSetDevice( gpu_1 );
cudaStreamCreate( &stream_on_gpu_1 );
cudaSetDevice( gpu_0 );
///////////////////////////
// pull copy
//
cudaEventRecord( start, 0 );
for( int i=0; i<nreps; i++ ) {
cudaMemcpyPeerAsync( d_b[0], gpu_0, d_b[1], gpu_1, num_bytes, stream_on_gpu_0 );
}
error = cudaStreamSynchronize(stream_on_gpu_0);
cudaEventRecord( stop, 0 );
error = cudaDeviceSynchronize();
process_error( error, "sync after pull copy" );
error = cudaEventElapsedTime( &elapsed_time_ms, start, stop );
process_error( error, "get event elapsed time" );
elapsed_time_ms /= nreps;
throughput_gbs = num_bytes * 1e-6f / elapsed_time_ms;
printf( "%d ->%d: %7.2f %7.2f\n", gpu_1, gpu_0, elapsed_time_ms, throughput_gbs );
///////////////////////////
// push copy
//
cudaEventRecord( start, 0 );
for( int i=0; i<nreps; i++ ) {
cudaMemcpyPeerAsync( d_a[1], gpu_1, d_a[0], gpu_0, num_bytes, stream_on_gpu_0 );
}
cudaEventRecord( stop, 0 );
error = cudaDeviceSynchronize();
process_error( error, "sync after push copy" );
error = cudaEventElapsedTime( &elapsed_time_ms, start, stop );
process_error( error, "get event elapsed time" );
elapsed_time_ms /= nreps;
throughput_gbs = num_bytes * 1e-6f / elapsed_time_ms;
printf( "%d ->%d: %7.2f %7.2f\n", gpu_0, gpu_1, elapsed_time_ms, throughput_gbs );
///////////////////////////
// exchange with sync
//
cudaEventRecord( start, 0 );
for( int i=0; i<nreps; i++ ) {
cudaMemcpyPeerAsync( d_a[1], gpu_1, d_a[0], gpu_0, num_bytes, stream_on_gpu_0 );
cudaMemcpyPeerAsync( d_b[0], gpu_0, d_b[1], gpu_1, num_bytes, stream_on_gpu_1 );
cudaDeviceSynchronize();
}
cudaEventRecord( stop, 0 );
error = cudaDeviceSynchronize();
process_error( error, "sync after exchange" );
error = cudaEventElapsedTime( &elapsed_time_ms, start, stop );
process_error( error, "get event elapsed time" );
elapsed_time_ms /= nreps;
throughput_gbs = num_bytes * 2e-6f / elapsed_time_ms;
printf( "%d<->%d: %7.2f %7.2f\n", gpu_0, gpu_1, elapsed_time_ms, throughput_gbs );
///////////////////////////
// exchange without sync
//
cudaEventRecord( start, 0 );
for( int i=0; i<nreps; i++ ) {
cudaMemcpyPeerAsync( d_a[1], gpu_1, d_a[0], gpu_0, num_bytes, stream_on_gpu_0 );
cudaMemcpyPeerAsync( d_b[0], gpu_0, d_b[1], gpu_1, num_bytes, stream_on_gpu_1 );
}
cudaEventRecord( stop, 0 );
error = cudaDeviceSynchronize();
process_error( error, "sync after exchange" );
error = cudaEventElapsedTime( &elapsed_time_ms, start, stop );
process_error( error, "get event elapsed time" );
elapsed_time_ms /= nreps;
throughput_gbs = num_bytes * 2e-6f / elapsed_time_ms;
printf( "%d<->%d: %7.2f %7.2f\n", gpu_0, gpu_1, elapsed_time_ms, throughput_gbs );
cudaSetDevice( gpu_0 );
error = cudaFree( d_a[0] );
process_error( error, "free memory on GPU 0" );
error = cudaDeviceReset();
process_error( error, "reset GPU 0" );
cudaSetDevice( gpu_1 );
error = cudaFree( d_a[1] );
process_error( error, "free memory on GPU 1" );
error = cudaDeviceReset();
process_error( error, "reset GPU 1" );
printf("CUDA: %s\n", cudaGetErrorString( cudaGetLastError() ) );
return 0;
}
|
14,937 | #include <iostream>
#define DEFAULT_BLOCK_COUNT 128
#define DEFAULT_TPB_COUNT 128
using namespace std;
int blockCnt = DEFAULT_BLOCK_COUNT;
int tpbCnt = DEFAULT_TPB_COUNT;
int totalThreads;
int * id;
//Declaration of pointers to CPU memory (host)
int * blockx_h;
int * idx_h;
//Declaration of pointers to GPU memory (device)
int * blockx_d;
int * idx_d;
__global__ void MyFirstKernel(int * blkx, int * idx) {
int threadId = (blockIdx.x * blockDim.x) + threadIdx.x;
blkx[threadId] = blockIdx.x;
idx[threadId] = threadIdx.x;
}
int ParseArguments(int argc, char ** argv) {
if (argc == 1)
return 0;
if (argc != 3) {
cout << "Error: Not enough arguments specified." << endl;
return -1;
}
for (int i=1;i<3;i++) {
if (atoi(argv[i]) <= 0) {
cout << "Error: Invalid arguments" << endl;
return -1;
}
}
blockCnt = atoi(argv[1]);
tpbCnt = atoi(argv[2]);
if (tpbCnt > 1024) {
cout << "Error: Too many threads per block (<= 1024)" << endl;
return -1;
}
return 0;
}
void CheckCudaError(cudaError_t ce) {
if (ce == cudaSuccess)
return;
cout << "Error: " << cudaGetErrorString(ce) << endl;
exit(-1);
}
int AllocateHostMemory(int totalThreads) {
try {
blockx_h = new int[totalThreads];
idx_h = new int[totalThreads];
}
catch(bad_alloc e) {
return -1;
}
return 0;
}
int main(int argc, char ** argv) {
if (ParseArguments(argc, argv))
exit(-1);
totalThreads = blockCnt * tpbCnt;
int totalMem = totalThreads * sizeof(int);
if (AllocateHostMemory(totalThreads)) {
cout << "Error: Memory allocation on host failed." << endl;
exit(-1);
}
//Allocate memory on GPU to store block identifiers
CheckCudaError( cudaMalloc(&blockx_d, totalMem));
//Allocate memory on GPU to store thread identifiers
CheckCudaError( cudaMalloc(&idx_d, totalMem));
//Clear allocated memory block on GPU for storing block identifiers to 0
CheckCudaError( cudaMemset(blockx_d, 0, totalMem));
//Clear allocated memory block on GPU for storing thread identifiers to 0
CheckCudaError( cudaMemset(idx_d, 0, totalMem));
//Invoke the kernel
MyFirstKernel <<<blockCnt, tpbCnt>>>(blockx_d, idx_d);
cudaDeviceSynchronize();
//Copying data generated by the kernel from GPU back to CPU
CheckCudaError(
cudaMemcpy(blockx_h, blockx_d, totalMem, cudaMemcpyDeviceToHost));
CheckCudaError(
cudaMemcpy(idx_h, idx_d, totalMem, cudaMemcpyDeviceToHost));
for (int i=0;i<totalThreads;i++)
cout << "[" << i << "]\t" <<
blockx_h[i] << "\t" <<
idx_h[i] << endl;
return 0;
}
|
14,938 | float h_A[]= {
0.9117131082158338, 0.6972821983247177, 0.573183557716688, 0.8498644086080427, 0.5574788859305122, 0.8662445104449241, 0.5966016422343936, 0.8997603333773154, 0.9805514067333172, 0.8795844012742473, 0.8293624427859163, 0.8797045112509313, 0.6556566613620285, 0.6636148024254569, 0.917274614376616, 0.5918303956350676, 0.8612934057378008, 0.9172922865164175, 0.8727025478020698, 0.6160940532242052, 0.5262503320528058, 0.5967335860782895, 0.8712780546230563, 0.8486133009446162, 0.786083963048618, 0.6612578208636117, 0.5602419131334084, 0.5360874448843446, 0.9011502537737162, 0.5060206434322645, 0.833051911894594, 0.5248941642021274, 0.866539803407312, 0.8959704052846265, 0.7971691355311883, 0.5603232058574855, 0.7967047550637225, 0.6301759748442048, 0.7142937817474708, 0.6898978919652963, 0.8232528259621298, 0.5296580351775223, 0.9029508278813022, 0.5329187667918744, 0.8809100942514079, 0.8973858411398804, 0.9759528736839123, 0.8718281669444027, 0.5941985856177481, 0.743934171300142, 0.7380056161579052, 0.7814985699390284, 0.7987068402040045, 0.7128433960187721, 0.96754208447362, 0.898173791110052, 0.6390795130137337, 0.8265108735143016, 0.593942893189813, 0.6818950485475683, 0.8300251540535419, 0.8459682315758426, 0.6621390223210986, 0.8054707782134587, 0.9382081816137599, 0.9659738531650766, 0.9786059065460777, 0.8642343606498407, 0.9581848831591113, 0.714298617914468, 0.6271937600795816, 0.6705098842448045, 0.6715089652736719, 0.7146530729175631, 0.8949016917943142, 0.7715240276791784, 0.9189620424701823, 0.689235659469217, 0.7849149444452932, 0.6698463848022628, 0.5530005309067244, 0.5730829644414548, 0.5822147339140904, 0.8595768739840872, 0.5769907180608533, 0.9236837207438067, 0.8265874022711812, 0.7377956519524602, 0.9666982974141167, 0.9298089239615812, 0.6540247079224859, 0.7077614941517909, 0.6620214729752811, 0.9264475754950114, 0.7669069976872886, 0.9176234786918418, 0.5292287924540591, 0.5422340416848663, 0.5249757435143212, 0.9562152550958727, 0.6718642980478808, 0.9656592651764213, 0.7133440840621774, 0.9322470779276317, 0.8612006383786901, 0.9937307465939225, 0.6251153862984817, 0.6782445105361792, 0.6483939164579477, 0.9801667755451794, 0.8698738437355604, 0.5993963978343939, 0.6973021989792896, 0.6805832147386935, 0.550135050948555, 0.7353130046626688, 0.8091624858959012, 0.9611873058517459, 0.7274394089809126, 0.9207936824680554, 0.5138323076390096, 0.958868648729198, 0.6640338453433046, 0.7280607637526306, 0.6417844405692459, 0.978885802860207, 0.7546617594684918, 0.9226796183960416, 0.7612834833779806, 0.8006406522828401, 0.6880657757709938, 0.7259515233483051, 0.6599531038851574, 0.8964826839503124, 0.7389593935795592, 0.5445392130614639, 0.8804064350373606, 0.830394162065305, 0.762619471515628, 0.6265876274983231, 0.6333759234367244, 0.6550839916360333, 0.9340731194615008, 0.8214670904080003, 0.7106545680922906, 0.7232013101671968, 0.8417174480542712, 0.5927768778119671, 0.5474165748819375, 0.562990627704244, 0.67679470810227, 0.5747981926348422, 0.9185493660601667, 0.828931411650867, 0.7785333652984677, 0.554373105435809, 0.632995916021558, 0.9531187483132073, 0.8547131696365022, 0.7206666994270046, 0.6696894823100641, 0.9812543456023349, 0.5155280967821616, 0.9113278571578352, 0.8839809572667519, 0.5513882834084113, 0.6527327726945356, 0.9198708499349684, 0.902004781635311, 0.822762591804062, 0.7138281798387026, 0.8429071259881067, 0.6868164689337034, 0.7940590341589577, 0.7107795862070396, 0.8219374727910723, 0.6526621371274142, 0.923086301363102, 0.6621569927638506, 0.6795632859952647, 0.8556627907346579, 0.9271206908514542, 0.9410902758837463, 0.8385133882397767, 0.8441399306391664, 0.5801549597201194, 0.6884696367894915, 0.6895289097188753, 0.6330246315922936, 0.5155783716188325, 0.7101643447954437, 0.9128288259545843, 0.8981889140186776, 0.8017454597065115, 0.9374271079293162, 0.8998703247625887, 0.5183291556442504, 0.7127596071550191, 0.833876583648213, 0.6292029817202682, 0.6121740280032804, 0.6129578902400722, 0.9153455681732451, 0.9748696833061504, 0.8300506074714264, 0.8743099106834015, 0.7397987629448222, 0.7654790469851097, 0.5218745666725322, 0.7135345649396143, 0.7644331544572457, 0.5037082590022778, 0.9356275210623679, 0.5169294196469774, 0.9609332078700897, 0.8942366585494805, 0.7406754978990941, 0.8221201093888013, 0.6764465116674456, 0.7227276541454957, 0.7242169000659909, 0.5820600890055381, 0.5023193242146275, 0.7688501592434585, 0.6657626432637656, 0.9317250869291752, 0.5292413487843461, 0.8460811462225, 0.5594989193966602, 0.9062085195675036, 0.9561121507297696, 0.9222628608077079, 0.8254379870655093, 0.9221967517421774, 0.9800933709156228, 0.5502903748635595, 0.6390538083863884, 0.8118149895118931, 0.5564559134082528, 0.7817638575427192, 0.8803874202775333, 0.823264171059982, 0.7524780379570581, 0.9308035475927029, 0.7816271947901521, 0.7749159873380345, 0.8779683130504928, 0.9645292316446028, 0.8828186206457317, 0.98153828285567, 0.8364326123347328, 0.9950571012196594, 0.6295941178131599, 0.8116840331722182, 0.843732195975794, 0.642307345618989, 0.8565346882264444, 0.7375950788971043, 0.694515491392571, 0.7162228708455547, 0.9074782243426759, 0.7252838206552832, 0.6942694364345355, 0.8233773179792075, 0.6626548695607903, 0.5853660449200218, 0.512316689154678, 0.5403060668937586, 0.510015946421374, 0.6685875391315859, 0.7904985099025394, 0.9690255399687038, 0.6712531957241314, 0.7940251013455876, 0.8427738409579388, 0.5989040629743649, 0.8569059902161759, 0.6615373891636924, 0.7137436546581137, 0.908531781555708, 0.735828297992835, 0.6897883885426639, 0.5305533336397195, 0.874082276019022, 0.8987910217134525, 0.8077067416614858, 0.7781016628811893, 0.6396786024263197, 0.98196276311976, 0.9476904502565833, 0.7960324383653479, 0.5950297573151158, 0.5827226960414302, 0.5853237938705108, 0.9907236757307512, 0.585308220413504, 0.8676576977170298, 0.5682739888019792, 0.7152734954393296, 0.6864817184824287, 0.6048775412848659, 0.7464650056198773, 0.5221095747831686, 0.63966322313075, 0.8051246020951983, 0.5277329046975044, 0.8125376819657953, 0.756519260846497, 0.534009141518574, 0.8070773604971969, 0.8765632072367205, 0.8548539611291797, 0.9511597144396761, 0.6300736047909535, 0.5838235789972114, 0.6715473974821157, 0.7428148144775919, 0.8840923966596498, 0.8105163398729891, 0.9110483537229834, 0.7022640534837994, 0.9267034664132223, 0.6249752494620486, 0.9123042386255251, 0.5460397378815715, 0.9029860291229584, 0.5748977394635766, 0.8840467582284888, 0.6779564278616137, 0.7360507075771279, 0.6046153418575169, 0.7236105778993156, 0.7315259146194493, 0.7097728853319798, 0.9670782720607459, 0.6686610424687867, 0.6167396428825951, 0.8088691638806058, 0.6297450238444224, 0.5582299876050497, 0.6171814241908722, 0.8159156070844791, 0.8058784632657876, 0.6251965202441649, 0.9746224082141144, 0.5406876423673956, 0.9753012689216043, 0.9007677040801918, 0.9247235271385843, 0.9812636616678881, 0.9526291395021416, 0.8470620310949853, 0.9616869626269353, 0.5174232936768476, 0.8278291127191149, 0.6468414348077417, 0.6083341686725887, 0.8928949942752209, 0.8967772453641634, 0.6701466439689928, 0.5810628940489533, 0.9445109782793415, 0.6052682787386252, 0.7762206645964084, 0.5423478232647787, 0.7154870666561228, 0.7216115484225598, 0.8120233987071221, 0.8596480175593333, 0.6134090487447594, 0.5450466553008613, 0.714861067100286, 0.72670253476331, 0.7318527954066065, 0.9571834319179618, 0.9985157426345346, 0.5827243119872746, 0.5180827526694727, 0.9803349134242125, 0.5022109837316496, 0.7595395837030784, 0.9648735760722631, 0.7730357474140417, 0.6966378145773535, 0.5190001977315688, 0.9392281497173127, 0.8435643469937422, 0.6292050624199453, 0.8786053607484201, 0.9612138955178122, 0.7410271018676757, 0.8339568350607665, 0.5021001486175367, 0.7178800564757098, 0.6648960759124307, 0.5115891838473894, 0.9306359677219227, 0.774245460172047, 0.9587172924498377, 0.8605124666857094, 0.7798140685505631, 0.8254163017214731, 0.834921626432304, 0.6626969635123761, 0.9156088866311722, 0.5168359762386463, 0.8037678071420004, 0.8674767010398609, 0.8873620576056016, 0.6917103600949789, 0.9062926347596225, 0.8995652480213948, 0.8737968192270742, 0.8492630663291614, 0.6015798752561614, 0.876021009199262, 0.8221393797152579, 0.7568874600007924, 0.8638893471657567, 0.9524518337984933, 0.9148779623023662, 0.7033358437028021, 0.9073608556434268, 0.9306259884715731, 0.7040720296734413, 0.6913937323650274, 0.8523386690149286, 0.6212446802336586, 0.6648513619239358, 0.9034204070021667, 0.9327631984117585, 0.8308434904699162, 0.9664858547071012, 0.9855518747025367, 0.8652009772274167, 0.5466833689987955, 0.5219965841022336, 0.5533773220592499, 0.629381458432269, 0.5610403964818221, 0.9551538184770267, 0.907651652660647, 0.8624067507453792, 0.999431419605974, 0.8054706392577727, 0.787874789044333, 0.9874551848469849, 0.6474320066692947, 0.9925584396059487, 0.9315624431340734, 0.8116071050240157, 0.8604883474646488, 0.8884774336663228, 0.7267699925666282, 0.6455223784370191, 0.9751272342466366, 0.5882474771283404, 0.6696119165662218, 0.8135761219922437, 0.7073793269928468, 0.6008490502636281, 0.69629020451821, 0.5996677595719748, 0.7933369320738043, 0.641242463647699, 0.53707922189714, 0.5212498281173237, 0.8401841229464466, 0.8251610963617892, 0.7692433165428543, 0.5365341410925097, 0.8967064731419814, 0.9358741841334408, 0.8596140653214006, 0.6438458012158849, 0.9091440501875505, 0.6473899039701246, 0.849784070472924, 0.664905387052547, 0.9320683112334599, 0.898854703247981, 0.5093112458913354, 0.5512905553679219, 0.7879733209233424, 0.8855543871866323, 0.8469065112550038, 0.5154471112799012, 0.5938621558757438, 0.5097228005379761, 0.8287409900301822, 0.6063906927263429, 0.6935477029836361, 0.7649294471590423, 0.5263275426553782, 0.6493179250760646, 0.7365558300367215, 0.5074466225371626, 0.8513882724310724, 0.916869835516799, 0.7253576302889351, 0.6095688386021763, 0.7223498319341146, 0.6923479612973947, 0.8279624389449176, 0.7678499314282521, 0.9577589302141362, 0.5623069824663464, 0.9978632073922011, 0.938033559985535, 0.923464425137525, 0.8449443744249645, 0.7073919820675125, 0.9363060901293097, 0.9633649938467896, 0.8982774167115529, 0.9132637695217223, 0.5718768557317924, 0.6619068323520139, 0.9039442849980583, 0.5137442494722063, 0.9188809839705624, 0.8746515178199914, 0.8870637531615662, 0.7210318912985663, 0.8237546950425085, 0.95395176045454, 0.64231381949444, 0.5739124065650579, 0.9768898343036911, 0.5882518600733933, 0.7529845208822437, 0.6160423581039759, 0.8987420704039888, 0.9711582863996151, 0.9309463404970182, 0.9719361161238069, 0.6626069236072087, 0.7847481384802584, 0.7476594949118036, 0.9550734511360404, 0.5146475731521519, 0.9631348657063926, 0.5495702688227442, 0.801438487082668, 0.5748356748937623, 0.8737939121648957, 0.7317300087650757, 0.5654282610626378, 0.8878303645796218, 0.7968424507782659, 0.9425443210190472, 0.5791018200294404, 0.7079414558002011, 0.8547648375987487, 0.953370630002209, 0.8000858213655997, 0.8604763795883795, 0.7215617224228591, 0.5527967556637741, 0.9122657746665475, 0.6576768673547164, 0.967147931142787, 0.8582702229683676, 0.6033249901273463, 0.6614058979473664, 0.5907470738948177, 0.7491017999601199, 0.6121682814547007, 0.6872501699978858, 0.9264801288081935, 0.9358687203295604, 0.7864432030616677, 0.7144838270192744, 0.5143470430428995, 0.8985638334996466, 0.5680830625999194, 0.5999150555366353, 0.9563882647548279, 0.7902843317975579, 0.6176070492925745, 0.5845089163802908, 0.5720075023266442, 0.6546324530448382, 0.7944000556147773, 0.8167911192122252, 0.544804536583634, 0.7987484559516803, 0.9989089893350716, 0.5442280025166442, 0.7018175043526793, 0.804254299585138, 0.9604885005904564, 0.6592836922119777, 0.7589109130103772, 0.7441967095562146, 0.8620546170868273, 0.7467784502497044, 0.7002379034302686, 0.7258077740591458, 0.914790179932766, 0.5187204435437118, 0.8115350509391936, 0.8803724108225962, 0.8272524772669403, 0.8409583047658888, 0.7614104671292166, 0.962425844257623, 0.8980510200687264, 0.9845388106994197, 0.884288574863653, 0.6537480318564164, 0.5900451817179799, 0.710080479653375, 0.5433509343747236, 0.5256006566491676, 0.5352592673160377, 0.6737069322945137, 0.9533777684738901, 0.5746080826542812, 0.6460998936807902, 0.5994181844530381, 0.7038016660647726, 0.7429234641435829, 0.9102730930203893, 0.8636715994862045, 0.5392803271388295, 0.6913312627257066, 0.7817362541077717, 0.5076225724146253, 0.7042217430052824, 0.5997011627341063, 0.792191367017012, 0.5049576375672603, 0.6209774771011851, 0.7127642178586856, 0.5450606034673114, 0.8659646544058912, 0.9190337040153209, 0.9806909098246632, 0.8215948580772164, 0.9467399644287677, 0.5787200759547935, 0.5694948673930507, 0.8764840275899263, 0.581092630192673, 0.5425211165959258, 0.6119326786482591, 0.8966849309419074, 0.8442727823895906, 0.885189992264656, 0.952213670828073, 0.9485607105721994, 0.5658614172933442, 0.5331679625113908, 0.7059474675059964, 0.5406891551427387, 0.5875038669917663, 0.5170892092531487, 0.9748437248356838, 0.5117502620506776, 0.9852971542459678, 0.5443636111720671, 0.7831291896967948, 0.5206539979747165, 0.5877820480517961, 0.542966510625663, 0.61868913101505, 0.5771286591713112, 0.9368488110419985, 0.8046140709657819, 0.5188371409734627, 0.8199119831017609, 0.8256316464554299, 0.6719683135265335, 0.5988904321574026, 0.9475997734807899, 0.7114926407192835, 0.583434427989793, 0.9739894019134895, 0.8298272197864505, 0.9746479638656376, 0.6893116026631703, 0.9409593853195902, 0.6660979723284104, 0.6624334199169404, 0.5650103839918226, 0.7584611587084508, 0.5689797006582251, 0.5358390043824601, 0.8743857471459426, 0.7509909608177001, 0.9694202991964935, 0.6330998867347574, 0.9191266314412007, 0.6540958508877222, 0.6504267490643716, 0.7972072655746613, 0.8006332505608068, 0.641335559439077, 0.8389314797982297, 0.5861905025181122, 0.551028885246111, 0.7090827147614065, 0.8683066281172862, 0.9827517524419579, 0.610858737072571, 0.7754291353109528, 0.5815716075412414, 0.8082411495731063, 0.6277542501298433, 0.9061925315527093, 0.5668286365680338, 0.6393619551212062, 0.78096479797814, 0.7211116282683656, 0.6735964739145801, 0.5139358495449797, 0.7735553809309639, 0.6780685331944839, 0.9342755859274177, 0.8515855064841451, 0.6907140769451654, 0.8578086802564309, 0.9208227046951751, 0.9103398956109177, 0.6863367115480358, 0.7690212028574386, 0.7916587610319594, 0.9693564528706748, 0.8970746088979265, 0.9669896931077632, 0.6558383326182122, 0.8923721011425567, 0.9289424817008485, 0.8755143097214637, 0.9534206993423722, 0.5022964201248786, 0.9755615477343329, 0.5846307486538014, 0.5631832800887424, 0.7883076896671704, 0.8688689147300286, 0.9339246081676531, 0.783786131432783, 0.8244244303150307, 0.6973872994768309, 0.7974627381939488, 0.575870964899664, 0.5223092863703546, 0.9030662422796978, 0.9301291527157821, 0.7187224939228027, 0.6405704236344996, 0.6405456977567874, 0.9271949978799108, 0.8851195214772527, 0.6010355454835019, 0.6349497395974739, 0.5466195115042203, 0.9403765062723888, 0.6163810048329253, 0.9290463029914224, 0.9426868977264422, 0.7512923875256705, 0.50448754980536, 0.7090501121432251, 0.8012865324381355, 0.7605968351116575, 0.9412754523165446, 0.7679641166960818, 0.7693939184794927, 0.7451722584892404, 0.5999706036576744, 0.711875904943255, 0.9390165602286218, 0.8248556771436512, 0.7871382748604647, 0.8938867795009204, 0.6894333563129186, 0.7881708368275988, 0.8953151505329671, 0.883367644781332, 0.8688416233315539, 0.5487408903579085, 0.9022945206108084, 0.9231400052624466, 0.5028513289973602, 0.7921897593025006, 0.5319412361898166, 0.8026123353452956, 0.6796190477628357, 0.5640042296481509, 0.8338232020589789, 0.8122330130342349, 0.5212360694232223, 0.9138656683493624, 0.5027869163029612, 0.6224979541311113, 0.647269283890567, 0.9524257567086454, 0.6152636035514927, 0.9205116371477984, 0.5208446224048373, 0.9215123226028652, 0.7324158449918895, 0.8771750152094784, 0.9953932171814053, 0.6122498581194767, 0.729249656657585, 0.6881041518843765, 0.7321912935978387, 0.7338179590375606, 0.8551260956459866, 0.9324298336805781, 0.7583247952403445, 0.9902201148760698, 0.88304829964079, 0.5911492353736584, 0.9132429876076769, 0.7505216093239468, 0.811825165443746, 0.9458358310748488, 0.5693394823034283, 0.595589413748849, 0.5530351870824919, 0.8406351289353792, 0.6267839828759876, 0.6654510207631128, 0.6442242333086089, 0.5884584068204887, 0.750849524908479, 0.9061528600985422, 0.5408417324932144, 0.8371688143001723, 0.6157736811518695, 0.9257339834131071, 0.7121007591451662, 0.8153509763598767, 0.9620766249064883, 0.7229615867564847, 0.7249099699706539, 0.8602857626535985, 0.6873405535283683, 0.9009046165564092, 0.5698298162367403, 0.782115992584038, 0.7953042065153866, 0.8562807927292555, 0.6532300159630394, 0.5656706881543839, 0.609274588021371, 0.8125138534713936, 0.979245085688702, 0.6890032462164036, 0.7170804270943338, 0.6314898927946899, 0.6133562204338954, 0.773781591543064, 0.5491469819503898, 0.6467041708660354, 0.8838974274630966, 0.8394143926563995, 0.6760969201309031, 0.9232781988813525, 0.661529686100218, 0.6984734347509792, 0.9657292013550487, 0.5306333839600179, 0.5822782122020581, 0.5133178096005856, 0.5355474589533411, 0.5266233908352148, 0.8893331390258277, 0.9602029713559443, 0.8561734841908049, 0.711386970888119, 0.6777850780754249, 0.537085083370462, 0.6105246151311545, 0.9867219167213437, 0.6729946111591953, 0.6059246822228972, 0.5437456870512378, 0.5495715968220348, 0.5016420701065518, 0.8022284343945243, 0.7979181944264055, 0.659505276064126, 0.6626569172553693, 0.9863804308657316, 0.7779821301286327, 0.9864796046590449, 0.7008641544542418, 0.6908459609430866, 0.504971815726356, 0.5099840148892112, 0.6808705522953198, 0.901841747596636, 0.8232227958941642, 0.6808633284160852, 0.6390856243569658, 0.8514711002402465, 0.5267524361143874, 0.9555299488743927, 0.740995639997011, 0.5204174409532445, 0.7476545629197089, 0.8521489978270423, 0.9465387815580446, 0.5073552982312611, 0.9448418760102903, 0.7060993334190222, 0.5794210949215651, 0.7582454480473738, 0.5388903557715009, 0.6237260771062823, 0.589519314797296, 0.8756570897122045, 0.6250458184039687, 0.5045285066313119, 0.527960241286838, 0.7672961918127001, 0.6104364227872081, 0.8797205567119342, 0.860252264718315, 0.8887096375174344, 0.8015197732314467, 0.9593093290128916, 0.7410713806947918, 0.714597746174625, 0.8208802438120929, 0.571397269251121, 0.7240725310297984, 0.7619530068460558, 0.5836831896896175, 0.626939689417799, 0.6422543879638527, 0.5465673598944215, 0.8039808371416357, 0.9739212750639451, 0.9146001510925422, 0.8352689130193443, 0.6251954485753526, 0.9926864023150718, 0.7262968843544783, 0.6282345894917325, 0.699520422345476, 0.5607667533979731, 0.6675478910853443, 0.5253763277758798, 0.8646215396599894, 0.8631486158465653, 0.8119125729448722, 0.671915743664488, 0.9102772649466347, 0.8190755806770151, 0.7164319197735212, 0.8621245929423784, 0.5443132640501225, 0.5179522181912326, 0.5829651081811208, 0.6410365837816666, 0.8108739535894498, 0.9530209429666272, 0.6457465667114436, 0.5304679263842642, 0.5755804544385497, 0.6738114029233206, 0.7489038378269361, 0.6650430450039271, 0.5144431446098694, 0.8498416369690992, 0.8016346538893002, 0.7959062145037386, 0.8307734612862236, 0.8402281507665659, 0.9182478451017806, 0.6455562863322863, 0.7753117520011501, 0.5745099446325892, 0.5091781238986663, 0.6464667695815675, 0.9000696212181507, 0.6385439302247444, 0.9230376052033782, 0.7758352231873891, 0.8394651453198673, 0.6847738100696454, 0.7857605472872171, 0.6685979847112816, 0.5921775876016048, 0.5910746689779208, 0.5105340902462483, 0.7044168474946368, 0.5261387879853883, 0.73569206096128, 0.9745735461761919, 0.5450210047003645, 0.6464129769971638, 0.6225962317269376, 0.9325927524878488, 0.9890862921125003, 0.6612810343345531, 0.895902674288396, 0.5265525700452458, 0.5472099326629418, 0.5641129636026336, 0.7121964661275646, 0.573965732526946, 0.5845590646808501, 0.6166808640217529, 0.6459713021707889, 0.8393430121938891, 0.9392217053183525, 0.5422558784225842, 0.8806113886439779, 0.885665371151304, 0.9214661767906664, 0.5454819656697638, 0.5171595011087247, 0.753954952144398, 0.670578290931276, 0.5741027908882357, 0.9848449280072169, 0.6721666016151855, 0.6857719496127015, 0.9808159697488563, 0.7117609246164429, 0.9671932463609041, 0.9725669668846583, 0.5718260305061993, 0.7667048227478063, 0.9289297862624075, 0.9091231057368743, 0.7278529215327345, 0.7066509595152948, 0.8987762199196169, 0.6884538718115896, 0.9757682389712086, 0.5977207525357975, 0.5048327519814881, 0.8834295213476833, 0.8110868789908945, 0.8823016095067251, 0.707717025269311, 0.5588380079281798, 0.669522815044261, 0.8144538518450594, 0.9337121979571628, 0.9344228852614229, 0.9150450963611058, 0.9595532339141697, 0.9231374071637612, 0.561207143915402, 0.6178774429001985, 0.660828132598446, 0.8459914544043323, 0.9947388592116209, 0.6950801744325548, 0.6229758066651836, 0.7892683398829513, 0.6464974559913331, 0.807811511377969, 0.8223965467575037, 0.6472524820249292, 0.9391902165790472, 0.9353427250007872, 0.746252304030957, 0.7130359303371803, 0.7608612428730006, 0.9636830768571691, 0.7124637893303427, 0.6666759641682896, 0.7373090190076759, 0.5755911933583789, 0.9211709869590383, 0.5851659124107701, 0.8096202846839655, 0.8294078671300169, 0.6307228271314993, 0.9065492671749038, 0.6895517537249735, 0.9850990816917238, 0.7995312743140213, 0.9797461428665396, 0.5063947428311628, 0.783731302335839, 0.7517968366296, 0.7475802072700053, 0.977269003069474, 0.7296228960558395, 0.800976103063388, 0.6555789006495458, 0.8092927982062716, 0.9731255398925711, 0.6125695654559729, 0.9824234876827392, 0.7168064531457272, 0.9203501495437374, 0.9791916945911774, 0.7538056560890827, 0.8997444172245064, 0.5712906985866428, 0.8638628166903505, 0.6398704514315308, 0.9987045960352804, 0.7828240046166008, 0.5225976591109298, 0.5420033085005831, 0.5374290863152686, 0.9084622531618031, 0.564882255287576, 0.9348259670204366, 0.5045102223196187, 0.8190078008125277, 0.8012068924859495, 0.5886651958310962, 0.5764694991129988, 0.6386495826506787, 0.9993839418810445, 0.617182395431271, 0.9761421309285405, 0.8290001290643709, 0.708566938044402, 0.9346227073628164, 0.5860604629113519, 0.9074306960550301, 0.5222837030889032, 0.6946410956559514, 0.9695711469698887, 0.918375097906196, 0.7096304879493704, 0.8875586325822846, 0.6102651508955066, 0.6315377972124749, 0.5076291276187188, 0.9367788908721633, 0.6164092595151263, 0.5448806357224181, 0.7435833816696584, 0.9528782392944132, 0.8678509954205536, 0.5696718020595053, 0.5936990801474282, 0.5516969885185459, 0.6631208554965254, 0.6832565422135957, 0.5503503691027007, 0.8438801952679086, 0.6911602240996557, 0.6386327832307443, 0.8197696543232122, 0.5267207094935946, 0.9744953409544871, 0.8841029418298413, 0.6470744947344051, 0.5411870109835143, 0.8713132485807169, 0.5471940376572234, 0.7088543459769787, 0.9071682732298, 0.7664336931198192, 0.8983957244586591, 0.5664592224852956, 0.5986823584477672, 0.659797754373106, 0.8869827746785428, 0.8566193140428633, 0.8210921129535569, 0.9255329063722828, 0.8952009126305305, 0.5087226567795948, 0.7191050478959513, 0.5416665516341178, 0.7890598476924502, 0.8691518772434743, 0.6923470859547349, 0.8701130581509884, 0.8320750075703807, 0.6130617173503163, 0.6101067023089373, 0.9623287264417649, 0.8163398967930756, 0.8407882166432077, 0.9771534499529154, 0.9194874282249645, 0.5700535709003143, 0.9429713135538764, 0.7543341288072662, 0.5579944618602485, 0.6058119384318272, 0.977057665804174, 0.6665500927132649, 0.5702278934941737, 0.8171701265249988, 0.8365104765229758, 0.9037494638369381, 0.504655657284824, 0.9872878503200976, 0.7736765524921235, 0.9400823359733657, 0.7202781780926378, 0.5092059122839743, 0.884450767146502, 0.8145325645860612, 0.5179912945961817, 0.7923216821038694, 0.5678747097351973, 0.791291249844466, 0.9020806750112491, 0.8893714978199205, 0.6626288309833046, 0.6482473660367399, 0.7501600545951006, 0.7080837316872808, 0.8637415058005137, 0.722071270424561, 0.7437716391677922, 0.9269158831117792, 0.9726480884411985, 0.7029735618182222, 0.7770092393854132, 0.7461924328937439, 0.9716352600328626, 0.6643781276992731, 0.9670355376515893, 0.8811870628889, 0.8597134106650717, 0.6819668061838804, 0.8563006160719184, 0.8966411968038253, 0.7633757778829611, 0.8996876418667816, 0.7526406233917152, 0.5856884723229338, 0.7559779321692823, 0.66077127865883, 0.8979119885989681, 0.6867267909213322, 0.898172640240581, 0.6103885072188172, 0.9396908027017439, 0.9224632481933689, 0.6333976456895287, 0.8881055920621487, 0.6208761458494703, 0.7344569483981447, 0.7395039830990746, 0.9338551862821944, 0.9463920047094783, 0.9557472474242716, 0.9861912617661792, 0.8573189384292397, 0.8406109501726535, 0.7596861894289764, 0.908995552737613, 0.5352538562509271, 0.6698103633811743, 0.7415733311096894, 0.6101771679434342, 0.8156145366762706, 0.5883975165476629, 0.5173125484828229, 0.5223435260815752, 0.6805467781474712, 0.5573706719050966, 0.6065623911160101, 0.9335477385206233, 0.7071168320243947, 0.9748772947818947, 0.6131564881481704, 0.6180193804600356, 0.5131464280281601, 0.8163370439289858, 0.7923864730541059, 0.6028068781832088, 0.7608735916155466, 0.7808621286338064, 0.531718389683985, 0.5790811959453797, 0.9216449895263664, 0.9703689486892536, 0.5522997542538018, 0.6642270096339393, 0.5510760605280485, 0.7750851714612657, 0.5238467624895871, 0.7950078327383809, 0.7540656717764986, 0.8972835616137573, 0.8132780987890016, 0.8709578373726268, 0.7196998549305644, 0.9585582395544837, 0.6135743838752572, 0.8805465696350745, 0.5044110570524678, 0.649582857433251, 0.7777368631809709, 0.7312463007861936, 0.9662018437877029, 0.6367550543729785, 0.8568932812330868, 0.8372566650315065, 0.7202215221240182, 0.5320894259077292, 0.7370028484231717, 0.5059147271904811, 0.836676495997888, 0.6695313599351673, 0.56006247735315, 0.8896733329085638, 0.9600339845063524, 0.6222262119079971, 0.7415468374859935, 0.8787994396076912, 0.635668725266078, 0.5513672390470574, 0.6089345853663665, 0.781692928343432, 0.5182888572321818, 0.7279158762751852, 0.8274868164966664, 0.8609444392069421, 0.7098086445246835, 0.7251119825433954, 0.5302130564729579, 0.6631883063712571, 0.8270151859742806, 0.7246392340499532, 0.7718642766116843, 0.7870185082708937, 0.6089745201295358, 0.5134386169513337, 0.8849226524280287, 0.6129813732752143, 0.8804084441645489, 0.8517343375716535, 0.9659071787628138, 0.7894506609299958, 0.6855010260166152, 0.9009859689276729, 0.7133138463206661, 0.5262498322308191, 0.8052040484594662, 0.8545785035407605, 0.535134560175131, 0.8090601593097116, 0.5161105867931624, 0.7473017368659056, 0.9051394255088665, 0.5732967536060753, 0.5788234435368572, 0.9616359101638313, 0.6452829325642745, 0.7292657199844368, 0.6420866492495818, 0.6136050093830377, 0.8115690243893943, 0.6392557755790984, 0.9035793716517875, 0.6625997321174583, 0.5515749413255748, 0.8286776739088606, 0.9624160612133428, 0.6135370203045771, 0.5907088663747263, 0.9771488843432112, 0.5365841670799356, 0.9862898184164895, 0.5001523837536803, 0.8915075095302022, 0.7101185044851273, 0.9945245688848071, 0.7103887295236996, 0.6160533699354114, 0.6615349694096979, 0.6013098058504104, 0.8957619163231525, 0.5957371521608228, 0.7741917703231853, 0.6387244846972833, 0.6972684098324298, 0.8306908644067434, 0.5871148420315415, 0.6914320636861324, 0.5949451749286909, 0.8721113128233111, 0.5794771717821194, 0.6716629092148627, 0.6391889167116421, 0.5362667617022977, 0.9925405908470911, 0.8959475693360051, 0.5300083015479398, 0.5188697212292197, 0.5421598802378165, 0.909596965217463, 0.9502097892246033, 0.7095964647931556, 0.6061091194328834, 0.8870847528113746, 0.7041439996788095, 0.7150916427218068, 0.9527478807482691, 0.7278014605812626, 0.8166307725265426, 0.9165223408179517, 0.8368006523263264, 0.9206142323652327, 0.5252862196400293, 0.7898771450713757, 0.7137596273546911, 0.9205603383206085, 0.729428054217288, 0.9700352247260707, 0.6444810799822491, 0.6294871276671106, 0.6852710279241763, 0.7261448945029016, 0.9007634427204586, 0.9148609302377981, 0.8581777359373033, 0.9089302011061606, 0.5785971868341782, 0.598220194097824, 0.9020977475600315, 0.5779273710137176, 0.7305604469979661, 0.9262109930259101, 0.791278905556807, 0.7488079821473537, 0.5079589730801, 0.9578730072246671, 0.7676633595216091, 0.7046243123900083, 0.8910904295361374, 0.5119255712625754, 0.702835945182813, 0.8697232275108244, 0.9586810520137816, 0.7338376938332909, 0.6085011279006329, 0.5409749894697571, 0.5272414347901153, 0.7650469551623447, 0.86024040530651, 0.8973866419771148, 0.6819417599165103, 0.726763033009765, 0.9506669965141219, 0.5430009245536762, 0.6339491603491982, 0.9483139538561258, 0.9262831510787115, 0.5636886144669739, 0.6379027442973526, 0.9661171259260979, 0.6099199432989311, 0.7188031363187551, 0.7602801823530216, 0.9346668063159707, 0.7030369451765999, 0.7007014358989776, 0.9670571308508848, 0.9067976207232691, 0.8312094529581271, 0.5243296523925695, 0.7347252233436137, 0.7819597924041217, 0.5646170030980702, 0.943586395171679, 0.7588785901616084, 0.5284677992718521, 0.533498235440427, 0.6342778298036237, 0.579348113345488, 0.7286608244253511, 0.6441574176765404, 0.7564656618869439, 0.9377313211777699, 0.8798462527569801, 0.6491245716344687, 0.6316606948550547, 0.8450335692191383, 0.9260999422837751, 0.6651976422405597, 0.6393251096301937, 0.9948205478035514, 0.7659368501001242, 0.7994645137491192, 0.8482806341575679, 0.6702584521572513, 0.9659782207084067, 0.8867252541456836, 0.9951318758371311, 0.6421701177980974, 0.7777122689574908, 0.9313380468253724, 0.8064990786974247, 0.7926330786036089, 0.5214132758264504, 0.7750016872478607, 0.7313928942240396, 0.687438022202044, 0.7662045264432871, 0.6537649878035318, 0.7484182067938071, 0.8486067169229152, 0.8748209831721477, 0.6509600775158797, 0.8785662866254098, 0.5798393085892928, 0.5459922608337368, 0.891477490981133, 0.738650351133221, 0.7959174582516715, 0.6008807616543912, 0.632251432513277, 0.9113388646840591, 0.94449802831805, 0.7576112186180899, 0.6418748058133987, 0.5752486983509357, 0.6384207636935231, 0.7428773305325229, 0.5144001420562692, 0.6317580183404844, 0.5597192266220095, 0.8279706399891034, 0.7092143904097006, 0.7112370939703584, 0.5351912687563376, 0.6600799513571297, 0.5567395338808951, 0.6762333550408361, 0.9487427194565461, 0.5181718764105196, 0.5004615026196741, 0.6621657191982938, 0.9886519482505398, 0.510214374285541, 0.6116869959229029, 0.6843940623326376, 0.9474159647640492, 0.92191991977402, 0.6690962649185901, 0.9869318166205043, 0.7344554560514889, 0.6292099867744273, 0.9220450952326186, 0.5304009798468182, 0.7347343411146154, 0.7388160337352077, 0.6639631071545671, 0.8268598283528499, 0.9306289669915202, 0.6911077366819642, 0.7233727257665821, 0.7482845846509465, 0.8753467954510606, 0.747071550073469, 0.5121427982867306, 0.7629882397747543, 0.500774195571341, 0.6411738519417289, 0.7308181760644208, 0.5518738368342644, 0.6029822016288273, 0.5155186884820857, 0.8607465879951304, 0.5939172960918024, 0.7426391279143071, 0.5002926142199678, 0.8149167061273477, 0.5413373192839899, 0.618294406024732, 0.7901636911656702, 0.9844835459591892, 0.5833265984008167, 0.6274119928264013, 0.6894512501832328, 0.5406077138571491, 0.7296735010213226, 0.5102050258162069, 0.612590232284377, 0.9334291350795267, 0.5793483223640161, 0.6317044727590784, 0.5272695866119546, 0.7633960342296535, 0.6611594582306234, 0.9937325950091718, 0.54757294147134, 0.5692861807956797, 0.781007041736511, 0.5120392691537581, 0.9951066962962447, 0.879817559421938, 0.7397044311076151, 0.8647025552769476, 0.9221158242860472, 0.5151700102023191, 0.9004742770707113, 0.9130854656623513, 0.7282047365316493, 0.831699457420614, 0.7592569286778023, 0.9734692965792409, 0.9947414374176408, 0.9259831719873479, 0.8210732673138561, 0.9855050300243107, 0.8887936373404424, 0.9158921645088616, 0.6653730980407556, 0.7887994157952539, 0.9247218495511168, 0.8897476934939146, 0.5831941984513704, 0.6370040703129876, 0.5516897576823319, 0.9296838392413078, 0.5446093475018328, 0.66024583517971, 0.7654720752530412, 0.9848438823338066, 0.9217919246077846, 0.5292415542953757, 0.9409781487610314, 0.5381901312747714, 0.7075924898611681, 0.5973276213429513, 0.5686794143655707, 0.6420134138645543, 0.9466662097811136, 0.6559122862033633, 0.7391825060255499, 0.6764373232082896, 0.8592082427212728, 0.8793326915043416, 0.5694885544623878, 0.7482556375456133, 0.8122388427538181, 0.7359821989685928, 0.7359274465564076, 0.8645218205610967, 0.9741765019013982, 0.5790900902267039, 0.5301213445564302, 0.6212430907479574, 0.7628246597953192, 0.5960766143460934, 0.7789079794617051, 0.7329228733965981, 0.7522225950228654, 0.6777331560136248, 0.761863851052677, 0.8044355059638414, 0.8096714892053749, 0.9044212141872665, 0.8523160673903625, 0.7039929435794603, 0.9599365904964454, 0.9964204369046028, 0.765549250220344, 0.9307138148199746, 0.6558700703634018, 0.6465688738149196, 0.8346231503395118, 0.8186596263689812, 0.8386010300082709, 0.8859029039913691, 0.5054908374919, 0.6378849540063674, 0.915994788788518, 0.744399333233233, 0.8653916432034204, 0.9202082283500614, 0.5349566098362091, 0.9828025035998549, 0.8693419301938772, 0.7760930618881561, 0.6429368638550143, 0.7325496183775506, 0.5576718753197265, 0.8008124750024921, 0.9613863068903692, 0.954311516171762, 0.8583876384567122, 0.9742459277037434, 0.5514995425878877, 0.8508788431601979, 0.6351320097117088, 0.9503580227616334, 0.5441589637924653, 0.7537775544074874, 0.665527622205253, 0.937652324391151, 0.7368751957401164, 0.6318566865872619, 0.5363366463176373, 0.9371927566286551, 0.8711393660944285, 0.5177055452797373, 0.6503786876408455, 0.9104242953639083, 0.7324542179986163, 0.9991393950184667, 0.6500821483206173, 0.5258422252756934, 0.8431115429756038, 0.7115498776423919, 0.788678884371494, 0.8835247623802949, 0.5635290795440299, 0.7063333517433823, 0.6613611921227491, 0.6338311456349824, 0.5978433792484775, 0.94475323627176, 0.509572426842785, 0.5597018077851474, 0.9174502094932052, 0.959879108537489, 0.6727019315123772, 0.581936278525643, 0.9207976445747237, 0.7663620174565444, 0.63085914199001, 0.7366015330394524, 0.5408914922273664, 0.8374863943461346, 0.8709777325671206, 0.8776762937834375, 0.6127995181184822, 0.5121035285420246, 0.9143402535035146, 0.6327418762525308, 0.6464389382802675, 0.9053343480184951, 0.6104493099774435, 0.6974485468311562, 0.9229062866270593, 0.8168596501401689, 0.9204630786171579, 0.8382952495743218, 0.8298859222730329, 0.8244040667762107, 0.9314545562066838, 0.9889694466575363, 0.9845947598334523, 0.9619953824775193, 0.8786190420538356, 0.8395599093058796, 0.8887099021992149, 0.8756655235153179, 0.5172501789476818, 0.9430298401982858, 0.5207201323027216, 0.5679536344644462, 0.7094902700722137, 0.9730763527028263, 0.5397400432823369, 0.9257109772009477, 0.6086913836815193, 0.841324207929149, 0.8540616758205251, 0.5653894041678302, 0.8665259131211566, 0.715404737803999, 0.5694242370423457, 0.9327450841616405, 0.6548740664084342, 0.7162123615136287, 0.5709466851357288, 0.8974721425359268, 0.6631441213974449, 0.9202489541545201, 0.5761053811596049, 0.8324416924085469, 0.7093188253200912, 0.9601616520805082, 0.735512550001481, 0.9034214339270576, 0.5697904721672935, 0.6979378558857507, 0.9159466311668876, 0.70156397947326, 0.6051864458580928, 0.862890566080653, 0.9461995083114559, 0.8921890393162368, 0.5883286518302808, 0.6327926408149176, 0.7679902836578274, 0.9007276758316194, 0.7265968706700476, 0.7966523456928198, 0.9753234791128871, 0.822968642979, 0.8376155966006205, 0.9355818381283544, 0.789446746280663, 0.9140932459731126, 0.7879274961433962, 0.5432559396199093, 0.7037421692461822, 0.7084987790830862, 0.8989385156640396, 0.9703228201792009, 0.9274994952146693, 0.838141154534076, 0.731322745147974, 0.9727065969132751, 0.8283249594116397, 0.5981098098077642, 0.9385509698881123, 0.6126492164299796, 0.8066789612541012, 0.7501186008977022, 0.768058591987169, 0.9159231631175637, 0.8808114112325913, 0.6074175296531084, 0.6770873777299049, 0.5725242980750178, 0.9083335068277032, 0.6970670158064782, 0.5696435970703428, 0.9385749077785669, 0.5751063197391822, 0.8444044095031914, 0.5924861477864993, 0.8357376705352567, 0.5859065301492004, 0.5503779989403735, 0.5501041410551293, 0.5758465707117981, 0.8621714488596143, 0.9753081934389045, 0.6528073527602789, 0.7352102805237959, 0.7019457035556098, 0.5819665108941119, 0.5085358174010289, 0.8384042180712403, 0.5972225159170552, 0.6843841865110174, 0.5752604512763924, 0.8579536353600529, 0.838313817237242, 0.9016854772117202, 0.7827385027735636, 0.9249160460050264, 0.8421991092335148, 0.8176948811704787, 0.7291884535329989, 0.9558929553284456, 0.7398442880980205, 0.7381762106869187, 0.902508752418284, 0.6440430249974107, 0.5053240840032868, 0.9060467753958983, 0.5529778881105345, 0.5221891506648402, 0.5658319557970659, 0.6904952173887265, 0.983557539880956, 0.703059938695403, 0.7864631981965812, 0.9390723528176639, 0.7578574345455065, 0.5169575227106752, 0.973042528323045, 0.9114476164298725, 0.5885173049696427, 0.8039880675242197, 0.7011327046175186, 0.8061696719367543, 0.8930894647879384, 0.8197711428363138, 0.7031907505012032, 0.644732671345693, 0.6555100009379116, 0.5278778455579404, 0.6230458240830157, 0.7357833484730982, 0.997636019478054, 0.8708266687668595, 0.9667800326642133, 0.7827859324092287, 0.8798862072960628, 0.588580621656273, 0.600719771275384, 0.9694679979888845, 0.6537423067657575, 0.5660410249155972, 0.7115300250989993, 0.8954300538688338, 0.6074031296710052, 0.683495843491728, 0.6938293590412727, 0.6017353116264819, 0.8494550223564201, 0.8597987684016346, 0.7769340348480172, 0.7961466982431145, 0.8223567010350301, 0.5324521870515474, 0.9951321003941033, 0.9489497235857864, 0.5573505414176946, 0.5893589968478077, 0.9312027875602257, 0.8477525021007967, 0.9825385518981657, 0.5393337450336277, 0.5661954607362805, 0.9556326840558158, 0.6184903054555217, 0.9771658152654747, 0.8330471997710719, 0.5379006913442548, 0.6390226287218141, 0.9296315960503123, 0.9617212863703721, 0.7518453054835457, 0.9612464110376522, 0.5072812558026272, 0.8701738652007194, 0.9557585338156201, 0.7510932685127618, 0.6900154061912485, 0.8075122892919554, 0.7357122668245122, 0.6099466391699286, 0.570556557263889, 0.9763636401795874, 0.5251207697186017, 0.8370614488876615, 0.6399358095338066, 0.625450976556966, 0.6016839187653553, 0.6341953878352571, 0.6065000424309623, 0.6795832027534836, 0.5534942793409121, 0.9136222551276181, 0.7274951179945964, 0.5288913039635822, 0.8773153772081279, 0.8769575609262878, 0.7166098698117984, 0.9720439409844187, 0.8486069757529666, 0.9304989446507087, 0.8295928173648006, 0.8507929335021998, 0.8522734584350214, 0.6866866204466875, 0.5782461902216267, 0.8669718169430886, 0.9972963541841617, 0.5666885356619548, 0.9875017591603088, 0.9376723114867791, 0.9165550202588068, 0.6140441532242706, 0.6352239780465083, 0.6560362617023687, 0.8923964136105684, 0.5102919168853361, 0.5882872639507079, 0.9675195826526258, 0.6287200351719249, 0.9850259046041785, 0.7051809646749834, 0.6579219740704974, 0.8102669639981048, 0.5959497972981405, 0.7930245742376373, 0.50438669324804, 0.6353120567105703, 0.6101606024136184, 0.8103860126586372, 0.9393682178293181, 0.950725610869503, 0.7169910705983422, 0.6828185423590598, 0.9679285434885132, 0.8413310305850624, 0.5808128907606049, 0.5749513937055823, 0.5007733251036397, 0.8171774331474404, 0.6596738671155303, 0.5637082302081705, 0.8688124788148357, 0.7874316608781375, 0.5722996283635833, 0.5391031540166148, 0.7287093657201854, 0.9459086932265035, 0.6659181816178712, 0.7015306125803231, 0.7066192395012494, 0.9337894535128524, 0.8473583079828251, 0.5506062835495595, 0.863258566046084, 0.8681786244328247, 0.5540685816443136, 0.6627782709552864, 0.5959867674170097, 0.5952098788019575, 0.9064085029223798, 0.7584477880159739, 0.5808263631390345, 0.7556238193300462, 0.9339978064049992, 0.7152033781474237, 0.6683286168445954, 0.8001902122538398, 0.5314509649695764, 0.7959885507434876, 0.9070707267185254, 0.7431973194842467, 0.6227868299487862, 0.7284710411523367, 0.5438631438289598, 0.6833243796789372, 0.7055166685851013, 0.7067376546736147, 0.5158456187826543, 0.971895672075245, 0.6296151207589493, 0.739643732517623, 0.8585854730686151, 0.892245063856952, 0.9732089122482046, 0.7705795588931799, 0.5710183190220923, 0.9672733224051095, 0.6279898714037779, 0.9598008918638709, 0.7538484136706858, 0.6126457908411187, 0.8035553514213225, 0.8252997753926599, 0.6959203106520104, 0.5109017938950111, 0.7389911982614144, 0.8610606063084685, 0.750355407219808, 0.8444440358126509, 0.6882805000709005, 0.807297669125876, 0.5970873881009486, 0.6680015404929311, 0.8269317974246868, 0.7151732986908705, 0.5856219550693136, 0.5783234901804879, 0.5609232511743458, 0.6130987534259558, 0.9854138715203682, 0.8088206661739374, 0.5127878665838836, 0.6382216031742931, 0.5844639526454609, 0.629982492018003, 0.5101170357918725, 0.5423581921772032, 0.5004075858787294, 0.8851466264673858, 0.6227910824876339, 0.519075253067367, 0.831579151699277, 0.6802507123188151, 0.905199917248785, 0.8463194395456666, 0.8602407967013429, 0.5043833297148631, 0.7433635902051932, 0.6053033558065001, 0.5458579901266514, 0.6692171864281393, 0.7472802012867841, 0.849314521985462, 0.505444475675942, 0.9872692046855027, 0.7034103294143697, 0.9965072393394823, 0.5349841015300736, 0.8490724265702561, 0.8591003286780976, 0.5418584712326237, 0.7111852088235161, 0.7082846138252628, 0.7620853804242078, 0.9091351985439622, 0.659062482680961, 0.7842567543444745, 0.6637565403797662, 0.7926915311489223, 0.9799831346617174, 0.5530592574228368, 0.9682710590018472, 0.9016788312251606, 0.6598357338806196, 0.6978582367433873, 0.9284971958939174, 0.5896891413479881, 0.9197799754353637, 0.7765125660979828, 0.8110321214635705, 0.9189204786053679, 0.6014177688074414, 0.5424807912460272, 0.9366539266993141, 0.9418887768404383, 0.7860838815885267, 0.7787193072037981, 0.738847711825818, 0.9729691231709369, 0.5945263853471472, 0.7150954457342764, 0.7674944527407699, 0.7716376708429957, 0.9790472854227235, 0.5581686401417851, 0.6360687403900576, 0.5200328513121228, 0.6949581353575891, 0.9727087893818225, 0.785328190037565, 0.6796555151149868, 0.9749994344518176, 0.5219893442679672, 0.8229606548341881, 0.7185191536514707, 0.6393620421029134, 0.9825336188909322, 0.6657941500638132, 0.6332235999394313, 0.9348231519385022, 0.603634744020145, 0.6449698143727179, 0.6119039418167005, 0.6799077511136906, 0.9117500615645879, 0.5906838324793512, 0.9165072334631731, 0.6711089734232132, 0.7083434748846258, 0.5889104522326649, 0.7483795936192064, 0.8796165173698418, 0.5359887366519032, 0.8492184358347137, 0.9627960332082794, 0.9716879598512351, 0.5677930561514426, 0.6611890894639989, 0.8105336716126801, 0.5099360170686286, 0.8644114053098445, 0.9306067027719704, 0.6874696894833594, 0.7643067953718352, 0.7160866928838794, 0.6057315509365363, 0.9284103957949927, 0.5969777194701027, 0.8369490710202645, 0.9648619283193631, 0.5386792997143937, 0.8933102651932128, 0.8339862718778019, 0.5182961197263987, 0.7789259208777894, 0.7703217211767002, 0.5172955220752304, 0.5953729107623253, 0.903390620782988, 0.804265413702697, 0.8442819738802814, 0.8420210697513422, 0.6548166053727421, 0.8667658082513278, 0.8317623327178869, 0.6128396549713677, 0.5743397482780304, 0.6405034747368623, 0.6605926301233531, 0.7815557697926654, 0.6891701323585505, 0.558058655891171, 0.6341543903957125, 0.9752660892323604, 0.6366041299591616, 0.9451154372212408, 0.8852500910085068, 0.6103900828974773, 0.7022656303823, 0.9345277934667034, 0.8668093557676477, 0.7248855602754924, 0.7520109851820902, 0.8378318472264072, 0.9292336998523596, 0.5888689110428589, 0.9619912959012108, 0.7960865201222216, 0.643268303214386, 0.5271280317353682, 0.9667320343237715, 0.6229235862313993, 0.5647823378556648, 0.7109054085802013, 0.9055425249481068, 0.9428223953484527, 0.6097599575859807, 0.7274502901665783, 0.5127179981029679, 0.6461786931032723, 0.8487634242972729, 0.8067610134668227, 0.5079567958824718, 0.9844422462270312, 0.8715621168411208, 0.977160659946969, 0.8054198330735252, 0.7834027984246673, 0.6460471722407152, 0.7599760845845338, 0.6387565538596767, 0.6785498429767396, 0.9310272471579495, 0.7816259038277069, 0.5542538923590762, 0.8771684205247392, 0.8406260034830018, 0.5092623369496949, 0.8643545086760425, 0.5267734193306205, 0.7997693348348501, 0.5120328645962331, 0.6967883049823493, 0.9684347445273569, 0.6047815067613145, 0.8028671641177207, 0.9401048009796287, 0.5929390594259971, 0.6949565823063337, 0.7148533328783462, 0.6837705190044447, 0.632110905881472, 0.8723282437142454, 0.706054649689494, 0.5864458952833709, 0.5911261853446697, 0.736539838397079, 0.5441075656317115, 0.7122097375958563, 0.7800575284771025, 0.5709382711118394, 0.5084204693405772, 0.5151852336545809, 0.5606618986558474, 0.9972462821139383, 0.997742876683436, 0.5617807674976304, 0.5127801818532142, 0.841308925957545, 0.6506951959009267, 0.6102540555558307, 0.9384699032768846, 0.5787817011079528, 0.7554596068243331, 0.6831013791789411, 0.8171301519124785, 0.7381148447931098, 0.5259498622021546, 0.6619404286877032, 0.9299931420637733, 0.8122470233062495, 0.9944791900706008, 0.6788672996661433, 0.6934496660463991, 0.6266301150232674, 0.7093644670530725, 0.7547220862187429, 0.8140688909012761, 0.562429688258862, 0.5150035853254269, 0.8165240750589118, 0.8405959832745797, 0.8832915181218157, 0.725120554228045, 0.9523255397866591, 0.6452628745292943, 0.7270121455053007, 0.9813996723278327, 0.791708997660429, 0.8436986927982657, 0.7445256909136397, 0.8790455553670576, 0.7267964387084234, 0.8940277762225182, 0.5835828665159228, 0.7968385881571781, 0.717149643685282, 0.7998110125370508, 0.5546194146498218, 0.9924283407066791, 0.7603664069363592, 0.9323163044193814, 0.614270298334985, 0.9917301655466584, 0.6248137797955202, 0.908876285056176, 0.5712560553630589, 0.6725580729563817, 0.6289087435340539, 0.5743692384932169, 0.9071353400892568, 0.5244339632093492, 0.8642465124461547, 0.7077647565594768, 0.6482303391823889, 0.7215737311457638, 0.8134031142629217, 0.6396546009352713, 0.829903455718862, 0.8854968217543557, 0.7348179375982213, 0.9410469011815634, 0.6460814713129573, 0.7316044533028909, 0.5396896866183263, 0.9971669510492561, 0.641325493373184, 0.6233678684634254, 0.8282811306788714, 0.9013918374411951, 0.5398709445694783, 0.7883518253153325, 0.6325269003306753, 0.9660642002521534, 0.6086120718206425, 0.7619493363361889, 0.6852903371870289, 0.7758624334320758, 0.7129366756512594, 0.9310588665294629, 0.9573347237109482, 0.5888693140155152, 0.6826046990828732, 0.7353806508826612, 0.9548446959174468, 0.6796452752608475, 0.751544852048629, 0.7990845650899008, 0.9713897649369628, 0.7071624296205794, 0.6694001768503222, 0.9351960213982643, 0.6636697869049569, 0.9989006543581652, 0.5984483102822267, 0.7574052458393453, 0.6524923461417808, 0.6521217998936585, 0.8557134816947245, 0.969760289567289, 0.9938699900331733, 0.6378675965891907, 0.5949120186295166, 0.7656175683830172, 0.5738745997025114, 0.7496573696267952, 0.632406161727706, 0.5455062478852111, 0.6888847930976744, 0.6650898007289252, 0.5344179933098772, 0.5103853972164105, 0.7267807182321168, 0.708734237494279, 0.5801517137328485, 0.6308029413719012, 0.8590072140213697, 0.7290469341672267, 0.8109521402880577, 0.9449783713324269, 0.9837224091316691, 0.5670202991473428, 0.7908755494658324, 0.7893554878337105, 0.858297180210155, 0.5480564527377123, 0.6697361690686205, 0.8731377731229387, 0.6750875248361605, 0.8752000616288711, 0.9733035889041985, 0.8297420715366552, 0.6690946240133984, 0.8682561020702427, 0.839530040270758, 0.6259819368685533, 0.5361692229781668, 0.9921874587618463, 0.9076724219864094, 0.8428952554598647, 0.9425811198610712, 0.8295749086544169, 0.7924962254463744, 0.9454080651019103, 0.6368099692190942, 0.8438230225173589, 0.6949793547734195, 0.8517562068816338, 0.6842425833279373, 0.9926568239568037, 0.780880310998765, 0.9833329702474903, 0.7840775786961327, 0.9713017812613896, 0.644381600572979, 0.8928756042712946, 0.6043173279597824, 0.7187458199198345, 0.9376735117152578, 0.7225799571329565, 0.8900230261834301, 0.7908080679866781, 0.6377376166659615, 0.5430062320991002, 0.7513843580345363, 0.748921286583556, 0.6780402181408091, 0.6471409978932596, 0.9391677943139398, 0.9661566111473001, 0.8786843307690351, 0.7462278557413843, 0.9422920304343492, 0.9965834776719678, 0.8417141835623339, 0.9527622911868742, 0.9191801636598358, 0.8848448735686392, 0.803936878534016, 0.7438915533049002, 0.7566089224739301, 0.8434064389050414, 0.8228164711664512, 0.8037581530153839, 0.899305433853733, 0.6231845061113068, 0.8151811317123676, 0.6884370484477949, 0.6190942466280001, 0.560011065623105, 0.53553049753817, 0.8262469067423979, 0.9579083878719611, 0.9149453298792888, 0.5165305290901661, 0.9380515718139641, 0.8367641399268705, 0.502310474159356, 0.5550742057932364, 0.6350754282789283, 0.9316657239395136, 0.886668354982382, 0.5569653360683909, 0.9547890282827962, 0.6829650689884299, 0.7044800919074115, 0.7480726268417677, 0.6011096147564139, 0.5090726963177336, 0.6366020277754112, 0.639499742008534, 0.6514577925974896, 0.9499882559819846, 0.8994554245015214, 0.5068449260923565, 0.8240692481484443, 0.6033938013594956, 0.8412428060337966, 0.9234766177937124, 0.9407847177157584, 0.9621435346620935, 0.9935519869895502, 0.8495933218296547, 0.6411490569023017, 0.6039229171025517, 0.8459285701049444, 0.6041657263560849, 0.6969390990397075, 0.8523693327183971, 0.9318060988006447, 0.6316563838767069, 0.6880814020840533, 0.7944469453603433, 0.5543564859660306, 0.5320098537407081, 0.6106578002295473, 0.6749521115860406, 0.8935187490316798, 0.6507367218946197, 0.8517768095460534, 0.5264830475829403, 0.5383638959831047, 0.9148787379300303, 0.5605567837112215, 0.8979858340647444, 0.5449253771477082, 0.892382610817541, 0.8513449336746958, 0.6074240604908787, 0.5154670445266338, 0.7335432618786627, 0.593740108376386, 0.6899429630635726, 0.9198873802192493, 0.7409021979147139, 0.6158137217814559, 0.5520540570250273, 0.608814492346851, 0.8496019383702245, 0.5782906922405713, 0.8402190233560886, 0.987465189945946, 0.6320879606559741, 0.6757894613077764, 0.5053820996924938, 0.5020766763685768, 0.7700671477447265, 0.7003288594416552, 0.9218412161040234, 0.5424542859033145, 0.7862794034735279, 0.8963419937227335, 0.6867273780032622, 0.8945327309930158, 0.692272571281817, 0.7435420662207092, 0.6477043377215537, 0.9839120060731588, 0.5145553534124041, 0.6666401130317234, 0.7941055801993664, 0.9800787322655942, 0.5875388507618245, 0.9293860675885307, 0.648045599017232, 0.6266196883835327, 0.6210016209784552, 0.6730416492427258, 0.5791290023447846, 0.8698244613125297, 0.8010874795260804, 0.8951608441045675, 0.8662505614038594, 0.6360941711177233, 0.8278690257924923, 0.8219998671762556, 0.6674251377034941, 0.840080473451714, 0.7532110350138126, 0.7052994324864827, 0.9203568617174869, 0.6318873640025131, 0.9048720649506885, 0.8427837000822971, 0.8190602851290368, 0.9868009918413474, 0.7358172079926963, 0.833131602457557, 0.9687977809529209, 0.7799041511089145, 0.5406431559820759, 0.6109843286368559, 0.7313812292445485, 0.662206551153186, 0.8437995200634014, 0.9683340764356898, 0.7052446150290452, 0.5820871544219547, 0.8704903280140899, 0.6628053295626686, 0.9320148964969379, 0.9613955866428632, 0.8098316222336082, 0.6209201179501284, 0.7638477776911428, 0.904219327779097, 0.7175661621686278, 0.8949699412521154, 0.7143972675536279, 0.8802645107055089, 0.5325383192375319, 0.6015379475845717, 0.7481470769623499, 0.6492032396732073, 0.8913648399567062, 0.8544514564481929, 0.5613127618435554, 0.9943764089252993, 0.5213676692240508, 0.9300892768108289, 0.6197737053096912, 0.7236591356565584, 0.8606587908275618, 0.7560541047151808, 0.6652471962420801, 0.7886372278144268, 0.7113942307448392, 0.7438606519457287, 0.7474726368363189, 0.7378530448198987, 0.8966463532967006, 0.6130106604578178, 0.9993770221595466, 0.9503057638934143, 0.7081901575903902, 0.7674535613269795, 0.8624396197341881, 0.8833064602115592, 0.7844778147599839, 0.6676399124988889, 0.7388972338850812, 0.992785873269145, 0.5743165142623547, 0.9345369776607719, 0.7272415659848727, 0.6780453939977846, 0.9798669540718579, 0.9163054751682488, 0.6589728233870477, 0.791079640221319, 0.5873982024222639, 0.8048224369021607, 0.6607196045883257, 0.622307725654861, 0.5449504429442196, 0.9627089558772852, 0.555638417494535, 0.9208619536865197, 0.9371691337706582, 0.6682538336814989, 0.6605084828451069, 0.5430179678668243, 0.6693477568776697, 0.7828389671379653, 0.9476957746376629, 0.9629420058505971, 0.7663533292390408, 0.786551104694574, 0.8594354600044041, 0.9721594311644087, 0.651024227265711, 0.8870615134513165, 0.9659606327382472, 0.6081597742138667, 0.9840012999433039, 0.6247338448955462, 0.9554494655792357, 0.5851949328478132, 0.6223973889078392, 0.9387377582474659, 0.9961224813131448, 0.9883813834114126, 0.8602972108172182, 0.5494173017068122, 0.9479589553325098, 0.8918157664290598, 0.5048324544168501, 0.609410161828329, 0.7006827255644968, 0.7955620352216419, 0.7742230121313938, 0.7528446126235633, 0.9525598385292182, 0.831341396006102, 0.9792098541064095, 0.9131069126352391, 0.837746184814313, 0.5988015012638948, 0.9514722159812092, 0.971947531335583, 0.5375463949343504, 0.8111762068635315, 0.5694669307104901, 0.8606226250249773, 0.9498732187239136, 0.5575164202676353, 0.9579734317702999, 0.5401179260797071, 0.7898571216920957, 0.6802955740604313, 0.9919702168427689, 0.9495064198041144, 0.8678690153438423, 0.7390655750471129, 0.8093390360733734, 0.5038311048524131, 0.6971483155529838, 0.8606048750481643, 0.9121456285455163, 0.6305595637056334, 0.5443445433122436, 0.8406924142644047, 0.515548489369525, 0.9314932607802956, 0.9932114163697914, 0.881545989650043, 0.878711870198619, 0.5480910699780017, 0.88752660431689, 0.759482685652058, 0.7565013178024751, 0.96894945944822, 0.854507875069178, 0.9900656013852194, 0.5408354298978302, 0.819349246697407, 0.526586131460775, 0.5646785871669624, 0.6397024441056931, 0.6849158294969159, 0.952502903282055, 0.6532653293010459, 0.7100321227930273, 0.806549681611486, 0.6684332341429814, 0.6484873092151845, 0.7048905283965191, 0.5816739681542278, 0.6962994561621297, 0.612457428942277, 0.5963027286376036, 0.8095079766575487, 0.7325620139884911, 0.9313024803347258, 0.5208863463977987, 0.8892679469867699, 0.7098383502867168, 0.7471209949455822, 0.6874333904324206, 0.8206214062995574, 0.8896545435859842, 0.6058365784379136, 0.7291474197405295, 0.9941526695272432, 0.6141587390298016, 0.6075800062575631, 0.9597896150937059, 0.8648036137313588, 0.5007932863002094, 0.8978161590848615, 0.5009017516127754, 0.7799662267700782, 0.9844484227041179, 0.5221708330104731, 0.6834686745005676, 0.8717392525452232, 0.6893932696941687, 0.9781845417814297, 0.5396453798137364, 0.5924144573861907, 0.5724082518528384, 0.88195664003502, 0.7524397827847389, 0.9219618750297074, 0.7679578430070448, 0.510197611095761, 0.862989520626851, 0.5157661713124144, 0.6987271114825062, 0.88490208617356, 0.5179985074711282, 0.5153553806361951, 0.6303045127730516, 0.6593488630045234, 0.9985073847431254, 0.8470698086925129, 0.5953259701853258, 0.9443725616795922, 0.5114528010537297, 0.8965196350650799, 0.6943204803502464, 0.5349897098852755, 0.9920764864383718, 0.5781942141960008, 0.7716266118421512, 0.8086710236078691, 0.7548996177453922, 0.5555265338180289, 0.6679706854718896, 0.9808486809049395, 0.9688578383979582, 0.6602990237274425, 0.6028346172189445, 0.579118545306484, 0.5194077373570819, 0.763278004786395, 0.8208479274238301, 0.861652281311456, 0.6310496353866875, 0.5892996294433512, 0.7399607445498804, 0.5418646959507538, 0.6066101691875865, 0.75039107557035, 0.5952085657593591, 0.6325980533664836, 0.7283322954940883, 0.7171822562243053, 0.8684375895587906, 0.7461184509654807, 0.5401189515866246, 0.7254419644335428, 0.8154875108778341, 0.8939336879516628, 0.7151678251469655, 0.5358686265339778, 0.5023165912526957, 0.790381370483696, 0.6845079453255514, 0.7355666789458357, 0.8684124598145857, 0.674623713686552, 0.731910414784974, 0.9554862747782967, 0.5797038291635124, 0.6445841353705555, 0.5321194762095535, 0.8376204685577009, 0.93172771318994, 0.8776921785831638, 0.5912719324117577, 0.9231804008692182, 0.5262646240486535, 0.6516652076032928, 0.609815234299899, 0.8044411311496866, 0.5897234954807922, 0.8871949459000599, 0.7196315874439043, 0.5673485243207782, 0.5750168762771448, 0.9456078097211396, 0.5171792210859705, 0.5716193903626375, 0.8961486831211304, 0.7760632182048263, 0.7631535932162359, 0.6343877410821557, 0.9161734651708116, 0.5810671662028773, 0.9443865094774526, 0.5084224255730371, 0.6522140602494321, 0.9913182201506894, 0.6478605072847139, 0.5754064933538499, 0.6836411987179637, 0.5791995019613638, 0.6501144112983805, 0.9823760942874591, 0.8143813895499259, 0.8282192928391217, 0.5267663697113856, 0.6866963171694468, 0.676626473256139, 0.627366901586235, 0.6775607783570656, 0.828050177442536, 0.8023612795314107, 0.702688217950159, 0.8100609153542145, 0.9366558343148117, 0.8161012095612364, 0.7818672051120796, 0.9408051810269649, 0.5037981352177684, 0.8557068524211067, 0.90966519619055, 0.5673544675408875, 0.8252437038075076, 0.7980009939248142, 0.8943481458024605, 0.7812523941579741, 0.9747482355487037, 0.9723053810109338, 0.5412835151983708, 0.9918220793544884, 0.6833582488959182, 0.6335952919539416, 0.6983646134729773, 0.842549230973156, 0.5511091677008179, 0.6493106774650428, 0.5823767986524176, 0.7040714752429106, 0.8069422720842305, 0.9175786867361432, 0.9159012310493926, 0.9235793962881622, 0.8010769692085089, 0.6939130255227217, 0.8585666210829535, 0.5583436627573259, 0.8434183980162457, 0.9666829385398344, 0.9269304277807353, 0.7961354427830265, 0.8567610060067322, 0.6667690036703641, 0.821822826461783, 0.8137455456462152, 0.8453555067310878, 0.9046033492714812, 0.8421415998840405, 0.5934412330152703, 0.7862611233124348, 0.6182842101585307, 0.988165142369096, 0.5029910415123977, 0.7131632276049984, 0.6947725411346914, 0.6278673284080482, 0.8937281727041599, 0.6901509789882536, 0.8234528717453412, 0.576341718701682, 0.796252077698477, 0.8808902811528543, 0.8487829516815013, 0.8162534940377812, 0.5913801225961042, 0.7981985871069528, 0.6292488569305106, 0.7099435338335689, 0.8311816267677343, 0.9021844036607475, 0.5526340866050102, 0.7016123176124778, 0.5846734155181261, 0.5584196397510086, 0.6211165772316318, 0.5859467598504619, 0.7728615678785171, 0.700319113945828, 0.9133989661645936, 0.5255989501712803, 0.9916352599884495, 0.9579884526967166, 0.5537697864744603, 0.7221066647382148, 0.8260710050840286, 0.7263735769367738, 0.8956177176079159, 0.9752731278172626, 0.8890143268535842, 0.9468579406759121, 0.9410992590349838, 0.8936194279459304, 0.8951724144607092, 0.8195454054609856, 0.795961856152093, 0.9794437598499296, 0.8293155526746885, 0.8762922482660078, 0.6987755521604063, 0.6988167446188515, 0.7594931781763568, 0.8321079693308266, 0.7013693063155163, 0.5631853957373092, 0.6925279998369638, 0.7581871975171601, 0.6328619544689171, 0.9563632842104044, 0.7385345999446501, 0.9938231049003678, 0.8516196184988958, 0.6079370306432768, 0.683002139012167, 0.6460628272158968, 0.7043294463040661, 0.6466416937743495, 0.6678961159741478, 0.6089978700147194, 0.9553809491620912, 0.745724746133406, 0.5730322241415717, 0.5797738056688634, 0.9178722807967146, 0.6505053344897462, 0.6935436130538222, 0.9071653666319788, 0.8773308053742508, 0.6554408252223218, 0.5435164619233019, 0.8712757074854265, 0.8853161597685453, 0.6180336862242524, 0.603429922567752, 0.9576591752700732, 0.9933524597955209, 0.7554651783811361, 0.5369134929238921, 0.919898665029211, 0.5261417551734118, 0.5598625766602741, 0.854137612753092, 0.6636030316269403, 0.5583143197663766, 0.6506302280223863, 0.8405457046037338, 0.9814676504402364, 0.9903539983341205, 0.9430570035401662, 0.9267002065249883, 0.9220378524715702, 0.6919548858133665, 0.5052736702976941, 0.8185956520310258, 0.933725188841449, 0.7436410247932655, 0.8596419240995665, 0.670564918942306, 0.9639442056816756, 0.9839797616565263, 0.7553335963339181, 0.9234179538945573, 0.9800256989232923, 0.6070993565768759, 0.759328051116216, 0.8819522894543013, 0.5099746933275887, 0.7572870474220398, 0.6408639809081054, 0.6526990625726338, 0.6229003984552794, 0.8570272017709546, 0.8503593915797019, 0.9542557907520128, 0.798097787626127, 0.9644538450748075, 0.8144928138115843, 0.5838776543946389, 0.9024754425295907, 0.8346385017183213, 0.9333446201951227, 0.5440351357103675, 0.9459732011677278, 0.9340346507468648, 0.6627841243943515, 0.9579594158421592, 0.8174169751253182, 0.9869940496726886, 0.7178578968075504, 0.7608978368691816, 0.5037556043834833, 0.7356261833280027, 0.9617621785370296, 0.6966355885987513, 0.8804672882534264, 0.9434852098893134, 0.8225412564615466, 0.765568773604058, 0.6231971489540166, 0.6279922413377423, 0.9654742376675671, 0.6738708366887789, 0.6449003299892682, 0.9585886093514391, 0.7314832322584071, 0.7640481769136422, 0.8697486399475141, 0.695885769146828, 0.791004856926177, 0.7021111187118314, 0.6956704896649935, 0.8741447284707022, 0.5435672462499186, 0.6521204775274776, 0.5941111679481023, 0.9861580446720166, 0.6501272091625334, 0.5498263579101148, 0.6386313392407674, 0.7009015958872046, 0.8232443301155844, 0.9056662285311223, 0.5424050020812133, 0.7596106545374715, 0.7727732407794083, 0.7165648253116241, 0.5414468146082541, 0.5108896136935019, 0.8681770913582989, 0.6245307286088728, 0.8700636667688741, 0.5430879319431523, 0.9346794579416247, 0.6998939311054937, 0.5556461241684005, 0.6924344346024167, 0.9609451847343634, 0.5329906890937882, 0.8479727601719386, 0.8409960690967424, 0.5769783697152658, 0.9643513576654144, 0.6926227036834054, 0.9279512682833833, 0.5704535483794404, 0.8707878970649867, 0.6818128118132311, 0.6169259565313272, 0.8125980509385973, 0.9456236244332612, 0.770149456966107, 0.8458602463331407, 0.586994539261739, 0.816734618287459, 0.8046818167422294, 0.8573087250740169, 0.6242297049750407, 0.5386390167516169, 0.917577827432017, 0.8155996035696065, 0.8033564151502768, 0.7409838832069056, 0.6818674109115224, 0.7068304506106082, 0.9603864975724595, 0.6473247295641179, 0.7070883235034555, 0.8816961420570577, 0.7484361354750841, 0.6551165970540869, 0.7475657552005708, 0.5632654003192309, 0.5372781327965916, 0.8479160897178193, 0.7508742220708284, 0.5497389082212618, 0.8353166723658101, 0.5246452638368585, 0.7745635069107619, 0.7125178158747223, 0.7147984303186647, 0.7911703822268292, 0.5469793443027597, 0.9410467959802605, 0.9820342027023358, 0.9192402966344853, 0.8888204095575752, 0.572674887580745, 0.8028912556810879, 0.6154231857497445, 0.8409222104624643, 0.8201361531462716, 0.9017867735517668, 0.5356632802669195, 0.6015840290880379, 0.6908125415983806, 0.880188530955407, 0.7144644677969323, 0.6407860703056542, 0.9211901185336624, 0.8231477253124824, 0.6976652500308493, 0.9698744609183798, 0.5586198846292508, 0.6502553923830958, 0.7253158861671615, 0.791184740370499, 0.5911669751576538, 0.8748989443494756, 0.6707624806997408, 0.8540631303727767, 0.899967572256928, 0.8110049861695596, 0.5062975445363562, 0.9525686037766345, 0.6503453050572943, 0.6803980566540933, 0.7604774825145999, 0.7880787889291405, 0.5193138101881478, 0.9645583013444113, 0.815436900871781, 0.5586728971511297, 0.5909974486850673, 0.5542388962998113, 0.7693554502912199, 0.7044159796283584, 0.9092319747142396, 0.7255852319427994, 0.8404820273853535, 0.875997118401129, 0.8103913673053473, 0.5189561658828915, 0.744601485381325, 0.607638183492523, 0.8364153967753535, 0.7207721325269358, 0.7195386184828219, 0.7825136491045328, 0.9021864051678208, 0.9550730319053643, 0.5847148517242846, 0.6510565382776452, 0.6517422567040592, 0.5673910937280278, 0.8845774706552932, 0.9020783614465442, 0.8972309805663676, 0.5800177252921306, 0.7650453236163709, 0.974186105338336, 0.6891347367001122, 0.7600053855002276, 0.5480663505693831, 0.7400736396289057, 0.5409478529693333, 0.5873851026322126, 0.8717154507550646, 0.8313240309813599, 0.9817184677904831, 0.5152554552512072, 0.5915076889338665, 0.8586935419887655, 0.9641082316381157, 0.9479659318301142, 0.6963558964591461, 0.719483293548751, 0.7082478963973794, 0.7011546911599709, 0.6955195685986069, 0.8473165763123525, 0.8260650655559945, 0.8720930416297511, 0.7360109540007201, 0.7158196855350742, 0.7955358141524249, 0.8713314477897376, 0.5877806915616078, 0.6724727829500187, 0.5017772982374546, 0.9492304266983993, 0.7404646396427323, 0.6748284769770869, 0.6064427640003636, 0.68013082222384, 0.9992864324782706, 0.964093130097393, 0.7298693002233205, 0.990655158399635, 0.8996436618077155, 0.9946855992264705, 0.8723504815043004, 0.5566069399170325, 0.8393059185539979, 0.740981719932359, 0.8309554901657208, 0.7685838419957499, 0.8974715291427238, 0.9439114386033796, 0.8143120805008618, 0.5101066382349283, 0.778904686166616, 0.9836859575413969, 0.6366423374093713, 0.894933427545859, 0.9441622159828607, 0.8696484583638007, 0.9893122963855445, 0.9390748123219705, 0.9748515714022398, 0.7104828798312103, 0.7620426349725582, 0.5080283461475537, 0.6838825378162907, 0.917639941345956, 0.9107080954206898, 0.6985751528024109, 0.5433600953658994, 0.968730107893442, 0.7027218011449485, 0.6465729925296544, 0.7627587723823732, 0.898413342745142, 0.6481656961883078, 0.7466302152555395, 0.9525410070067496, 0.8340772829640897, 0.9645791083350667, 0.612098547149245, 0.7489537238046433, 0.5265518460218497, 0.681909796461546, 0.7074819994122572, 0.8187797217510551, 0.7966160999448246, 0.870594694120415, 0.7477628180698208, 0.5484446235956282, 0.9634695538693432, 0.8954968714367875, 0.5848556927224182, 0.7918709057253308, 0.8255395271235584, 0.9759440985088058, 0.6285193196805812, 0.8260536513895395, 0.5849762267117697, 0.5771567083646878, 0.8163249022783938, 0.7746202407062948, 0.9612536397615778, 0.6205038590414486, 0.595902451629295, 0.863099404723228, 0.6977721976261316, 0.6134761314266399, 0.72381832071323, 0.8474606180967932, 0.5086292331399773, 0.6881095771111012, 0.8822706977726207, 0.756406061936527, 0.9652055345231659, 0.6293781044171647, 0.7101734856917807, 0.5415974433078301, 0.72901364046584, 0.7634390182649748, 0.9717501610729384, 0.5197446400701419, 0.6703404697744391, 0.6441900247656709, 0.8398129480292053, 0.8447100440049862, 0.882087278614653, 0.896798918670689, 0.7647078780307088, 0.9550119440872581, 0.6402471370066262, 0.7934197015581699, 0.5431935788591628, 0.7685079056248083, 0.591321909325546, 0.7648987092434566, 0.801798326096596, 0.6322363453826039, 0.592659974443635, 0.9541321219445533, 0.6869452739346018, 0.7571295939960765, 0.9025084788684995, 0.8134936877479295, 0.9816889496005504, 0.5583874807868394, 0.697416085648775, 0.522489345594874, 0.7053957768565791, 0.5388691359957978, 0.8715409387755709, 0.7520613116213315, 0.6126544936167913, 0.7583156345796567, 0.7132396558570935, 0.7943005086395782, 0.8938855662837621, 0.8193220978353866, 0.5225299088386124, 0.554816836189473, 0.7560680188884505, 0.5661457956607341, 0.7060481410793176, 0.6234471955686369, 0.5272579142629308, 0.625741706619436, 0.8634688642199697, 0.9767904945507235, 0.7654269829395192, 0.6466733751892824, 0.792461497212714, 0.6663465607731662, 0.8901781248697802, 0.829539673128265, 0.616087563433992, 0.7162268409232406, 0.5472577919676725, 0.5233946578834463, 0.9186311441478248, 0.68438209216267, 0.922140436606842, 0.8379598365059332, 0.6519391102987299, 0.5668281048247399, 0.6695352472906015, 0.9686598560201856, 0.6210888725184409, 0.6326931701500408, 0.9845985381983478, 0.7792598220615655, 0.608465463840522, 0.616645105386005, 0.6880489067297926, 0.856270272473063, 0.9923733216333999, 0.8322183117882926, 0.7501567424935356, 0.9011946883177713, 0.7119346527975097, 0.8105637730686466, 0.5692565900495623, 0.7628366152781778, 0.6099629554684183, 0.6127752914398916, 0.5604131334465859, 0.5843197385922778, 0.994738205241202, 0.5342463466744516, 0.6963931244484476, 0.6322705057979096, 0.6626100679426595, 0.5924323661523988, 0.9820859828520054, 0.6645057298236261, 0.7983451981739049, 0.5035184190787529, 0.778896694814371, 0.9061204697869764, 0.7879033169436802, 0.9880737312767689, 0.8622306238178011, 0.6855837948669746, 0.8653294793501107, 0.6122428065610882, 0.9523222155989122, 0.5010225130486041, 0.9236574324608857, 0.7414106095236683, 0.5481462220250042, 0.9285547474229794, 0.8912778351655586, 0.6403355900899312, 0.9285198674258051, 0.9104353478540999, 0.9470334917069279, 0.9681773649045398, 0.8395394770090132, 0.5892570160026702, 0.7192659858291293, 0.5757761579539817, 0.9626534031415264, 0.6523228482444351, 0.536896254637867, 0.6664843211130109, 0.871908897327754, 0.741090462331032, 0.548385166956473, 0.5193703927539534, 0.5165507482546499, 0.8152223102089847, 0.6951087997250692, 0.7820550555036041, 0.9941002644191652, 0.7592513439746669, 0.9554309270518538, 0.5684116931953236, 0.7739180478098602, 0.6982295090705364, 0.9022344252366026, 0.9890930499082906, 0.7563302293767309, 0.5997445029876703, 0.955145635257316, 0.8790657666498745, 0.5702313737870544, 0.9998031603210108, 0.9412845866760333, 0.9794429041986135, 0.5767059830496943, 0.5969477398299549, 0.9589046318766824, 0.5838078744802391, 0.5354285143921134, 0.9995832941062599, 0.629452970173374, 0.7400359627091517, 0.6386193407074532, 0.9856270783688907, 0.8782101831545241, 0.5252134403693108, 0.7725720114034078, 0.9719675752840886, 0.9259325230794881, 0.6082684531152236, 0.6402818351348172, 0.9532329510664446, 0.5492638034620076, 0.5637641516330625, 0.7704772399795482, 0.5192251443255336, 0.9497899431677954, 0.8516876588098401, 0.5601067526417582, 0.9133521456587985, 0.5150812093481665, 0.7352877969957059, 0.8922701332273459, 0.7113516586537673, 0.7517693533658018, 0.8925245070070382, 0.5586248960153951, 0.7471966764443806, 0.5974335495342795, 0.9075311241959529, 0.7110737934546734, 0.6027677623125691, 0.5132245359115248, 0.9886190566362258, 0.6702347660030071, 0.9387738955976629, 0.8090447187048624, 0.8064459366969908, 0.522952990583911, 0.8664410627369161, 0.7752535414083306, 0.9494859897462989, 0.5600981055608781, 0.9972456587926868, 0.7375038269593455, 0.6167527554969559, 0.7273360364364807, 0.5251762752038522, 0.6257618409124395, 0.5057971495326732, 0.5284715553340167, 0.9071243249855406, 0.7044101840671888, 0.7927684504907193, 0.9652350380573614, 0.7012759800916329, 0.6945586280552881, 0.9864308503938372, 0.8616411858558388, 0.9833419738035267, 0.7105591003072176, 0.7285500706231921, 0.8816094008390345, 0.7067554898426607, 0.5026269136378481, 0.8055670309618794, 0.9735218304093607, 0.9428151753172923, 0.5813155302012669, 0.863091066591137, 0.7115458460607575, 0.7489057891759507, 0.644978744669463, 0.8463731418075614, 0.7638325371473034, 0.6850612688171445, 0.8366836929863257, 0.5545393980927575, 0.9168967676283404, 0.5426413434003635, 0.899699955182449, 0.7937585633266729, 0.5099182992724702, 0.6240819924591354, 0.8468219450248226, 0.6357943202158984, 0.9934746370246321, 0.539338763140371, 0.7184519310684823, 0.5976722308725135, 0.7112677645532444, 0.9804051939754734, 0.9688049889041305, 0.8894783099323462, 0.6549587408905713, 0.7722406252462852, 0.7910914636674327, 0.6030932302450549, 0.6212206165479557, 0.8397085199572503, 0.5489621001152997, 0.9787849058650704, 0.5122944379148385, 0.505265692343194, 0.8139273433554968, 0.5425944710341601, 0.7471166540455585, 0.9740282920820296, 0.743625403396611, 0.7842735661548034, 0.8116867489751562, 0.6110791152622399, 0.7142230020179027, 0.858256964660318, 0.5777879829198601, 0.5320875072076443, 0.5302762205337357, 0.7747936154987032, 0.8557211102334568, 0.6774962667952908, 0.5204917970302128, 0.939906344099128, 0.850326445995935, 0.8511951889797893, 0.5652232351302513, 0.7792906163978147, 0.8113569255770116, 0.7624463684699081, 0.8859984221681194, 0.8608372038428933, 0.9576754597531918, 0.762950715435309, 0.5710171122091257, 0.5922074003599271, 0.6764516174881111, 0.829536951498663, 0.6716894236004569, 0.8974488596001191, 0.5822720542082619, 0.9790384792784463, 0.9971210068653412, 0.9984610038585631, 0.9019510583821204, 0.7827244746155719, 0.7790034650186853, 0.9245326754499792, 0.5805888005266331, 0.8853891390002959, 0.9588126442316808, 0.9156382793904987, 0.9806397862076858, 0.515357854000065, 0.9245304976562381, 0.5448023544788831, 0.5850891262839202, 0.7196587096671063, 0.6029475763635337, 0.5599391651476484, 0.63399673426902, 0.7364706090314184, 0.7036199888719776, 0.951975226546584, 0.553042816574262, 0.7225919511652783, 0.716158937188073, 0.5102363173194162, 0.9494201038331391, 0.699178730579848, 0.8913502311680109, 0.723312252229897, 0.7211823945948801, 0.8901155670752412, 0.682069711005332, 0.6051094707854866, 0.5687147210515953, 0.6654915462396221, 0.8924006661183346, 0.8994085072978166, 0.5896517670777892, 0.9219162212752162, 0.8532463045678109, 0.9748071243476535, 0.9491076108142937, 0.7921904179319097, 0.8780543998390087, 0.7478395353244582, 0.6979487771653703, 0.792413740484579, 0.6013000954636358, 0.6184154818820733, 0.818707735985533, 0.743073797004534, 0.5951883116299519, 0.8385734504188374, 0.9383961056084338, 0.8209140949825993, 0.8050544933808079, 0.6584655021742365, 0.6914350583815538, 0.9504656403419862, 0.7403520309219781, 0.9018581000988926, 0.8993759005142503, 0.939556042743181, 0.6862843827813732, 0.9927813733114672, 0.8909801890820712, 0.6520359495646021, 0.9373047691355938, 0.9884764622668916, 0.7047272031582585, 0.7204206475578135, 0.5312916502134899, 0.8387446326137482, 0.6126024009090913, 0.7769855889954858, 0.8831618769858667, 0.5280937310910916, 0.8379901355701809, 0.822411694832107, 0.66139988404165, 0.7759341905778183, 0.9707687977651427, 0.8553239670309531, 0.9634179315178215, 0.9647722584936078, 0.641090095152818, 0.5084153089956815, 0.5946513050755171, 0.9805553322417825, 0.6473423646108725, 0.9350921132206733, 0.6586622783451459, 0.8413986411767922, 0.8343350576672617, 0.8584414097897528, 0.680945044902204, 0.7812626015423014, 0.9009677632848627, 0.5129213117852768, 0.9666585243220182, 0.705346269615525, 0.6673729035938691, 0.7846230143575028, 0.8546803887583103, 0.8574787481928524, 0.8909189235611288, 0.7848144453656076, 0.5911123807100989, 0.9656474662814599, 0.7238063301765714, 0.5565378301967199, 0.5257126044707596, 0.9906196606808121, 0.6357173974192407, 0.9652952848085917, 0.6034299310587758, 0.7562936518447532, 0.7607157546645251, 0.9857373307957273, 0.520253539434172, 0.5765051167274908, 0.7158437591880247, 0.6712234908552268, 0.9024837634584352, 0.7174892740434322, 0.8417584378161089, 0.8409614633948937, 0.6493120078067685, 0.5311712683240511, 0.5445334764232197, 0.6625914772288151, 0.6716389705497928, 0.9635166517064555, 0.9485480270335678, 0.6885886836319005, 0.9858449541405492, 0.7688451622521686, 0.8367370976143024, 0.664270965214511, 0.5835542236455942, 0.6676557060722312, 0.798709888893578, 0.8721769951999401, 0.9953102410518428, 0.8901394623960255, 0.8746259032158274, 0.9873907820811572, 0.7968104737440256, 0.8235468527467995, 0.8504931252957095, 0.8562963458970105, 0.6822341068175737, 0.8754109635014196, 0.9113768003642588, 0.55166840294661, 0.8503762900339389, 0.5104576543419579, 0.506264218256195, 0.5175016208585514, 0.8803489533417008, 0.990655529234831, 0.5600040096505388, 0.5922583190681975, 0.9624531688200144, 0.9782080556588859, 0.9017776605119383, 0.7882513043750083, 0.7720206190838439, 0.6414807793388111, 0.8839478581774536, 0.8661883830174728, 0.506623801904283, 0.7028738339956273, 0.8937001018233275, 0.931221500875552, 0.5627865188755813, 0.5021230631862486, 0.5879063940027631, 0.7864446804757094, 0.8142180165951531, 0.7671706192514325, 0.7973547710101159, 0.5888654619040199, 0.9814306150122771, 0.7353285885528947, 0.5163807178330444, 0.578711459536907, 0.8117408683966036, 0.9624551125535726, 0.8653746111177584, 0.6681882386481304, 0.7654340451557007, 0.6576725129645236, 0.5778960405940904, 0.7845638395211272, 0.691076822375724, 0.5589206425054831, 0.9652039349299266, 0.9927836956034121, 0.786668662654188, 0.9545263585755388, 0.554283127517209, 0.6940389732956629, 0.545269093638101, 0.686063228625122, 0.7294012593742569, 0.8278908083395505, 0.5714751256660129, 0.9625815028096689, 0.9195520826910961, 0.5993448982253655, 0.697467894598603, 0.6194938615299965, 0.925381367023626, 0.5456475406932075, 0.6046070970585069, 0.7009162225418237, 0.5166835815248456, 0.9522660115276905, 0.6686436078525037, 0.903298369272088, 0.5166656683605628, 0.7976582770812743, 0.5583990012875955, 0.6177972378216884, 0.9708957044113982, 0.7524868252259919, 0.7426866048090499, 0.5352123146767778, 0.7521313840565689, 0.5695124381039588, 0.774729086324663, 0.9633323444318491, 0.8801415617946957, 0.8671518733827257, 0.890182529776643, 0.6771717103380889, 0.70218852166574, 0.7997494090590765, 0.9363327037718142, 0.8628123079657342, 0.9397235031068842, 0.912255597316346, 0.8456333231866366, 0.7783708461603875, 0.6191996175715995, 0.8341153863869264, 0.6216691693844554, 0.884126971535844, 0.5716804815033878, 0.8315088716929744, 0.5105267558114024, 0.680468615587293, 0.7052360302652458, 0.5421184556910408, 0.8371140213998692, 0.7333596981949234, 0.6313248495279332, 0.501179938330161, 0.7468060924063185, 0.5312645199901009, 0.7439593457006489, 0.5969898324787626, 0.6122688654104831, 0.7243532643662981, 0.8285730638061852, 0.9133569302322009, 0.7266278020556072, 0.9867705272132206, 0.9459078590509675, 0.6971331707361748, 0.7479230545120781, 0.8836757015388814, 0.5291048388764769, 0.7147081510933753, 0.5525355593556973, 0.8554157409903492, 0.5454131159968039, 0.9259852923306355, 0.7023662699156888, 0.5796844993946614, 0.7447636322499567, 0.8683467611039242, 0.6508767203647066, 0.5739087270553087, 0.5392800417983465, 0.981766672037119, 0.8901677011965022, 0.9767997367217733, 0.6445859659175465, 0.9406682928138689, 0.8800551417929248, 0.715733782376434, 0.7182863862025716, 0.6453177160891106, 0.6305147854401639, 0.5995078020031641, 0.9495959984888731, 0.6841758295950677, 0.5618057219134365, 0.6808741576172258, 0.9205307269417253, 0.9445343695094783, 0.5499626455890437, 0.5168528175774639, 0.6474606932131626, 0.7297024901940807, 0.8326522334370703, 0.6301072802855181, 0.5337535918331908, 0.5037432909404873, 0.7422615844092095, 0.8677448525172855, 0.6607937699018607, 0.6707720837408419, 0.9133087129412757, 0.9481512674230637, 0.7883697103775542, 0.699868805150745, 0.8349051633475366, 0.9573639365757711, 0.6837632772938267, 0.834408879797368, 0.9742075679770009, 0.6339105481974168, 0.9840596660761565, 0.640140322226068, 0.7536376572102963, 0.8531324036538401, 0.5161400242042762, 0.9196758574706793, 0.9277200945814283, 0.5042098801060222, 0.7385155339785513, 0.8521944085106832, 0.9577739109043049, 0.520331161948661, 0.669665168058411, 0.9634195217380641, 0.915781902604718, 0.6971608235598816, 0.5381292570511711, 0.7377926764049509, 0.5133510155568459, 0.8406640121086945, 0.8080121517672334, 0.6354417879046987, 0.5771176544750589, 0.6712706843319466, 0.6980749788622598, 0.930542969836098, 0.8224085437859479, 0.699899658036587, 0.7549715340941533, 0.5734033946798475, 0.7898597094080175, 0.5906172641375943, 0.5935671149911894, 0.5141442587146328, 0.7750689143813909, 0.9194160864152605, 0.588070530921088, 0.6265841705602648, 0.654361915119229, 0.7808553901306454, 0.6110135806648278, 0.7790658846040535, 0.8381609342030092, 0.9387515812775822, 0.7288295823742017, 0.9612638120584189, 0.7461812038513387, 0.6882143927335949, 0.9932041888217829, 0.8385962921996017, 0.6342102624871786, 0.9663927644774528, 0.6998136439167797, 0.8093741363186233, 0.8123322214448574, 0.5249809360523112, 0.5295606012310711, 0.5995835102324933, 0.5779990724742392, 0.8825202598060973, 0.5701514638728806, 0.7166234666852314, 0.8083257983184315, 0.8358429764797574, 0.8446737189025695, 0.8328045449301007, 0.9529848535890881, 0.854809525180185, 0.7479368372633978, 0.8336223696406806, 0.6631709563449988, 0.7169983599789009, 0.6602073192812337, 0.6938114819859408, 0.6114957296246992, 0.5468241595533694, 0.5433150919924087, 0.869975675074439, 0.8272690888543532, 0.855371731379269, 0.6980882173055198, 0.5070568470203067, 0.6403589466996717, 0.9469229194747906, 0.6321959315229352, 0.8279995830328775, 0.6947957482160103, 0.9147542698834422, 0.6409944513790848, 0.6683605387772088, 0.9971718056291867, 0.9047894515923078, 0.6526009757756348, 0.6112667601755295, 0.5425373298065507, 0.7630992317041304, 0.7744429832775739, 0.5433764598927286, 0.8446058605540068, 0.9331403652847508, 0.7870018719166916, 0.7874177313163074, 0.8365255924052055, 0.5783270915163362, 0.9787548954676313, 0.5664043625770507, 0.6692566292496156, 0.941691108079598, 0.7825171682239744, 0.6412146386173099, 0.8950284467106853, 0.8838588359053308, 0.8091222864330776, 0.834514979864603, 0.8103827754668578, 0.7885736464545877, 0.6010486377397583, 0.6819552259919104, 0.8757338796897978, 0.8912963355321887, 0.9600502597910092, 0.5910531283264719, 0.9384045810500572, 0.9068542293309947, 0.7119197909104851, 0.5596498226412181, 0.6406802790901938, 0.7997519817735581, 0.8221928390636104, 0.7599830203682135, 0.5313013319668803, 0.939167348841109, 0.5915166329666075, 0.9827430231986035, 0.760261955454224, 0.5527954032142268, 0.6537955355311768, 0.8062407044249704, 0.5355682142747877, 0.667037664628685, 0.8571403054216733, 0.660105315786468, 0.7058059888683427, 0.5950200445384497, 0.5000127181926609, 0.7704865757118043, 0.5651569493929836, 0.6972786340488775, 0.9365329064366125, 0.9284461057177863, 0.7931511960375927, 0.883086860179014, 0.5927934738971534, 0.944738479342426, 0.8443118215163846, 0.6493247468258538, 0.8481880020576604, 0.9350114546548808, 0.6687797057514795, 0.6768248787717333, 0.6207399503917617, 0.6697516453070858, 0.9234844192873419, 0.8675437657312297, 0.8468735052361782, 0.5430312752894246, 0.9278574931958061, 0.8947543475660658, 0.5380775712367232, 0.8814347489617305, 0.7939130872753891, 0.9063161071200243, 0.6970299227153378, 0.7571743476092392, 0.7559127376627712, 0.9157234384629381, 0.5857778508690421, 0.595636806057362, 0.7088764791046851, 0.9824649837297263, 0.7536234923675493, 0.7967549376536438, 0.6099111580282728, 0.668331092911937, 0.9849110422434049, 0.8825390763380059, 0.7598922929446852, 0.7450745304706052, 0.9152617832567382, 0.6871604209826933, 0.5302019847357922, 0.5909115393240767, 0.7901492345047276, 0.9438290937895224, 0.9660170353173714, 0.8473476153381516, 0.992644681081339, 0.7747685900411923, 0.6180833683250702, 0.6106666739929512, 0.7379741143062035, 0.7042900753924519, 0.8868650716117592, 0.9583443480946081, 0.7410074405266911, 0.8765655367923151, 0.8425460859222274, 0.6257347411500325, 0.6543096794911465, 0.7626264180710112, 0.6475338710616513, 0.9990848964711194, 0.6367130552718226, 0.7464630506847969, 0.8550725997305055, 0.60278793356047, 0.9859612962231843, 0.6417582501056532, 0.5007823644759832, 0.7205639418601741, 0.9524395214508086, 0.8365649784348096, 0.7857839199623827, 0.8131927588605938, 0.8746149667177296, 0.7066566406904069, 0.8143718223908418, 0.6506056609429842, 0.563362822187777, 0.6746716764833616, 0.870141727520362, 0.6780759891154715, 0.693032774073431, 0.6111479753625455, 0.7741530714092661, 0.596509355197999, 0.812980800005247, 0.8439908592677094, 0.7943824635851036, 0.5803529162364196, 0.7954234588634759, 0.6474934812766421, 0.5333876042880876, 0.9123187521490643, 0.7806706993495595, 0.5311915366193257, 0.9783402831398911, 0.8965189143586841, 0.5873132658674394, 0.5839306437451951, 0.7662203794937206, 0.8624807812832679, 0.7977307989873077, 0.8486196573715947, 0.6885472018322658, 0.9575980630877561, 0.8703559107720533, 0.8530468759220378, 0.9487617290810206, 0.7117681098156007, 0.6681479127311669, 0.5900970666564386, 0.9344296841257365, 0.7679324243890728, 0.5811601955119461, 0.7113903452636339, 0.8087101886972623, 0.8310329948142268, 0.9839527610178738, 0.8309902766755399, 0.7730544831627382, 0.9198949594510173, 0.6557982619193508, 0.92609499693507, 0.517594291619943, 0.6033762941195837, 0.937853257468716, 0.6934858467116614, 0.8486374910833526, 0.5286274859159182, 0.6292141453424701, 0.6313596872878307, 0.6671588949072585, 0.8466407850323665, 0.7688077701834151, 0.8126123819801432, 0.6334094703304578, 0.8108394496822559, 0.8847345867352336, 0.7569821368785852, 0.8081793225556557, 0.671305171597883, 0.630412538319158, 0.7871993068865035, 0.5989301371477196, 0.5910680832868397, 0.9339045957773352, 0.9486707731280566, 0.6058785922072827, 0.8513522618283712, 0.9665594824351005, 0.7537417859784259, 0.6927877301593813, 0.5351278497172711, 0.614806689529918, 0.5944480797527435, 0.9862018845660092, 0.7328415989890886, 0.9164764088577326, 0.867724376061858, 0.7597565581636525, 0.544817237002118, 0.9895691655448347, 0.6517900416824625, 0.8960966438974273, 0.6267102509670834, 0.8586468057659875, 0.8575110331151751, 0.8242327598831634, 0.7650341991318776, 0.7806222628816615, 0.7568289025861308, 0.5474451165456662, 0.5222026467431962, 0.6847001046090796, 0.9964331669251716, 0.915622653448246, 0.7591536032263827, 0.7867030049179329, 0.9096152784043161, 0.6708069587533825, 0.666781049584459, 0.8008840574508402, 0.5473537262443545, 0.8368251296551478, 0.8885533765087343, 0.9048938569005953, 0.5258158875812554, 0.5990654729403526, 0.8651756390281116, 0.7582009290170291, 0.5197815362547561, 0.5223536461021132, 0.8491681994466872, 0.5983295099157762, 0.6773220195731808, 0.7678441239418247, 0.667295171447021, 0.6886238149905245, 0.5442909712362987, 0.9444954344627161, 0.7465331125754533, 0.6350904910768848, 0.8076275614919417, 0.665117109989207, 0.7261266861709592, 0.9423885112982373, 0.6264648016052167, 0.8728894135266494, 0.7980197798268549, 0.8733641124438933, 0.5107769650371048, 0.5502206795015305, 0.9204506309419668, 0.7676524440004096, 0.9152306047212937, 0.7149057007775086, 0.8877774122750033, 0.7596419017340252, 0.7214995806889098, 0.7185595254698895, 0.6317594916001359, 0.5577510772624973, 0.6226345797308641, 0.9300920299873361, 0.7719685976225066, 0.5203126256309165, 0.5935592936293579, 0.9094751787894682, 0.9902267091254475, 0.7474550691265547, 0.8701259443923773, 0.5133621816335747, 0.704814205064614, 0.898318051597534, 0.6833897036309167, 0.7819898050652672, 0.9955198923586104, 0.663857963997253, 0.8686383101690542, 0.858889126987359, 0.9657975924838937, 0.9903386663025249, 0.8073387260933289, 0.912871156401517, 0.5969050329185233, 0.9557973268965607, 0.9987137853815984, 0.7395961201785859, 0.6578005744878063, 0.7319633377313517, 0.7159400429068413, 0.5722930961331856, 0.670144487566783, 0.6740781716486797, 0.7768681732379125, 0.5131764239050394, 0.5386401712535045, 0.8003439614297134, 0.9633802004849448, 0.600334707088103, 0.806005256068336, 0.9652919345752724, 0.9921659646318126, 0.814702087729426, 0.9089112115180018, 0.9864288275107749, 0.6907846339248036, 0.5388704044800519, 0.7901839060063416, 0.5693169163480241, 0.5137095817951627, 0.6369937959834132, 0.8156191582029977, 0.5686159944162678, 0.7379553012979787, 0.6506472409498387, 0.9460507670728361, 0.6163802882824707, 0.5468089800437118, 0.6519982121729752, 0.5071031159506149, 0.5046130470022675, 0.5881094766313639, 0.6347949144423775, 0.7313046114305461, 0.9840486465702072, 0.7062808138272396, 0.6688750291836731, 0.7581826445459496, 0.5956402731208847, 0.7275059031596689, 0.8159406301210026, 0.7122988434901573, 0.7214283119559628, 0.815691172813708, 0.8740565658200063, 0.8225064292952602, 0.9544496962226925, 0.5200203371325921, 0.6865790703469973, 0.5379895399370654, 0.9683712507419923, 0.6006359529063685, 0.5822659171184136, 0.5429540123387115, 0.7555319700339993, 0.5768109079860395, 0.5807139227339132, 0.9904859469713503, 0.5681337980032445, 0.8139339190269466, 0.6536490563432489, 0.5418784057256496, 0.9568894573514672, 0.6138280023733476, 0.8761459392834177, 0.8339616632374018, 0.536215776611102, 0.766720767526669, 0.7668151253814405, 0.6353071168762638, 0.693920725508945, 0.729410370089872, 0.9752906906787384, 0.6736116081622354, 0.7811868046250552, 0.5432604444530507, 0.8920265311034561, 0.5263327839562721, 0.775309957397974, 0.7977247820885147, 0.7492259847546265, 0.8330046768903482, 0.8798751594118562, 0.596226698634498, 0.64046195667782, 0.9088117811538072, 0.5760825296118783, 0.7633622564567899, 0.7068578278808846, 0.6783705824548203, 0.623419115951662, 0.5244070592823811, 0.5279786334896166, 0.543432459748735, 0.5030740664567548, 0.5537583460727167, 0.5439905247583969, 0.854151095139025, 0.7970854372478828, 0.9629880674481754, 0.9362194756244022, 0.6669840741595889, 0.5892531474309779, 0.5881567216376304, 0.8685887439884682, 0.7404661591071058, 0.8743586120283204, 0.8744382314714194, 0.6203461304039769, 0.5818936957943808, 0.7012564512656414, 0.5834995343475651, 0.5583182184446124, 0.7366847963555457, 0.8620216056806589, 0.739025579619533, 0.7710948878160921, 0.6170799186920166, 0.546249604538884, 0.9830094342850613, 0.7377704656418442, 0.8794132651643204, 0.8883631033891848, 0.5107491274515965, 0.7152601347506186, 0.9305205561021154, 0.6491860765783717, 0.6339741718759252, 0.8521316298794092, 0.6750913150613151, 0.5627555228863137, 0.6388150338968384, 0.6962536666366053, 0.6549500051198884, 0.71029489705357, 0.6293921936358742, 0.7617917101165639, 0.8870719242985199, 0.8489193349568973, 0.9163498175985643, 0.5507039888957594, 0.7749456778600224, 0.6061769917253339, 0.6981372506579713, 0.5467999262250246, 0.7985474363838757, 0.5359689915676378, 0.6867756214146421, 0.9941281600673433, 0.7872379234991296, 0.6248224892906351, 0.8763866033031003, 0.746103499571022, 0.811202648679338, 0.858121234406042, 0.5538929399047545, 0.969059019226671, 0.7569483491003893, 0.9322502195899303, 0.7752063014650508, 0.5245559504324726, 0.5088767661086304, 0.809222299687373, 0.6905233990660709, 0.6283172656589467, 0.794439020610304, 0.5798034448984283, 0.594164572212637, 0.9519801144947904, 0.9360877642702091, 0.8366033282965959, 0.9223840867823183, 0.8276138530417261, 0.5114597387551062, 0.9179860844903414, 0.6651036699095053, 0.7860620215161079, 0.8813061288261725, 0.8102046913459163, 0.6711835516317666, 0.8856831423963879, 0.9682259705823619, 0.8129962738899075, 0.9536366495935662, 0.7121713069552041, 0.6734254628074194, 0.9689634871075204, 0.5076766504838108, 0.6325405851642912, 0.8520693640137867, 0.7065993237254755, 0.587469082717885, 0.9777061859970804, 0.8258173520058102, 0.880918676419199, 0.7762114377084595, 0.5651095594036127, 0.6580705891218939, 0.5676358155665319, 0.6113207551784978, 0.5713301917494391, 0.9206466210248865, 0.5783015316955631, 0.8410966927379107, 0.9628886429264866, 0.9979250127226578, 0.8431042485173854, 0.7421153135371485, 0.6093767372465572, 0.6468583740777356, 0.9944797193389827, 0.7836981261912561, 0.5968227109405293, 0.7337428450688154, 0.5997932079981628, 0.6925506595045015, 0.8185610823514207, 0.7258136701890157, 0.7259906599911733, 0.9309277270230372, 0.6573636453805872, 0.9629282463114388, 0.6382645948488009, 0.7890377491150338, 0.7214074064678726, 0.6382550793952466, 0.8165924900249255, 0.765868604079208, 0.9043455228303194, 0.5268449844466699, 0.9347095806278514, 0.7615792740985037, 0.6004588535559303, 0.596859023929114, 0.8050756744237921, 0.723028227845524, 0.9468662244549262, 0.6878641837698787, 0.888624967813467, 0.9707205127657805, 0.7186568013048222, 0.8090963215838347, 0.5630602642458142, 0.5173813034292967, 0.7230923681880257, 0.8365682512989729, 0.7999810547475197, 0.9683346567807181, 0.9385929381915097, 0.6850263568935648, 0.6103804362496088, 0.8299507177715078, 0.901691447552259, 0.5026809011764753, 0.9786911381381642, 0.9347512897315986, 0.7610864056158752, 0.92547513511128, 0.587572849375876, 0.5108159830489608, 0.9395576464704996, 0.5851286840393503, 0.854761022464855, 0.7244855923723448, 0.5783206801414148, 0.5853433423842298, 0.9367459641080121, 0.944735774320046, 0.8044795410067467, 0.5427902981940005, 0.5920886293039703, 0.5469700836873498, 0.5728507756388059, 0.5711093434733739, 0.9198815893445256, 0.5075284094945713, 0.9731157778295854, 0.6589093944290678, 0.8820350943649657, 0.7788385473286252, 0.9528471682569732, 0.548823320554129, 0.53526156849505, 0.8142988418100664, 0.8493844444167451, 0.643628885694455, 0.8652298948168873, 0.8297636441111939, 0.7136051446297618, 0.9233228115638892, 0.7424222471752271, 0.6957440270964489, 0.7641547775096899, 0.5696771674163225, 0.6251122615868865, 0.725766511127415, 0.88942760249506, 0.8353594528673943, 0.8836474530701335, 0.9235779112120457, 0.5369495985078321, 0.757737078836434, 0.9941360280262741, 0.9552375039041247, 0.9750734136274455, 0.8437224801954359, 0.5698426714619826, 0.8539002498851285, 0.5795236516117389, 0.8733329125807701, 0.5010827769032444, 0.8928784135811791, 0.9522397986313265, 0.8929176668232855, 0.6042839908416474, 0.5692681271937925, 0.8523210688283389, 0.5253325705484939, 0.7047643341293199, 0.6048811293026787, 0.7268022783493786, 0.7080270738476983, 0.7594191401506134, 0.9670565617222688, 0.6523010793333793, 0.8742760544483673, 0.8892134771055397, 0.7881283608175753, 0.6973512596734379, 0.5807038406168658, 0.5099651960488025, 0.699095211087982, 0.8731696434752295, 0.7488141218665374, 0.877087836053051, 0.7380240860376263, 0.5305538998326721, 0.6814225528235769, 0.7066319365314528, 0.5868431367730484, 0.6029187814697756, 0.8093437096279169, 0.5853688720104624, 0.6883543305668778, 0.6464605349234351, 0.6386066025402712, 0.6333860184969362, 0.576999923923639, 0.886577354342338, 0.8242532904953912, 0.5602178622458203, 0.7401998054802288, 0.5052238548787202, 0.5873398228075732, 0.5485715124812807, 0.643275628499469, 0.5591575475860102, 0.6350263015712176, 0.7226057590577362, 0.9038320126469472, 0.5888115314155928, 0.5609844346838755, 0.8640387770937245, 0.555022372573812, 0.7371096013635249, 0.7714553895451148, 0.934784106479694, 0.6836740572658617, 0.6379735599996512, 0.592640782457004, 0.5382285422371622, 0.5724856084787244, 0.771221629127949, 0.9340882417415108, 0.8814075495728104, 0.6482354971655391, 0.6543348530847628, 0.9030047146911029, 0.7313968478796005, 0.9590717835754002, 0.5713841348572328, 0.5497645312415076, 0.939970095964354, 0.5049686938928135, 0.9643582357274109, 0.8919765456051627, 0.6526786634149975, 0.9404696272201805, 0.7396921565212706, 0.808069963191788, 0.9662488247676251, 0.7185171998879036, 0.9830077539751214, 0.8493275383800913, 0.735131123797884, 0.5454894119885876, 0.6683400649255071, 0.5702105390476437, 0.7553938957892905, 0.9509657077571207, 0.6473576981619613, 0.8188983000268732, 0.8874666916537626, 0.5425382142934343, 0.6467739754241979, 0.8662216113109407, 0.5715830134777427, 0.8190762903364863, 0.5065495271968781, 0.6124839209085986, 0.748592198337992, 0.7679023558963578, 0.8651362958840162, 0.5685906037634276, 0.7080673874938703, 0.6025939462839631, 0.8453696900170087, 0.8516607362116884, 0.9558262319053936, 0.8104298787893127, 0.79306674903873, 0.5176757345275718, 0.8763060099365234, 0.7034610025902233, 0.8906391605516172, 0.9383948646437847, 0.7618363845274438, 0.6664158096161146, 0.8372259301264684, 0.7096807011419328, 0.6075495973562355, 0.9514809614505264, 0.831156004872813, 0.9506537790499748, 0.5695198523028797, 0.9564628822299657, 0.5591730199798266, 0.6990370111748258, 0.5551318834907327, 0.7705248780218796, 0.8223930233044574, 0.7692470236822774, 0.7609913630900753, 0.593943306745194, 0.5936556857853799, 0.5273237413520351, 0.6535498078827808, 0.501321081961409, 0.7550806947370388, 0.5496737693968514, 0.7722393164032899, 0.9795131537850356, 0.6333821449034638, 0.7017163251027345, 0.5904245152727219, 0.8955162301936084, 0.7269538838731429, 0.9927051743153388, 0.6177232383372925, 0.819938654489248, 0.9342268361931285, 0.7556846077952875, 0.9358132418856455, 0.5693883650194208, 0.6824985368384213, 0.9841259666857908, 0.6640756826506127, 0.663225519261464, 0.702547177401778, 0.6172782687809995, 0.5936628135235593, 0.5590909776828086, 0.5055226345200977, 0.525630095033083, 0.9746210136347315, 0.9531962113687182, 0.8975098196294726, 0.6551750077542373, 0.7876338955358538, 0.8961564420708598, 0.5100269976308556, 0.8270763468952863, 0.9803713976788895, 0.8007163474044621, 0.7526983702809475, 0.8895554133605075, 0.6899032469696578, 0.5306745650243335, 0.7304096178416003, 0.5683510736939832, 0.648273295046436, 0.9172815842086379, 0.782743280567361, 0.8254723043463557, 0.7611254073431105, 0.9935294062092884, 0.5978158979744876, 0.9492287852585319, 0.7557006524272916, 0.5884992087664964, 0.633126951135851, 0.9995642785417844, 0.8832425182794126, 0.9624716484207128, 0.7822287014620415, 0.7833618089971048, 0.6901968996203216, 0.92636539037722, 0.9815378236692072, 0.7285993739647826, 0.8073643520583558, 0.9012632950328807, 0.7220988161114877, 0.5364121561893447, 0.5980174428887967, 0.6959496366381605, 0.8605961328974447, 0.5130613456681152, 0.7755965341243913, 0.9343222397426202, 0.632552574255967, 0.9689673120817142, 0.7752175085891263, 0.5450626667098706, 0.5604672466875744, 0.5714656065842487, 0.7626049433766142, 0.5419034577232089, 0.6196150357916785, 0.658550737679199, 0.6511802965382317, 0.7779455815834087, 0.6861688014072709, 0.7506903862890593, 0.9626577050255594, 0.8579840919648869, 0.9639430840445635, 0.5257492233789842, 0.5305107656455443, 0.5560261067510062, 0.8992860752955102, 0.6812944065874345, 0.6322830959209538, 0.5241296516065495, 0.944100189069399, 0.6968201378447103, 0.9918850327704729, 0.6489406669072793, 0.5471834528497717, 0.6970106112010099, 0.5798678522719756, 0.5393502189206425, 0.7640800762619182, 0.9659497432904465, 0.954740057114928, 0.6005514983836118, 0.6319185282318591, 0.5446352842743976, 0.7704836840241238, 0.6981856626550578, 0.9756046096026996, 0.8847625239766376, 0.9872729046722528, 0.7638474228453922, 0.9018522517715221, 0.5399128497355097, 0.9862248295130581, 0.9011939271950244, 0.5599088180721796, 0.9642285290736021, 0.6430090345413464, 0.8754631657245949, 0.9974975191030994, 0.8183744270889282, 0.6907261946418629, 0.9239798058988317, 0.516866865458568, 0.9275658198406354, 0.553394951781213, 0.8303574522140545, 0.8060584455675082, 0.8313814306847161, 0.7805092721736073, 0.7504765448166877, 0.5630604491274174, 0.8668131075701768, 0.768150644825577, 0.6094943640048954, 0.7519745758960414, 0.781732237259328, 0.5592686949139021, 0.7094609543824273, 0.88335306426334, 0.7552028389186353, 0.5472789830337237, 0.6840221912085909, 0.6273214629420174, 0.6728666684734574, 0.6828061952371502, 0.8412169396280014, 0.5969662059776132, 0.6497644069777956, 0.673538993973149, 0.5443277996484954, 0.9714157429168007, 0.7163783102595156, 0.6770401491946767, 0.8757475243172517, 0.9503207869925425, 0.918821598474663, 0.8035557078362661, 0.9817717603520717, 0.6819712352748137, 0.5565534871918421, 0.5876266515269808, 0.7162938548760271, 0.7621280483244088, 0.8662775575762784, 0.9987244328871107, 0.9837996084694485, 0.6695879769293791, 0.7789501690393, 0.9370798393152133, 0.8201898012176159, 0.6533747605353936, 0.6212555016773651, 0.5683630810928604, 0.898075932209158, 0.5934768195966793, 0.7564102491856224, 0.9530425964860525, 0.9470806978624937, 0.7506622710943025, 0.7648247350176515, 0.6871668613024133, 0.7353467744111328, 0.8620199087475447, 0.72741414473471, 0.6594693118168176, 0.9997387798898374, 0.9095944955109883, 0.8922769079935909, 0.8297207816368706, 0.9757483417572897, 0.5690458111470715, 0.5309069462802776, 0.5456029449480924, 0.7876921020660055, 0.9223156045082217, 0.8439768985754019, 0.800572775842403, 0.9333569703941321, 0.5818625172019024, 0.8341645601980006, 0.9192385451266707, 0.5228170388803519, 0.7508990295742228, 0.6944427462920615, 0.6123185081058549, 0.5148170039835807, 0.8059722053763958, 0.7165390311391582, 0.9064270574529577, 0.8016774706114586, 0.670085286900404, 0.9353030556472273, 0.5927435242836367, 0.6257877632897684, 0.8803013426202091, 0.9774312050544605, 0.8623275156593865, 0.6757064893918825, 0.5727706375554509, 0.9348945580505417, 0.8852434773408431, 0.6006828146297696, 0.5473749103562362, 0.7971818971302178, 0.6870643146946429, 0.9513016744023053, 0.5974092784988815, 0.865901739811777, 0.7480167228317108, 0.699646583011779, 0.8190386708605238, 0.8833104987450462, 0.6366345706774497, 0.5247828501184696, 0.6829711577326565, 0.5976557978057633, 0.5504791465488938, 0.874469324773542, 0.6886582481983381, 0.7611405589274516, 0.9304160974564263, 0.787633408855215, 0.6024515678530027, 0.9937958692445094, 0.5031676349320515, 0.5908541177271048, 0.9729621202708669, 0.7894211208547157, 0.7480937037730644, 0.5839322933761941, 0.6258753357311317, 0.6242287932934217, 0.6961996645135367, 0.5902817327728602, 0.5631685175784654, 0.9489879264730063, 0.7145292380985129, 0.7715826786360646, 0.5971010387886541, 0.928886120579828, 0.922105506289315, 0.9766908440410984, 0.7178556378905434, 0.5628834108574562, 0.8884886664653551, 0.8801925815426119, 0.5501477847238067, 0.9406592461145151, 0.57409284587939, 0.8030567295605424, 0.5735532642022023, 0.7131632558403465, 0.6021019680673134, 0.8023466923087237, 0.9911517312914858, 0.9283555628477698, 0.9642873298537067, 0.5685366720625091, 0.8995738049864357, 0.9508536587399902, 0.7721733216533586, 0.6914493638273214, 0.6201843272462808, 0.8686713046977015, 0.8112874035755897, 0.6281433977833668, 0.5605735639857068, 0.7286325551175119, 0.933026408227567, 0.612700719991336, 0.9894012487664079, 0.9296655350513817, 0.8789845541012371, 0.6660585265080744, 0.9130395427235796, 0.9440390617578096, 0.595671901599319, 0.5801307491853879, 0.9801843716336662, 0.7665304285176111, 0.5443299647278783, 0.8160807585427419, 0.8963720379838112, 0.8000037208560357, 0.5493387160541028, 0.6423128129638429, 0.6782672766143182, 0.5366965263950833, 0.9904539029428634, 0.8074386660795596, 0.9749987005315113, 0.9004461253417266, 0.5442801886371628, 0.9861100100466311, 0.6756961758387185, 0.5153001795048755, 0.7105820041836035, 0.7151087428810528, 0.8426844028578999, 0.9334696908389316, 0.5387173940659978, 0.9780356510200655, 0.5092078748250634, 0.9041894408167037, 0.9084507033100153, 0.9858352985035522, 0.9977251604888921, 0.6255166936925756, 0.9319055335143986, 0.9478515183088212, 0.5112238200770491, 0.5243465933523405, 0.9611788723586476, 0.8283579827635386, 0.9726180811357575, 0.769322756281425, 0.5511759547006604, 0.5973778817756554, 0.766049919424078, 0.8066555871188895, 0.6123607447150197, 0.9041876394863497, 0.5453547224416004, 0.8799021693102951, 0.64073794685083, 0.6235313277860667, 0.8647203253320053, 0.5538870257753217, 0.7951733989524614, 0.7909103166763635, 0.7295631280155119, 0.5658552501514414, 0.9969737588020086, 0.9648560288790774, 0.971577504130111, 0.6043944040860201, 0.6093731497805022, 0.9524485285255859, 0.5272397944806816, 0.7459899539201624, 0.7413539953487533, 0.7764371956689934, 0.7723599462035233, 0.969271808257017, 0.8179015454611895, 0.8062198631203001, 0.859192189233609, 0.7983883339257585, 0.8180450886792647, 0.5897645281417023, 0.9583294268962703, 0.6366144321059479, 0.5328775801858479, 0.7481341800163481, 0.8279860703844647, 0.9335955975243848, 0.8361617606158512, 0.8644465067640223, 0.7560815799127463, 0.5391598467510541, 0.6415972838313251, 0.5066063737655278, 0.8689867141708889, 0.8264348542402253, 0.9680711977042178, 0.7535303210927917, 0.8745731906787078, 0.6041732645806178, 0.8897209561908159, 0.546115344944078, 0.5662358624110522, 0.8274953409568278, 0.6969972384445595, 0.7646928749543843, 0.7741170706175562, 0.8927336189110302, 0.8349502746063118, 0.927993983519436, 0.8400038993892867, 0.7150662628964659, 0.8337203900287746, 0.809162415179727, 0.7821107478091514, 0.6472812535935779, 0.9992936142838325, 0.6988279918944089, 0.765774962343685, 0.8870801106439199, 0.644811649791124, 0.9317444372977486, 0.5287655326593361, 0.8103926857413416, 0.6737593581951399, 0.9354539852017341, 0.7524220444159123, 0.9294067680861566, 0.7118023177070739, 0.6125408747001304, 0.8061374670936118, 0.6025771659503504, 0.5988008839504377, 0.6772160315977416, 0.5721842093240558, 0.9257582558522934, 0.8625876557984682, 0.8369430584587806, 0.7871715326768565, 0.5758325450224827, 0.9403720946962999, 0.8624773095391387, 0.7784212275176914, 0.6314419446801292, 0.7864919049380775, 0.9860365578096937, 0.7088158324786624, 0.822637028741438, 0.6106576135339501, 0.6526516729224165, 0.630363750117541, 0.9457715162842351, 0.923825726244818, 0.6203508034625268, 0.5777090370308859, 0.9361211369974689, 0.8799247620562903, 0.5690777888029468, 0.9594926154101553, 0.7553492359507502, 0.9172716873744107, 0.6469259630892066, 0.6419609459167421, 0.9356267025520083, 0.5893220726747321, 0.7794392450633837, 0.6466284511074818, 0.8101684041837733, 0.9272100585063107, 0.8070201776945543, 0.9072703921400087, 0.7717124662644665, 0.6474798893269526, 0.5656525978391302, 0.7695098989891975, 0.5308961681093154, 0.7978579546956408, 0.9893621888895585, 0.7854852922218851, 0.7913104375855369, 0.5601868465870661, 0.9205969835165542, 0.8649544786080443, 0.83925695396736, 0.8831593927515858, 0.8706676011665795, 0.8468914678750754, 0.9751213095055555, 0.6708867340677697, 0.5196757999952641, 0.9688822104389352, 0.8082070301603339, 0.887508522935192, 0.8627305062379904, 0.9112309752533647, 0.6627953460753193, 0.9104988198224979, 0.8545916381476852, 0.6712100236336769, 0.9386754441236531, 0.638288526324945, 0.8899318720327782, 0.5447974922141623, 0.7593956956871424, 0.6193879070632426, 0.6276753629526366, 0.7452503407105513, 0.566214035174412, 0.8246001900730047, 0.8275804844644135, 0.7175001337921665, 0.5757475357908031, 0.725354225023966, 0.7508714493277839, 0.8490888602439735, 0.5221810110236531, 0.6432087027047209, 0.5703424258751832, 0.5918439553957207, 0.5450633559651887, 0.7636295345572123, 0.8935455482822133, 0.5951562723265575, 0.8460069393757905, 0.5792940802743152, 0.9597930681890543, 0.8140748314028896, 0.6457368186916801, 0.5067618468504529, 0.8786725412981935, 0.8999072789089997, 0.9628867702430594, 0.7005712188754336, 0.9064162276411194, 0.5737426578521723, 0.9651402375551379, 0.5923859583231059, 0.6036605470654337, 0.8109372708212519, 0.9817447671144041, 0.8951035758355101, 0.6044109533558193, 0.7389349255937645, 0.6074630885016052, 0.7927549963240855, 0.6560024146608627, 0.606545906748442, 0.9894548369407334, 0.6647423076066405, 0.970438497725037, 0.6447132452019526, 0.7254278352305437, 0.5127809696326886, 0.7590417754236509, 0.8452070335146279, 0.7059712704227914, 0.7396227997602918, 0.5842101653931897, 0.5986208701174884, 0.7184923784743245, 0.7299323697191284, 0.6489232729335903, 0.5022669210771146, 0.6322509627525327, 0.763053063976199, 0.9268442287683314, 0.9276453967678917, 0.6280685705244176, 0.7253397556880219, 0.7733494013540292, 0.8836125802738826, 0.6930509412339598, 0.8241751004715885, 0.5735239219131214, 0.8704959587960808, 0.6824755270766485, 0.6034089221126737, 0.7980067119686445, 0.81325544545997, 0.7058306437927562, 0.5506534213991547, 0.6971774696597886, 0.7168587427677391, 0.5621021954922584, 0.5590101159733214, 0.5720405600082666, 0.65068658523896, 0.8724329072349906, 0.6419975731455467, 0.7815188772618925, 0.7364756292657628, 0.7269697432946434, 0.764828667380228, 0.7026608113176966, 0.7467171978427285, 0.7716736753861309, 0.6611229821333526, 0.8594085513677666, 0.7810126943620727, 0.6058488186716612, 0.6187430325103898, 0.6372971087630233, 0.7962287805947063, 0.8819303568602181, 0.7349868411867739, 0.6225530961193368, 0.8267217436312126, 0.7166265528759843, 0.9839814012664176, 0.9844973998795409, 0.6454413039376963, 0.8424804041766842, 0.8758524823878595, 0.8664635342064181, 0.966523823173336, 0.903637924898262, 0.6024873887810961, 0.6472721158941692, 0.5473677975855294, 0.6515781846976938, 0.7913614488158919, 0.5078223977006053, 0.951443396816132, 0.8926649804022003, 0.5942274444495961, 0.9734472646101278, 0.9173589189877508, 0.9733892737751464, 0.6932726680437682, 0.7961515201262784, 0.8482503432012801, 0.5416943396530425, 0.6059121765633351, 0.9157956089649966, 0.9556006186383681, 0.8657232989217368, 0.8531936709891366, 0.7752035712198964, 0.7643374557716713, 0.9232079180140841, 0.7188592583245593, 0.6071271098511439, 0.693570737835572, 0.5694973324378205, 0.7287595775739686, 0.9533633395304775, 0.9386031212921386, 0.8754358162340823, 0.9723077968583876, 0.6074031962775197, 0.6091315509084652, 0.6708435230901378, 0.8910684099960196, 0.9171970450946719, 0.8850249301759934, 0.6957334883363528, 0.7684063440799163, 0.8499034122905205, 0.9994248037367608, 0.7587630957244205, 0.615639466635499, 0.5910933579476372, 0.868038905804835, 0.9911348021331192, 0.9683312312660068, 0.7775543538030218, 0.8698903637356296, 0.9836661582739838, 0.8262618785117107, 0.8208170819195332, 0.8192488889306426, 0.9663016987742086, 0.9608759512587788, 0.7001613684759183, 0.5518900638293815, 0.9189158416917398, 0.7309909777466168, 0.617585604910807, 0.64108539867837, 0.9826962519726201, 0.8581069911878478, 0.5236069397118206, 0.8511306831511287, 0.8354259560498847, 0.9974849673678108, 0.8626679568588198, 0.6384249999425213, 0.8933993774294342, 0.605783480101186, 0.5767005680551742, 0.6933973915784716, 0.6267386418193486, 0.7532954438472861, 0.8366831754089226, 0.6925181379650953, 0.8806042361831985, 0.9927715256038303, 0.6468397175378227, 0.8363973823610666, 0.8228807661545354, 0.8207635227195325, 0.6345684542638482, 0.5460244737561737, 0.862896483932174, 0.7594506534530985, 0.8182111643210719, 0.9326587426786261, 0.8437607135085048, 0.8105828675250444, 0.9651722423408827, 0.7188309785165733, 0.9184745691697921, 0.6837155087680773, 0.864356557495406, 0.8111636777703696, 0.6604474626760426, 0.8433207421597612, 0.8582322474075599, 0.9322435984070796, 0.5592054881623384, 0.7465202185902682, 0.9648156326182875, 0.6622641393259387, 0.7743743302544923, 0.8324276639933258, 0.5997323851018697, 0.6936082129336076, 0.6949485045387525, 0.8494343315342369, 0.8560263484427157, 0.8934204463563975, 0.719285574423793, 0.8157969850351168, 0.9402366590308817, 0.5892047053193208, 0.8492050304598676, 0.9190346663778923, 0.6304232118782132, 0.9299306416395479, 0.8895239884590795, 0.6210813899272769, 0.6239828305492119, 0.8815964167441854, 0.5421508055052457, 0.6234835078352234, 0.5318737113537412, 0.5532307532545061, 0.7467806156370795, 0.7012167962833722, 0.7413258214448004, 0.6932670986737859, 0.9652966501354581, 0.9317480295767412, 0.527148741224237, 0.9789433505816936, 0.5966851895085903, 0.7539269216900355, 0.6477067257986939, 0.9472810274736629, 0.9859434201171593, 0.8685604185594906, 0.5516852771590778, 0.736075385854067, 0.6203753353014708, 0.984070627105726, 0.9224713603227914, 0.8481022597855841, 0.730888046559131, 0.8127518828589673, 0.7232364880000784, 0.7532584600771846, 0.7179610647075051, 0.7901949677247415, 0.540628991200071, 0.7137722346008291, 0.7218910591958734, 0.6819530757884142, 0.8498661350868397, 0.5253229489785055, 0.8619610312216612, 0.8275675237657103, 0.6793781885657606, 0.5385754899195578, 0.7956346749115927, 0.8861079791320856, 0.7273914814356961, 0.7462112172270107, 0.6998935098990737, 0.9550717071141894, 0.7325989168217644, 0.8095947864463819, 0.7467534519899934, 0.5056370412144444, 0.9805134089440726, 0.7424928976516934, 0.8215389396992916, 0.7071116321268107, 0.8373864471663752, 0.5282736207930574, 0.6427315360566351, 0.8583428922609158, 0.7197423267487044, 0.8943949591423941, 0.8406508066968519, 0.797448320184507, 0.7123447773927369, 0.5272879469536178, 0.8071061070252796, 0.6613529514777503, 0.7464871456839346, 0.8650240110836119, 0.9630083436491759, 0.7321337490739381, 0.6689011340981394, 0.6012298990311133, 0.6761702417836236, 0.9085863875448552, 0.7114673435571468, 0.9286913879874974, 0.527130366470415, 0.6197132495254953, 0.5342020081092502, 0.6842655211856379, 0.6358157533248419, 0.9667905926849183, 0.7372302134002104, 0.7988735782655114, 0.9942424083548351, 0.7126357603022706, 0.5194328994076363, 0.7058617506477431, 0.6975594303454569, 0.6742148113713158, 0.5636466261424874, 0.866924039350547, 0.8128241922744169, 0.6242259302674458, 0.7216928039121008, 0.8625526215181499, 0.5446336408466662, 0.9720805968172449, 0.7799353634051467, 0.9020759688299356, 0.6085786952006665, 0.7272228954025439, 0.9516427212111332, 0.9202029054742108, 0.7865550595659505, 0.6592032796519258, 0.8140804193322463, 0.5817581828374656, 0.9945108190080619, 0.5945187715998665, 0.5854298668768506, 0.7849657307193798, 0.6678548554887997, 0.7511010535816605, 0.6412952245673447, 0.9074463219384168, 0.8593796275024604, 0.941575300389083, 0.8862845602805124, 0.6195749463604809, 0.7962173720632153, 0.5996288962359027, 0.7424032849857551, 0.6664851531929831, 0.8959714776432297, 0.7938249975452126, 0.7523675604878046, 0.6695543399555861, 0.5495882956372122, 0.5074509836783101, 0.6355267553318541, 0.7254805294319262, 0.615994579556958, 0.8842496847500916, 0.8335096017771166, 0.8356136520892048, 0.753744822004992, 0.6687124225214591, 0.5013493463649774, 0.6964961348640462, 0.9284157458715212, 0.6574112340670069, 0.7394353823108109, 0.5076509817741056, 0.505049463185155, 0.9974586410157392, 0.608221926715348, 0.6085440101667177, 0.5502612942824134, 0.7350279730979432, 0.5054225229205341, 0.7845245072058287, 0.9399912914267939, 0.774985246196791, 0.628685800350931, 0.7836842507974778, 0.5040066232581919, 0.6868320113657542, 0.7428315687094418, 0.9591594251423772, 0.64028083865476, 0.581483021877352, 0.8458369980655769, 0.7801308345323663, 0.539591735618743, 0.6387729415226062, 0.8214162905124474, 0.6294385668007609, 0.5710118744524275, 0.9788100564312313, 0.8822053785589933, 0.6013680971114553, 0.8872482285611019, 0.7154401787988868, 0.957082666492155, 0.9238480685778019, 0.920499421339372, 0.8077174153731128, 0.62758600221516, 0.6070135706950428, 0.7690903493869063, 0.8964064639922824, 0.777356337297191, 0.8585222440695485, 0.9539225951403205, 0.6064968253795233, 0.8774041728795045, 0.5275339114748874, 0.5471018557122967, 0.6989315449124527, 0.6808127971219511, 0.9925792129631301, 0.8701908283397499, 0.8625230471392498, 0.7539456428881932, 0.951174496820616, 0.5163765929335989, 0.5212714552857647, 0.9189619901075334, 0.7546977498288028, 0.8442503180369829, 0.9977724247669473, 0.5466379518328359, 0.6754370687040228, 0.519689669240756, 0.7407312916789695, 0.6886682921913734, 0.5585553793388556, 0.5001646854927186, 0.8245076617655127, 0.7582070554256293, 0.8055233935960134, 0.6476485643874574, 0.5397088580373507, 0.6650368083029508, 0.751326653285374, 0.8728103026150922, 0.9334712785031092, 0.8516728895901313, 0.7150563033233293, 0.64249182600287, 0.797134886018501, 0.7687954835998456, 0.5030099695235011, 0.6483065311253663, 0.8042959437608342, 0.811330099538009, 0.9907821030562312, 0.9251775861740497, 0.6665724905453976, 0.8505561564686237, 0.8077796359439562, 0.5417073477243081, 0.8492871233040286, 0.9803575521934502, 0.840961315007005, 0.6726842825617717, 0.8139841204931875, 0.6153603601815192, 0.6360204745560991, 0.7871629133891715, 0.6266377462531667, 0.5564990014798619, 0.9584342253026024, 0.84635307762267, 0.6627638673629821, 0.647496268977732, 0.7558606729721418, 0.6036079901726443, 0.7549357823247207, 0.7018668603936367, 0.7035952761747284, 0.5830384073970101, 0.8661151493965972, 0.5115210548017171, 0.9002449189278384, 0.80165331871786, 0.8366054774012033, 0.5152704761422321, 0.5513625974009901, 0.6503869992716769, 0.9362992004363219, 0.5362699365759088, 0.5509840695196888, 0.6054639405828044, 0.6068258463760381, 0.5899449884028138, 0.6863981402349759, 0.966104403465786, 0.7862928355853145, 0.724271685076771, 0.6797581310687606, 0.6657869460439452, 0.7875666347283876, 0.5224257669091793, 0.7715246130877873, 0.7495691554622255, 0.860980814613453, 0.7508232374621643, 0.5508662192697289, 0.9730480306235394, 0.7571295111460224, 0.8817231851399774, 0.6639833203789659, 0.6454477869535898, 0.8705027095160459, 0.9416287187398209, 0.8244523560703203, 0.5723220437824728, 0.5581010279168555, 0.5314122639344255, 0.8881725080712382, 0.9699796942746709, 0.5352110322638514, 0.8611758181033355, 0.5188827958307191, 0.6741961155633764, 0.9106729967355841, 0.9895395937253049, 0.8823547707681418, 0.9617486209620031, 0.9770196068101571, 0.5122914461283276, 0.8127063652037921, 0.8924411233016846, 0.6338177190596195, 0.6323571387106324, 0.8009320934022757, 0.9698883687908351, 0.8153607225108277, 0.7918960785878455, 0.9557413033570452, 0.9954371459565591, 0.6587014932483872, 0.9047113385705211, 0.8806559913052848, 0.6533317002154566, 0.9542431289755395, 0.967483372977517, 0.6829659385158403, 0.53338847146473, 0.5474766224876981, 0.785525621935918, 0.8743535269236435, 0.6396475202388634, 0.5810599730233394, 0.5517820338047237, 0.567747614855417, 0.7419112627027873, 0.7759341184740727, 0.9899811323691114, 0.869022750542275, 0.9786090918373964, 0.6659766617084661, 0.6055957058520327, 0.5287207428813627, 0.6279301088965541, 0.5051051323182799, 0.5067869337063622, 0.9520892637020106, 0.9983246199520428, 0.6663720715107004, 0.8667304232136006, 0.5646181645973436, 0.7822801629295308, 0.6306407731184911, 0.5422390488381967, 0.7423610211775562, 0.7669692750622168, 0.6284215766776917, 0.5246790945804687, 0.7569465431686893, 0.8239438446312104, 0.8066088020023157, 0.9559454716923568, 0.9522892281958288, 0.8141237064950588, 0.7741795281217967, 0.9646188078148421, 0.8866249702469755, 0.9373333365514304, 0.8144156521331988, 0.5463363293456942, 0.8391382926004074, 0.7912909199941116, 0.6661283890391354, 0.8547679263691306, 0.8660490127309544, 0.9869218469468191, 0.8700658149211682, 0.6292786748579431, 0.6027470319449397, 0.7724539690962524, 0.9341279089150127, 0.8908967529214852, 0.8959756385400404, 0.9499915162410328, 0.8829808914050519, 0.588527188936645, 0.6220719595038948, 0.7520996157065233, 0.6732883960685321, 0.8138596903828739, 0.8400604207752698, 0.5910070623034218, 0.9810535708416663, 0.5317218838671773, 0.6380600983904474, 0.510422124228725, 0.9124714638422703, 0.9826745905913727, 0.8162873165681135, 0.5705213544243117, 0.9229669654353108, 0.6782793446956181, 0.5348241071984849, 0.6481731364020362, 0.8962311341660407, 0.9251476969132646, 0.8693645654379853, 0.6947493350702592, 0.6429346802919816, 0.8405250466793532, 0.5426840974741132, 0.6737793710757707, 0.8661738897068203, 0.6472203535492085, 0.5653312935343306, 0.7913043452514655, 0.9873635602933897, 0.6344374083838337, 0.8436586516434272, 0.5930489754586775, 0.6251511006164502, 0.8943895121165337, 0.5898576557769177, 0.6270049473271088, 0.5016010675944148, 0.987024927223421, 0.5462361682732024, 0.7820801556008385, 0.9025907713734327, 0.7957839895257873, 0.7171885111983831, 0.9801259732522883, 0.8363819335418878, 0.7041989551441128, 0.8453422013526817, 0.506137573502518, 0.7016582340068112, 0.6389676000208759, 0.7464468831698865, 0.7943464592271, 0.7540874843242008, 0.9663842386622867, 0.5044029485070005, 0.7550461526818808, 0.6669952270564248, 0.6294832704740695, 0.7301511348931227, 0.6852883012601347, 0.7572699509445467, 0.7845110020285995, 0.7046665517543833, 0.623357211629395, 0.9221188670816767, 0.8469707236947878, 0.5406416080805108, 0.7071904518028774, 0.7578915003408437, 0.5570448083383401, 0.7625604509072689, 0.8930559677225354, 0.8912777967126418, 0.64330156894169, 0.5522331875101077, 0.5291207180788813, 0.7475599596319111, 0.5279019989424953, 0.6709963647385724, 0.9467125318160795, 0.6859647834676862, 0.6591653070551489, 0.7059039187272782, 0.750253013825608, 0.584059926717909, 0.6781431069956636, 0.6617685520679031, 0.8495978166027816, 0.7446990678829344, 0.683412953298346, 0.9194524083816196, 0.5951683400215075, 0.8567453538780716, 0.71942286957326, 0.7074442115305337, 0.5645290347358267, 0.6463821250878576, 0.978765301122825, 0.6887683197776799, 0.6118484045078656, 0.7268199273091658, 0.8008365474947201, 0.6442526397219189, 0.6858896261387012, 0.5723382522298827, 0.7920213478027313, 0.6654273036160414, 0.5817386220915031, 0.8727666904094182, 0.7835175528306348, 0.5455031228928606, 0.9814027362742459, 0.5727563389200692, 0.689762627492942, 0.791714642525873, 0.6163485800836753, 0.8207310443572817, 0.8133949879362177, 0.5790250279467155, 0.8042293684435147, 0.8661813354366318, 0.9005521261402658, 0.6610150174600089, 0.7014153279191648, 0.7882123376622696, 0.6812162819773513, 0.9574991749617754, 0.8547294358863358, 0.590419787368414, 0.5398120274938989, 0.5046546938254727, 0.5758468194250621, 0.9767988933708491, 0.8499268880608828, 0.9548401530285281, 0.8022817016170913, 0.7371141158022472, 0.9362793233775166, 0.5447460685337024, 0.7093465353240389, 0.9923554642857717, 0.9705475138288397, 0.8906479886315544, 0.5955923073632401, 0.6512953175684628, 0.7281578948560894, 0.7014151313703025, 0.5393590452686484, 0.537838946345712, 0.5039152192411301, 0.8544862231294592, 0.6530427126336545, 0.9950926027190417, 0.9230018248516366, 0.5032713795121586, 0.9389579831832147, 0.5108200134571232, 0.5331820214995369, 0.634444365913078, 0.8796023629950587, 0.5540371204191573, 0.8849459691936822, 0.8272156815681302, 0.9534040014489806, 0.8550879243395424, 0.6212247783030326, 0.5900124030218686, 0.9414752603135741, 0.7835532908693117, 0.9058906278660093, 0.6764349850807293, 0.6752319541111288, 0.7278142282061899, 0.6822799991805462, 0.9842158315649887, 0.6451455223795333, 0.8165457890399725, 0.8479902913099067, 0.7262708287148001, 0.9183101142381913, 0.8593146430242924, 0.9986441443222565, 0.6482521992242056, 0.5937495240653794, 0.9432032582279726, 0.9991227223244209, 0.8253708783810989, 0.9750372730487554, 0.9994818479579337, 0.5144883938697711, 0.9844987566360291, 0.522443698228952, 0.6877436441318387, 0.610195484604662, 0.9626393837851239, 0.6901441618296951, 0.537500423665631, 0.9050592645770619, 0.7711508239035141, 0.6363619331652022, 0.7104251062958189, 0.7875394603173844, 0.9200418727070854, 0.9868673954380366, 0.5467174272023982, 0.5263977136179505, 0.9301157874322603, 0.6088797535102128, 0.7898981727643433, 0.9795836956264268, 0.7704315279465123, 0.711076673167428, 0.6756095281447514, 0.8735693527462387, 0.8320963297540304, 0.6925513221262253, 0.5745395426443783, 0.948419020047903, 0.6523333273108587, 0.5236132106155145, 0.5231664223487679, 0.9906846652425891, 0.8073194450820561, 0.6880077644956579, 0.606160860015338, 0.7568637011582766, 0.7903400957115343, 0.6492732823240357, 0.6358051871991257, 0.7110718018796274, 0.704198480878232, 0.671112138312153, 0.9262353217392337, 0.7643239659261309, 0.6098524027370238, 0.6237405955567091, 0.9724131448230725, 0.771320749903684, 0.9027026763447825, 0.5157515175285698, 0.5044751696790775, 0.5080204996619516, 0.6316711369162525, 0.8482847170423797, 0.5768850715970963, 0.6927238806187495, 0.9463711994652871, 0.9628368032031593, 0.7613563685323115, 0.9207611020549092, 0.727461245149277, 0.8654089718648319, 0.5240012299600254, 0.528232664685313, 0.5971396909025997, 0.9843867307350378, 0.5035702500340398, 0.8350013091841058, 0.9555597223005814, 0.6847165162959185, 0.9612882552738432, 0.9662764693704639, 0.5618622304575872, 0.5437702373170543, 0.9042684116086005, 0.8994915482254788, 0.588192441711917, 0.7414407128697327, 0.8990495861792343, 0.9943783345882771, 0.6693100033208512, 0.9994306005931309, 0.8367778136979753, 0.8075341780018201, 0.87914381107437, 0.5391930194481513, 0.6921287633448756, 0.6821691052702441, 0.6478679060787151, 0.9697889762921106, 0.8121277199700512, 0.8208557648581127, 0.7688825457395927, 0.662218186939532, 0.7689031143591891, 0.6529719537760637, 0.5537111057627339, 0.7086412381957566, 0.5737179878895107, 0.5377528549595207, 0.9062197534994383, 0.5546321695219765, 0.8583301206929392, 0.771979175924891, 0.963190428961276, 0.5443752597274821, 0.8493553648434213, 0.7695227879433879, 0.8021848704675553, 0.8706199419166726, 0.5450576543168795, 0.5578596893932948, 0.6895014039351153, 0.6265898564416867, 0.6430066849859131, 0.8592206518269505, 0.5763069504073473, 0.6696851527025589, 0.9970588708398582, 0.6321689881338716, 0.6056867750946369, 0.6451922188604371, 0.5526265730803903, 0.5296750624413173, 0.7611111407209188, 0.7202147828565442, 0.5792410340849332, 0.9497219032729168, 0.502902877414116, 0.8866677015935163, 0.9231596227679413, 0.9852603809095961, 0.82308761245762, 0.6371239891920707, 0.6017704910701397, 0.827407677530754, 0.5582822632364661, 0.8459088122230044, 0.9372800374030774, 0.590580561900143, 0.6999863895751688, 0.5934554558891096, 0.9708659427224768, 0.7717176923415803, 0.8713444470437954, 0.797904760310113, 0.6708811129587311, 0.7349158197388348, 0.9675671793527554, 0.8853991086225568, 0.9908704100135797, 0.897532780912736, 0.5985791619605969, 0.9441159185834863, 0.815489791402076, 0.795776053398582, 0.9268106097488966, 0.8818912902098247, 0.5665704322934888, 0.7937331688155187, 0.6749848933926995, 0.5081105273440787, 0.5661905949031548, 0.889738692890724, 0.8476089852730666, 0.7681557862390453, 0.5878238711935817, 0.5265941255759441, 0.5885320174767544, 0.6380294996410438, 0.7530180932746419, 0.7818542280651599, 0.9474876929488656, 0.6210302519672064, 0.8720175232623644, 0.6518003278984306, 0.6440609082083164, 0.5057199469450412, 0.9126974413422204, 0.5904492177774997, 0.9980943736943837, 0.8931951121552346, 0.630855202006282, 0.9739217023920542, 0.907953786119857, 0.8074024441732766, 0.6722336003958567, 0.7658162855791206, 0.9130898946532303, 0.6343208087274046, 0.9020599136155785, 0.9491400925968245, 0.9476663147910864, 0.7775670091757569, 0.7834763422297821, 0.6361737913315363, 0.7801193138125524, 0.935457432001034, 0.7435846563638826, 0.8511990817411328, 0.921914407316724, 0.5825643228034596, 0.6304291759220022, 0.9478757991589415, 0.7529194780498816, 0.5358916251263166, 0.5313131622782332, 0.8130577753990751, 0.7050077384556331, 0.5304989308591047, 0.5263903066550707, 0.8404523624914022, 0.960834594803891, 0.9808031486890788, 0.7664792481918353, 0.7085329575742314, 0.5972279893857362, 0.5850770908121116, 0.6675956377465075, 0.7667063912293711, 0.951847010451752, 0.5348802346695274, 0.9666425987850307, 0.6193773863150092, 0.5247338933684833, 0.6814014994906257, 0.9588513651786922, 0.5280415453357645, 0.5900331853726817, 0.8999581976152473, 0.8548746086015919, 0.5683052763374872, 0.5753213256607375, 0.8153848573205993, 0.9258176407889587, 0.7032029797111488, 0.9141246141515655, 0.8786449904484597, 0.71220928038982, 0.9626046113989366, 0.5884927679477097, 0.7041096186091234, 0.729421824800783, 0.516693560946442, 0.6151655602886694, 0.7828780620466007, 0.8121391289328194, 0.9215532925595558, 0.7635166112639108, 0.9631519895135339, 0.7495963033292148, 0.8645240084161566, 0.965778410923265, 0.7877480930922875, 0.9738568920244689, 0.5532159736892408, 0.9010590626326116, 0.9809248169742025, 0.9907865234407219, 0.621331994489667, 0.7277880107594965, 0.7748254454467006, 0.8532794660200171, 0.5475629157383297, 0.6608650231220455, 0.9070996824809132, 0.5272361925015754, 0.9528038687350556, 0.9800855037190217, 0.7620462770752794, 0.5035210540237935, 0.7262476757718992, 0.8630230697199942, 0.9807581236143462, 0.9208463247181572, 0.973080807984918, 0.602643956324007, 0.800692027499462, 0.8955366885728462, 0.7466603241404706, 0.691560970225601, 0.783150720642182, 0.6903208376647194, 0.8309749691074096, 0.993309365982387, 0.8144912450784367, 0.9135447742150085, 0.9407740914297857, 0.7074604353545152, 0.697081179656499, 0.9649093568046953, 0.7016223119759175, 0.8960571490054365, 0.5481401159712449, 0.8167771800895456, 0.9866212931031462, 0.9789621601443093, 0.6238153551011751, 0.5805384288332347, 0.8352098226872349, 0.7511435686985755, 0.6075407565904285, 0.9970353481185896, 0.7625417034394228, 0.6896381574561721, 0.9936647967624479, 0.9698328866896199, 0.9579449271820555, 0.693176074549464, 0.5400438502822318, 0.7797175252539563, 0.6521040837229886, 0.7373313103928962, 0.774732377939773, 0.5700415912126688, 0.7379487315557738, 0.9261222811296093, 0.812839531082739, 0.8158067456232269, 0.6189611524042704, 0.8918868356475027, 0.7880199735780778, 0.7467041003023948, 0.5522794106119177, 0.8782760382192549, 0.7164184998995669, 0.8975103435205203, 0.8641587183132425, 0.7405421454589063, 0.6743099860170436, 0.8275697455584052, 0.7656313588165564, 0.8067949662425203, 0.6930012304124682, 0.7687870064049797, 0.5749624303879397, 0.6017232235631653, 0.7140040962893651, 0.8893900636177143, 0.7833465535429397, 0.7139905418363948, 0.809210507257953, 0.7428256589834867, 0.7131696716664147, 0.7572942424130329, 0.6707742875580609, 0.7508782531967153, 0.5971456049033353, 0.6205474054670728, 0.7782485533023116, 0.6081535540978, 0.5232898809564903, 0.8729576881245928, 0.840174208894777, 0.7032681618686192, 0.7215491805428262, 0.8054550283104849, 0.6243799131467653, 0.7806667753687628, 0.9559695572947281, 0.8179660902961803, 0.8637683336667368, 0.8908419905157499, 0.9149756796631563, 0.5592323653036884, 0.8792907979891693, 0.5467607148287494, 0.6892168653389859, 0.7807541062529757, 0.850868214082108, 0.7346786685112618, 0.5145650058870381, 0.9723453266680887, 0.9420563870228102, 0.8485630469122754, 0.9079829432977093, 0.6300987047851148, 0.884904762235216, 0.5754226458949381, 0.57036998662811, 0.710767029916455, 0.5890012491023402, 0.7441989942952081, 0.8882314695800978, 0.7161385256847599, 0.6155200814632955, 0.7965838907404095, 0.7298169294013594, 0.8210765825200552, 0.9227448370682529, 0.543316228191334, 0.5086054537055513, 0.6436865128545388, 0.9313952149407179, 0.8165538747624577, 0.9046333620079032, 0.9475515575767919, 0.5792364218624378, 0.7406047471746384, 0.744354689376508, 0.668220435529526, 0.5489704803568004, 0.9177666869717374, 0.7386230674847225, 0.9549119038390215, 0.6663007459411747, 0.501511801720759, 0.5798291937238038, 0.8608795042913362, 0.9311931058865901, 0.9663580289198465, 0.6666432803673723, 0.9733449386049107, 0.5776961465744133, 0.9625512678771435, 0.8118203058996251, 0.8786775767397074, 0.5619908435230588, 0.9810255376517084, 0.9647457750461601, 0.516867428393186, 0.7385329007267993, 0.9253523860994977, 0.8159911384585913, 0.5034669452890302, 0.9798401263397128, 0.5554990294354976, 0.8923565967200522, 0.936016041633787, 0.5617432279371279, 0.9115007994469173, 0.9616120252103568, 0.5512614719933815, 0.7745756337850078, 0.5763931258335667, 0.9319487206028941, 0.5215573683202133, 0.9409225369273543, 0.7535761038267095, 0.549207699535055, 0.5449808775640238, 0.5174410237202394, 0.5359497155968703, 0.8488764553619538, 0.8385226592950682, 0.9827112436989683, 0.6251820559912584, 0.5811010479983634, 0.5731586680325087, 0.9548355726480812, 0.7487518854671545, 0.7822573850550457, 0.8402221562373673, 0.7111939169749915, 0.6170137959232103, 0.5776628918788507, 0.8553482641117176, 0.9052261788366268, 0.8233934472315709, 0.9402156502488612, 0.8825086593073908, 0.7208125128137459, 0.726704212687597, 0.9597905946665753, 0.5088441907381211, 0.6568821675606182, 0.735290431589837, 0.7455273057598923, 0.9283352894794226, 0.9731069282709043, 0.5945919534206381, 0.9545748679765542, 0.8876561701419011, 0.8279208760858486, 0.546106475189355, 0.7340471990108124, 0.5091437340474346, 0.6544922034439471, 0.9321119942707652, 0.8175276218388964, 0.6057109859496039, 0.5495172859860433, 0.7165390961702398, 0.8633524271487929, 0.8943950142380009, 0.9666132253102043, 0.6566493166334739, 0.6023344459120341, 0.5618943649927509, 0.5688488331547201, 0.7522104526775619, 0.7677593338785325, 0.6299834090090831, 0.7940285869927799, 0.6822173850265412, 0.9159337987335532, 0.5949111911215261, 0.7741225741022411, 0.6692708953656141, 0.7600670700627712, 0.8982546547226739, 0.7148503005329411, 0.9629938955188944, 0.5434943554049741, 0.927636793736359, 0.5555095892420285, 0.9394404903964699, 0.7066524266839527, 0.7141812100068912, 0.5145164870229106, 0.7970265780131245, 0.9761940469713764, 0.8089334717346975, 0.8876117744930605, 0.9594496608894341, 0.9626651281278993, 0.8799664199715649, 0.7263434794396915, 0.6389132194343428, 0.9147343376938425, 0.5772670148037337, 0.8116652636916576, 0.8492596279369569, 0.5847620544473917, 0.535505526428508, 0.6569179323182224, 0.5654580262325739, 0.7554853168395022, 0.8160809006916021, 0.6262394156476006, 0.985183401739836, 0.6481177230005469, 0.7022062274509213, 0.8948697556916736, 0.9085391248394926, 0.8021890211107321, 0.5122871752221073, 0.5958414631832574, 0.7643722909219043, 0.9166881287919484, 0.8613035650744176, 0.8242330313229017, 0.7011863363769808, 0.5729926461305722, 0.627330555101542, 0.5873460393330576, 0.5640381747862884, 0.7306663406613055, 0.9773582454911178, 0.655667173909833, 0.8983890518855125, 0.5794611745141827, 0.7201639454830593, 0.7222179841382386, 0.9606061496427862, 0.5271431718202606, 0.69181713715974, 0.8798206669400055, 0.6198936103797201, 0.8655089627787671, 0.698826669071261, 0.59514389449373, 0.9850811885260504, 0.8568163612461759, 0.5392396837828866, 0.5058683352348164, 0.7546475734549476, 0.9541223330146427, 0.7903611448867129, 0.6263456484991599, 0.9231776694188625, 0.9897892704354601, 0.927818922689645, 0.5423485668378314, 0.7047726401840282, 0.6976140888653959, 0.8129145824702841, 0.7775948270056894, 0.9599251885132676, 0.5897051182717943, 0.72259610471521, 0.8208185764231162, 0.7829879670249849, 0.5794791231344536, 0.6336939820699174, 0.841672673143572, 0.7597993995559185, 0.5383469576860076, 0.7333575281548512, 0.6803742245254932, 0.5360705457248814, 0.5337415686290475, 0.8916263395486934, 0.5936009356733328, 0.7430659924476758, 0.937640447034054, 0.6271336373347673, 0.5096342135120778, 0.6956335645542973, 0.7096904461367426, 0.6646328858319623, 0.7598965484320279, 0.5597282222257824, 0.9396122935043956, 0.6691518823899298, 0.5300168273840131, 0.8044385031539234, 0.9441986171679582, 0.5347452753848763, 0.8780461964059572, 0.947201810497851, 0.7408452061202506, 0.8275865512618619, 0.9085451154688798, 0.6647112615174757, 0.9211507294082044, 0.8849905250123828, 0.956877454226151, 0.6814851836919837, 0.6143345874121494, 0.8478898242580301, 0.9032443706198472, 0.8704397870338586, 0.5838327234386085, 0.5093424607625776, 0.750210386221454, 0.6102001943178907, 0.6787408592484323, 0.6539212853921985, 0.6552082311664991, 0.8441167669405294, 0.5515882697084866, 0.8847674741807332, 0.631502507680206, 0.831542692197049, 0.7582017714076408, 0.7062437818723368, 0.8273174464873452, 0.587509614887266, 0.9378992047021357, 0.8655204056002334, 0.6714407441108761, 0.8016948745007113, 0.500498948503451, 0.9578454013269695, 0.5727639046866754, 0.5074585941491072, 0.5047562390443983, 0.6394479734989078, 0.7050525434507275, 0.8260806268203649, 0.9003423928859982, 0.8177338763505966, 0.8313633879148359, 0.5235734184814482, 0.5757098088395213, 0.5680103037906832, 0.6622293264053203, 0.8186935492499072, 0.6420021030055776, 0.72588672166501, 0.5968752149251801, 0.8012143434310814, 0.5306563078654774, 0.7040229872412612, 0.5796787203354592, 0.547577708372718, 0.514535939734698, 0.97880555490639, 0.624298765564067, 0.6078522931105188, 0.5514806721996608, 0.961161399588859, 0.5171142609826986, 0.9671588435594267, 0.709747998286118, 0.5003384976527174, 0.8936260755378128, 0.9386092958147547, 0.6325017778360582, 0.5272937070212371, 0.8023640771106344, 0.8457181011029538, 0.5849181994974715, 0.9895759256022094, 0.912751561030071, 0.9097329929956273, 0.5979523641726006, 0.7976078511320994, 0.6807244069121313, 0.9769282635340015, 0.8236123029954576, 0.7095663616509169, 0.9568282589119455, 0.5121874335791188, 0.5848382154096595, 0.5363190913725588, 0.7263317732540282, 0.8192121463418599, 0.9320345086268264, 0.820941478958841, 0.9377133749107056, 0.9291095075545825, 0.6219943632408482, 0.7475936347145958, 0.6035060192441475, 0.51613835495847, 0.6385714253766595, 0.794772661716022, 0.944959205894051, 0.6371016541857302, 0.7440090924057259, 0.7990161042362784, 0.9116032503614602, 0.9838413248299333, 0.7750995054268339, 0.8409674775981206, 0.7674891045980811, 0.6105021547038763, 0.815966262352742, 0.81420105566067, 0.8896918807135217, 0.5232372188184375, 0.7224060620289086, 0.6311442951987545, 0.9166395038006065, 0.554327091587342, 0.6207726941513645, 0.9132344442466798, 0.9664722704083832, 0.6968271119570654, 0.970774620534606, 0.6007051783370898, 0.5269033204193031, 0.7142062693485165, 0.8381255579417386, 0.7499655016617677, 0.919972067231363, 0.6945185737768618, 0.9433802233098678, 0.5911774518884401, 0.5879303662995576, 0.9386682024682087, 0.9858960900215794, 0.5819236340974195, 0.6029226568022873, 0.6259292864115986, 0.6958886475032189, 0.9937376285548076, 0.8320003246569377, 0.8702464066552742, 0.9450851657272148, 0.6957097378393557, 0.6530638439860259, 0.6501849325995566, 0.5950420634586825, 0.9214464457789617, 0.7256108866011248, 0.9218122594463583, 0.7721612524309132, 0.7674741839596815, 0.5819236377126293, 0.6143647578410715, 0.51262649215124, 0.5937341428238281, 0.956228682104425, 0.7414831777740765, 0.8819998789895225, 0.7905894928357589, 0.6300035653322343, 0.6650867138598482, 0.6567746291628338, 0.5497305649876898, 0.8020202655697968, 0.7912543982342661, 0.7332937746764661, 0.9447514391850014, 0.845639136550834, 0.8699067243538015, 0.9181619656984477, 0.6005863689508792, 0.6487903874747905, 0.7539029273712368, 0.7497786666274742, 0.724563176564998, 0.5023082424050707, 0.5884415544925233, 0.680759290387083, 0.5663625065059598, 0.5123781290486731, 0.5954149627282883, 0.6108011673877101, 0.625787966156357, 0.6183997406254287, 0.7138788654213449, 0.7185466527228659, 0.676441221984499, 0.6520389638721793, 0.5065802328308648, 0.775894753045652, 0.5374628434241483, 0.986729501201747, 0.8858823440454417, 0.8644546265658095, 0.5944324040747955, 0.9042400833410023, 0.6239635101390193, 0.8707267010857396, 0.5829301501830846, 0.5199067455464088, 0.71156981424374, 0.6462548852846816, 0.5745937961768736, 0.9616339451270471, 0.5151223732390933, 0.7469846000148666, 0.6568451961218407, 0.9891077960527648, 0.5868735664766032, 0.5276865587072519, 0.788532444924437, 0.8568274545362671, 0.7382268032499952, 0.5455428243351605, 0.663947961618639, 0.6995078291229855, 0.7602486356990275, 0.5522437700294085, 0.6564187834399263, 0.7098482921076579, 0.7970794560543202, 0.8794620541876333, 0.8558278576295268, 0.6953459653855725, 0.7924269956359657, 0.7427184227386803, 0.611128613561954, 0.6182016244816565, 0.7771939684685976, 0.7486834523076167, 0.8906902820703783, 0.9731136652752677, 0.8125775018452577, 0.9820511230622848, 0.868713141716424, 0.6995722042855335, 0.5068941022794847, 0.5559584569580376, 0.7489136474129336, 0.9047252910503262, 0.7461895032638683, 0.6855594038671138, 0.8657387531726675, 0.8448167158403557, 0.6387128402858971, 0.9249573644447906, 0.6676387269337412, 0.8332763052829479, 0.724456814608504, 0.676952875014325, 0.9394291767952458, 0.617030241896537, 0.6699281572191766, 0.8800297403982202, 0.8513196296473062, 0.8434512974299726, 0.8106102330780651, 0.6239297803389688, 0.5103707709485812, 0.7404545647183582, 0.8925657415372745, 0.6903951721291898, 0.6618160374559625, 0.8685889368030917, 0.824143660395962, 0.6900713758129617, 0.8618022025388352, 0.7750263475354926, 0.9333359422614473, 0.7076518842113546, 0.5905297117513141, 0.8623326459481608, 0.7740220458956032, 0.6353494331599708, 0.8782835303747962, 0.8738937878086444, 0.829769765383211, 0.9686518414096643, 0.9961416488482957, 0.7077218096744202, 0.9568803869035982, 0.9543664385660772, 0.7376049019097555, 0.6115508097181528, 0.799801536282982, 0.5445627704543562, 0.5811972814704692, 0.9988007977949125, 0.9890307892726667, 0.7315789949713221, 0.5823131997800091, 0.9521110156988839, 0.9177812420135374, 0.9713679244856624, 0.8451066627742647, 0.6897898872351151, 0.9524697300131968, 0.5884566053004838, 0.8922603741057531, 0.992087369458752, 0.9954205212853859, 0.7413923653723893, 0.7443419545465024, 0.5234894288227889, 0.9029024439461615, 0.9118065971924152, 0.8640841451728554, 0.8893893778701103, 0.7903734453127909, 0.8235783554798508, 0.5599762746658203, 0.6954132699728093, 0.5551054962485193, 0.5468696848886887, 0.7013235990456677, 0.9319981303555127, 0.9374451842966122, 0.5162608741334316, 0.7593373763590037, 0.9980774268047573, 0.6468327969389807, 0.9752185064346188, 0.857317989737009, 0.9326514133512578, 0.7946057205917072, 0.6519536121206885, 0.6206587485931965, 0.6983623030935081, 0.5216833929101097, 0.6100074081241458, 0.5556384774341406, 0.897829968157962, 0.9629735642666668, 0.54299655523251, 0.6737171394007462, 0.5529980000970356, 0.7699781518235288, 0.6390709314642065, 0.7466243913291098, 0.7370868322920983, 0.6577714838434039, 0.8667955475814573, 0.8570927450588994, 0.915443069705038, 0.6761497278706741, 0.9028639722672178, 0.7870245636794932, 0.9286246553754067, 0.7151866046811506, 0.7580169009608121, 0.6316691106743932, 0.8536631232775428, 0.578597066662427, 0.7053450737047939, 0.6248803283100124, 0.6402438901870201, 0.5382362243533723, 0.6576081598486704, 0.6397805892757882, 0.5739304456483973, 0.9274104066698363, 0.5136163368971776, 0.7791369422709844, 0.6759696802652387, 0.9655234565714774, 0.508801023167938, 0.6278248082470821, 0.5183750901727101, 0.6669880475856915, 0.9566274135072395, 0.7863004043445014, 0.916631910770439, 0.95490309570699, 0.5193346073401304, 0.8658106752555692, 0.583594882035577, 0.6134488810350021, 0.9749299914027337, 0.9950964526079356, 0.6494263052602922, 0.9245302293777766, 0.9680142443911066, 0.8401328610630099, 0.7462412687472043, 0.5852454919132735, 0.9515875107282289, 0.7786423596120517, 0.7616281510429062, 0.566174305491594, 0.9543339542856715, 0.5273749665567898, 0.6195356820733211, 0.5994298504336371, 0.5735985022210579, 0.7066806787651188, 0.5339849363381436, 0.7439740867141023, 0.8163193321796762, 0.6196235736423314, 0.9660851623694238, 0.5704041827775252, 0.576049710003587, 0.7192707495755286, 0.6659549442543731, 0.6853339087844519, 0.7296601665496404, 0.8110748505493178, 0.629411521840766, 0.7613305529138508, 0.5181797910951775, 0.677495957398109, 0.9725076131018721, 0.708163197501172, 0.5898068760275351, 0.7194877589767117, 0.9752781468677, 0.5587680784176909, 0.6743451926808512, 0.7062029363005743, 0.5092257468171308, 0.8100761851190434, 0.5209093336425272, 0.9635812754176503, 0.6080854494996701, 0.5104739408222416, 0.8244010097772878, 0.5184157841042363, 0.7645356724838412, 0.9455409533863923, 0.9416185854759107, 0.9529139615341653, 0.7121095898789763, 0.9924328347113163, 0.7500861990877278, 0.8737156710348117, 0.9075888804293593, 0.9897185858163133, 0.9359078019123415, 0.9180518535285974, 0.819159722941896, 0.6901042144408316, 0.9539228790816021, 0.5260031171535817, 0.8384345272856997, 0.8534840165057954, 0.6666314970393343, 0.8665983874942456, 0.5050303143119163, 0.9245475216578249, 0.6304590057833481, 0.5520713844339039, 0.8664706194735008, 0.8907457469413166, 0.8985407673651786, 0.8153897769153148, 0.5805830933043794, 0.7946065312968003, 0.7894095591414108, 0.5642107543628881, 0.7169223927215134, 0.7816648684684097, 0.8370834201788926, 0.83427396861363, 0.595665806793358, 0.5879045396010092, 0.60225269403792, 0.6639952333584591, 0.9690943665634364, 0.8589753035012386, 0.9810124868913355, 0.824004762984585, 0.9548898670024601, 0.9430384979492147, 0.991053192902693, 0.5814749338074727, 0.5690821314788856, 0.646089434973248, 0.7449489881627496, 0.7800743929675247, 0.7502217346742057, 0.5610263946233678, 0.6277309241501896, 0.8623814511942484, 0.5283183560677007, 0.7993474526213944, 0.883360572925914, 0.7514186808094832, 0.7285197688564702, 0.8985730015703615, 0.8780966629637306, 0.5105513462144173, 0.5743845687160591, 0.6473557379305319, 0.5596342134453482, 0.5635261348027991, 0.7388192282736423, 0.6517223201208557, 0.8908123061154398, 0.9609017916608003, 0.7134180241968856, 0.7722716816300095, 0.628915190016496, 0.7992539249901016, 0.9524723906969494, 0.6874613488981086, 0.6201398285843631, 0.7738401783654318, 0.6216962112087092, 0.518347734160981, 0.9820150863386088, 0.997935734550006, 0.6302250195446221, 0.6891292765965279, 0.7445525936155979, 0.971016942944117, 0.7170096574450445, 0.9181274841829085, 0.650198949647707, 0.9410415519609641, 0.973367646683202, 0.5588098269287807, 0.7350939431608099, 0.8862411231679831, 0.5467340687530848, 0.9448254964231639, 0.6636530701520094, 0.7891706621385175, 0.6174018387352945, 0.8446743138383446, 0.9586801998347894, 0.7765386623620008, 0.7183259211777344, 0.5804345832679023, 0.5470095772327443, 0.899680384945557, 0.9091170986615027, 0.5184006817650293, 0.6257631455904229, 0.6565077927611609, 0.799345867457347, 0.7397471659038668, 0.8656987171673609, 0.9902975310913673, 0.6504544628771519, 0.8386205093325515, 0.5845299753148365, 0.7466978144380825, 0.6226702642801555, 0.5361870592875855, 0.6180035748596389, 0.6530020588487803, 0.9043213846362522, 0.7737838623853228, 0.7606026949878543, 0.7894878255654493, 0.5419214399090408, 0.869686185955669, 0.8440654618381636, 0.5092667441930632, 0.9992020795549932, 0.5632120553500557, 0.5823077453685636, 0.6475458177302845, 0.6244972283002301, 0.8443518334040885, 0.6052367484471675, 0.9642422402159312, 0.5849823902964231, 0.7638727892105397, 0.9972146662800506, 0.6908506576943332, 0.6339339904859067, 0.8760707597978983, 0.9143227854671068, 0.8839646015220203, 0.9751222545493602, 0.7130300542744511, 0.9472868026242351, 0.9792277467614607, 0.6902262552797989, 0.6842961936766934, 0.7702291197585027, 0.8041854878745002, 0.8854941581325739, 0.9869939102714557, 0.5825313991328933, 0.9156944330654705, 0.7041593705465274, 0.6517568716596716, 0.8431802317413712, 0.5771890491167326, 0.8999483766949412, 0.7306833530105366, 0.5105306485581766, 0.8941974746659, 0.7001845125417325, 0.9230480978082162, 0.7296500630157935, 0.6585127604396668, 0.6056567711344879, 0.7298999588303678, 0.7717246320849841, 0.8342539924357717, 0.9387262695166154, 0.9385848134053758, 0.967743588775694, 0.827918276825976, 0.5793968821863473, 0.992269631438065, 0.9372890046808348, 0.6042498151832054, 0.8572151804735397, 0.8287857358392101, 0.8020810248765524, 0.5775685721038615, 0.9072346965090103, 0.9673790386720709, 0.5646160884367447, 0.7203338996588889, 0.7966734590495259, 0.6405119934419821, 0.8494586031118672, 0.9967902275760788, 0.7329506518020156, 0.8067745835621453, 0.662089300940164, 0.8096395485173582, 0.5134721220811164, 0.631177325185702, 0.7055575958148321, 0.5235668360048862, 0.5930493519024747, 0.5742897074293141, 0.9933232219239445, 0.7490016095699557, 0.9228955667932219, 0.7019851990183952, 0.9088767836279619, 0.5175628744495488, 0.8161612952430528, 0.5807502477554018, 0.6296754160479395, 0.9013836481827184, 0.5121239821747697, 0.5512897483516895, 0.6506175540808223, 0.964916731062867, 0.8090448849230295, 0.7536931871475352, 0.9924999698903926, 0.8670715996691496, 0.9540822808877119, 0.7307842132711111, 0.8927413052280826, 0.7473936047875409, 0.7707846106018578, 0.776889834355496, 0.7063052413417985, 0.9750405487799287, 0.9634610960940286, 0.981018285591378, 0.6081858284854998, 0.5697324286976835, 0.9671324843121714, 0.9651214313113363, 0.7538811234941332, 0.5062967997092089, 0.7739971114983419, 0.9845224328261482, 0.756722968929995, 0.5193750966689646, 0.5282581092333758, 0.7047094773212488, 0.9813825433284973, 0.571645206565319, 0.9296626831678227, 0.7434012423010969, 0.6559294891539424, 0.8188660396473283, 0.5446087856981467, 0.909559548354246, 0.5272920833887746, 0.7071931471523577, 0.5536862673539684, 0.8564666274296306, 0.6329393490922303, 0.8742048562954048, 0.5902216221712031, 0.5656511109545408, 0.7788253192830252, 0.6194290033936616, 0.9407302127986888, 0.9709233552379848, 0.5271409184155882, 0.8333902808389675, 0.5478825150687853, 0.9527645321940665, 0.8575596075192027, 0.7461483922547937, 0.9301817332789855, 0.6054960006827144, 0.6181110448430851, 0.9115442473543911, 0.7479732534279512, 0.6075176615633155, 0.653317287640738, 0.5347820685420424, 0.7399641608547755, 0.9411236882598509, 0.8062444846628714, 0.955657916416232, 0.6250383469335281, 0.7437924956977395, 0.6367630478529768, 0.7442259409077976, 0.9522675541177187, 0.6267479491140331, 0.6605325751695186, 0.7135647795251977, 0.6040888609898711, 0.7753938484687041, 0.7901315009432075, 0.5231910399789512, 0.5494139008908815, 0.8524831299609441, 0.5584945858501782, 0.5678221587349633, 0.8961725250043927, 0.8167358946180179, 0.5141028835914199, 0.5257767908544535, 0.6109489312165779, 0.5386824219978534, 0.788781576043345, 0.9083898556190059, 0.5816091580686003, 0.684570421674064, 0.7548838414634305, 0.9390758322173589, 0.9742952674974352, 0.944472770654831, 0.5836583927849082, 0.5886609513714438, 0.9036992804398856, 0.7990555352941213, 0.594504829339402, 0.8151598625411867, 0.55034881222174, 0.7492890004155064, 0.7454553223467248, 0.7103703812856168, 0.9043640966612263, 0.6669645580521766, 0.740557864356781, 0.7114909932755679, 0.6892914247999391, 0.6797374644514984, 0.804739855926995, 0.9089380590942107, 0.6865886118042455, 0.9200926842412764, 0.6320996054330472, 0.7830442996465905, 0.5369618564243737, 0.8418937175849487, 0.9801840129367836, 0.54699112303973, 0.9476738951769137, 0.8621129389677087, 0.5720044712533787, 0.55726637492837, 0.9855064992896845, 0.8039593764114352, 0.8263516499147485, 0.9768104293024091, 0.6841678663040249, 0.9021184994828246, 0.7211204277135697, 0.7668081535805646, 0.5352551547916418, 0.8673430832819562, 0.7139625790908085, 0.6543944185740465, 0.7474133793272264, 0.9352045711943157, 0.6898600350324553, 0.699026993816739, 0.8448840852562054, 0.8208716009427247, 0.8325120392210702, 0.5303627364860108, 0.6130734803313831, 0.6052856525409556, 0.5758404034760503, 0.6881738078562052, 0.8638735040297867, 0.6223293630602783, 0.8004569225726799, 0.7323457237543523, 0.7333868469804345, 0.7069503486037301, 0.706515672847049, 0.8545356351414239, 0.8571007219567894, 0.6061155135811439, 0.7720091625167034, 0.8825133182309278, 0.7141733498439138, 0.5155210287132066, 0.8513707481682196, 0.706846777882213, 0.5144397114366884, 0.5387180201335567, 0.9437728225157572, 0.8183729215677006, 0.7937717332181202, 0.9198354618668612, 0.6174417092049806, 0.7768723528503718, 0.9371433272083187, 0.6598753115418798, 0.6460235128164442, 0.954700198293184, 0.5720088701687174, 0.6158239035308396, 0.5498523803577058, 0.7606094258761089, 0.9823633654786544, 0.9682736849552296, 0.6301224910009215, 0.7497655182058425, 0.6588817758262142, 0.7872249293371525, 0.7876803397924068, 0.7379218157029063, 0.8679095267708163, 0.5214892296450815, 0.7794813763014543, 0.8131410251806674, 0.6655554955924685, 0.9629355177303521, 0.6245346606054978, 0.8568426819684396, 0.8151527456602158, 0.7996638265904075, 0.5678450538107396, 0.7889981661573171, 0.8566507722101928, 0.8573344321055989, 0.8643580000022488, 0.8485453972271111, 0.6612273255873804, 0.6835483620524443, 0.5301005541909193, 0.5642083816829928, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0};
int h_B[]= {
0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480, 482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572, 574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802, 804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848, 850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894, 896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940, 942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986, 988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026, 1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064, 1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102, 1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140, 1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178, 1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216, 1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330, 1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368, 1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406, 1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444, 1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482, 1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520, 1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558, 1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596, 1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634, 1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748, 1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786, 1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824, 1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862, 1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900, 1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938, 1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976, 1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014, 2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052, 2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090, 2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128, 2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166, 2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204, 2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242, 2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280, 2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356, 2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432, 2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470, 2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508, 2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546, 2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584, 2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660, 2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698, 2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736, 2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774, 2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812, 2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850, 2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888, 2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926, 2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964, 2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002, 3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040, 3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078, 3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116, 3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154, 3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192, 3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230, 3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268, 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306, 3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344, 3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382, 3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420, 3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458, 3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496, 3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534, 3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572, 3574, 3576, 3578, 3581, 3583, 3585, 3587, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610, 3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648, 3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686, 3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724, 3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762, 3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800, 3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838, 3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876, 3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914, 3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952, 3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990, 3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028, 4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066, 4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104, 4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142, 4144, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181, 4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219, 4221, 4223, 4225, 4227, 4229, 4231, 4233, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256, 4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294, 4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4336, 4338, 4340, 4342, 4344, 4346, 4349, 4351, 4354, 4356, 4359, 4361, 4364, 4366, 4368, 4370, 4373, 4375, 4378, 4380, 4385, 4387, 4389, 4391, 4393, 4395, 4382, 4377, 4382, 4377, 4382, 4377, 4335, 4404, 4335, 4404, 4335, 4404, 4407, 4405, 4407, 4405, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4335, 4404, 4407, 4405, 4335, 4404, 4407, 4405, 4382, 4377, 4382, 4377, 4382, 4382, 4377, 4382, 4377, 4335, 4404, 4382, 4377, 4382, 4377, 4382, 4377, 4377, 4407, 4405, 4407, 4405, 4363, 4363, 4335, 4404, 4407, 4405, 4335, 4404, 4407, 4405, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4335, 4404, 4407, 4405, 4407, 4405, 4335, 4404, 4335, 4404, 4407, 4405, 4407, 4405, 4382, 4377, 4407, 4405, 4407, 4405, 4382, 4377, 4382, 4377, 4407, 4405, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4335, 4404, 4335, 4404, 4407, 4405, 4407, 4405, 4407, 4405, 4407, 4405, 4407, 4405, 4400, 4402, 4400, 4402, 4407, 4405, 4407, 4405, 4407, 4405, 4407, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4407, 4405, 4407, 4405, 4407, 4405, 4335, 4404, 4335, 4404, 4407, 4405, 4400, 4402, 4407, 4405, 4402, 4400, 4402, 4400, 4407, 4405, 4402, 4400, 4402, 4400, 4407, 4405, 4348, 4348, 4377, 4382, 4377, 4382, 4377, 4382, 4382, 4377, 4382, 4377, 4382, 4377, 4382, 4377, 4402, 4400, 4335, 4404, 4407, 4405, 4407, 4405, 4407, 4405, 4377, 4382, 4377, 4382, 4407, 4405, 4407, 4405, 4377, 4382, 4377, 4382, 4405, 4407, 4407, 4405, 4407, 4405, 4400, 4407, 4405, 4400, 4405, 4402, 4407, 4405, 4407, 4405, 4235, 4407, 4405, 4235, 4377, 4382, 4397, 4397, 4377, 4382, 4407, 4405, 4335, 4402, 4335, 4407, 4405, 4384, 4384, 4402, 4400, 4402, 4400, 4407, 4405, 4409, 4402, 4400, 4404, 4402, 4400, 4404, 4407, 4405, 4409, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7600, 7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638, 7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676, 7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714, 7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752, 7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790, 7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828, 7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866, 7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904, 7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942, 7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980, 7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018, 8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056, 8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094, 8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132, 8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170, 8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208, 8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246, 8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284, 8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322, 8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360, 8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398, 8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436, 8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474, 8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512, 8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550, 8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588, 8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626, 8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664, 8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702, 8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740, 8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778, 8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816, 8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854, 8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892, 8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930, 8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968, 8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006, 9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044, 9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082, 9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120, 9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158, 9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196, 9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234, 9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272, 9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310, 9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348, 9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386, 9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424, 9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462, 9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500, 9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538, 9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576, 9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614, 9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652, 9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690, 9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728, 9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766, 9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9791, 9792, 9793, 9794, 9795, 9796, 9797, 9798, 9799, 9800, 9801, 9802, 9803, 9804, 9805, 9806, 9807, 9808, 9809, 9810, 9811, 9812, 9813, 9814, 9815, 9816, 9817, 9818, 9819, 9820, 9821, 9822, 9823, 9824, 9825, 9826, 9827, 9828, 9829, 9830, 9831, 9832, 9833, 9834, 9835, 9836, 9837, 9838, 9839, 9840, 9841, 9842, 9843, 9844, 9845, 9846, 9847, 9848, 9849, 9850, 9851, 9852, 9853, 9854, 9855, 9856, 9857, 9858, 9859, 9860, 9861, 9862, 9863, 9864, 9865, 9866, 9867, 9868, 9869, 9870, 9871, 9872, 9873, 9874, 9875, 9876, 9877, 9878, 9879, 9880, 9881, 9882, 9883, 9884, 9885, 9886, 9887, 9888, 9889, 9890, 9891, 9892, 9893, 9894, 9895, 9896, 9897, 9898, 9899, 9900, 9901, 9902, 9903, 9904, 9905, 9906, 9907, 9908, 9909, 9910, 9911, 9912, 9913, 9914, 9915, 9916, 9917, 9918, 9919, 9920, 9921, 9922, 9923, 9924, 9925, 9926, 9927, 9928, 9929, 9930, 9931, 9932, 9933, 9934, 9935, 9936, 9937, 9938, 9939, 9940, 9941, 9942, 9943, 9944, 9945, 9946, 9947, 9948, 9949, 9950, 9951, 9952, 9953, 9954, 9955, 9956, 9957, 9958, 9959, 9960, 9961, 9962, 9963, 9964, 9965, 9966, 9967, 9968, 9969, 9970, 9971, 9972, 9973, 9974, 9975, 9976, 9977, 9978, 9979, 9980, 9981, 9982, 9983, 9984, 9985, 9986, 9987, 9988, 9989, 9990, 9991, 9992, 9993, 9994, 9995, 9996, 9997, 9998, 9999, 10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007, 10008, 10009, 10010, 10011, 10012, 10013, 10014, 10015, 10016, 10017, 10018, 10019, 10020, 10021, 10022, 10023, 10024, 10025, 10026, 10027, 10028, 10029, 10030, 10031, 10032, 10033, 10034, 10035, 10036, 10037, 10038, 10039, 10040, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 10048, 10049, 10050, 10051, 10052, 10053, 10054, 10055, 10056, 10057, 10058, 10059, 10060, 10061, 10062, 10063, 10064, 10065, 10066, 10067, 13, 14, 15, 4353, 4358, 10080, 4377, 4382, 10081, 4400, 4335, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 10097, 11175, 4353, 4358, 10101, 11177, 11179, 4353, 4358, 10105, 4382, 4377, 4382, 4377, 4358, 4353, 10113, 4358, 4353, 10443, 4358, 4353, 10437, 4377, 4384, 4358, 4353, 10116, 4382, 4382, 4382, 4384, 4400, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 10616, 4384, 4407, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4377, 4382, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4384, 4358, 4353, 10354, 4382, 4377, 4397, 4382, 4377, 4384, 4400, 4405, 4400, 4405, 4358, 4353, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 10131, 4384, 4335, 4407, 4405, 4335, 4404, 4407, 4405, 4353, 4358, 10138, 4377, 4382, 4384, 4397, 4400, 11181, 4358, 4353, 4348, 4353, 4358, 4363, 10145, 10147, 10148, 4384, 4407, 4353, 4358, 4348, 4353, 4358, 4363, 10150, 10152, 10495, 4384, 4407, 11183, 4407, 4405, 4358, 4353, 10154, 4377, 4382, 10429, 4400, 4400, 4358, 4353, 10499, 4377, 4382, 10157, 4377, 4382, 10160, 4400, 4400, 4353, 4358, 4348, 4353, 4358, 4363, 10162, 10164, 10424, 4384, 11185, 4407, 4405, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4382, 4377, 10495, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4377, 4382, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4384, 4358, 4353, 10171, 4377, 4382, 10174, 4400, 4404, 4358, 4353, 4348, 4358, 4353, 4363, 4377, 4382, 4397, 4382, 4377, 4384, 4407, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4377, 4382, 4382, 4377, 4384, 4400, 4405, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4377, 4382, 4382, 4377, 4384, 4353, 4358, 10197, 4382, 4377, 4397, 4353, 4358, 10204, 4377, 4382, 4384, 4400, 4358, 4353, 4348, 4358, 4353, 4363, 10212, 10214, 10216, 4397, 11187, 4335, 4404, 11189, 4353, 4358, 10579, 4384, 4353, 4358, 4353, 4358, 4353, 4358, 10223, 4353, 4358, 10574, 4353, 4358, 4353, 4358, 4353, 4358, 10229, 4353, 4358, 10230, 11191, 11193, 11195, 4384, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4382, 4382, 4382, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4377, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 10270, 4353, 4358, 10274, 4353, 4358, 4363, 4348, 11197, 11199, 4353, 4358, 10282, 4353, 4358, 10286, 11201, 11203, 11205, 4384, 4400, 4353, 4358, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 10616, 4384, 4353, 4358, 10297, 4382, 4377, 4382, 4377, 4377, 4382, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 10306, 4377, 4384, 4353, 4358, 10323, 4353, 4358, 10339, 4382, 4382, 4382, 4384, 4353, 4358, 10319, 4358, 4353, 10310, 4377, 4382, 4397, 4377, 4382, 4384, 4400, 4353, 4358, 10570, 4353, 4358, 10574, 4353, 4358, 10579, 4384, 4353, 4358, 10584, 11207, 11209, 11211, 4384, 4353, 4358, 10319, 4353, 4358, 10323, 4353, 4358, 10327, 4377, 4377, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 10339, 4382, 4382, 4382, 4384, 4353, 4358, 10345, 4377, 4382, 10349, 4400, 4335, 4404, 4358, 4353, 10354, 4382, 4377, 4397, 4382, 4377, 4384, 4400, 4405, 4358, 4353, 4348, 4353, 4358, 4363, 4377, 4382, 4377, 4382, 10424, 4384, 4407, 4405, 4353, 4358, 10370, 4377, 4382, 4377, 4382, 4377, 4382, 4384, 4400, 4405, 4353, 4358, 10380, 4377, 4382, 4397, 4382, 4377, 4384, 4400, 4405, 4409, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 10616, 4384, 11213, 11215, 11217, 11219, 4353, 4358, 10394, 11221, 11223, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 10403, 4353, 4358, 10407, 4382, 4377, 4382, 4377, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4400, 4358, 4353, 4348, 4358, 4353, 4363, 10421, 4397, 10424, 4384, 4407, 4358, 4353, 10427, 4382, 4377, 10429, 4400, 4400, 4358, 4353, 10433, 4358, 4353, 10437, 4377, 4384, 4353, 4358, 10443, 4353, 4358, 10447, 4382, 4382, 4382, 4384, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4400, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 4400, 4353, 4358, 10481, 4382, 4377, 4397, 4382, 4377, 4384, 4400, 4405, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 10495, 4384, 4407, 4358, 4353, 10499, 4377, 4382, 10503, 4400, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4358, 4353, 10520, 4358, 4353, 10524, 4358, 4353, 10528, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4358, 4353, 10538, 4358, 4353, 10542, 11226, 11228, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4358, 4353, 10552, 4358, 4353, 4358, 4353, 4358, 4353, 4358, 4353, 4358, 4353, 10559, 4382, 4377, 4397, 4382, 4377, 4384, 4400, 11230, 4353, 4358, 10570, 4353, 4358, 10574, 4384, 4353, 4358, 10579, 4384, 4353, 4358, 10584, 11232, 11234, 11236, 4384, 4400, 4353, 4358, 10588, 4377, 4382, 4397, 4377, 4382, 4384, 4400, 4405, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4397, 4382, 4377, 4384, 4400, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 10616, 4384, 4353, 4358, 10619, 4382, 4377, 4382, 4377, 4382, 4377, 4384, 4358, 4353, 10623, 4382, 4384, 4358, 4353, 4363, 4348, 4358, 4353, 10633, 4358, 4353, 4348, 4358, 4353, 4363, 4377, 4358, 4353, 10644, 4382, 4382, 4358, 4353, 4348, 4358, 4353, 4363, 4358, 4353, 4348, 4358, 4353, 4363, 4358, 4353, 4358, 4353, 4358, 4353, 4363, 4358, 4353, 10668, 4382, 4377, 4382, 4377, 4382, 4377, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4377, 4382, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4382, 4377, 4382, 4377, 4384, 4358, 4353, 10711, 4382, 4377, 4397, 4382, 4377, 4384, 4400, 4335, 4404, 4358, 4353, 4348, 4358, 4353, 4363, 10728, 4358, 4353, 4348, 4358, 4353, 4363, 10736, 4358, 4353, 4348, 4358, 4353, 4363, 10744, 10746, 11239, 4335, 4404, 11241, 4358, 4353, 4358, 4353, 4358, 4353, 4348, 4382, 4382, 4358, 4353, 4358, 4353, 4358, 4353, 4348, 4382, 4382, 4358, 4353, 4348, 4358, 4353, 4363, 4377, 4377, 4377, 4397, 11245, 11247, 11249, 11251, 4353, 4358, 4348, 4353, 4358, 4363, 11253, 4358, 4353, 4348, 4353, 4358, 4363, 11255, 4384, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 11257, 11259, 11261, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4358, 4353, 4348, 4358, 4353, 4363, 11169, 4382, 4377, 10777, 4397, 11263, 11265, 11267, 4353, 4358, 4348, 4353, 4358, 4363, 10853, 4397, 10780, 4384, 4402, 11269, 4402, 11271, 11273, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 4382, 4377, 4384, 11275, 4358, 4353, 4348, 4358, 4353, 4363, 10820, 10901, 10902, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 10940, 10941, 4353, 4358, 4348, 4353, 4358, 4363, 11277, 10949, 4353, 4358, 4348, 4353, 4358, 4363, 10796, 4377, 4382, 11279, 4358, 4353, 4348, 4358, 4353, 4363, 10806, 10808, 4358, 4353, 4348, 4358, 4353, 4363, 10816, 4377, 4382, 11281, 4358, 4353, 4348, 4353, 4358, 4363, 10820, 10901, 10959, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 11283, 4397, 4353, 4358, 4348, 4353, 4358, 4363, 11285, 4397, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4384, 11287, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4358, 4353, 4348, 4353, 4358, 4363, 11289, 11291, 4353, 4358, 4348, 4353, 4358, 4363, 11293, 4353, 4358, 4348, 4353, 4358, 4363, 11295, 11297, 11299, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4400, 4353, 4358, 4348, 4358, 4353, 4363, 10853, 4397, 10856, 4384, 4400, 11301, 4400, 11303, 11305, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 4384, 4402, 11307, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 4382, 4377, 4384, 4400, 11309, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4384, 4397, 4400, 4402, 11311, 4358, 4353, 4348, 4358, 4353, 4363, 4377, 4382, 4382, 4377, 4382, 4377, 4384, 11313, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4397, 4377, 4382, 4384, 11315, 11317, 4358, 4353, 4348, 4358, 4353, 4363, 10879, 10881, 4358, 4353, 4348, 4358, 4353, 4363, 10889, 4382, 4377, 11319, 4358, 4353, 4348, 4353, 4358, 4363, 10895, 4382, 4377, 10897, 4397, 11321, 11323, 4353, 4358, 4348, 4353, 4358, 4363, 10900, 10901, 10902, 4384, 4400, 4400, 4400, 4400, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 11326, 4353, 4358, 4348, 4353, 4358, 4363, 11328, 11330, 11332, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 11334, 4384, 4400, 11336, 4353, 4358, 4348, 4353, 4358, 4363, 10925, 4353, 4358, 4348, 4353, 4358, 4363, 4377, 4382, 4353, 4358, 4348, 4358, 4353, 4363, 10940, 10941, 4353, 4358, 4348, 4353, 4358, 4363, 10948, 10949, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 4384, 4402, 11338, 11340, 4358, 4353, 4348, 4353, 4358, 4363, 10955, 10957, 10959, 4384, 4402, 11342, 4402, 11344, 11346, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4397, 4384, 11348, 11350, 4358, 4353, 4348, 4353, 4358, 4363, 4382, 4377, 4382, 4377, 11169, 4377, 4382, 11352, 11354, 11356, 11358, 11360, 11362, 4353, 4358, 4353, 4358, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 11367, 4353, 4358, 4348, 4353, 4358, 4363, 11369, 4384, 4353, 4358, 4348, 4353, 4358, 4363, 4353, 4358, 4348, 4353, 4358, 4363, 11372, 11374, 4353, 4358, 4348, 4353, 4358, 4363, 11376, 11378, 4353, 4358, 4348, 4353, 4358, 4363, 11169, 4382, 4377, 11173, 4397, 11380, 11382, 11384, 11386, 4358, 4353, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 4384, 4402, 11388, 4353, 4358, 4348, 4358, 4353, 4363, 11031, 11390, 11032, 4397, 4353, 4358, 4348, 4358, 4353, 4363, 11040, 11392, 11041, 4397, 11394, 11396, 4353, 4358, 4348, 4358, 4353, 4363, 11049, 11050, 4353, 4358, 4348, 4358, 4353, 4363, 11057, 11398, 4353, 4358, 4348, 4358, 4353, 4363, 11064, 11065, 11066, 11400, 11404, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4384, 4397, 4400, 11406, 4358, 4353, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 4382, 4377, 4384, 11409, 4358, 4353, 4348, 4353, 4358, 4363, 11090, 4358, 4353, 4348, 4353, 4358, 4363, 11097, 4358, 4353, 4348, 4353, 4358, 4363, 11104, 11105, 4358, 4353, 4348, 4353, 4358, 4363, 4382, 4377, 11115, 4353, 4358, 4348, 4353, 4358, 4363, 4382, 4377, 4397, 4382, 4377, 4384, 11414, 11416, 4353, 4358, 4348, 4358, 4353, 4363, 4382, 4377, 4382, 4377, 4382, 4377, 4384, 4400, 11419, 4353, 4358, 4348, 4358, 4353, 4363, 11144, 11145, 4353, 4358, 4348, 4358, 4353, 4363, 11152, 11422, 4353, 4358, 4348, 4358, 4353, 4363, 11159, 11160, 11161, 11426, 11428, 11433, 4353, 4358, 4348, 4358, 4353, 4363, 11169, 4382, 4377, 11173, 4397, 11437, 11439, 11441, 11444, 11447, 11450, 11425, 11424, 11425, 11424, 11449, 11446, 11449, 11432, 11449, 11432, 11449, 11432, 11449, 11421, 11443, 11418, 11452, 11449, 11446, 11432, 11430, 11425, 11424, 11436, 11435, 11425, 11424, 11435, 11424, 11425, 11424, 11425, 11424, 11436, 11435, 11425, 11424, 11425, 11424, 11425, 11424, 11432, 11425, 11424, 11430, 11421, 11443, 11425, 11424, 11436, 11435, 11425, 11424, 11425, 11436, 11418, 11425, 11424, 11425, 11424, 11418, 11430, 11425, 11424, 11425, 11435, 11435, 11418, 11418, 11452, 11449, 11446, 11432, 11430, 11446, 11449, 11446, 11449, 11446, 11449, 11446, 11449, 11446, 11449, 11446, 11432, 11430, 11443, 11432, 11430, 11443, 11432, 11430, 11449, 11446, 11446, 11430, 11421, 11432, 11430, 11452, 11432, 11430, 11452, 11432, 11430, 11449, 11446, 11449, 11446, 11421, 11449, 11446, 11452, 11449, 11446, 11452, 11449, 11446, 11449, 11446, 11424, 11424, 11446, 11421, 11449, 11446, 11449, 11446, 11432, 11430, 11443, 11432, 11430, 11443, 11432, 11430, 11430, 11449, 11446, 11421, 11446, 11432, 11430, 11432, 11430, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 11456, 11457, 11458, 11459, 11460, 11461, 11462, 11463, 11464, 11465, 11466, 11467, 11468, 11469, 11470, 11471, 11472, 11473, 11474, 11475, 11476, 11477, 11478, 11479, 11481, 11482, 11483, 11486, 11487, 11488, 11489, 11490, 11491, 11492, 11493, 11494, 11495, 11496, 11497, 11498, 11499, 11500, 11501, 11502, 11503, 11504, 11505, 11506, 11507, 11508, 11509, 11510, 11511, 11512, 11513, 11514, 11515, 11516, 11517, 11518, 11519, 11520, 11521, 11522, 11523, 11524, 11525, 11526, 11527, 11528, 11529, 11530, 11531, 11532, 11533, 11534, 11535, 11536, 11537, 11538, 11539, 11540, 11541, 11542, 11543, 11544, 11545, 11546, 11547, 11548, 11549, 11550, 11551, 11552, 11553, 11554, 11555, 11556, 11557, 11558, 11559, 11560, 11561, 11562, 11563, 11564, 11565, 11566, 11567, 11568, 11569, 11570, 11571, 11572, 11573, 11574, 11575, 11576, 11577, 11578, 11579, 11580, 11581, 11583, 11584, 11585, 11586, 11587, 11588, 11589, 11590, 11591, 11592, 11593, 11594, 11595, 11596, 11597, 11598, 11599, 11600, 11601, 11602, 11603, 11604, 11606, 11607, 11608, 11609, 11610, 11611, 11612, 11613, 11614, 11615, 11616, 11617, 11618, 11619, 11620, 11621, 11622, 11623, 11624, 11625, 11626, 11627, 11628, 11629, 11630, 11631, 11632, 11633, 11634, 11635, 11636, 11638, 11639, 11640, 11641, 11642, 11643, 11644, 11645, 11646, 11647, 11648, 11649, 11650, 11651, 11652, 11653, 11654, 11655, 11656, 11657, 11658, 11659, 11660, 11661, 11662, 11663, 11664, 11665, 11666, 11667, 11668, 11669, 11670, 11671, 11672, 11673, 11674, 11675, 11676, 11677, 11678, 11679, 11680, 11681, 11682, 11683, 11684, 11685, 11686, 11687, 11688, 11689, 11690, 11691, 11692, 11693, 11694, 11695, 11696, 11697, 11698, 11699, 11700, 11701, 11702, 11703, 11704, 11705, 11706, 11707, 11708, 11709, 11710, 11711, 11712, 11713, 11714, 11715, 11716, 11717, 11718, 11719, 11720, 11721, 11722, 11723, 11724, 11725, 11726, 11727, 11728, 11729, 11730, 11731, 11732, 11733, 11734, 11735, 11736, 11737, 11738, 11739, 11740, 11741, 11742, 11744, 11745, 11747, 11748, 11749, 11750, 11751, 11752, 11753, 11754, 11755, 11756, 11757, 11758, 11759, 11760, 11761, 11762, 11763, 11764, 11765, 11766, 11767, 11768, 11769, 11770, 11774, 11775, 11776, 11777, 11778, 11779, 11780, 11781, 11782, 11783, 11784, 11785, 11786, 11787, 11788, 11789, 11790, 11791, 11792, 11793, 11794, 11795, 11796, 11797, 11798, 11799, 11800, 11801, 11802, 11803, 11804, 11805, 11806, 11807, 11808, 11809, 11810, 11811, 11812, 11813, 11814, 11815, 11816, 11817, 11818, 11819, 11820, 11821, 11824, 11825, 11826, 11827, 11828, 11829, 11833, 11834, 11835, 11836, 11837, 11838, 11839, 11840, 11841, 11842, 11843, 11844, 11845, 11846, 11847, 11848, 11849, 11850, 11851, 11852, 11853, 11854, 11855, 11856, 11857, 11858, 11859, 11860, 11861, 11862, 11863, 11864, 11865, 11866, 11867, 11868, 11869, 11870, 11871, 11872, 11873, 11874, 11875, 11876, 11877, 11878, 11879, 11880, 11881, 11882, 11883, 11884, 11885, 11886, 11887, 11888, 11889, 11890, 11891, 11892, 11893, 11894, 11895, 11896, 11897, 11898, 11899, 11900, 11901, 11902, 11903, 11907, 11908, 11909, 11910, 11911, 11912, 11913, 11914, 11915, 11916, 11917, 11918, 11919, 11920, 11921, 11922, 11923, 11924, 11925, 11926, 11927, 11928, 11929, 11930, 11931, 11932, 11933, 11934, 11935, 11936, 11937, 11938, 11939, 11940, 11941, 11942, 11943, 11944, 11945, 11946, 11947, 11948, 11949, 11950, 11951, 11952, 11953, 11954, 11955, 11956, 11957, 11958, 11959, 11960, 11961, 11962, 11963, 11964, 11965, 11966, 11967, 11968, 11969, 11970, 11971, 11972, 11973, 11974, 11975, 11976, 11977, 11978, 11979, 11980, 11981, 11982, 11983, 11984, 11985, 11986, 11987, 11988, 11989, 11990, 11991, 11992, 11993, 11994, 11995, 11996, 11997, 11998, 11999, 12000, 12001, 12006, 12007, 12008, 12011, 12012, 12013, 12014, 12015, 12016, 12017, 12018, 12019, 12020, 12021, 12022, 12023, 12024, 12025, 12026, 12027, 12028, 12029, 12030, 12031, 12032, 12033, 12034, 12035, 12036, 12037, 12038, 12039, 12040, 12041, 12042, 12043, 12044, 12045, 12046, 12047, 12048, 12049, 12050, 12051, 12052, 12053, 12054, 12055, 12056, 12057, 12058, 12059, 12060, 12061, 12062, 12063, 12064, 12065, 12066, 12067, 12068, 12069, 12070, 12071, 12072, 12073, 12074, 12075, 12076, 12077, 12078, 12079, 12080, 12081, 12082, 12083, 12084, 12085, 12086, 12087, 12088, 12089, 12090, 12091, 12092, 12093, 12094, 12095, 12096, 12097, 12098, 12099, 12100, 12101, 12102, 12103, 12104, 12105, 12106, 12107, 12108, 12109, 12110, 12111, 12112, 12113, 12114, 12115, 12116, 12117, 12118, 12119, 12120, 12121, 12122, 12123, 12124, 12125, 12126, 12127, 12128, 12129, 12130, 12131, 12132, 12133, 12134, 12135, 12136, 12137, 12138, 12139, 12140, 12141, 12142, 12143, 12144, 12145, 12146, 12147, 12148, 12149, 12150, 12151, 12152, 12153, 12154, 12155, 12156, 12157, 12158, 12159, 12160, 12161, 12162, 12163, 12164, 12165, 12166, 12167, 12170, 12171, 12172, 12173, 12174, 12175, 12176, 12177, 12178, 12179, 12180, 12181, 12182, 12183, 12184, 12185, 12186, 12187, 12188, 12189, 12190, 12191, 12192, 12193, 12194, 12195, 12196, 12197, 12199, 12200, 12201, 12202, 12203, 12204, 12205, 12206, 12207, 12208, 12209, 12210, 12211, 12212, 12216, 12217, 12218, 12219, 12220, 12221, 12222, 12223, 12224, 12225, 12226, 12227, 12228, 12229, 12230, 12231, 12232, 12233, 12234, 12235, 12236, 12237, 12238, 12239, 12240, 12241, 12242, 12243, 12244, 12245, 12246, 12247, 12248, 12249, 12250, 12251, 12252, 12253, 12254, 12255, 12256, 12257, 12258, 12259, 12260, 12261, 12262, 12263, 12264, 12265, 12266, 12267, 12268, 12269, 12270, 12271, 12272, 12273, 12274, 12275, 12276, 12277, 12278, 12279, 12280, 12281, 12282, 12283, 12284, 12285, 12286, 12287, 12288, 12289, 12290, 12291, 12292, 12293, 12294, 12295, 12296, 12297, 12298, 12299, 12300, 12301, 12302, 12303, 12304, 12305, 12306, 12307, 12308, 12309, 12310, 12311, 12312, 12313, 12314, 12315, 12316, 12317, 12318, 12319, 12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12330, 12331, 12332, 12333, 12334, 12335, 12336, 12337, 12338, 12339, 12340, 12341, 12342, 12343, 12344, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12353, 12354, 12355, 12356, 12357, 12358, 12359, 12360, 12361, 12362, 12363, 12364, 12365, 12366, 12367, 12368, 12369, 12370, 12371, 12372, 12373, 12374, 12375, 12376, 12377, 12378, 12379, 12380, 12381, 12383, 12384, 12386, 12387, 12388, 12389, 12390, 12391, 12392, 12393, 12394, 12395, 12396, 12397, 12398, 12399, 12400, 12401, 12402, 12403, 12404, 12405, 12406, 12407, 12408, 12409, 12410, 12411, 12412, 12413, 12418, 12419, 12420, 12421, 12422, 12423, 12425, 12426, 12427, 12428, 12429, 12430, 12432, 12433, 12434, 12435, 12436, 12437, 12438, 12439, 12440, 12441, 12442, 12443, 12444, 12445, 12449, 12450, 12451, 12452, 12453, 12454, 12455, 12456, 12457, 12458, 12459, 12460, 12461, 12462, 12463, 12464, 12465, 12466, 12470, 12471, 12472, 12473, 12474, 12475, 12476, 12477, 12478, 12479, 12480, 12482, 12485, 12486, 12487, 12488, 12489, 12490, 12491, 12492, 12493, 12494, 12495, 12496, 12498, 12499, 12500, 12501, 12502, 12503, 12504, 12505, 12506, 12507, 12508, 12509, 12510, 12511, 12512, 12513, 12514, 12515, 12516, 12517, 12518, 12519, 12520, 12521, 12523, 12524, 12525, 12526, 12527, 12528, 12529, 12530, 12531, 12532, 12534, 12535, 12536, 12537, 12538, 12539, 12540, 12541, 12542, 12543, 12544, 12545, 12546, 12547, 12548, 12549, 12550, 12552, 12553, 12554, 12555, 12556, 12557, 12558, 12559, 12560, 12561, 12562, 12563, 12564, 12565, 12566, 12567, 12569, 12570, 12571, 12572, 12573, 12574, 12575, 12577, 12578, 12579, 12580, 12581, 12582, 12583, 12584, 12585, 12586, 12588, 12589, 12590, 12591, 12592, 12593, 12594, 12595, 12596, 12597, 12598, 12599, 12600, 12603, 12604, 12605, 12606, 12607, 12608, 12610, 12611, 12612, 12613, 12614, 12615, 12619, 12620, 12621, 12622, 12623, 12624, 12625, 12626, 12627, 12628, 12629, 12630, 12631, 12632, 12633, 12634, 12635, 12636, 12637, 12639, 12642, 12643, 12644, 12645, 12646, 12647, 12648, 12649, 12650, 12651, 12652, 12654, 12655, 12656, 12657, 12658, 12659, 12660, 12661, 12662, 12663, 12664, 12665, 12666, 12667, 12669, 12670, 12671, 12672, 12673, 12674, 12675, 12676, 12677, 12678, 12679, 12680, 12682, 12683, 12684, 12685, 12686, 12687, 12688, 12689, 12690, 12691, 12692, 12693, 12694, 12696, 12697, 12698, 12699, 12700, 12701, 12702, 12703, 12704, 12705, 12706, 12707, 12710, 12711, 12712, 12713, 12714, 12715, 12716, 12717, 12718, 12719, 12720, 12721, 12722, 12723, 12724, 12725, 12726, 12728, 12729, 12730, 12731, 12732, 12733, 12734, 12735, 12736, 12737, 12738, 12741, 12742, 12743, 12744, 12745, 12746, 12747, 12748, 12749, 12750, 12751, 12752, 12753, 12754, 12755, 12756, 12757, 12758, 12759, 12760, 12761, 12762, 12763, 12764, 12765, 12766, 12767, 12768, 12769, 12770, 12771, 12772, 12773, 12775, 12776, 12777, 12778, 12779, 12780, 12784, 12785, 12786, 12787, 12788, 12789, 12790, 12792, 12793, 12795, 12796, 12797, 12798, 12799, 12800, 12801, 12802, 12803, 12804, 12805, 12806, 12807, 12808, 12809, 12810, 12811, 12812, 12813, 12814, 12815, 12816, 12817, 12818, 12819, 12820, 12821, 12822, 12823, 12824, 12825, 12826, 12827, 12828, 12829, 12830, 12831, 12832, 12833, 12834, 12835, 12836, 12839, 12840, 12841, 12842, 12843, 12844, 12845, 12846, 12847, 12848, 12849, 12851, 12854, 12855, 12856, 12857, 12858, 12859, 12860, 12861, 12862, 12863, 12866, 12867, 12868, 12869, 12870, 12871, 12872, 12873, 12874, 12875, 12876, 12877, 12878, 12885, 12886, 12887, 12888, 12889, 12890, 12891, 12892, 12893, 12894, 12895, 12896, 12897, 12899, 12900, 12901, 12902, 12903, 12904, 12906, 12907, 12908, 12909, 12910, 12911, 12912, 12913, 12914, 12915, 12916, 12917, 12918, 12921, 12922, 12923, 12924, 12925, 12926, 12929, 12930, 12931, 12932, 12933, 12934, 12935, 12936, 12937, 12938, 12939, 12944, 12945, 12946, 12947, 12948, 12949, 12950, 12951, 12952, 12953, 12954, 12956, 12957, 12958, 12959, 12960, 12961, 12962, 12964, 12965, 12966, 12967, 12968, 12969, 12970, 12971, 12972, 12974, 12975, 12978, 12979, 12980, 12981, 12982, 12983, 12984, 12985, 12986, 12987, 12988, 12989, 12990, 12991, 12992, 12994, 12995, 12996, 12997, 12998, 12999, 13000, 13001, 13002, 13005, 13006, 13007, 13008, 13009, 13010, 13011, 13012, 13013, 13014, 13015, 13017, 13018, 13019, 13020, 13021, 13022, 13023, 13024, 13025, 13026, 13027, 13028, 13029, 13031, 13032, 13033, 13034, 13035, 13036, 13037, 13038, 13039, 13040, 13041, 13042, 13043, 13044, 13045, 13046, 13047, 13048, 13049, 13050, 13051, 13052, 13053, 13054, 13055, 13056, 13057, 13058, 13059, 13060, 13061, 13062, 13063, 13064, 13065, 13066, 13067, 13068, 13069, 13070, 13071, 13072, 13073, 13076, 13077, 13078, 13079, 13080, 13081, 13082, 13083, 13084, 13085, 13086, 13087, 13088, 13089, 13091, 13092, 13093, 13094, 13095, 13096, 13097, 13098, 13099, 13100, 13101, 13102, 13103, 13104, 13105, 13107, 13108, 13109, 13110, 13111, 13112, 13113, 13114, 13115, 13119, 13120, 13121, 13122, 13123, 13124, 13125, 13126, 13127, 13128, 13129, 11425, 11425, 11424, 13136, 13137, 13138, 13139, 13140, 13141, 13142, 13143, 13144, 13145, 13146, 11605, 13147, 13148, 13149, 13150, 13151, 13152, 11637, 13153, 13154, 13155, 13156, 11443, 11452, 13157, 13158, 13159, 13160, 11425, 11424, 13161, 13162, 13163, 13164, 11436, 11425, 11425, 11424, 13165, 13166, 13167, 13168, 13169, 13170, 13171, 13172, 11425, 11424, 13173, 13174, 12002, 11418, 12004, 11418, 11425, 11424, 13175, 13176, 13177, 13178, 13179, 13180, 13181, 13182, 13183, 13184, 13185, 13186, 13187, 13188, 13189, 11436, 11435, 13190, 13191, 13192, 13193, 13194, 13195, 11425, 11424, 13196, 13197, 13198, 13199, 13200, 13201, 13202, 13203, 13204, 13205, 13206, 13207, 13208, 13209, 11443, 11452, 12414, 11443, 12416, 11452, 11425, 11425, 11424, 13210, 12467, 11452, 11443, 11418, 11418, 13211, 13212, 13213, 13214, 11424, 13215, 13216, 13217, 13218, 13219, 13220, 11452, 13221, 13222, 13223, 13224, 13225, 13226, 13227, 13228, 11452, 13229, 13230, 13231, 11421, 11436, 11435, 11425, 11425, 11424, 11418, 11421, 11418, 11418, 13232, 11421, 11449, 11449, 13233, 13234, 13235, 13236, 13237, 13238, 13239, 13240, 13241, 11443, 13242, 13243, 11443, 13244, 13245, 11452, 13246, 11425, 11425, 11424, 11418, 13247, 13248, 13249, 13250, 13251, 13252, 13253, 13254, 13255, 13256, 11418, 11421, 11421, 11449, 11421, 11432, 11430, 11443, 11432, 11430, 11452, 13257, 11425, 13258, 11436, 11425, 11435, 11424, 13259, 13260, 12941, 11452, 11443, 11418, 11436, 11436, 13261, 13262, 11443, 13263, 13264, 11452, 11436, 11436, 13265, 13266, 13267, 13268, 13269, 13270, 13271, 13272, 11452, 11418, 13273, 11421, 13274, 13275, 13276, 13277, 11418, 11421, 11421, 11436, 11436, 13278, 13279, 11443, 13280, 13281, 11452, 11449, 11446, 11443, 11449, 11446, 11452, 13, 14, 15, 13296, 13299, 13304, 13307, 13310, 13312, 13314, 13317, 13320, 13323, 13326, 13328, 13330, 13333, 13336, 13341, 13349, 13352, 13355, 13361, 13364, 13367, 13369, 13371, 13374, 13377, 13380, 13383, 13386, 13393, 13396, 13399, 13405, 13407, 13409, 13411, 13414, 13416, 13419, 13422, 13430, 13433, 13441, 13443, 13446, 13451, 13454, 13457, 13462, 13465, 13472, 13474, 13477, 13480, 13482, 13486, 13489, 13492, 13494, 13496, 13499, 13502, 13505, 13508, 13513, 13516, 13519, 13522, 13526, 13529, 13532, 13534, 13536, 13541, 13544, 13547, 13549, 13551, 13554, 13557, 13560, 13563, 13567, 13570, 13577, 13579, 13583, 13585, 13587, 13590, 13593, 13595, 13597, 13600, 13604, 13606, 13608, 13611, 13614, 13617, 13620, 13627, 13630, 13635, 13638, 13641, 13644, 13647, 13649, 13651, 13654, 13659, 13662, 13665, 13667, 13671, 13674, 13676, 13678, 13681, 13684, 13687, 13692, 13695, 13702, 13705, 13708, 13711, 13715, 13718, 13721, 13725, 13729, 13732, 13735, 13740, 13742, 13744, 13747, 13754, 13757, 13761, 13763, 13766, 13769, 13774, 13777, 13780, 13782, 13786, 13788, 13791, 13793, 13795, 13800, 13803, 13806, 13812, 13815, 13818, 13823, 13826, 13829, 13832, 13835, 13838, 13840, 13842, 13844, 13846, 13850, 13853, 13861, 13864, 13869, 13872, 13877, 13880, 13887, 13889, 13891, 13895, 13898, 13901, 13903, 13905, 13908, 13911, 13913, 13916, 13919, 13922, 13927, 13930, 13933, 13935, 13940, 13943, 13947, 13949, 13951, 13954, 13956, 13958, 13961, 13964, 13967, 13970, 13972, 13974, 13977, 13980, 13983, 13985, 13987, 13990, 13993, 13995, 13997, 13999, 14001, 14004, 14007, 14011, 14014, 14018, 14022, 14027, 14030, 14033, 14038, 14041, 14044, 14047, 14051, 14054, 14057, 14062, 14065, 14067, 14069, 14072, 14077, 14079, 14081, 14084, 14087, 14091, 14096, 14099, 14102, 14105, 14108, 14110, 14112, 14115, 14118, 14120, 14122, 14125, 14128, 14131, 14133, 14135, 14138, 14141, 14144, 14147, 14150, 14152, 14154, 14157, 14160, 14163, 14167, 14169, 14172, 14176, 14179, 14183, 14186, 14191, 14193, 14195, 14197, 14202, 14204, 14206, 14211, 14214, 14221, 14224, 14227, 14230, 14234, 14236, 14238, 14241, 14244, 14248, 14251, 14254, 14257, 14261, 14265, 14268, 14277, 14280, 14283, 14286, 14289, 14292, 14299, 14302, 14307, 14310, 14314, 14317, 14321, 14323, 14326, 14331, 14334, 14338, 14340, 14343, 14350, 14353, 14357, 14360, 14364, 14367, 14370, 14373, 14375, 14377, 14380, 14383, 14386, 14389, 14392, 14395, 14399, 14402, 14406, 14409, 14418, 14421, 14424, 14426, 14429, 14432, 14435, 14437, 14439, 14443, 14446, 14449, 14451, 14453, 14455, 14458, 14461, 14463, 14465, 14468, 14471, 14474, 14477, 14480, 14483, 14488, 14491, 14495, 14497, 14500, 14504, 14508, 14511, 14522, 14524, 14526, 14529, 14532, 14535, 14538, 14541, 14544, 14548, 14551, 14556, 14559, 14563, 14566, 14569, 14571, 14574, 14579, 14582, 14587, 14590, 14593, 14595, 14598, 14601, 14610, 14613, 14616, 14618, 14620, 14623, 14626, 14628, 14631, 14633, 14635, 14637, 14640, 14643, 14646, 14649, 14653, 14656, 14659, 14662, 14665, 14668, 14671, 14674, 14678, 14682, 14685, 14688, 14690, 14693, 14696, 14702, 14705, 14711, 14714, 14719, 14722, 14726, 14729, 14735, 14738, 14741, 14743, 14746, 14749, 14752, 14754, 14756, 14759, 14762, 14766, 14769, 14773, 14776, 14781, 14784, 14787, 14790, 14793, 14796, 14799, 14802, 14805, 14808, 14810, 14812, 14816, 14819, 14824, 14827, 14831, 14834, 14840, 14843, 14847, 13303, 14851, 14852, 14853, 14854, 14856, 13340, 13347, 11425, 11424, 11446, 13359, 11421, 11449, 11443, 11449, 11452, 13403, 13404, 11582, 13428, 11425, 11424, 11421, 13439, 11425, 11424, 14861, 11421, 14863, 14865, 14866, 11449, 11449, 11432, 14868, 11432, 14870, 13471, 11425, 11424, 14872, 13485, 13512, 11418, 11430, 11418, 11430, 13576, 11436, 11435, 14873, 14875, 14877, 14878, 13582, 14881, 13603, 14883, 14884, 14885, 11425, 11424, 11436, 11435, 11436, 11435, 14887, 14889, 14890, 13657, 14891, 14892, 11449, 13670, 13691, 14893, 13701, 11425, 11424, 11449, 14895, 14897, 13724, 13728, 14901, 14902, 14903, 11436, 11435, 13753, 11425, 11424, 11449, 11418, 13785, 11430, 11452, 11432, 13811, 13822, 14905, 14906, 14907, 14908, 14909, 14910, 14911, 11446, 13859, 13857, 11421, 11449, 11449, 13876, 14914, 13886, 11425, 11424, 11446, 11430, 11432, 11418, 13938, 11421, 11430, 14917, 14919, 14921, 14926, 14927, 12198, 14930, 14017, 14021, 14025, 14934, 14935, 11446, 11432, 11421, 11430, 14061, 14076, 14938, 11436, 11425, 11424, 14944, 11424, 11425, 11436, 11435, 14946, 14948, 14950, 14951, 11424, 11435, 11425, 11436, 14220, 11436, 11435, 14952, 14953, 14954, 14955, 14956, 14233, 14247, 14957, 14958, 14264, 11435, 14960, 14961, 14962, 14274, 14272, 12483, 12481, 14963, 14964, 14298, 11425, 11424, 14965, 14967, 11425, 11424, 11425, 14969, 11435, 14970, 14972, 14974, 14976, 11425, 11424, 11435, 14977, 14980, 14983, 14985, 14349, 11425, 11424, 14986, 14356, 14363, 14989, 14990, 14991, 14992, 14398, 14993, 14994, 11446, 14415, 14413, 12640, 12638, 14995, 11432, 14996, 11430, 14997, 14998, 15000, 15001, 15002, 11425, 11424, 11435, 15004, 15007, 15010, 15012, 14507, 11435, 15013, 15015, 15016, 15018, 14517, 11425, 11424, 11432, 11430, 11432, 11430, 15020, 14547, 15021, 15022, 14554, 11446, 15023, 11435, 11425, 11424, 11425, 11424, 15024, 15027, 15030, 15032, 11430, 15034, 15035, 14607, 11425, 11424, 12852, 12850, 15036, 15037, 15038, 11435, 15039, 15040, 15041, 15042, 15043, 15044, 15046, 14652, 15048, 15049, 15050, 15051, 14681, 11435, 15054, 15055, 15056, 11432, 15057, 14701, 15058, 11435, 14710, 15059, 11435, 15060, 15062, 15063, 15065, 11425, 11424, 15066, 11435, 15067, 11435, 11425, 11424, 15068, 15071, 15074, 15076, 11432, 15077, 15079, 11424, 11425, 11425, 11424, 11436, 15080, 15084, 15085, 11432, 15086, 11425, 11424, 15087, 11435, 15088, 11435, 11425, 11424, 15089, 15091, 15092, 15094, 14850, 11435, 15095, 15096, 15097, 15098, 15099, 15100, 15082, 15053, 15006, 14979, 15082, 15053, 15019, 15003, 15006, 14936, 15082, 15053, 15082, 15070, 15053, 15082, 15053, 15082, 15053, 15082, 15053, 15082, 15053, 15019, 15003, 15026, 15070, 15026, 14979, 14943, 15053, 15053, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 13298, 13301, 15584, 13309, 13306, 13316, 11365, 11364, 13319, 13322, 15586, 13325, 11436, 11435, 13332, 13335, 13338, 15590, 13343, 15591, 15592, 15593, 15594, 13354, 13351, 15595, 13357, 15596, 13366, 13363, 11425, 11424, 13376, 13373, 13379, 13382, 13388, 13385, 15597, 15598, 15599, 15600, 13398, 13395, 15601, 13401, 15602, 11443, 15137, 11452, 13413, 15141, 15603, 13424, 13421, 15604, 15605, 15606, 15607, 13435, 13432, 15608, 15609, 15610, 15612, 11418, 13445, 13448, 15616, 15617, 13453, 13459, 13456, 15618, 15620, 13467, 13464, 15622, 15623, 15624, 11418, 13479, 13476, 15626, 11425, 11424, 13491, 13488, 11425, 11424, 13501, 13498, 13504, 13507, 13510, 15627, 13518, 13515, 13524, 13521, 15628, 13531, 13528, 13538, 11425, 11424, 15629, 15630, 13546, 13543, 13553, 11425, 11424, 13556, 13559, 13562, 13565, 15631, 13572, 13569, 15632, 15633, 15634, 15635, 15188, 13581, 15639, 13589, 11243, 11364, 13592, 13599, 11243, 11364, 13602, 15641, 13610, 11365, 11364, 13616, 13613, 13622, 13619, 15645, 15646, 15647, 15648, 13632, 13629, 15649, 15650, 13640, 13637, 13643, 13646, 15212, 15652, 13653, 13656, 15654, 15657, 13664, 13661, 15658, 11425, 11424, 13673, 13680, 11425, 11424, 13686, 13683, 13689, 15659, 13694, 13697, 15661, 15662, 15663, 13704, 13707, 13713, 13710, 15664, 13717, 13720, 13723, 15667, 13727, 15668, 13731, 13734, 13737, 15672, 15673, 13746, 11365, 11364, 13749, 15674, 15675, 15676, 13756, 13759, 15245, 13765, 13771, 13768, 15677, 15678, 13779, 13776, 15679, 11425, 11424, 11418, 13790, 13797, 11425, 11424, 15680, 15681, 13802, 13808, 13805, 15682, 15683, 13817, 13814, 15684, 13820, 13825, 15689, 13831, 13828, 13834, 13837, 11436, 11435, 13848, 11365, 11364, 15692, 13855, 13852, 15693, 15694, 15695, 13863, 13866, 15696, 15697, 13871, 13874, 15698, 13879, 13882, 15700, 15701, 15702, 13893, 11365, 11364, 15703, 13900, 13897, 11425, 11424, 13910, 13907, 11436, 11435, 15704, 13918, 13924, 13921, 15705, 15706, 13932, 13929, 15707, 11425, 11424, 15708, 13942, 13945, 15709, 13953, 11365, 11364, 13960, 11365, 11364, 13963, 13966, 13969, 13976, 11365, 11364, 13979, 13982, 15713, 13989, 11365, 11364, 13992, 11244, 11243, 11365, 11364, 14003, 14009, 14006, 15715, 14013, 14016, 15717, 14020, 15718, 14024, 15719, 15722, 14029, 14035, 14032, 15723, 15724, 14043, 14040, 14049, 14046, 15725, 14056, 14053, 15726, 14059, 14064, 14071, 11425, 11424, 14074, 15727, 15347, 14083, 14089, 14086, 15729, 14093, 15730, 15731, 14101, 14098, 14107, 14104, 14114, 11365, 11364, 14117, 14124, 11425, 11424, 14130, 14127, 11425, 11424, 14140, 14137, 14143, 14149, 14146, 14156, 11425, 11424, 14159, 14165, 14162, 15378, 14174, 14171, 15733, 14181, 14178, 15734, 14188, 14185, 15735, 15736, 15737, 15385, 14199, 11244, 11243, 15741, 15742, 14208, 11244, 11243, 15743, 15744, 14216, 14213, 15745, 15746, 15747, 14226, 14223, 14232, 14229, 15753, 14240, 11365, 11364, 14246, 14243, 15754, 14253, 14250, 14259, 14256, 15757, 11436, 15758, 15760, 14270, 14267, 15762, 15763, 15764, 15765, 14282, 14279, 14288, 14285, 14294, 14291, 15768, 15769, 15770, 14304, 14301, 15773, 15774, 14312, 14309, 15775, 14319, 14316, 11436, 15777, 14328, 14325, 15782, 15783, 14336, 14333, 11436, 15784, 14345, 14342, 15789, 15790, 15791, 14355, 14352, 15793, 14362, 14359, 15794, 14369, 14366, 14372, 14379, 11365, 11364, 14385, 14382, 15796, 14391, 14388, 14397, 14394, 15799, 14404, 14401, 15802, 14411, 14408, 15803, 15804, 15805, 15806, 14423, 14420, 15453, 15808, 14434, 14431, 14441, 11425, 11424, 15810, 14448, 14445, 15462, 11449, 14460, 14457, 14467, 11425, 11424, 14473, 14470, 14479, 14476, 14485, 14482, 15816, 15817, 14493, 14490, 11436, 15818, 14502, 14499, 15823, 11436, 15824, 14513, 14510, 15829, 15830, 15831, 15832, 15833, 15834, 15835, 14528, 11365, 11364, 14534, 14531, 14540, 14537, 14546, 14543, 15837, 14553, 14550, 15840, 15841, 14561, 14558, 15843, 14568, 14565, 11436, 14576, 14573, 15844, 15845, 14584, 14581, 15846, 15847, 14592, 14589, 15506, 15852, 15853, 14603, 14600, 15855, 15856, 15857, 15858, 15859, 14615, 14612, 15512, 14625, 14622, 11436, 15863, 11425, 11424, 15864, 15867, 14639, 11365, 11364, 14645, 14642, 14651, 14648, 15871, 14658, 14655, 14664, 14661, 15872, 14670, 14667, 15874, 14676, 14673, 15876, 11436, 15877, 15879, 14687, 14684, 15537, 15881, 14698, 14695, 15883, 15885, 14707, 14704, 15886, 15888, 14716, 14713, 15893, 15894, 14724, 14721, 15896, 14731, 14728, 15898, 15899, 15900, 14740, 14737, 15551, 15905, 14751, 14748, 14758, 11425, 11424, 14764, 14761, 15908, 14771, 14768, 15909, 14778, 14775, 15910, 15911, 14786, 14783, 15912, 11435, 14795, 14792, 14801, 14798, 15914, 14807, 14804, 14814, 11425, 11424, 15916, 14821, 14818, 15918, 15919, 14829, 14826, 15921, 14836, 14833, 15923, 15924, 15925, 14845, 14842, 15930, 11436, 15931, 15932, 15935, 15938, 15939, 15940, 15941, 15942, 15943, 15082, 15053, 15944, 15945, 15946, 15947, 15948, 15949, 15950, 15951, 15952, 15953, 15954, 15688, 15686, 15955, 15956, 15957, 15958, 15959, 15960, 15961, 15962, 15963, 15964, 15965, 15966, 15967, 15751, 15749, 15968, 15767, 15019, 15082, 15781, 14982, 14979, 15788, 14982, 14979, 15019, 15795, 15813, 15003, 15053, 15822, 15009, 15006, 15828, 15826, 15073, 15070, 15029, 15026, 15862, 15969, 15892, 15890, 15904, 15073, 15070, 15907, 15082, 15929, 15927, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15984, 15985, 15987, 15988, 15989, 15990, 15991, 15992, 15993, 15995, 15996, 15997, 15998, 15999, 16000, 16002, 16003, 16007, 16008, 16010, 16012, 16013, 16014, 16015, 16016, 16017, 16018, 16019, 16020, 16021, 16026, 16027, 16029, 16030, 16031, 16032, 16033, 16034, 16035, 16037, 16038, 16039, 16043, 16044, 16045, 16049, 16050, 16051, 16054, 16055, 16056, 16059, 16060, 16061, 16064, 16065, 16066, 16068, 16069, 16070, 16071, 16072, 16073, 16074, 16075, 16076, 16077, 16078, 16080, 16081, 16082, 16083, 16085, 16086, 16087, 16088, 16089, 16092, 16093, 16094, 16095, 16096, 16097, 16098, 16099, 16100, 16102, 16103, 16104, 16108, 16109, 16110, 16111, 16112, 16113, 16114, 16115, 16116, 16117, 16118, 16119, 16120, 16121, 16122, 16123, 16124, 16125, 16126, 16127, 16129, 16131, 16132, 16133, 16135, 16136, 16137, 16138, 16139, 16141, 16142, 16143, 16145, 16146, 16148, 16149, 16150, 16151, 16152, 16153, 16154, 16155, 16156, 16158, 16159, 16160, 16163, 16164, 16165, 16166, 16168, 16169, 16170, 16171, 16172, 16173, 16174, 16175, 16176, 16177, 16179, 16180, 16181, 16182, 16183, 16186, 16187, 16188, 16189, 16190, 16191, 16194, 16195, 16197, 16198, 16199, 16200, 16201, 16202, 16203, 16206, 16207, 16208, 16211, 16212, 16214, 16215, 16217, 16218, 16219, 16220, 16221, 16222, 16223, 16224, 16225, 16227, 16228, 16229, 16232, 16233, 16236, 16237, 16239, 16240, 16241, 16244, 16245, 16246, 16248, 16249, 16250, 16251, 16252, 16253, 16254, 16255, 16257, 16258, 16259, 16262, 16263, 16265, 16266, 16268, 16269, 16271, 16272, 16273, 16274, 16275, 16276, 16277, 16278, 16279, 16280, 16281, 16282, 16283, 16284, 16286, 16287, 16288, 16289, 16290, 16291, 16292, 16293, 16294, 16295, 16296, 16298, 16299, 16301, 16302, 16303, 16304, 16306, 16307, 16308, 16311, 16312, 16313, 16314, 16316, 16317, 16319, 16320, 16321, 16322, 16323, 16324, 16326, 16327, 16328, 16329, 16331, 16332, 16334, 16335, 16336, 16337, 16338, 16339, 16340, 16341, 16342, 16343, 16344, 16345, 16346, 16347, 16348, 16349, 16350, 16351, 16352, 16353, 16354, 16355, 16356, 16357, 16358, 16359, 16360, 16361, 16362, 16364, 16365, 16367, 16368, 16369, 16372, 16373, 16374, 16375, 16376, 16378, 16379, 16380, 16381, 16383, 16384, 16385, 16388, 16389, 16390, 16391, 16393, 16394, 16395, 16396, 16397, 16398, 16399, 16400, 16401, 16402, 16404, 16407, 16408, 16409, 16411, 16413, 16414, 16415, 16416, 16417, 16418, 16419, 16422, 16423, 16424, 16426, 16427, 16428, 16429, 16430, 16431, 16433, 16434, 16435, 16437, 16438, 16439, 16441, 16442, 16443, 16446, 16447, 16449, 16450, 16452, 16453, 16454, 16455, 16456, 16457, 16458, 16459, 16461, 16462, 16463, 16464, 16465, 16466, 16467, 16469, 16470, 16471, 16473, 16475, 16476, 16477, 16479, 16480, 16481, 16482, 16483, 16485, 16486, 16487, 16488, 16489, 16490, 16491, 16492, 16493, 16494, 16495, 16496, 16497, 16498, 16499, 16500, 16502, 16503, 16504, 16506, 16507, 16509, 16511, 16512, 16513, 16516, 16518, 16520, 16521, 16522, 16523, 16524, 16525, 16526, 16527, 16528, 16529, 16530, 16531, 16534, 16535, 16537, 16538, 16539, 16540, 16541, 16542, 16544, 16545, 16546, 16548, 16549, 16550, 16553, 16554, 16555, 16558, 16560, 16561, 16562, 16563, 16564, 16565, 16567, 16568, 16571, 16572, 16573, 16574, 16575, 16576, 16577, 16579, 16580, 16581, 16582, 16584, 16585, 16587, 16588, 16590, 16593, 16594, 16595, 16597, 16598, 16599, 16601, 16602, 16603, 16605, 16606, 16607, 16609, 16610, 15895, 16612, 16613, 15897, 16615, 16617, 16618, 16619, 16621, 16622, 16623, 16624, 16625, 16626, 16627, 16629, 16630, 16632, 16633, 16634, 16636, 16637, 16639, 16640, 16641, 16642, 16643, 16645, 16646, 16647, 16648, 16649, 16651, 16652, 16653, 16655, 16656, 15920, 16658, 16659, 15922, 16661, 16663, 16664, 16666, 15019, 15073, 16011, 16670, 16672, 16025, 16023, 15029, 16042, 16674, 16676, 16677, 16048, 14929, 15003, 15621, 15619, 16678, 15019, 16084, 16091, 15082, 15637, 15053, 15082, 16193, 16205, 16210, 16689, 16690, 14943, 16231, 16691, 15029, 15073, 15003, 15070, 16261, 16267, 16693, 15710, 14929, 14936, 16310, 14979, 16695, 16697, 15739, 16704, 16705, 16406, 16707, 16708, 16709, 16710, 16711, 16712, 16713, 16714, 16715, 16716, 16717, 15003, 15809, 15811, 16718, 16719, 16720, 16721, 16722, 16723, 16724, 16725, 15842, 16726, 16727, 16728, 16729, 16552, 16730, 15869, 15866, 16592, 15882, 16732, 16733, 16734, 16735, 16736, 15906, 16737, 16738, 16644, 15917, 16739, 16740, 15937, 15934, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16754, 16756, 16762, 16768, 16769, 16009, 16772, 16774, 16776, 16780, 16782, 16028, 16785, 16791, 16793, 16794, 16796, 16801, 16803, 16805, 16807, 16067, 16811, 16813, 16815, 16820, 16822, 16824, 16826, 16829, 16831, 16838, 16840, 16843, 16844, 16848, 16852, 16853, 16856, 16858, 16860, 16862, 16865, 16872, 16873, 16147, 16878, 16881, 16886, 16889, 16894, 16896, 16901, 16905, 16910, 16912, 16196, 16918, 16922, 16924, 16213, 16928, 16932, 16934, 16937, 16946, 16947, 16950, 16952, 16954, 16956, 16959, 16961, 16264, 16967, 16970, 16976, 16981, 16985, 16987, 16990, 16995, 16997, 16999, 17001, 17003, 17005, 16318, 17009, 17015, 17019, 17021, 17023, 17027, 17030, 17032, 17034, 17037, 17039, 17043, 17046, 17048, 17050, 17054, 17058, 17062, 17064, 17065, 17067, 17069, 17072, 17074, 17075, 17077, 16403, 17080, 17084, 17086, 17088, 17090, 17091, 17094, 17097, 17099, 17100, 17103, 17105, 17106, 17108, 17109, 17111, 17113, 17116, 17119, 17121, 17123, 17125, 17126, 17128, 17132, 17135, 17137, 17140, 17144, 17146, 17149, 17151, 17153, 17156, 17158, 17159, 16508, 17162, 17164, 17167, 17170, 17172, 17174, 17176, 17177, 17179, 17181, 17184, 17187, 17190, 17193, 17195, 17197, 17200, 17202, 17203, 17205, 17208, 17210, 17212, 17214, 17216, 17218, 16589, 17221, 17224, 17226, 17227, 17229, 17230, 17233, 17236, 17238, 17240, 17243, 17245, 17248, 17250, 17252, 17255, 16638, 17258, 17260, 17262, 17264, 17267, 17270, 17273, 17275, 17277, 16665, 16753, 17280, 15994, 15585, 16001, 15589, 15588, 17281, 17282, 17285, 17286, 16788, 16790, 17287, 17288, 16797, 17292, 16799, 17293, 17294, 17295, 17296, 16806, 16819, 17298, 17299, 17300, 16837, 16835, 17301, 15638, 17302, 15640, 15651, 16140, 14942, 15045, 17303, 14940, 15660, 16157, 17304, 15666, 15665, 14940, 16900, 15671, 16907, 14982, 17305, 16916, 17306, 17307, 17308, 16216, 15691, 17310, 17311, 16941, 17313, 17314, 14940, 16238, 15699, 17315, 17316, 17317, 17318, 16966, 17320, 16285, 14923, 15711, 14925, 14928, 15712, 17321, 16300, 15716, 17322, 17323, 17324, 17325, 15728, 17018, 16325, 14940, 15732, 15740, 17327, 17328, 17330, 15766, 17332, 17334, 17337, 17342, 15807, 17343, 17344, 15812, 17346, 17348, 17351, 15019, 15082, 17353, 17354, 17356, 17358, 15860, 17360, 17361, 17362, 17363, 17364, 17366, 17369, 17372, 17373, 17374, 17376, 17377, 9, 10, 11, 12, 13, 14, 15, 17393, 17413, 17420, 17422, 17426, 17427, 17429, 17437, 17438, 17444, 17448, 17449, 17455, 17458, 17465, 17466, 17467, 17468, 17469, 17470, 17480, 17484, 17485, 17490, 17495, 17496, 17501, 17506, 17524, 17533, 17536, 17543, 17546, 17561, 17563, 17570, 17582, 17591, 17597, 17598, 17394, 15047, 17600, 17601, 17395, 17602, 17603, 17604, 17397, 17283, 16778, 17399, 17401, 17607, 17403, 17609, 16786, 17610, 17406, 17289, 17408, 17613, 17291, 17615, 17616, 17409, 17618, 17411, 17620, 16817, 17415, 17621, 17418, 17625, 17626, 17424, 17628, 17428, 17425, 17630, 17432, 14940, 16864, 15644, 17631, 17435, 17632, 17633, 17634, 17636, 17637, 17440, 17638, 15045, 17441, 17443, 17442, 17640, 17641, 17445, 17642, 17643, 17644, 17645, 17646, 17446, 17648, 17450, 17452, 17652, 17454, 14940, 17653, 16939, 17312, 17656, 17657, 17659, 17660, 17661, 17457, 17462, 17460, 17463, 17319, 17666, 17668, 17669, 17670, 17671, 17672, 17673, 17472, 17474, 17473, 17675, 17676, 17475, 17477, 17479, 14941, 17681, 16330, 14942, 17682, 17683, 17684, 17036, 17487, 17491, 17685, 17052, 16366, 16363, 17686, 17498, 16392, 15752, 17503, 15047, 17082, 17690, 17509, 17511, 17515, 17096, 17093, 17692, 17518, 17102, 17693, 17520, 17115, 16451, 16448, 17528, 15798, 16460, 15047, 17130, 17695, 17134, 17142, 17698, 17538, 17541, 17155, 17700, 17545, 17702, 17703, 16532, 15836, 17550, 15047, 17189, 17186, 17183, 16536, 17705, 17192, 17558, 17708, 17199, 17709, 16578, 15870, 16586, 16583, 15047, 17223, 17575, 17573, 17579, 17235, 17232, 17714, 17242, 17587, 17254, 16631, 16628, 17589, 17595, 17272, 17269, 17719, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15045, 17768, 17769, 17772, 17774, 17776, 17778, 17779, 17780, 17782, 17784, 17786, 17788, 17290, 17793, 17795, 17729, 17797, 17798, 17800, 17730, 17731, 17801, 17803, 17804, 14940, 15045, 17805, 17806, 15045, 17808, 17809, 17810, 17811, 17813, 17814, 17735, 17736, 17819, 17821, 17817, 17822, 17823, 17824, 17825, 15045, 17827, 17828, 17833, 17738, 17739, 17835, 17836, 15045, 17838, 17839, 17841, 15045, 17848, 17846, 17849, 17850, 17851, 17742, 14942, 14924, 15047, 14940, 15045, 17857, 17860, 17861, 17862, 17863, 17865, 17866, 17867, 17748, 17868, 15045, 17870, 17871, 17873, 17750, 17875, 17876, 17751, 17877, 17879, 17880, 17881, 17882, 17883, 17061, 17057, 17884, 17885, 17886, 17887, 15045, 17755, 17888, 17890, 17891, 17892, 17893, 17894, 17896, 17897, 17899, 17900, 17901, 17902, 17903, 17904, 17905, 15045, 17906, 17907, 17909, 17757, 17910, 17758, 17912, 17913, 17914, 17759, 17916, 17917, 17919, 15045, 17920, 17921, 17922, 17923, 17924, 17925, 17926, 17928, 17929, 17931, 17761, 17933, 17934, 15045, 17935, 17936, 17937, 17763, 17938, 17939, 17940, 17941, 17942, 17943, 17945, 17764, 17946, 17947, 17948, 17949, 17950, 17765, 17951, 17952, 17953, 17766, 17667, 17832, 17844, 17792, 17599, 17611, 17622, 14, 15, 17968, 17970, 17971, 17974, 17783, 17981, 17984, 17985, 17988, 17989, 17993, 17994, 17995, 17997, 17999, 17812, 18004, 18005, 18006, 18010, 18013, 18017, 18018, 18021, 18022, 18025, 18028, 18031, 18032, 18033, 18034, 18035, 18036, 18039, 18045, 18047, 17869, 18049, 18051, 18052, 18054, 18056, 18061, 18062, 18067, 18063, 18065, 18068, 18072, 18075, 18078, 18084, 18081, 18088, 18090, 18092, 18094, 18098, 18099, 18102, 18104, 18109, 18112, 18110, 18114, 18116, 18118, 18120, 18124, 18125, 18127, 18130, 18131, 18134, 17781, 18135, 18136, 17674, 17639, 17627, 18137, 17664, 17623, 18138, 17678, 18139, 17679, 17878, 17651, 17647, 17777, 17650, 17787, 17992, 18140, 17796, 18141, 17842, 17680, 17794, 17707, 17691, 17715, 17716, 17699, 17911, 17930, 17712, 17696, 17331, 18096, 17908, 17359, 17340, 17889, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18144, 18146, 18154, 18157, 18159, 18008, 18163, 18164, 18167, 18169, 17854, 17855, 18174, 18175, 18177, 18046, 18181, 18185, 18060, 18189, 18192, 18194, 18083, 18097, 18203, 18206, 18211, 18213, 18216, 18218, 16703, 18221, 17649, 18148, 17297, 18222, 16702, 18223, 16699, 17663, 18225, 16685, 17852, 18226, 17624, 18228, 18230, 18231, 16684, 18232, 18233, 18234, 18235, 18236, 18237, 16680, 18239, 16701, 18149, 18241, 18242, 17284, 18243, 17834, 16681, 18219, 17697, 17713, 18244, 17689, 17932, 17898, 17717, 18245, 18246, 18247, 17915, 17701, 18248, 18249, 17370, 18250, 18251, 18252, 17345, 18253, 18254, 18255, 18256, 17711, 18257, 17954, 18258, 13, 14, 15, 18272, 18274, 18275, 18276, 18277, 18279, 18280, 18281, 18282, 18284, 18285, 18287, 18290, 18291, 18196, 18295, 18207, 18302, 18304, 17677, 18305, 18306, 18308, 18310, 18311, 18313, 18314, 18316, 18320, 17605, 18327, 18059, 16687, 18329, 18330, 18333, 18335, 18336, 18317, 18321, 18324, 18238, 18338, 18339, 17944, 18341, 18342, 17927, 18343, 18344, 18348, 17341, 17718, 18349, 18352, 18356, 17895, 18361, 17371, 18363, 18346, 18350, 18354, 15, 18368, 18369, 18370, 18373, 18374, 18375, 18376, 18379, 18382, 18383, 18384, 18387, 16686, 18397, 18399, 18400, 17635, 18385, 18389, 18390, 18224, 18312, 18394, 18395, 18319, 18326, 18401, 18332, 18334, 18412, 18415, 18419, 18420, 18424, 17688, 16706, 18426, 18410, 18413, 18417, 18422, 18423, 18425, 18427, 12, 13, 14, 15, 18438, 16706, 16682, 16683, 18444, 16688, 16700, 18448, 17654, 17662, 18443, 18445, 18446, 18453, 18455, 18408, 18409, 18459, 16731, 17694, 17704, 18466, 18467, 18461, 18462, 18463, 18359, 18471, 18472, 13, 14, 15, 18481, 18482, 18483, 18485, 18486, 16731, 18488, 18489, 18490, 18407, 18498, 18499, 18500, 18501, 18502, 18469, 18470, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18517, 18512, 18386, 18514, 18515, 18516, 18402, 18331, 18521, 18522, 18421, 18524, 18506, 18474, 18527, 15, 18544, 18545, 18449, 18451, 18452, 18549, 18550, 18553, 18554, 18473, 18557, 11, 12, 13, 14, 15, 18492, 18561, 18520, 18564, 18494, 18566, 18507, 18568, 18569, 9, 10, 11, 12, 13, 14, 15, 18576, 18577, 18579, 18581, 18558, 18583, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18552, 18593, 18596, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18608, 18610, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18609, 7591, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18641, 7552, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18657, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18656, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
int h_C[]= {
1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481, 483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573, 575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803, 805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849, 851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895, 897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941, 943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987, 989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027, 1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065, 1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103, 1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141, 1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179, 1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217, 1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331, 1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369, 1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407, 1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445, 1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483, 1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521, 1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559, 1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597, 1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749, 1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787, 1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825, 1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863, 1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901, 1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939, 1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977, 1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015, 2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091, 2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129, 2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167, 2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205, 2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243, 2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281, 2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357, 2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433, 2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471, 2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509, 2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547, 2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585, 2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661, 2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699, 2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737, 2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775, 2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813, 2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851, 2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889, 2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927, 2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965, 2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003, 3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041, 3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079, 3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117, 3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155, 3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193, 3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231, 3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269, 3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307, 3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345, 3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383, 3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421, 3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459, 3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497, 3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535, 3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573, 3575, 3577, 3579, 3582, 3584, 3586, 3588, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611, 3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649, 3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687, 3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725, 3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763, 3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801, 3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839, 3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877, 3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915, 3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953, 3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991, 3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029, 4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067, 4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105, 4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143, 4145, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180, 4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218, 4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257, 4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332, 4337, 4339, 4341, 4343, 4345, 4347, 4350, 4352, 4355, 4357, 4360, 4362, 4365, 4367, 4369, 4371, 4374, 4376, 4379, 4381, 4386, 4388, 4390, 4392, 4394, 4396, 4296, 4296, 4333, 4333, 4333, 4333, 4399, 4399, 4403, 4403, 4403, 4403, 4146, 4146, 4146, 4146, 4333, 4333, 4333, 4333, 4333, 4333, 4296, 4296, 4296, 4296, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4399, 4399, 4406, 4406, 4399, 4399, 4146, 4146, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4399, 4399, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4406, 4406, 4406, 4406, 3580, 3589, 4403, 4403, 4146, 4146, 4403, 4403, 4146, 4146, 4296, 4296, 4296, 4296, 4333, 4333, 4333, 4333, 4333, 4333, 4403, 4403, 4146, 4146, 4146, 4146, 4399, 4399, 4403, 4403, 4146, 4146, 4406, 4406, 4333, 4333, 4146, 4146, 4146, 4146, 4296, 4296, 4333, 4333, 4406, 4406, 4296, 4296, 4296, 4296, 4296, 4296, 4333, 4333, 4333, 4333, 4333, 4333, 4399, 4399, 4403, 4403, 4146, 4146, 4406, 4406, 4406, 4406, 4406, 4406, 4406, 4406, 4334, 4334, 4334, 4334, 4146, 4146, 4146, 4146, 4146, 4146, 4146, 4296, 4296, 4333, 4333, 4333, 4333, 4333, 4333, 4296, 4296, 4406, 4406, 4406, 4406, 4406, 4406, 4399, 4399, 4403, 4403, 4146, 4146, 4401, 4401, 4406, 4406, 4401, 4401, 4401, 4401, 4406, 4406, 4401, 4401, 4401, 4401, 4406, 4406, 3580, 3589, 4296, 4296, 4296, 4296, 4296, 4296, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4333, 4401, 4401, 4399, 4399, 4146, 4146, 4146, 4146, 4406, 4406, 4296, 4296, 4333, 4333, 4406, 4406, 4406, 4406, 4296, 4296, 4333, 4333, 4406, 4406, 4406, 4406, 4406, 4406, 4401, 4406, 4406, 4334, 4146, 4401, 4406, 4406, 4406, 4406, 4408, 4406, 4406, 4398, 4296, 4296, 4372, 4383, 4333, 4333, 4406, 4406, 4399, 4334, 4403, 4406, 4406, 4372, 4383, 4401, 4401, 4401, 4401, 4406, 4406, 4398, 4401, 4401, 4399, 4401, 4401, 4403, 4406, 4406, 4408, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7601, 7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639, 7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677, 7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715, 7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753, 7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791, 7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829, 7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867, 7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905, 7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943, 7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981, 7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019, 8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057, 8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095, 8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133, 8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171, 8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209, 8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247, 8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285, 8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323, 8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361, 8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399, 8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437, 8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475, 8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513, 8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551, 8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589, 8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627, 8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665, 8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703, 8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741, 8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779, 8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817, 8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855, 8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893, 8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931, 8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969, 8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007, 9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045, 9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083, 9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121, 9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159, 9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197, 9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235, 9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273, 9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311, 9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349, 9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387, 9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425, 9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463, 9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501, 9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539, 9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577, 9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615, 9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653, 9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691, 9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729, 9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767, 9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 4434, 4435, 4439, 4440, 4441, 4442, 4539, 4540, 4563, 4564, 4596, 4597, 4703, 4704, 4707, 4708, 4733, 4734, 4735, 4736, 4737, 4738, 4787, 4788, 4789, 4790, 4797, 4798, 4799, 4800, 4801, 4802, 4874, 4875, 4876, 4877, 4878, 4879, 4975, 4976, 4977, 4978, 4979, 4980, 4981, 4982, 4986, 4987, 4988, 4989, 5128, 5148, 5149, 5150, 5151, 5180, 5181, 5196, 5197, 5198, 5199, 5200, 5201, 5279, 5369, 5370, 5373, 5374, 5386, 5389, 5405, 5406, 5407, 5408, 5409, 5410, 5411, 5412, 5419, 5420, 5427, 5428, 5443, 5444, 5445, 5446, 5447, 5448, 5467, 5468, 5469, 5470, 5471, 5472, 5484, 5485, 5487, 5488, 5489, 5490, 5503, 5504, 5529, 5530, 5541, 5542, 5560, 5561, 5578, 5579, 5587, 5588, 5599, 5600, 5614, 5615, 5616, 5617, 5624, 5625, 5632, 5633, 5634, 5635, 5636, 5637, 5657, 5658, 5660, 5661, 5662, 5663, 5675, 5676, 5691, 5692, 5705, 5706, 5720, 5721, 5734, 5735, 5736, 5737, 5755, 5756, 5768, 5769, 5770, 5771, 5786, 5806, 5807, 5814, 5815, 5816, 5817, 5818, 5819, 5827, 5828, 5831, 5832, 5875, 5876, 5877, 5878, 5890, 5891, 5893, 5894, 5895, 5896, 5907, 5908, 5909, 5910, 5924, 5925, 5926, 5927, 5928, 5929, 5930, 5931, 5932, 5933, 5934, 5935, 5938, 5941, 5945, 5952, 5953, 5960, 5961, 5969, 5976, 5977, 5978, 5979, 5986, 5987, 5988, 5989, 6001, 6002, 6003, 6004, 6005, 6006, 6007, 6008, 6020, 6021, 6029, 6030, 6040, 6041, 6044, 6045, 6046, 6047, 6063, 6064, 6074, 6075, 6076, 6077, 6078, 6079, 6091, 6092, 6106, 6107, 6108, 6140, 6141, 6154, 6155, 6156, 6157, 6158, 6159, 6174, 6175, 6176, 6192, 6193, 6201, 6203, 6205, 6206, 6207, 6208, 6209, 6210, 6211, 6212, 6213, 6221, 6224, 6227, 6228, 6229, 6230, 6231, 6232, 6233, 6234, 6235, 6236, 6237, 6238, 6239, 6240, 6241, 6242, 13, 14, 15, 10665, 10344, 10710, 10347, 10500, 10173, 10175, 10720, 10083, 10082, 10084, 10086, 10085, 10087, 10089, 10088, 10758, 10090, 10092, 10091, 10093, 10095, 10094, 10096, 11176, 10099, 10098, 10100, 11178, 11180, 10103, 10102, 10104, 10107, 10106, 10109, 10108, 10111, 10110, 10112, 10114, 10441, 10442, 10435, 10434, 10436, 10438, 10439, 10115, 10445, 10446, 10448, 10449, 10450, 10451, 10117, 10696, 10609, 10610, 10699, 10611, 10700, 10613, 10118, 10614, 10423, 10617, 10497, 10677, 10676, 10678, 10680, 10679, 10681, 10683, 10682, 10685, 10684, 10687, 10686, 10688, 10690, 10689, 10691, 10169, 10693, 10694, 10119, 10587, 10710, 10482, 10120, 10121, 10674, 10357, 10122, 10288, 10123, 10124, 10125, 10209, 10126, 10210, 10699, 10489, 10361, 10128, 10127, 10129, 10130, 10132, 10133, 10135, 10134, 10748, 10747, 10137, 10136, 10708, 10344, 10667, 10140, 10139, 10142, 10141, 10143, 11182, 10487, 10419, 10610, 10488, 10698, 10700, 10144, 10146, 10423, 10425, 10497, 10289, 10487, 10697, 10699, 10698, 10700, 10149, 10151, 10153, 10390, 10497, 11184, 10368, 10367, 10709, 10708, 10426, 10428, 10346, 10502, 10504, 10566, 10709, 10665, 10710, 10156, 10155, 4372, 10159, 10158, 4383, 10504, 10566, 10289, 10695, 10610, 10699, 10611, 10700, 10161, 10163, 10165, 10425, 11186, 10368, 10367, 10289, 10695, 10697, 10699, 10611, 10700, 10293, 10166, 10295, 10294, 10494, 10167, 10677, 10676, 10678, 10680, 10679, 10681, 10683, 10168, 10685, 10684, 10687, 10686, 10688, 10690, 10689, 10691, 10169, 10693, 10170, 10709, 10708, 10498, 10172, 10346, 10173, 10175, 10719, 10487, 10696, 10697, 10611, 10291, 10700, 10177, 10176, 10178, 10706, 10705, 10707, 10179, 10181, 10180, 10182, 10184, 10183, 10185, 10371, 10186, 10374, 10373, 10674, 10187, 10188, 10189, 10190, 10696, 10695, 10191, 10699, 10698, 10700, 10702, 10701, 10192, 10704, 10706, 10193, 10707, 10195, 10194, 10196, 10199, 10198, 10200, 10202, 10201, 10203, 10206, 10205, 10207, 10208, 10209, 10419, 10210, 10489, 10488, 10361, 10211, 10213, 10215, 10217, 11188, 10748, 10747, 11190, 10577, 10576, 10578, 10580, 10219, 10218, 10221, 10220, 10758, 10222, 3589, 10572, 10571, 10573, 10225, 10224, 10227, 10226, 10758, 10228, 3589, 10582, 10581, 10583, 11192, 11194, 11196, 10585, 10232, 10231, 10758, 10233, 10235, 10234, 10236, 10238, 10237, 10239, 10241, 10240, 10242, 10244, 10243, 10245, 10247, 10246, 10248, 10249, 10250, 10251, 10252, 10254, 10253, 10255, 10257, 10256, 10258, 10259, 10260, 10262, 10261, 10263, 10265, 10264, 10266, 10268, 10267, 10269, 10272, 10271, 10273, 10276, 10275, 10278, 10277, 11198, 11200, 10280, 10279, 10281, 10284, 10283, 10285, 11202, 11204, 11206, 10287, 10288, 10289, 10609, 10290, 10611, 10291, 10700, 10293, 10292, 10295, 10294, 10494, 10496, 10369, 10296, 10710, 10299, 10298, 10672, 10300, 10302, 10301, 10384, 10304, 10303, 10305, 10334, 10333, 10335, 10325, 10324, 10326, 10307, 10308, 10321, 10320, 10322, 10337, 10336, 10338, 10340, 10341, 10342, 10343, 10317, 10316, 10318, 10309, 10480, 10710, 10311, 10381, 10312, 10673, 10592, 10358, 10313, 10568, 10567, 10314, 10572, 10571, 10573, 10577, 10576, 10578, 10580, 10582, 10581, 10583, 11208, 11210, 11212, 10315, 10317, 10316, 10318, 10321, 10320, 10322, 10325, 10324, 10326, 10328, 10329, 10331, 10330, 10758, 10332, 10334, 10333, 10335, 10337, 10336, 10338, 10340, 10341, 10342, 10343, 10708, 10344, 10710, 10347, 10346, 10348, 10350, 10352, 10351, 10353, 10480, 10426, 10482, 10355, 10356, 10592, 10357, 10358, 10359, 10360, 10487, 10696, 10697, 10699, 10611, 10361, 10363, 10362, 10365, 10364, 10366, 10496, 10368, 10367, 10369, 10666, 10667, 10372, 10371, 10374, 10373, 10606, 10375, 10376, 10377, 10378, 10587, 10379, 10426, 10382, 10381, 10383, 10674, 10606, 10384, 10385, 10386, 10387, 10696, 10609, 10697, 10699, 10611, 10700, 10613, 10388, 10614, 10389, 10390, 11214, 11216, 11218, 11220, 10392, 10391, 10393, 11222, 11224, 10396, 10395, 10397, 10758, 10398, 10399, 10401, 10400, 10402, 10405, 10404, 10406, 10409, 10408, 10411, 10410, 10413, 10412, 10758, 10414, 10416, 10415, 10417, 10418, 10609, 10419, 10697, 10611, 10488, 10700, 10420, 10422, 10423, 10425, 10497, 10709, 10708, 10426, 10500, 10428, 10502, 10504, 10566, 10431, 10430, 10432, 10435, 10434, 10436, 10438, 10439, 10441, 10440, 10442, 10445, 10444, 10446, 10448, 10449, 10450, 10451, 10453, 10452, 10454, 10758, 10456, 10455, 10457, 10458, 10460, 10459, 10461, 10463, 10462, 10464, 10466, 10465, 10468, 10467, 10470, 10469, 10471, 10473, 10472, 10474, 10476, 10475, 10478, 10477, 10479, 10480, 10618, 10498, 10482, 10590, 10483, 10674, 10593, 10594, 10484, 10485, 10487, 10486, 10697, 10489, 10488, 10700, 10491, 10490, 10493, 10492, 10494, 10496, 10497, 10709, 10708, 10498, 10501, 10500, 10502, 10504, 10506, 10505, 10507, 10758, 10509, 10508, 10510, 10512, 10511, 10513, 10758, 10515, 10514, 10516, 10518, 10517, 10519, 10522, 10521, 10523, 10526, 10525, 10527, 10530, 10529, 10531, 10758, 10533, 10532, 10534, 10536, 10535, 10537, 10540, 10539, 10541, 11227, 11229, 10544, 10543, 10545, 10758, 10547, 10546, 10548, 10550, 10549, 10551, 10758, 10553, 10555, 10554, 10557, 10556, 10558, 10758, 10709, 10708, 10667, 10561, 10560, 10562, 10564, 10563, 10565, 10566, 11231, 10568, 10567, 10569, 10572, 10571, 10573, 10575, 10577, 10576, 10578, 10580, 10582, 10581, 10583, 11233, 11235, 11237, 10585, 10586, 10587, 10618, 10710, 10590, 10589, 10591, 10593, 10592, 10594, 10595, 10596, 10598, 10597, 10599, 10601, 10600, 10602, 10604, 10603, 10605, 10674, 10606, 10607, 10608, 10696, 10609, 10610, 10699, 10611, 10700, 10613, 10612, 10614, 10615, 10617, 10665, 10618, 10667, 10670, 10669, 10672, 10671, 10674, 10673, 10675, 10621, 10620, 10622, 10624, 10625, 10627, 10626, 10629, 10628, 10631, 10630, 10632, 10635, 10634, 10636, 10638, 10637, 10639, 10640, 10642, 10641, 10643, 10645, 10646, 10648, 10647, 10649, 10651, 10650, 10652, 10654, 10653, 10655, 10657, 10656, 10658, 10660, 10659, 10661, 10758, 10663, 10662, 10664, 10666, 10665, 10667, 10670, 10669, 10672, 10671, 10674, 10673, 10675, 10677, 10676, 10678, 10680, 10679, 10681, 10683, 10682, 10685, 10684, 10687, 10686, 10688, 10690, 10689, 10691, 10693, 10692, 10694, 10696, 10695, 10697, 10699, 10698, 10700, 10702, 10701, 10704, 10703, 10706, 10705, 10707, 10709, 10708, 10710, 10713, 10712, 10714, 10716, 10715, 10717, 10718, 10720, 10719, 10722, 10721, 10723, 10725, 10724, 10726, 10727, 10730, 10729, 10731, 10733, 10732, 10734, 10735, 10738, 10737, 10739, 10741, 10740, 10742, 10743, 10745, 11240, 10748, 10747, 11242, 10749, 10758, 10758, 10758, 10751, 10750, 10752, 10753, 10754, 10756, 10755, 10758, 10757, 10760, 10759, 10761, 10762, 10763, 10765, 10764, 10766, 10768, 10767, 10769, 10770, 10771, 10772, 10773, 11246, 11248, 11250, 11252, 10981, 10980, 10982, 10984, 10983, 10985, 11254, 10986, 10847, 10988, 10990, 10989, 10991, 11256, 10916, 10974, 10774, 10976, 10975, 10978, 10977, 10979, 10910, 10909, 10911, 10913, 10912, 10914, 11258, 11260, 11262, 10915, 10994, 10908, 10995, 10997, 10996, 10998, 11011, 10775, 10893, 11166, 10894, 11014, 11015, 11017, 10776, 11018, 11174, 11264, 11266, 11268, 10785, 10778, 10893, 11119, 11166, 11076, 10852, 10779, 10855, 10960, 10961, 11270, 11024, 11272, 11274, 11117, 10781, 10782, 11119, 11118, 11076, 11122, 10783, 10784, 11125, 11124, 11126, 11276, 11116, 10785, 11128, 10953, 10851, 11076, 10786, 10821, 10855, 10822, 10935, 10934, 10787, 10788, 10938, 10939, 4296, 4296, 10943, 10942, 10944, 10946, 10945, 10947, 11278, 4333, 10790, 10789, 10791, 10793, 10792, 10794, 10795, 10798, 10797, 11280, 10800, 10799, 10801, 10803, 10802, 10804, 10805, 10807, 10810, 10809, 10811, 10813, 10812, 10814, 10815, 10818, 10817, 11282, 11116, 10952, 11128, 11119, 10953, 11120, 10819, 10821, 10855, 10822, 10824, 10823, 10825, 10827, 10826, 10828, 11284, 10829, 10831, 10830, 10832, 10834, 10833, 10835, 11286, 10836, 10838, 10837, 10839, 10841, 10840, 10842, 10844, 10843, 10845, 11288, 10974, 10846, 10976, 10975, 10978, 10977, 10979, 10986, 10847, 10988, 10990, 10989, 10991, 11290, 11292, 10981, 10980, 10848, 10984, 10983, 10985, 11294, 10910, 10909, 10911, 10913, 10912, 10914, 11296, 11298, 11300, 10915, 10994, 10993, 10995, 10997, 10996, 10998, 10849, 10952, 11067, 10850, 11166, 10851, 11076, 10852, 10854, 10855, 10960, 10860, 11302, 11137, 11304, 11306, 11163, 11127, 11020, 11119, 11166, 11120, 11069, 11021, 11023, 10857, 11024, 11308, 11072, 11117, 11073, 11075, 11074, 11076, 11078, 10858, 11080, 11079, 11082, 10859, 11083, 10860, 11310, 11163, 11127, 11128, 11119, 11118, 11076, 10862, 10861, 11071, 11070, 10863, 11024, 11312, 11072, 11117, 10864, 11075, 11074, 11076, 11077, 10865, 11080, 11079, 11082, 11081, 11083, 11314, 11163, 11127, 11128, 11119, 11075, 11076, 10867, 10866, 10868, 10870, 10869, 10871, 11316, 11318, 10873, 10872, 10874, 10876, 10875, 10877, 10878, 10880, 10883, 10882, 10884, 10886, 10885, 10887, 10888, 10891, 10890, 11320, 10892, 10965, 10893, 10894, 11013, 11076, 11015, 10971, 11016, 10896, 10898, 11322, 11324, 11117, 11116, 11128, 11074, 11013, 11129, 10899, 10956, 10958, 10960, 10903, 10904, 10905, 10906, 10974, 10907, 10976, 10975, 10978, 10977, 10979, 10994, 10908, 10995, 10997, 10996, 10998, 10981, 10980, 10982, 10984, 10983, 10985, 11327, 10910, 10909, 10911, 10913, 10912, 10914, 11329, 11331, 11333, 10915, 10987, 10986, 10988, 10990, 10989, 10991, 11335, 10916, 10917, 11337, 10919, 10918, 10920, 10922, 10921, 10923, 10924, 10927, 10926, 10928, 10930, 10929, 10931, 10933, 10932, 10935, 10934, 10936, 10938, 10937, 10939, 4296, 4296, 10943, 10942, 10944, 10946, 10945, 10947, 4333, 4333, 11117, 11116, 11164, 11119, 11166, 11076, 11069, 10950, 10951, 10963, 10961, 11339, 11341, 11127, 10952, 10962, 11119, 10953, 11076, 10954, 10956, 10958, 10960, 10961, 11343, 11024, 11345, 11347, 11127, 11019, 10962, 11075, 11165, 11129, 11069, 11021, 10964, 10963, 11349, 11351, 10966, 10965, 11164, 11119, 11166, 11167, 10968, 10967, 10970, 10969, 11168, 10972, 10971, 11353, 11355, 11357, 11359, 11361, 11363, 10974, 10973, 10976, 10975, 10978, 10977, 10979, 10981, 10980, 10982, 10984, 10983, 10985, 11368, 10987, 10986, 10988, 10990, 10989, 10991, 11370, 10992, 10994, 10993, 10995, 10997, 10996, 10998, 11000, 10999, 11001, 11003, 11002, 11004, 11373, 11375, 11006, 11005, 11007, 11009, 11008, 11010, 11377, 11379, 11163, 11011, 11012, 11074, 11013, 11014, 11015, 11017, 11016, 11018, 11174, 11381, 11383, 11385, 11387, 11127, 11019, 11020, 11119, 11166, 11120, 11069, 11021, 11023, 11022, 11024, 11389, 11026, 11025, 11027, 11029, 11028, 11030, 4296, 11391, 4296, 11033, 11035, 11034, 11036, 11038, 11037, 11039, 4333, 11393, 4333, 11042, 11395, 11397, 11044, 11043, 11045, 11047, 11046, 11048, 4296, 4296, 11052, 11051, 11053, 11055, 11054, 11056, 4296, 11399, 11059, 11058, 11060, 11062, 11061, 11063, 4333, 4333, 4333, 11401, 11405, 11163, 11067, 11128, 11119, 11166, 11129, 11069, 11068, 11071, 11070, 11137, 11407, 11072, 11117, 11073, 11075, 11074, 11076, 11078, 11077, 11080, 11079, 11082, 11081, 11083, 11410, 11085, 11084, 11086, 11088, 11087, 11089, 4296, 11092, 11091, 11093, 11095, 11094, 11096, 4296, 11099, 11098, 11100, 11102, 11101, 11103, 4333, 4333, 11107, 11106, 11108, 11110, 11109, 11111, 11113, 11112, 11114, 11117, 11116, 11164, 11119, 11118, 11120, 11122, 11121, 11123, 11125, 11124, 11126, 11415, 11417, 11163, 11127, 11128, 11166, 11165, 11129, 11131, 11130, 11133, 11132, 11135, 11134, 11136, 11137, 11420, 11139, 11138, 11140, 11142, 11141, 11143, 4296, 4296, 11147, 11146, 11148, 11150, 11149, 11151, 4296, 11423, 11154, 11153, 11155, 11157, 11156, 11158, 4333, 4333, 4333, 11427, 11429, 11434, 11163, 11162, 11164, 11166, 11165, 11167, 11168, 11171, 11170, 11172, 11174, 11438, 11440, 11442, 11445, 11448, 11451, 11366, 11366, 11238, 11238, 11413, 11413, 11431, 11413, 11413, 11413, 11413, 11413, 11413, 11403, 11403, 11403, 11403, 11431, 11431, 11431, 11431, 11371, 11371, 11366, 11366, 11238, 11238, 11371, 11371, 11238, 11238, 11366, 11366, 11366, 11366, 11371, 11371, 11238, 11238, 11371, 11371, 11431, 11238, 11238, 11431, 11403, 11403, 11225, 11225, 11371, 11371, 11238, 11238, 11371, 11366, 11403, 11366, 11366, 11371, 11371, 11412, 11413, 11238, 11238, 11366, 11238, 11366, 11402, 11325, 11325, 11431, 11431, 11431, 11431, 11408, 11408, 11408, 11408, 11408, 11411, 11411, 11411, 11411, 11411, 11411, 11411, 11411, 11412, 11411, 11411, 11325, 11411, 11411, 11411, 11411, 11431, 11411, 11403, 11431, 11431, 11412, 11431, 11431, 11325, 11431, 11431, 11431, 11431, 11431, 11431, 11325, 11411, 11411, 11402, 11411, 11411, 11403, 11411, 11411, 11411, 11411, 11366, 11371, 11411, 11402, 11431, 11431, 11431, 11431, 11411, 11411, 11402, 11411, 11411, 11403, 11411, 11411, 11408, 11411, 11411, 11412, 11413, 11431, 11431, 11431, 11431, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4410, 4411, 4412, 4413, 4414, 4415, 4416, 4417, 4418, 4419, 4420, 4421, 4422, 4423, 4424, 4425, 4426, 4427, 4428, 4429, 4430, 4431, 4432, 4433, 4436, 4437, 4438, 4443, 4444, 4445, 4446, 4447, 4448, 4449, 4450, 4451, 4452, 4453, 4454, 4455, 4456, 4457, 4458, 4459, 4460, 4461, 4462, 4463, 4464, 4465, 4466, 4467, 4468, 4469, 4470, 4471, 4472, 4473, 4474, 4475, 4476, 4477, 4478, 4479, 4480, 4481, 4482, 4483, 4484, 4485, 4486, 4487, 4488, 4489, 4490, 4491, 4492, 4493, 4494, 4495, 4496, 4497, 4498, 4499, 4500, 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4519, 4520, 4521, 4522, 4523, 4524, 4525, 4526, 4527, 4528, 4529, 4530, 4531, 4532, 4533, 4534, 4535, 4536, 4537, 4538, 4541, 4542, 4543, 4544, 4545, 4546, 4547, 4548, 4549, 4550, 4551, 4552, 4553, 4554, 4555, 4556, 4557, 4558, 4559, 4560, 4561, 4562, 4565, 4566, 4567, 4568, 4569, 4570, 4571, 4572, 4573, 4574, 4575, 4576, 4577, 4578, 4579, 4580, 4581, 4582, 4583, 4584, 4585, 4586, 4587, 4588, 4589, 4590, 4591, 4592, 4593, 4594, 4595, 4598, 4599, 4600, 4601, 4602, 4603, 4604, 4605, 4606, 4607, 4608, 4609, 4610, 4611, 4612, 4613, 4614, 4615, 4616, 4617, 4618, 4619, 4620, 4621, 4622, 4623, 4624, 4625, 4626, 4627, 4628, 4629, 4630, 4631, 4632, 4633, 4634, 4635, 4636, 4637, 4638, 4639, 4640, 4641, 4642, 4643, 4644, 4645, 4646, 4647, 4648, 4649, 4650, 4651, 4652, 4653, 4654, 4655, 4656, 4657, 4658, 4659, 4660, 4661, 4662, 4663, 4664, 4665, 4666, 4667, 4668, 4669, 4670, 4671, 4672, 4673, 4674, 4675, 4676, 4677, 4678, 4679, 4680, 4681, 4682, 4683, 4684, 4685, 4686, 4687, 4688, 4689, 4690, 4691, 4692, 4693, 4694, 4695, 4696, 4697, 4698, 4699, 4700, 4701, 4702, 4705, 4706, 4709, 4710, 4711, 4712, 4713, 4714, 4715, 4716, 4717, 4718, 4719, 4720, 4721, 4722, 4723, 4724, 4725, 4726, 4727, 4728, 4729, 4730, 4731, 4732, 4739, 4740, 4741, 4742, 4743, 4744, 4745, 4746, 4747, 4748, 4749, 4750, 4751, 4752, 4753, 4754, 4755, 4756, 4757, 4758, 4759, 4760, 4761, 4762, 4763, 4764, 4765, 4766, 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, 4783, 4784, 4785, 4786, 4791, 4792, 4793, 4794, 4795, 4796, 4803, 4804, 4805, 4806, 4807, 4808, 4809, 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, 4826, 4827, 4828, 4829, 4830, 4831, 4832, 4833, 4834, 4835, 4836, 4837, 4838, 4839, 4840, 4841, 4842, 4843, 4844, 4845, 4846, 4847, 4848, 4849, 4850, 4851, 4852, 4853, 4854, 4855, 4856, 4857, 4858, 4859, 4860, 4861, 4862, 4863, 4864, 4865, 4866, 4867, 4868, 4869, 4870, 4871, 4872, 4873, 4880, 4881, 4882, 4883, 4884, 4885, 4886, 4887, 4888, 4889, 4890, 4891, 4892, 4893, 4894, 4895, 4896, 4897, 4898, 4899, 4900, 4901, 4902, 4903, 4904, 4905, 4906, 4907, 4908, 4909, 4910, 4911, 4912, 4913, 4914, 4915, 4916, 4917, 4918, 4919, 4920, 4921, 4922, 4923, 4924, 4925, 4926, 4927, 4928, 4929, 4930, 4931, 4932, 4933, 4934, 4935, 4936, 4937, 4938, 4939, 4940, 4941, 4942, 4943, 4944, 4945, 4946, 4947, 4948, 4949, 4950, 4951, 4952, 4953, 4954, 4955, 4956, 4957, 4958, 4959, 4960, 4961, 4962, 4963, 4964, 4965, 4966, 4967, 4968, 4969, 4970, 4971, 4972, 4973, 4974, 4983, 4984, 4985, 4990, 4991, 4992, 4993, 4994, 4995, 4996, 4997, 4998, 4999, 5000, 5001, 5002, 5003, 5004, 5005, 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5020, 5021, 5022, 5023, 5024, 5025, 5026, 5027, 5028, 5029, 5030, 5031, 5032, 5033, 5034, 5035, 5036, 5037, 5038, 5039, 5040, 5041, 5042, 5043, 5044, 5045, 5046, 5047, 5048, 5049, 5050, 5051, 5052, 5053, 5054, 5055, 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, 5074, 5075, 5076, 5077, 5078, 5079, 5080, 5081, 5082, 5083, 5084, 5085, 5086, 5087, 5088, 5089, 5090, 5091, 5092, 5093, 5094, 5095, 5096, 5097, 5098, 5099, 5100, 5101, 5102, 5103, 5104, 5105, 5106, 5107, 5108, 5109, 5110, 5111, 5112, 5113, 5114, 5115, 5116, 5117, 5118, 5119, 5120, 5121, 5122, 5123, 5124, 5125, 5126, 5127, 5129, 5130, 5131, 5132, 5133, 5134, 5135, 5136, 5137, 5138, 5139, 5140, 5141, 5142, 5143, 5144, 5145, 5146, 5147, 5152, 5153, 5154, 5155, 5156, 5157, 5158, 5159, 5160, 5161, 5162, 5163, 5164, 5165, 5166, 5167, 5168, 5169, 5170, 5171, 5172, 5173, 5174, 5175, 5176, 5177, 5178, 5179, 5182, 5183, 5184, 5185, 5186, 5187, 5188, 5189, 5190, 5191, 5192, 5193, 5194, 5195, 5202, 5203, 5204, 5205, 5206, 5207, 5208, 5209, 5210, 5211, 5212, 5213, 5214, 5215, 5216, 5217, 5218, 5219, 5220, 5221, 5222, 5223, 5224, 5225, 5226, 5227, 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5235, 5236, 5237, 5238, 5239, 5240, 5241, 5242, 5243, 5244, 5245, 5246, 5247, 5248, 5249, 5250, 5251, 5252, 5253, 5254, 5255, 5256, 5257, 5258, 5259, 5260, 5261, 5262, 5263, 5264, 5265, 5266, 5267, 5268, 5269, 5270, 5271, 5272, 5273, 5274, 5275, 5276, 5277, 5278, 5280, 5281, 5282, 5283, 5284, 5285, 5286, 5287, 5288, 5289, 5290, 5291, 5292, 5293, 5294, 5295, 5296, 5297, 5298, 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5309, 5310, 5311, 5312, 5313, 5314, 5315, 5316, 5317, 5318, 5319, 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5327, 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, 5336, 5337, 5338, 5339, 5340, 5341, 5342, 5343, 5344, 5345, 5346, 5347, 5348, 5349, 5350, 5351, 5352, 5353, 5354, 5355, 5356, 5357, 5358, 5359, 5360, 5361, 5362, 5363, 5364, 5365, 5366, 5367, 5368, 5371, 5372, 5375, 5376, 5377, 5378, 5379, 5380, 5381, 5382, 5383, 5384, 5385, 5387, 5388, 5390, 5391, 5392, 5393, 5394, 5395, 5396, 5397, 5398, 5399, 5400, 5401, 5402, 5403, 5404, 5413, 5414, 5415, 5416, 5417, 5418, 5421, 5422, 5423, 5424, 5425, 5426, 5429, 5430, 5431, 5432, 5433, 5434, 5435, 5436, 5437, 5438, 5439, 5440, 5441, 5442, 5449, 5450, 5451, 5452, 5453, 5454, 5455, 5456, 5457, 5458, 5459, 5460, 5461, 5462, 5463, 5464, 5465, 5466, 5473, 5474, 5475, 5476, 5477, 5478, 5479, 5480, 5481, 5482, 5483, 5486, 5491, 5492, 5493, 5494, 5495, 5496, 5497, 5498, 5499, 5500, 5501, 5502, 5505, 5506, 5507, 5508, 5509, 5510, 5511, 5512, 5513, 5514, 5515, 5516, 5517, 5518, 5519, 5520, 5521, 5522, 5523, 5524, 5525, 5526, 5527, 5528, 5531, 5532, 5533, 5534, 5535, 5536, 5537, 5538, 5539, 5540, 5543, 5544, 5545, 5546, 5547, 5548, 5549, 5550, 5551, 5552, 5553, 5554, 5555, 5556, 5557, 5558, 5559, 5562, 5563, 5564, 5565, 5566, 5567, 5568, 5569, 5570, 5571, 5572, 5573, 5574, 5575, 5576, 5577, 5580, 5581, 5582, 5583, 5584, 5585, 5586, 5589, 5590, 5591, 5592, 5593, 5594, 5595, 5596, 5597, 5598, 5601, 5602, 5603, 5604, 5605, 5606, 5607, 5608, 5609, 5610, 5611, 5612, 5613, 5618, 5619, 5620, 5621, 5622, 5623, 5626, 5627, 5628, 5629, 5630, 5631, 5638, 5639, 5640, 5641, 5642, 5643, 5644, 5645, 5646, 5647, 5648, 5649, 5650, 5651, 5652, 5653, 5654, 5655, 5656, 5659, 5664, 5665, 5666, 5667, 5668, 5669, 5670, 5671, 5672, 5673, 5674, 5677, 5678, 5679, 5680, 5681, 5682, 5683, 5684, 5685, 5686, 5687, 5688, 5689, 5690, 5693, 5694, 5695, 5696, 5697, 5698, 5699, 5700, 5701, 5702, 5703, 5704, 5707, 5708, 5709, 5710, 5711, 5712, 5713, 5714, 5715, 5716, 5717, 5718, 5719, 5722, 5723, 5724, 5725, 5726, 5727, 5728, 5729, 5730, 5731, 5732, 5733, 5738, 5739, 5740, 5741, 5742, 5743, 5744, 5745, 5746, 5747, 5748, 5749, 5750, 5751, 5752, 5753, 5754, 5757, 5758, 5759, 5760, 5761, 5762, 5763, 5764, 5765, 5766, 5767, 5772, 5773, 5774, 5775, 5776, 5777, 5778, 5779, 5780, 5781, 5782, 5783, 5784, 5785, 5787, 5788, 5789, 5790, 5791, 5792, 5793, 5794, 5795, 5796, 5797, 5798, 5799, 5800, 5801, 5802, 5803, 5804, 5805, 5808, 5809, 5810, 5811, 5812, 5813, 5820, 5821, 5822, 5823, 5824, 5825, 5826, 5829, 5830, 5833, 5834, 5835, 5836, 5837, 5838, 5839, 5840, 5841, 5842, 5843, 5844, 5845, 5846, 5847, 5848, 5849, 5850, 5851, 5852, 5853, 5854, 5855, 5856, 5857, 5858, 5859, 5860, 5861, 5862, 5863, 5864, 5865, 5866, 5867, 5868, 5869, 5870, 5871, 5872, 5873, 5874, 5879, 5880, 5881, 5882, 5883, 5884, 5885, 5886, 5887, 5888, 5889, 5892, 5897, 5898, 5899, 5900, 5901, 5902, 5903, 5904, 5905, 5906, 5911, 5912, 5913, 5914, 5915, 5916, 5917, 5918, 5919, 5920, 5921, 5922, 5923, 5936, 5937, 5939, 5940, 5942, 5943, 5944, 5946, 5947, 5948, 5949, 5950, 5951, 5954, 5955, 5956, 5957, 5958, 5959, 5962, 5963, 5964, 5965, 5966, 5967, 5968, 5970, 5971, 5972, 5973, 5974, 5975, 5980, 5981, 5982, 5983, 5984, 5985, 5990, 5991, 5992, 5993, 5994, 5995, 5996, 5997, 5998, 5999, 6000, 6009, 6010, 6011, 6012, 6013, 6014, 6015, 6016, 6017, 6018, 6019, 6022, 6023, 6024, 6025, 6026, 6027, 6028, 6031, 6032, 6033, 6034, 6035, 6036, 6037, 6038, 6039, 6042, 6043, 6048, 6049, 6050, 6051, 6052, 6053, 6054, 6055, 6056, 6057, 6058, 6059, 6060, 6061, 6062, 6065, 6066, 6067, 6068, 6069, 6070, 6071, 6072, 6073, 6080, 6081, 6082, 6083, 6084, 6085, 6086, 6087, 6088, 6089, 6090, 6093, 6094, 6095, 6096, 6097, 6098, 6099, 6100, 6101, 6102, 6103, 6104, 6105, 6109, 6110, 6111, 6112, 6113, 6114, 6115, 6116, 6117, 6118, 6119, 6120, 6121, 6122, 6123, 6124, 6125, 6126, 6127, 6128, 6129, 6130, 6131, 6132, 6133, 6134, 6135, 6136, 6137, 6138, 6139, 6142, 6143, 6144, 6145, 6146, 6147, 6148, 6149, 6150, 6151, 6152, 6153, 6160, 6161, 6162, 6163, 6164, 6165, 6166, 6167, 6168, 6169, 6170, 6171, 6172, 6173, 6177, 6178, 6179, 6180, 6181, 6182, 6183, 6184, 6185, 6186, 6187, 6188, 6189, 6190, 6191, 6194, 6195, 6196, 6197, 6198, 6199, 6200, 6202, 6204, 6214, 6215, 6216, 6217, 6218, 6219, 6220, 6222, 6223, 6225, 6226, 11480, 11485, 11484, 6260, 6261, 6263, 6264, 6296, 6297, 6310, 6316, 6317, 6319, 6320, 11413, 6323, 6324, 6333, 6334, 6336, 6337, 11431, 6387, 6388, 6389, 6390, 11743, 11746, 6396, 6397, 6402, 6403, 11772, 11771, 6416, 6417, 6431, 6432, 11823, 11822, 11831, 11830, 6457, 6458, 6469, 6470, 6472, 6473, 6476, 6477, 11905, 11904, 6484, 6485, 11413, 12003, 11413, 12005, 12010, 12009, 6535, 6536, 6549, 6558, 6559, 6588, 6592, 6593, 6601, 6602, 6604, 6605, 6607, 6611, 6613, 12169, 12168, 6621, 6630, 6632, 6633, 6638, 6639, 12214, 12213, 6645, 6660, 6668, 6669, 6671, 6680, 6683, 6703, 6708, 6709, 6720, 6721, 6722, 6723, 12382, 12385, 11413, 12415, 11413, 12417, 12424, 12447, 12446, 6762, 11408, 12469, 12468, 12484, 12497, 6788, 6789, 6790, 6791, 12522, 6804, 6805, 6806, 6807, 6808, 6809, 12533, 6819, 6820, 6821, 6822, 6823, 6824, 6825, 6826, 12551, 6833, 6834, 6844, 12587, 12602, 12601, 12609, 12617, 12616, 12641, 12653, 12668, 12681, 6893, 12695, 12708, 12709, 6901, 6910, 6911, 6912, 6913, 6914, 6915, 6916, 6917, 12727, 6924, 6925, 12739, 6927, 6928, 12740, 6939, 12774, 12782, 12781, 12794, 6972, 6973, 6974, 6975, 6976, 6977, 6978, 6979, 6980, 6981, 12838, 12837, 12853, 12864, 12865, 12880, 12879, 12881, 12883, 12882, 12884, 7016, 12898, 7025, 12920, 12919, 12928, 12927, 7034, 7035, 12940, 12943, 12942, 12955, 12963, 12973, 7059, 7060, 12976, 7062, 7063, 12977, 12993, 13003, 7079, 7080, 7081, 7082, 7083, 7084, 7085, 7086, 13004, 13016, 7098, 13030, 7114, 7115, 7116, 7121, 13075, 13074, 13090, 13106, 13116, 7145, 7146, 13117, 7148, 7149, 13118, 13131, 13130, 13132, 13134, 13133, 13135, 13, 14, 15, 13297, 13300, 13305, 13308, 13311, 13313, 13315, 13318, 13321, 13324, 13327, 13329, 13331, 13334, 13337, 13342, 13350, 13353, 13356, 13362, 13365, 13368, 13370, 13372, 13375, 13378, 13381, 13384, 13387, 13394, 13397, 13400, 13406, 13408, 13410, 13412, 13415, 13417, 13420, 13423, 13431, 13434, 13442, 13444, 13447, 13452, 13455, 13458, 13463, 13466, 13473, 13475, 13478, 13481, 13483, 13487, 13490, 13493, 13495, 13497, 13500, 13503, 13506, 13509, 13514, 13517, 13520, 13523, 13527, 13530, 13533, 13535, 13537, 13542, 13545, 13548, 13550, 13552, 13555, 13558, 13561, 13564, 13568, 13571, 13578, 13580, 13584, 13586, 13588, 13591, 13594, 13596, 13598, 13601, 13605, 13607, 13609, 13612, 13615, 13618, 13621, 13628, 13631, 13636, 13639, 13642, 13645, 13648, 13650, 13652, 13655, 13660, 13663, 13666, 13668, 13672, 13675, 13677, 13679, 13682, 13685, 13688, 13693, 13696, 13703, 13706, 13709, 13712, 13716, 13719, 13722, 13726, 13730, 13733, 13736, 13741, 13743, 13745, 13748, 13755, 13758, 13762, 13764, 13767, 13770, 13775, 13778, 13781, 13783, 13787, 13789, 13792, 13794, 13796, 13801, 13804, 13807, 13813, 13816, 13819, 13824, 13827, 13830, 13833, 13836, 13839, 13841, 13843, 13845, 13847, 13851, 13854, 13862, 13865, 13870, 13873, 13878, 13881, 13888, 13890, 13892, 13896, 13899, 13902, 13904, 13906, 13909, 13912, 13914, 13917, 13920, 13923, 13928, 13931, 13934, 13936, 13941, 13944, 13948, 13950, 13952, 13955, 13957, 13959, 13962, 13965, 13968, 13971, 13973, 13975, 13978, 13981, 13984, 13986, 13988, 13991, 13994, 13996, 13998, 14000, 14002, 14005, 14008, 14012, 14015, 14019, 14023, 14028, 14031, 14034, 14039, 14042, 14045, 14048, 14052, 14055, 14058, 14063, 14066, 14068, 14070, 14073, 14078, 14080, 14082, 14085, 14088, 14092, 14097, 14100, 14103, 14106, 14109, 14111, 14113, 14116, 14119, 14121, 14123, 14126, 14129, 14132, 14134, 14136, 14139, 14142, 14145, 14148, 14151, 14153, 14155, 14158, 14161, 14164, 14168, 14170, 14173, 14177, 14180, 14184, 14187, 14192, 14194, 14196, 14198, 14203, 14205, 14207, 14212, 14215, 14222, 14225, 14228, 14231, 14235, 14237, 14239, 14242, 14245, 14249, 14252, 14255, 14258, 14262, 14266, 14269, 14278, 14281, 14284, 14287, 14290, 14293, 14300, 14303, 14308, 14311, 14315, 14318, 14322, 14324, 14327, 14332, 14335, 14339, 14341, 14344, 14351, 14354, 14358, 14361, 14365, 14368, 14371, 14374, 14376, 14378, 14381, 14384, 14387, 14390, 14393, 14396, 14400, 14403, 14407, 14410, 14419, 14422, 14425, 14427, 14430, 14433, 14436, 14438, 14440, 14444, 14447, 14450, 14452, 14454, 14456, 14459, 14462, 14464, 14466, 14469, 14472, 14475, 14478, 14481, 14484, 14489, 14492, 14496, 14498, 14501, 14505, 14509, 14512, 14523, 14525, 14527, 14530, 14533, 14536, 14539, 14542, 14545, 14549, 14552, 14557, 14560, 14564, 14567, 14570, 14572, 14575, 14580, 14583, 14588, 14591, 14594, 14596, 14599, 14602, 14611, 14614, 14617, 14619, 14621, 14624, 14627, 14629, 14632, 14634, 14636, 14638, 14641, 14644, 14647, 14650, 14654, 14657, 14660, 14663, 14666, 14669, 14672, 14675, 14679, 14683, 14686, 14689, 14691, 14694, 14697, 14703, 14706, 14712, 14715, 14720, 14723, 14727, 14730, 14736, 14739, 14742, 14744, 14747, 14750, 14753, 14755, 14757, 14760, 14763, 14767, 14770, 14774, 14777, 14782, 14785, 14788, 14791, 14794, 14797, 14800, 14803, 14806, 14809, 14811, 14813, 14817, 14820, 14825, 14828, 14832, 14835, 14841, 14844, 14848, 13302, 6252, 6254, 6255, 14855, 14857, 13339, 13346, 13345, 13344, 13348, 13358, 13360, 13389, 13390, 13391, 13392, 13402, 11413, 13418, 13427, 13426, 13425, 13429, 13438, 13437, 13436, 14862, 13440, 14864, 6321, 14867, 13449, 13450, 13460, 14869, 13461, 14871, 13470, 13469, 13468, 6343, 13484, 13511, 13525, 13539, 13540, 13566, 13575, 13574, 13573, 14874, 14876, 6391, 6393, 11371, 14882, 11773, 6409, 6410, 14886, 13626, 13625, 13624, 13623, 13634, 13633, 14888, 6435, 6436, 11832, 6440, 6441, 13658, 13669, 13690, 14894, 13700, 13699, 13698, 13714, 14896, 14898, 11371, 11906, 6480, 6481, 14904, 13739, 13738, 13752, 13751, 13750, 13772, 13773, 13784, 13798, 13799, 13809, 13810, 13821, 6525, 6526, 6527, 6528, 6530, 6531, 14912, 13849, 13858, 13856, 13860, 13867, 13868, 13875, 14915, 13885, 13884, 13883, 13894, 13915, 13925, 13926, 13937, 13939, 13946, 14918, 14920, 14922, 6615, 6616, 14010, 14931, 11366, 11371, 12215, 6642, 6643, 14026, 14036, 14037, 14050, 14060, 14075, 14939, 14090, 14095, 14094, 14945, 14175, 14182, 14190, 14189, 14947, 14949, 6724, 6726, 14201, 14200, 14210, 14209, 14219, 14218, 14217, 6742, 6743, 6744, 6745, 6748, 12431, 12448, 6758, 6759, 14263, 14260, 6768, 6769, 6770, 14273, 14271, 14276, 14275, 6777, 6782, 14297, 14296, 14295, 14966, 14968, 14306, 14305, 14313, 6799, 14320, 14971, 14973, 14975, 6810, 14330, 14329, 14337, 14978, 14981, 14984, 6827, 14348, 14347, 14346, 14987, 12568, 12576, 6845, 6851, 6852, 6855, 12618, 6859, 6860, 14405, 14414, 14412, 14417, 14416, 6870, 14428, 6875, 14442, 6882, 6887, 6894, 6899, 6900, 14487, 14486, 14494, 15005, 15008, 15011, 6918, 14506, 14503, 15014, 6926, 15017, 6929, 14516, 14515, 14514, 14519, 14518, 14521, 14520, 6947, 12783, 6951, 6952, 12791, 14555, 6957, 14562, 14578, 14577, 14586, 14585, 15025, 15028, 15031, 15033, 14597, 6986, 6987, 14606, 14605, 14604, 14609, 14608, 6995, 6999, 7000, 14630, 7007, 7008, 7009, 7010, 7011, 7012, 7019, 12905, 7028, 7029, 7032, 7033, 14680, 14677, 7041, 7042, 7043, 14692, 7048, 14700, 7052, 14699, 14709, 7057, 14708, 15061, 7061, 15064, 7064, 14718, 14717, 7071, 14725, 7075, 14734, 14733, 14732, 15069, 15072, 15075, 7087, 14745, 7092, 7099, 14765, 14772, 14780, 14779, 14789, 15081, 7122, 7123, 14815, 7130, 14823, 14822, 7137, 14830, 7141, 14839, 14838, 14837, 15090, 7147, 15093, 7150, 14849, 14846, 7156, 7157, 7158, 7159, 7160, 7161, 15083, 15083, 14959, 14959, 14860, 14860, 14988, 14988, 15052, 14988, 15052, 14988, 14988, 14959, 15078, 14959, 14999, 14913, 14913, 14916, 14916, 14937, 14937, 14937, 14937, 15052, 15052, 14959, 15052, 14988, 14959, 15052, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 15104, 15105, 6245, 15107, 15106, 15110, 15109, 15108, 15111, 15112, 15587, 15113, 15115, 15114, 15116, 15117, 15118, 6266, 15119, 6268, 6269, 6270, 6271, 15121, 15120, 6274, 15122, 6276, 15124, 15123, 15126, 15125, 15128, 15127, 15129, 15130, 15132, 15131, 6287, 6288, 6289, 6290, 15134, 15133, 6293, 15135, 6295, 15136, 11413, 15138, 15139, 15140, 6303, 15143, 15142, 6306, 6307, 6308, 6309, 15145, 15144, 6313, 6314, 6315, 6318, 15146, 15147, 15148, 6327, 6328, 15149, 15151, 15150, 6332, 6335, 15153, 15152, 6340, 6341, 6342, 15154, 15156, 15155, 6347, 15158, 15157, 15160, 15159, 15162, 15161, 15164, 15163, 15165, 15166, 15167, 6359, 15169, 15168, 15171, 15170, 6364, 15173, 15172, 15176, 15175, 15174, 6370, 6371, 15178, 15177, 15181, 15180, 15179, 15182, 15183, 15184, 15185, 6381, 15187, 15186, 6384, 6385, 6386, 15636, 11431, 15189, 6395, 15192, 15191, 15190, 15193, 15196, 15195, 15194, 15197, 6408, 15200, 15199, 15198, 15202, 15201, 15204, 15203, 6420, 6421, 6422, 6423, 15206, 15205, 6426, 6427, 15208, 15207, 15209, 15210, 15211, 15653, 15213, 15214, 6439, 6442, 15216, 15215, 6445, 15218, 15217, 15219, 15222, 15221, 15220, 15224, 15223, 15225, 6455, 15226, 15227, 6460, 6461, 6462, 15228, 15229, 15231, 15230, 6467, 15232, 15233, 15234, 6475, 15235, 6479, 15236, 15237, 15238, 6487, 6488, 15241, 15240, 15239, 15242, 6493, 6494, 6495, 15243, 15244, 13760, 15246, 15248, 15247, 6502, 6503, 15250, 15249, 6506, 15252, 15251, 15253, 15254, 15257, 15256, 15255, 6514, 6515, 15258, 15260, 15259, 6519, 6520, 15262, 15261, 6523, 15263, 15264, 15690, 15266, 15265, 15267, 15268, 15270, 15269, 15273, 15272, 15271, 6543, 15275, 15274, 6546, 6547, 6548, 15276, 15277, 6552, 6553, 15278, 15279, 6556, 15280, 15281, 6561, 6562, 6563, 15284, 15283, 15282, 6567, 15286, 15285, 15288, 15287, 15290, 15289, 15292, 15291, 6576, 15293, 15295, 15294, 6580, 6581, 15297, 15296, 6584, 15299, 15298, 6587, 15300, 15301, 6591, 15304, 15303, 15302, 15307, 15306, 15305, 15308, 15309, 15310, 15313, 15312, 15311, 15314, 15315, 15714, 15318, 15317, 15316, 15319, 15323, 15322, 15321, 15320, 15324, 15326, 15325, 6629, 15327, 15328, 6635, 15329, 6637, 15330, 6641, 6644, 15331, 15333, 15332, 6649, 6650, 15335, 15334, 15337, 15336, 6655, 15339, 15338, 6658, 15340, 15341, 15344, 15343, 15342, 15345, 6666, 15346, 15348, 15350, 15349, 6674, 15351, 6676, 6677, 15353, 15352, 15355, 15354, 15358, 15357, 15356, 15359, 15362, 15361, 15360, 15364, 15363, 15366, 15365, 15368, 15367, 15369, 15371, 15370, 15374, 15373, 15372, 15375, 15377, 15376, 14166, 15380, 15379, 6712, 15382, 15381, 6715, 15384, 15383, 6718, 6719, 15738, 11431, 15388, 15387, 15386, 6730, 6731, 15391, 15390, 15389, 6735, 6736, 15393, 15392, 6739, 6740, 6741, 15395, 15394, 15397, 15396, 6751, 15400, 15399, 15398, 15402, 15401, 6757, 15404, 15403, 15406, 15405, 6765, 15407, 6767, 15761, 15409, 15408, 6773, 6774, 6775, 6776, 15411, 15410, 15413, 15412, 15415, 15414, 6785, 6786, 6787, 15417, 15416, 6794, 6795, 15419, 15418, 6798, 15421, 15420, 15422, 6803, 15424, 15423, 6813, 6814, 15426, 15425, 15427, 6818, 15429, 15428, 6830, 6831, 6832, 15431, 15430, 6837, 15433, 15432, 6840, 15435, 15434, 15436, 15439, 15438, 15437, 15441, 15440, 15797, 15443, 15442, 15445, 15444, 6858, 15447, 15446, 6863, 15449, 15448, 6866, 6867, 6868, 6869, 15451, 15450, 15452, 6874, 15455, 15454, 15458, 15457, 15456, 6881, 15460, 15459, 15461, 15463, 15465, 15464, 15468, 15467, 15466, 15470, 15469, 15472, 15471, 15474, 15473, 6904, 6905, 15476, 15475, 15477, 6909, 15479, 15478, 6921, 15480, 6923, 15482, 15481, 6932, 6933, 6934, 6935, 6936, 6937, 6938, 15485, 15484, 15483, 15487, 15486, 15489, 15488, 15491, 15490, 6950, 15493, 15492, 6955, 6956, 15495, 15494, 6960, 15497, 15496, 15498, 15500, 15499, 6966, 6967, 15502, 15501, 6970, 6971, 15504, 15503, 15505, 6985, 15854, 15508, 15507, 6990, 6991, 6992, 6993, 6994, 15510, 15509, 15511, 15514, 15513, 15517, 7004, 15516, 15515, 15865, 15868, 15520, 15519, 15518, 15522, 15521, 15524, 15523, 7022, 15526, 15525, 15528, 15527, 15873, 15530, 15529, 15875, 15532, 15531, 7038, 15533, 7040, 15880, 15535, 15534, 15536, 7047, 15539, 15538, 7051, 7053, 15541, 15540, 7056, 7058, 15543, 15542, 7067, 7068, 15545, 15544, 7072, 15547, 15546, 7076, 7077, 7078, 15549, 15548, 15550, 7091, 15553, 15552, 15556, 15555, 15554, 15558, 15557, 7102, 15560, 15559, 7105, 15562, 15561, 7108, 7109, 15564, 15563, 7112, 15565, 15567, 15566, 15569, 15568, 15915, 15571, 15570, 15574, 15573, 15572, 7129, 15576, 15575, 7133, 7134, 15578, 15577, 7138, 15580, 15579, 7142, 7143, 7144, 15582, 15581, 7153, 15583, 7155, 15933, 15936, 7175, 7176, 7180, 7181, 7191, 7192, 15615, 15613, 7208, 7209, 7212, 7220, 7232, 7238, 7246, 7248, 7254, 7261, 7267, 15687, 15685, 7288, 7289, 7306, 7307, 7334, 7335, 7336, 7337, 7339, 7348, 7350, 7353, 7355, 15750, 15748, 7373, 14988, 15772, 15771, 15780, 15779, 15778, 15787, 15786, 15785, 15792, 14988, 14999, 15815, 15814, 15821, 15820, 15819, 15827, 15825, 15851, 15850, 15849, 15848, 15861, 7459, 15891, 15889, 15903, 15902, 15901, 15078, 15913, 15928, 15926, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 6243, 6244, 6246, 6247, 6248, 6249, 6250, 6251, 6253, 6256, 6257, 6258, 6259, 6262, 6265, 6267, 16004, 6272, 6273, 6275, 6277, 6278, 6279, 6280, 6281, 6282, 6283, 6284, 6285, 6286, 6291, 6292, 6294, 14858, 6298, 6299, 6300, 6301, 6302, 6304, 6305, 16040, 6311, 6312, 16046, 6322, 6325, 6326, 6329, 6330, 6331, 6338, 6339, 16062, 6344, 6345, 6346, 6348, 6349, 6350, 6351, 6352, 6353, 6354, 6355, 6356, 6357, 6358, 6360, 6361, 6362, 6363, 6365, 6366, 6367, 6368, 6369, 6372, 6373, 6374, 6375, 6376, 6377, 6378, 6379, 6380, 6382, 6383, 16105, 6392, 6394, 14879, 6398, 6399, 6400, 6401, 6404, 6405, 6406, 6407, 15642, 6411, 6412, 6413, 6414, 6415, 6418, 6419, 16128, 16130, 6424, 6425, 16134, 6428, 6429, 6430, 6433, 6434, 6437, 6438, 15655, 6443, 6444, 6446, 6447, 6448, 6449, 6450, 6451, 6452, 6453, 6454, 6456, 6459, 16161, 6463, 6464, 6465, 6466, 6468, 6471, 6474, 14899, 6478, 15669, 6482, 6483, 6486, 16178, 6489, 6490, 6491, 6492, 16184, 6496, 6497, 6498, 6499, 6500, 6501, 6504, 6505, 6507, 6508, 6509, 6510, 6511, 6512, 6513, 6516, 6517, 6518, 6521, 6522, 6524, 6529, 6532, 6533, 6534, 6537, 6538, 6539, 6540, 6541, 6542, 6544, 6545, 16230, 6550, 6551, 6554, 6555, 6557, 6560, 16242, 6564, 6565, 6566, 6568, 6569, 6570, 6571, 6572, 6573, 6574, 6575, 6577, 6578, 6579, 6582, 6583, 6585, 6586, 6589, 6590, 6594, 6595, 6596, 6597, 6598, 6599, 6600, 6603, 6606, 6608, 6609, 6610, 6612, 6614, 6617, 6618, 6619, 6620, 6622, 6623, 6624, 6625, 6626, 6627, 6628, 6631, 6634, 6636, 14932, 6640, 15720, 6646, 6647, 6648, 6651, 6652, 6653, 6654, 6656, 6657, 6659, 6661, 6662, 6663, 6664, 6665, 6667, 6670, 6672, 6673, 6675, 16333, 6678, 6679, 6681, 6682, 6684, 6685, 6686, 6687, 6688, 6689, 6690, 6691, 6692, 6693, 6694, 6695, 6696, 6697, 6698, 6699, 6700, 6701, 6702, 6704, 6705, 6706, 6707, 6710, 6711, 6713, 6714, 6716, 6717, 16370, 6725, 6727, 6728, 6729, 16377, 6732, 6733, 6734, 16382, 6737, 6738, 16386, 6746, 6747, 6749, 6750, 6752, 6753, 6754, 6755, 6756, 15755, 6760, 6761, 6763, 6764, 6766, 6771, 6772, 16410, 16412, 6778, 6779, 6780, 6781, 6783, 6784, 16420, 6792, 6793, 16425, 6796, 6797, 15776, 6800, 6801, 6802, 6811, 6812, 16436, 6815, 6816, 6817, 6828, 6829, 16444, 6835, 6836, 6838, 6839, 6841, 6842, 6843, 6846, 6847, 6848, 6849, 6850, 6853, 6854, 6856, 6857, 15800, 6861, 6862, 6864, 6865, 16472, 16474, 6871, 6872, 6873, 6876, 6877, 6878, 6879, 6880, 6883, 6884, 6885, 6886, 6888, 6889, 6890, 6891, 6892, 6895, 6896, 6897, 6898, 6902, 6903, 16501, 6906, 6907, 6908, 6919, 6920, 6922, 6930, 6931, 16514, 16517, 16519, 6940, 6941, 6942, 6943, 6944, 6945, 6946, 6948, 6949, 15838, 6953, 6954, 6958, 6959, 6961, 6962, 6963, 6964, 6965, 16543, 6968, 6969, 16547, 6982, 6983, 6984, 6988, 6989, 16556, 16559, 6996, 6997, 6998, 7001, 7002, 7003, 7005, 7006, 7013, 7014, 7015, 7017, 7018, 7020, 7021, 7023, 7024, 7026, 7027, 7030, 7031, 7036, 7037, 7039, 7044, 7045, 7046, 7049, 7050, 15884, 7054, 7055, 15887, 7065, 7066, 16608, 7069, 7070, 16611, 7073, 7074, 16614, 16616, 7088, 7089, 7090, 7093, 7094, 7095, 7096, 7097, 7100, 7101, 7103, 7104, 7106, 7107, 16635, 7110, 7111, 7113, 7117, 7118, 7119, 7120, 7124, 7125, 7126, 7127, 7128, 7131, 7132, 16654, 7135, 7136, 16657, 7139, 7140, 16660, 16662, 7151, 7152, 7154, 15986, 16006, 15083, 16671, 16673, 16024, 16022, 16036, 14860, 16675, 7195, 7197, 15611, 16053, 16052, 16058, 16057, 16679, 16079, 14988, 16090, 16101, 16107, 16144, 16167, 16192, 16204, 16209, 7279, 7280, 16226, 14913, 16692, 16235, 16234, 16247, 16256, 16260, 14916, 16694, 16270, 16297, 16305, 16309, 16315, 16696, 16698, 16371, 7366, 7367, 15759, 7379, 7381, 7382, 7386, 7387, 7388, 7391, 7392, 7393, 7395, 7399, 16468, 16478, 16484, 7415, 7417, 7418, 7421, 7422, 7423, 7425, 7426, 16533, 7440, 7441, 7442, 7443, 16551, 7449, 16570, 16569, 15878, 16596, 7466, 7467, 7471, 7472, 7473, 16620, 7477, 7482, 15083, 16650, 7490, 7491, 16669, 16668, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16755, 16757, 16763, 16005, 16770, 16771, 16773, 16775, 16777, 16781, 16783, 16784, 14859, 16792, 16041, 16795, 16047, 16802, 16804, 16063, 16808, 16809, 16812, 16814, 16816, 16821, 16823, 16825, 16827, 16830, 16832, 16839, 16106, 14880, 16845, 16849, 15643, 16854, 16857, 16859, 16861, 16863, 16866, 15656, 16874, 16875, 16879, 16882, 16162, 16890, 14900, 15670, 16902, 16185, 16911, 16913, 16914, 16919, 16923, 16925, 16926, 16929, 16933, 16935, 16938, 16243, 16948, 16951, 16953, 16955, 16957, 16960, 16962, 16963, 16968, 16971, 16977, 16982, 16986, 16988, 16991, 14933, 15721, 17000, 17002, 17004, 17006, 17007, 17010, 17016, 17020, 17022, 17024, 17028, 17031, 17033, 17035, 17038, 17040, 17044, 17047, 17049, 17051, 17055, 17059, 17063, 16387, 17066, 17068, 17070, 17073, 15756, 17076, 17078, 17079, 17081, 17085, 17087, 17089, 16421, 17092, 17095, 17098, 16432, 17101, 17104, 16440, 17107, 16445, 17110, 17112, 17114, 17117, 17120, 17122, 17124, 15801, 17127, 17129, 17133, 17136, 17138, 17141, 17145, 17147, 17150, 17152, 17154, 17157, 16505, 17160, 17161, 17163, 16515, 17168, 17171, 17173, 17175, 15839, 17178, 17180, 17182, 17185, 17188, 17191, 17194, 16557, 17198, 17201, 16566, 17204, 17206, 17209, 17211, 17213, 17215, 17217, 17219, 17220, 17222, 17225, 16600, 17228, 16604, 17231, 17234, 17237, 17239, 17241, 17244, 17246, 17249, 17251, 17253, 17256, 17257, 17259, 17261, 17263, 17265, 17268, 17271, 17274, 17276, 17278, 17279, 16752, 7163, 16760, 16759, 16766, 16765, 16764, 7173, 7177, 7183, 7184, 16787, 16789, 7189, 7193, 15614, 7198, 16798, 7200, 7201, 7203, 7204, 15625, 16818, 7214, 7216, 7218, 16836, 16834, 7223, 16841, 7226, 16847, 16867, 16869, 16870, 16868, 7244, 16887, 16884, 16883, 7256, 16892, 16891, 16897, 16899, 16898, 16906, 16908, 7271, 14916, 7275, 7277, 17309, 16927, 16930, 7286, 7290, 16940, 7292, 7293, 16942, 16943, 16944, 7299, 7302, 7304, 7308, 16965, 7310, 16980, 16975, 16973, 16979, 16984, 16974, 7323, 16993, 16992, 7328, 7330, 7332, 17326, 17013, 17017, 17012, 17014, 17045, 17053, 7362, 17329, 7375, 17083, 17333, 17335, 17338, 7405, 17131, 7409, 7411, 17143, 17347, 17349, 17352, 17166, 17165, 7435, 17355, 17357, 7445, 17196, 7451, 7452, 7461, 7463, 17365, 17367, 7475, 7484, 7486, 17375, 7493, 7494, 9, 10, 11, 12, 13, 14, 15, 16758, 16810, 16828, 16833, 16846, 16850, 16855, 16876, 16880, 16903, 16915, 16920, 16936, 16949, 16964, 16969, 16972, 16978, 16983, 17471, 17011, 17025, 17029, 17041, 17056, 17060, 17071, 16405, 17118, 17139, 17148, 16510, 17169, 17562, 17207, 16591, 17247, 17266, 16667, 7162, 16761, 17392, 7167, 7168, 16767, 7170, 7171, 7172, 17396, 17606, 17400, 17398, 16779, 17608, 17402, 7186, 17404, 7188, 17405, 17612, 17407, 7196, 17614, 7199, 17617, 16800, 17619, 17410, 7206, 17416, 17414, 7213, 17417, 7221, 7222, 17423, 7225, 16851, 16842, 7231, 17431, 17434, 17433, 17430, 7239, 16871, 7241, 7242, 7243, 7249, 7250, 16885, 7252, 17439, 16888, 16895, 16893, 7259, 7260, 16904, 7264, 7265, 7266, 7268, 7269, 16909, 7273, 16921, 17451, 7282, 16931, 17453, 7285, 17456, 17655, 7291, 17658, 7295, 7296, 7297, 16945, 17461, 17459, 16958, 17665, 7309, 7311, 7313, 7316, 7317, 7318, 7321, 16989, 16996, 16994, 7326, 7327, 16998, 17476, 17478, 17482, 7342, 17481, 17483, 7345, 7346, 7347, 17488, 17486, 17042, 7357, 17494, 17493, 17492, 7361, 17497, 17500, 17499, 17502, 17504, 17507, 7377, 17508, 17510, 17514, 17513, 17512, 17336, 17517, 17516, 17339, 17519, 17523, 17522, 17521, 17527, 17526, 17525, 17529, 17530, 7407, 17531, 17534, 7413, 17537, 17540, 17539, 17350, 17544, 7428, 7429, 17551, 17548, 17549, 17547, 17555, 17554, 17553, 17552, 17706, 17556, 17557, 7447, 17559, 17710, 17565, 17564, 17568, 17567, 17566, 17571, 17574, 17572, 17578, 17577, 17576, 17368, 17580, 17586, 17585, 17584, 17583, 17588, 17594, 17593, 17592, 17720, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17728, 7165, 7166, 7169, 17775, 7174, 7178, 7179, 7182, 7185, 7187, 7190, 7194, 17789, 7202, 7205, 17412, 7210, 7211, 7215, 17419, 17421, 17802, 7224, 17629, 17732, 17733, 7229, 7230, 17734, 7234, 7235, 7236, 7237, 7240, 17815, 17436, 16877, 7251, 7253, 17818, 7255, 7257, 7258, 17826, 17737, 7263, 17829, 7270, 17447, 16917, 7276, 7278, 17740, 7283, 7284, 7287, 17741, 7298, 17847, 7300, 7301, 7303, 17464, 17746, 17745, 17743, 17744, 17747, 17858, 7322, 7324, 7325, 17864, 7329, 7331, 7333, 17008, 7340, 17749, 7343, 7344, 17874, 17026, 7351, 7352, 17489, 7356, 7358, 7359, 7360, 17687, 7363, 17753, 17752, 7368, 7369, 7370, 7371, 17754, 17505, 7376, 7378, 7380, 7383, 7384, 7385, 7389, 7390, 7394, 7396, 7397, 7398, 7400, 7401, 7402, 17756, 7404, 7406, 7408, 17532, 7412, 17535, 7416, 7419, 7420, 17542, 7427, 17918, 7430, 17760, 7432, 7433, 7434, 7436, 7437, 7438, 7439, 7444, 7446, 7448, 17560, 7453, 7454, 17762, 7456, 7457, 7458, 17569, 7462, 7464, 7465, 7468, 7469, 7470, 7474, 17581, 7478, 7479, 7480, 7481, 7483, 17590, 7487, 7488, 7489, 17596, 17853, 17831, 17843, 17791, 17767, 17785, 17799, 14, 15, 7164, 17770, 17773, 17975, 17978, 17790, 7207, 17986, 7217, 7219, 7227, 7228, 17996, 7233, 18000, 18002, 7245, 7247, 17820, 18011, 7262, 7272, 7274, 7281, 18023, 7294, 18029, 7305, 7312, 7314, 7315, 7319, 7320, 18040, 7338, 7341, 18048, 17872, 7349, 18053, 7354, 18057, 7364, 7365, 7372, 18064, 18066, 7374, 18073, 18076, 18079, 7403, 18082, 7410, 7414, 18093, 7424, 7431, 18100, 18103, 18105, 7450, 7455, 18111, 18115, 7460, 18119, 18121, 7476, 18126, 18128, 7485, 18132, 7492, 17976, 7497, 7498, 18038, 18009, 17990, 7511, 18030, 17987, 7520, 18042, 7522, 18043, 18055, 18020, 18016, 17973, 18019, 17979, 17991, 7539, 17983, 7542, 18024, 18044, 17982, 18106, 18071, 18123, 18129, 18091, 18089, 18107, 18117, 18087, 18070, 18095, 18086, 18108, 18077, 18069, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17969, 17972, 18155, 17998, 18003, 18162, 18012, 18014, 17837, 17845, 18172, 18173, 17856, 18176, 18041, 18179, 18050, 18058, 18186, 18190, 18074, 18080, 18195, 18201, 18204, 18113, 18122, 18214, 18133, 7496, 18184, 7500, 18166, 17977, 18150, 7506, 18183, 7508, 18178, 18170, 7515, 18161, 18171, 7518, 18152, 7521, 7524, 7525, 18160, 7527, 7528, 7530, 7531, 7532, 7533, 18151, 7540, 18182, 17980, 7545, 7547, 18147, 7549, 18165, 18153, 18220, 18197, 18210, 7556, 18191, 18205, 18193, 18215, 7562, 7563, 7564, 18199, 18200, 7571, 7572, 18212, 7574, 7575, 7576, 18198, 7578, 7580, 7581, 7584, 18209, 7586, 18217, 7590, 13, 14, 15, 18145, 18156, 18158, 17816, 18007, 18015, 18168, 18027, 18283, 18037, 17859, 18180, 18187, 18188, 18294, 18202, 18297, 7499, 7501, 18286, 7504, 7505, 7507, 7512, 7514, 7516, 7517, 7519, 7526, 18273, 7534, 18289, 18278, 7541, 7543, 7548, 7550, 7551, 18229, 18322, 18325, 18328, 7553, 7554, 18298, 7557, 7558, 18296, 7560, 7561, 7566, 18293, 18300, 7569, 7573, 7577, 18292, 7585, 18299, 7589, 18347, 18351, 18355, 15, 17771, 17807, 18001, 17830, 17840, 18026, 18377, 18288, 18085, 18101, 18208, 7503, 18372, 7529, 7535, 7536, 18371, 18303, 18307, 18309, 18391, 18393, 18315, 18227, 18396, 18398, 18240, 18403, 18404, 7555, 7559, 7567, 7568, 7582, 18380, 18381, 7588, 18411, 18414, 18345, 18353, 18357, 18362, 18364, 12, 13, 14, 15, 18378, 18432, 18433, 18434, 7510, 18435, 18439, 7538, 18436, 18437, 18388, 18323, 18447, 18454, 18406, 18457, 18458, 18460, 18442, 18440, 18441, 7583, 7587, 18340, 18416, 18464, 18465, 18428, 18430, 13, 14, 15, 7495, 7502, 7509, 7513, 7523, 18480, 7544, 7546, 18450, 18491, 7565, 7570, 7579, 18360, 18468, 18503, 18504, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7537, 18301, 18513, 18484, 18392, 18318, 18518, 18519, 18495, 18418, 18523, 18358, 18525, 18526, 18528, 15, 18487, 18337, 18546, 18547, 18548, 18456, 18551, 18505, 18429, 18555, 18475, 11, 12, 13, 14, 15, 18560, 18562, 18563, 18493, 18565, 18497, 18567, 18508, 18556, 9, 10, 11, 12, 13, 14, 15, 18496, 18578, 18580, 18405, 18582, 18584, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18592, 18594, 18597, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18595, 18570, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18624, 18625, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7592, 18640, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7593, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18672, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
bool h_Op[]= {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#define THREADS_PER_BLOCK 16
#define BLOCKS_PER_GRID 1
#define SIZE_OF_IN 7600
#define SIZE_OF_AC 11104
__device__ void
ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) {
int i= blockDim.x * blockIdx.x + threadIdx.x;
__shared__ float R[1169*THREADS_PER_BLOCK];
const int t= THREADS_PER_BLOCK;
__shared__ float final;
final=0;
R[i + 0*t] = A[i + 0*t];
R[i + 1*t] = A[i + 1*t];
R[i + 2*t] = A[i + 2*t];
R[i + 3*t] = A[i + 3*t];
R[i + 4*t] = A[i + 4*t];
R[i + 5*t] = A[i + 5*t];
R[i + 6*t] = A[i + 6*t];
R[i + 7*t] = A[i + 7*t];
R[i + 8*t] = A[i + 8*t];
R[i + 9*t] = A[i + 9*t];
R[i + 10*t] = A[i + 10*t];
R[i + 11*t] = A[i + 11*t];
R[i + 12*t] = A[i + 12*t];
R[i + 13*t] = A[i + 13*t];
R[i + 14*t] = A[i + 14*t];
R[i + 15*t] = A[i + 15*t];
R[i + 16*t] = A[i + 16*t];
R[i + 17*t] = A[i + 17*t];
R[i + 18*t] = A[i + 18*t];
R[i + 19*t] = A[i + 19*t];
R[i + 20*t] = A[i + 20*t];
R[i + 21*t] = A[i + 21*t];
R[i + 22*t] = A[i + 22*t];
R[i + 23*t] = A[i + 23*t];
R[i + 24*t] = A[i + 24*t];
R[i + 25*t] = A[i + 25*t];
R[i + 26*t] = A[i + 26*t];
R[i + 27*t] = A[i + 27*t];
R[i + 28*t] = A[i + 28*t];
R[i + 29*t] = A[i + 29*t];
R[i + 30*t] = A[i + 30*t];
R[i + 31*t] = A[i + 31*t];
R[i + 32*t] = A[i + 32*t];
R[i + 33*t] = A[i + 33*t];
R[i + 34*t] = A[i + 34*t];
R[i + 35*t] = A[i + 35*t];
R[i + 36*t] = A[i + 36*t];
R[i + 37*t] = A[i + 37*t];
R[i + 38*t] = A[i + 38*t];
R[i + 39*t] = A[i + 39*t];
R[i + 40*t] = A[i + 40*t];
R[i + 41*t] = A[i + 41*t];
R[i + 42*t] = A[i + 42*t];
R[i + 43*t] = A[i + 43*t];
R[i + 44*t] = A[i + 44*t];
R[i + 45*t] = A[i + 45*t];
R[i + 46*t] = A[i + 46*t];
R[i + 47*t] = A[i + 47*t];
R[i + 48*t] = A[i + 48*t];
R[i + 49*t] = A[i + 49*t];
R[i + 50*t] = A[i + 50*t];
R[i + 51*t] = A[i + 51*t];
R[i + 52*t] = A[i + 52*t];
R[i + 53*t] = A[i + 53*t];
R[i + 54*t] = A[i + 54*t];
R[i + 55*t] = A[i + 55*t];
R[i + 56*t] = A[i + 56*t];
R[i + 57*t] = A[i + 57*t];
R[i + 58*t] = A[i + 58*t];
R[i + 59*t] = A[i + 59*t];
R[i + 60*t] = A[i + 60*t];
R[i + 61*t] = A[i + 61*t];
R[i + 62*t] = A[i + 62*t];
R[i + 63*t] = A[i + 63*t];
R[i + 64*t] = A[i + 64*t];
R[i + 65*t] = A[i + 65*t];
R[i + 66*t] = A[i + 66*t];
R[i + 67*t] = A[i + 67*t];
R[i + 68*t] = A[i + 68*t];
R[i + 69*t] = A[i + 69*t];
R[i + 70*t] = A[i + 70*t];
R[i + 71*t] = A[i + 71*t];
R[i + 72*t] = A[i + 72*t];
R[i + 73*t] = A[i + 73*t];
R[i + 74*t] = A[i + 74*t];
R[i + 75*t] = A[i + 75*t];
R[i + 76*t] = A[i + 76*t];
R[i + 77*t] = A[i + 77*t];
R[i + 78*t] = A[i + 78*t];
R[i + 79*t] = A[i + 79*t];
R[i + 80*t] = A[i + 80*t];
R[i + 81*t] = A[i + 81*t];
R[i + 82*t] = A[i + 82*t];
R[i + 83*t] = A[i + 83*t];
R[i + 84*t] = A[i + 84*t];
R[i + 85*t] = A[i + 85*t];
R[i + 86*t] = A[i + 86*t];
R[i + 87*t] = A[i + 87*t];
R[i + 88*t] = A[i + 88*t];
R[i + 89*t] = A[i + 89*t];
R[i + 90*t] = A[i + 90*t];
R[i + 91*t] = A[i + 91*t];
R[i + 92*t] = A[i + 92*t];
R[i + 93*t] = A[i + 93*t];
R[i + 94*t] = A[i + 94*t];
R[i + 95*t] = A[i + 95*t];
R[i + 96*t] = A[i + 96*t];
R[i + 97*t] = A[i + 97*t];
R[i + 98*t] = A[i + 98*t];
R[i + 99*t] = A[i + 99*t];
R[i + 100*t] = A[i + 100*t];
R[i + 101*t] = A[i + 101*t];
R[i + 102*t] = A[i + 102*t];
R[i + 103*t] = A[i + 103*t];
R[i + 104*t] = A[i + 104*t];
R[i + 105*t] = A[i + 105*t];
R[i + 106*t] = A[i + 106*t];
R[i + 107*t] = A[i + 107*t];
R[i + 108*t] = A[i + 108*t];
R[i + 109*t] = A[i + 109*t];
R[i + 110*t] = A[i + 110*t];
R[i + 111*t] = A[i + 111*t];
R[i + 112*t] = A[i + 112*t];
R[i + 113*t] = A[i + 113*t];
R[i + 114*t] = A[i + 114*t];
R[i + 115*t] = A[i + 115*t];
R[i + 116*t] = A[i + 116*t];
R[i + 117*t] = A[i + 117*t];
R[i + 118*t] = A[i + 118*t];
R[i + 119*t] = A[i + 119*t];
R[i + 120*t] = A[i + 120*t];
R[i + 121*t] = A[i + 121*t];
R[i + 122*t] = A[i + 122*t];
R[i + 123*t] = A[i + 123*t];
R[i + 124*t] = A[i + 124*t];
R[i + 125*t] = A[i + 125*t];
R[i + 126*t] = A[i + 126*t];
R[i + 127*t] = A[i + 127*t];
R[i + 128*t] = A[i + 128*t];
R[i + 129*t] = A[i + 129*t];
R[i + 130*t] = A[i + 130*t];
R[i + 131*t] = A[i + 131*t];
R[i + 132*t] = A[i + 132*t];
R[i + 133*t] = A[i + 133*t];
R[i + 134*t] = A[i + 134*t];
R[i + 135*t] = A[i + 135*t];
R[i + 136*t] = A[i + 136*t];
R[i + 137*t] = A[i + 137*t];
R[i + 138*t] = A[i + 138*t];
R[i + 139*t] = A[i + 139*t];
R[i + 140*t] = A[i + 140*t];
R[i + 141*t] = A[i + 141*t];
R[i + 142*t] = A[i + 142*t];
R[i + 143*t] = A[i + 143*t];
R[i + 144*t] = A[i + 144*t];
R[i + 145*t] = A[i + 145*t];
R[i + 146*t] = A[i + 146*t];
R[i + 147*t] = A[i + 147*t];
R[i + 148*t] = A[i + 148*t];
R[i + 149*t] = A[i + 149*t];
R[i + 150*t] = A[i + 150*t];
R[i + 151*t] = A[i + 151*t];
R[i + 152*t] = A[i + 152*t];
R[i + 153*t] = A[i + 153*t];
R[i + 154*t] = A[i + 154*t];
R[i + 155*t] = A[i + 155*t];
R[i + 156*t] = A[i + 156*t];
R[i + 157*t] = A[i + 157*t];
R[i + 158*t] = A[i + 158*t];
R[i + 159*t] = A[i + 159*t];
R[i + 160*t] = A[i + 160*t];
R[i + 161*t] = A[i + 161*t];
R[i + 162*t] = A[i + 162*t];
R[i + 163*t] = A[i + 163*t];
R[i + 164*t] = A[i + 164*t];
R[i + 165*t] = A[i + 165*t];
R[i + 166*t] = A[i + 166*t];
R[i + 167*t] = A[i + 167*t];
R[i + 168*t] = A[i + 168*t];
R[i + 169*t] = A[i + 169*t];
R[i + 170*t] = A[i + 170*t];
R[i + 171*t] = A[i + 171*t];
R[i + 172*t] = A[i + 172*t];
R[i + 173*t] = A[i + 173*t];
R[i + 174*t] = A[i + 174*t];
R[i + 175*t] = A[i + 175*t];
R[i + 176*t] = A[i + 176*t];
R[i + 177*t] = A[i + 177*t];
R[i + 178*t] = A[i + 178*t];
R[i + 179*t] = A[i + 179*t];
R[i + 180*t] = A[i + 180*t];
R[i + 181*t] = A[i + 181*t];
R[i + 182*t] = A[i + 182*t];
R[i + 183*t] = A[i + 183*t];
R[i + 184*t] = A[i + 184*t];
R[i + 185*t] = A[i + 185*t];
R[i + 186*t] = A[i + 186*t];
R[i + 187*t] = A[i + 187*t];
R[i + 188*t] = A[i + 188*t];
R[i + 189*t] = A[i + 189*t];
R[i + 190*t] = A[i + 190*t];
R[i + 191*t] = A[i + 191*t];
R[i + 192*t] = A[i + 192*t];
R[i + 193*t] = A[i + 193*t];
R[i + 194*t] = A[i + 194*t];
R[i + 195*t] = A[i + 195*t];
R[i + 196*t] = A[i + 196*t];
R[i + 197*t] = A[i + 197*t];
R[i + 198*t] = A[i + 198*t];
R[i + 199*t] = A[i + 199*t];
R[i + 200*t] = A[i + 200*t];
R[i + 201*t] = A[i + 201*t];
R[i + 202*t] = A[i + 202*t];
R[i + 203*t] = A[i + 203*t];
R[i + 204*t] = A[i + 204*t];
R[i + 205*t] = A[i + 205*t];
R[i + 206*t] = A[i + 206*t];
R[i + 207*t] = A[i + 207*t];
R[i + 208*t] = A[i + 208*t];
R[i + 209*t] = A[i + 209*t];
R[i + 210*t] = A[i + 210*t];
R[i + 211*t] = A[i + 211*t];
R[i + 212*t] = A[i + 212*t];
R[i + 213*t] = A[i + 213*t];
R[i + 214*t] = A[i + 214*t];
R[i + 215*t] = A[i + 215*t];
R[i + 216*t] = A[i + 216*t];
R[i + 217*t] = A[i + 217*t];
R[i + 218*t] = A[i + 218*t];
R[i + 219*t] = A[i + 219*t];
R[i + 220*t] = A[i + 220*t];
R[i + 221*t] = A[i + 221*t];
R[i + 222*t] = A[i + 222*t];
R[i + 223*t] = A[i + 223*t];
R[i + 224*t] = A[i + 224*t];
R[i + 225*t] = A[i + 225*t];
R[i + 226*t] = A[i + 226*t];
R[i + 227*t] = A[i + 227*t];
R[i + 228*t] = A[i + 228*t];
R[i + 229*t] = A[i + 229*t];
R[i + 230*t] = A[i + 230*t];
R[i + 231*t] = A[i + 231*t];
R[i + 232*t] = A[i + 232*t];
R[i + 233*t] = A[i + 233*t];
R[i + 234*t] = A[i + 234*t];
R[i + 235*t] = A[i + 235*t];
R[i + 236*t] = A[i + 236*t];
R[i + 237*t] = A[i + 237*t];
R[i + 238*t] = A[i + 238*t];
R[i + 239*t] = A[i + 239*t];
R[i + 240*t] = A[i + 240*t];
R[i + 241*t] = A[i + 241*t];
R[i + 242*t] = A[i + 242*t];
R[i + 243*t] = A[i + 243*t];
R[i + 244*t] = A[i + 244*t];
R[i + 245*t] = A[i + 245*t];
R[i + 246*t] = A[i + 246*t];
R[i + 247*t] = A[i + 247*t];
R[i + 248*t] = A[i + 248*t];
R[i + 249*t] = A[i + 249*t];
R[i + 250*t] = A[i + 250*t];
R[i + 251*t] = A[i + 251*t];
R[i + 252*t] = A[i + 252*t];
R[i + 253*t] = A[i + 253*t];
R[i + 254*t] = A[i + 254*t];
R[i + 255*t] = A[i + 255*t];
R[i + 256*t] = A[i + 256*t];
R[i + 257*t] = A[i + 257*t];
R[i + 258*t] = A[i + 258*t];
R[i + 259*t] = A[i + 259*t];
R[i + 260*t] = A[i + 260*t];
R[i + 261*t] = A[i + 261*t];
R[i + 262*t] = A[i + 262*t];
R[i + 263*t] = A[i + 263*t];
R[i + 264*t] = A[i + 264*t];
R[i + 265*t] = A[i + 265*t];
R[i + 266*t] = A[i + 266*t];
R[i + 267*t] = A[i + 267*t];
R[i + 268*t] = A[i + 268*t];
R[i + 269*t] = A[i + 269*t];
R[i + 270*t] = A[i + 270*t];
R[i + 271*t] = A[i + 271*t];
R[i + 272*t] = A[i + 272*t];
R[i + 273*t] = A[i + 273*t];
R[i + 274*t] = A[i + 274*t];
R[i + 275*t] = A[i + 275*t];
R[i + 276*t] = A[i + 276*t];
R[i + 277*t] = A[i + 277*t];
R[i + 278*t] = A[i + 278*t];
R[i + 279*t] = A[i + 279*t];
R[i + 280*t] = A[i + 280*t];
R[i + 281*t] = A[i + 281*t];
R[i + 282*t] = A[i + 282*t];
R[i + 283*t] = A[i + 283*t];
R[i + 284*t] = A[i + 284*t];
R[i + 285*t] = A[i + 285*t];
R[i + 286*t] = A[i + 286*t];
R[i + 287*t] = A[i + 287*t];
R[i + 288*t] = A[i + 288*t];
R[i + 289*t] = A[i + 289*t];
R[i + 290*t] = A[i + 290*t];
R[i + 291*t] = A[i + 291*t];
R[i + 292*t] = A[i + 292*t];
R[i + 293*t] = A[i + 293*t];
R[i + 294*t] = A[i + 294*t];
R[i + 295*t] = A[i + 295*t];
R[i + 296*t] = A[i + 296*t];
R[i + 297*t] = A[i + 297*t];
R[i + 298*t] = A[i + 298*t];
R[i + 299*t] = A[i + 299*t];
R[i + 300*t] = A[i + 300*t];
R[i + 301*t] = A[i + 301*t];
R[i + 302*t] = A[i + 302*t];
R[i + 303*t] = A[i + 303*t];
R[i + 304*t] = A[i + 304*t];
R[i + 305*t] = A[i + 305*t];
R[i + 306*t] = A[i + 306*t];
R[i + 307*t] = A[i + 307*t];
R[i + 308*t] = A[i + 308*t];
R[i + 309*t] = A[i + 309*t];
R[i + 310*t] = A[i + 310*t];
R[i + 311*t] = A[i + 311*t];
R[i + 312*t] = A[i + 312*t];
R[i + 313*t] = A[i + 313*t];
R[i + 314*t] = A[i + 314*t];
R[i + 315*t] = A[i + 315*t];
R[i + 316*t] = A[i + 316*t];
R[i + 317*t] = A[i + 317*t];
R[i + 318*t] = A[i + 318*t];
R[i + 319*t] = A[i + 319*t];
R[i + 320*t] = A[i + 320*t];
R[i + 321*t] = A[i + 321*t];
R[i + 322*t] = A[i + 322*t];
R[i + 323*t] = A[i + 323*t];
R[i + 324*t] = A[i + 324*t];
R[i + 325*t] = A[i + 325*t];
R[i + 326*t] = A[i + 326*t];
R[i + 327*t] = A[i + 327*t];
R[i + 328*t] = A[i + 328*t];
R[i + 329*t] = A[i + 329*t];
R[i + 330*t] = A[i + 330*t];
R[i + 331*t] = A[i + 331*t];
R[i + 332*t] = A[i + 332*t];
R[i + 333*t] = A[i + 333*t];
R[i + 334*t] = A[i + 334*t];
R[i + 335*t] = A[i + 335*t];
R[i + 336*t] = A[i + 336*t];
R[i + 337*t] = A[i + 337*t];
R[i + 338*t] = A[i + 338*t];
R[i + 339*t] = A[i + 339*t];
R[i + 340*t] = A[i + 340*t];
R[i + 341*t] = A[i + 341*t];
R[i + 342*t] = A[i + 342*t];
R[i + 343*t] = A[i + 343*t];
R[i + 344*t] = A[i + 344*t];
R[i + 345*t] = A[i + 345*t];
R[i + 346*t] = A[i + 346*t];
R[i + 347*t] = A[i + 347*t];
R[i + 348*t] = A[i + 348*t];
R[i + 349*t] = A[i + 349*t];
R[i + 350*t] = A[i + 350*t];
R[i + 351*t] = A[i + 351*t];
R[i + 352*t] = A[i + 352*t];
R[i + 353*t] = A[i + 353*t];
R[i + 354*t] = A[i + 354*t];
R[i + 355*t] = A[i + 355*t];
R[i + 356*t] = A[i + 356*t];
R[i + 357*t] = A[i + 357*t];
R[i + 358*t] = A[i + 358*t];
R[i + 359*t] = A[i + 359*t];
R[i + 360*t] = A[i + 360*t];
R[i + 361*t] = A[i + 361*t];
R[i + 362*t] = A[i + 362*t];
R[i + 363*t] = A[i + 363*t];
R[i + 364*t] = A[i + 364*t];
R[i + 365*t] = A[i + 365*t];
R[i + 366*t] = A[i + 366*t];
R[i + 367*t] = A[i + 367*t];
R[i + 368*t] = A[i + 368*t];
R[i + 369*t] = A[i + 369*t];
R[i + 370*t] = A[i + 370*t];
R[i + 371*t] = A[i + 371*t];
R[i + 372*t] = A[i + 372*t];
R[i + 373*t] = A[i + 373*t];
R[i + 374*t] = A[i + 374*t];
R[i + 375*t] = A[i + 375*t];
R[i + 376*t] = A[i + 376*t];
R[i + 377*t] = A[i + 377*t];
R[i + 378*t] = A[i + 378*t];
R[i + 379*t] = A[i + 379*t];
R[i + 380*t] = A[i + 380*t];
R[i + 381*t] = A[i + 381*t];
R[i + 382*t] = A[i + 382*t];
R[i + 383*t] = A[i + 383*t];
R[i + 384*t] = A[i + 384*t];
R[i + 385*t] = A[i + 385*t];
R[i + 386*t] = A[i + 386*t];
R[i + 387*t] = A[i + 387*t];
R[i + 388*t] = A[i + 388*t];
R[i + 389*t] = A[i + 389*t];
R[i + 390*t] = A[i + 390*t];
R[i + 391*t] = A[i + 391*t];
R[i + 392*t] = A[i + 392*t];
R[i + 393*t] = A[i + 393*t];
R[i + 394*t] = A[i + 394*t];
R[i + 395*t] = A[i + 395*t];
R[i + 396*t] = A[i + 396*t];
R[i + 397*t] = A[i + 397*t];
R[i + 398*t] = A[i + 398*t];
R[i + 399*t] = A[i + 399*t];
R[i + 400*t] = A[i + 400*t];
R[i + 401*t] = A[i + 401*t];
R[i + 402*t] = A[i + 402*t];
R[i + 403*t] = A[i + 403*t];
R[i + 404*t] = A[i + 404*t];
R[i + 405*t] = A[i + 405*t];
R[i + 406*t] = A[i + 406*t];
R[i + 407*t] = A[i + 407*t];
R[i + 408*t] = A[i + 408*t];
R[i + 409*t] = A[i + 409*t];
R[i + 410*t] = A[i + 410*t];
R[i + 411*t] = A[i + 411*t];
R[i + 412*t] = A[i + 412*t];
R[i + 413*t] = A[i + 413*t];
R[i + 414*t] = A[i + 414*t];
R[i + 415*t] = A[i + 415*t];
R[i + 416*t] = A[i + 416*t];
R[i + 417*t] = A[i + 417*t];
R[i + 418*t] = A[i + 418*t];
R[i + 419*t] = A[i + 419*t];
R[i + 420*t] = A[i + 420*t];
R[i + 421*t] = A[i + 421*t];
R[i + 422*t] = A[i + 422*t];
R[i + 423*t] = A[i + 423*t];
R[i + 424*t] = A[i + 424*t];
R[i + 425*t] = A[i + 425*t];
R[i + 426*t] = A[i + 426*t];
R[i + 427*t] = A[i + 427*t];
R[i + 428*t] = A[i + 428*t];
R[i + 429*t] = A[i + 429*t];
R[i + 430*t] = A[i + 430*t];
R[i + 431*t] = A[i + 431*t];
R[i + 432*t] = A[i + 432*t];
R[i + 433*t] = A[i + 433*t];
R[i + 434*t] = A[i + 434*t];
R[i + 435*t] = A[i + 435*t];
R[i + 436*t] = A[i + 436*t];
R[i + 437*t] = A[i + 437*t];
R[i + 438*t] = A[i + 438*t];
R[i + 439*t] = A[i + 439*t];
R[i + 440*t] = A[i + 440*t];
R[i + 441*t] = A[i + 441*t];
R[i + 442*t] = A[i + 442*t];
R[i + 443*t] = A[i + 443*t];
R[i + 444*t] = A[i + 444*t];
R[i + 445*t] = A[i + 445*t];
R[i + 446*t] = A[i + 446*t];
R[i + 447*t] = A[i + 447*t];
R[i + 448*t] = A[i + 448*t];
R[i + 449*t] = A[i + 449*t];
R[i + 450*t] = A[i + 450*t];
R[i + 451*t] = A[i + 451*t];
R[i + 452*t] = A[i + 452*t];
R[i + 453*t] = A[i + 453*t];
R[i + 454*t] = A[i + 454*t];
R[i + 455*t] = A[i + 455*t];
R[i + 456*t] = A[i + 456*t];
R[i + 457*t] = A[i + 457*t];
R[i + 458*t] = A[i + 458*t];
R[i + 459*t] = A[i + 459*t];
R[i + 460*t] = A[i + 460*t];
R[i + 461*t] = A[i + 461*t];
R[i + 462*t] = A[i + 462*t];
R[i + 463*t] = A[i + 463*t];
R[i + 464*t] = A[i + 464*t];
R[i + 465*t] = A[i + 465*t];
R[i + 466*t] = A[i + 466*t];
R[i + 467*t] = A[i + 467*t];
R[i + 468*t] = A[i + 468*t];
R[i + 469*t] = A[i + 469*t];
R[i + 470*t] = A[i + 470*t];
R[i + 471*t] = A[i + 471*t];
R[i + 472*t] = A[i + 472*t];
R[i + 473*t] = A[i + 473*t];
R[i + 474*t] = A[i + 474*t];
__syncthreads();
for (int iter=0; iter< n_iter; iter++) {
R[i + 475*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]];
R[i + 476*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]];
R[i + 477*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]];
R[i + 478*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]];
R[i + 479*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]];
R[i + 480*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]];
R[i + 481*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]];
R[i + 482*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]];
R[i + 483*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]];
R[i + 484*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]];
R[i + 485*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]];
R[i + 486*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]];
R[i + 487*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]];
R[i + 488*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]];
R[i + 489*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]];
R[i + 490*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]];
R[i + 491*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]];
R[i + 492*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]];
R[i + 493*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]];
R[i + 494*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]];
R[i + 495*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]];
R[i + 496*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]];
R[i + 497*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]];
R[i + 498*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]];
R[i + 499*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]];
R[i + 500*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]];
R[i + 501*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]];
R[i + 502*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]];
R[i + 503*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]];
R[i + 504*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]];
R[i + 505*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]];
R[i + 506*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]];
R[i + 507*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]];
R[i + 508*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]];
R[i + 509*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]];
R[i + 510*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]];
R[i + 511*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]];
R[i + 512*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]];
R[i + 513*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]];
R[i + 514*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]];
R[i + 515*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]];
R[i + 516*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]];
R[i + 517*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]];
R[i + 518*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]];
R[i + 519*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]];
R[i + 520*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]];
R[i + 521*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]];
R[i + 522*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]];
R[i + 523*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]];
R[i + 524*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]];
R[i + 525*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]];
R[i + 526*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]];
R[i + 527*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]];
R[i + 528*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]];
R[i + 529*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]];
R[i + 530*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]];
R[i + 531*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]];
R[i + 532*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]];
R[i + 533*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]];
R[i + 534*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]];
R[i + 535*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]];
R[i + 536*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]];
R[i + 537*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]];
R[i + 538*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]];
R[i + 539*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]];
R[i + 540*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]];
R[i + 541*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]];
R[i + 542*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]];
R[i + 543*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]];
R[i + 544*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]];
R[i + 545*t] = Op[i + 70*t] ? R[B[i + 70*t]] * R[C[i + 70*t]] : R[B[i + 70*t]] + R[C[i + 70*t]];
R[i + 546*t] = Op[i + 71*t] ? R[B[i + 71*t]] * R[C[i + 71*t]] : R[B[i + 71*t]] + R[C[i + 71*t]];
R[i + 547*t] = Op[i + 72*t] ? R[B[i + 72*t]] * R[C[i + 72*t]] : R[B[i + 72*t]] + R[C[i + 72*t]];
R[i + 548*t] = Op[i + 73*t] ? R[B[i + 73*t]] * R[C[i + 73*t]] : R[B[i + 73*t]] + R[C[i + 73*t]];
R[i + 549*t] = Op[i + 74*t] ? R[B[i + 74*t]] * R[C[i + 74*t]] : R[B[i + 74*t]] + R[C[i + 74*t]];
R[i + 550*t] = Op[i + 75*t] ? R[B[i + 75*t]] * R[C[i + 75*t]] : R[B[i + 75*t]] + R[C[i + 75*t]];
R[i + 551*t] = Op[i + 76*t] ? R[B[i + 76*t]] * R[C[i + 76*t]] : R[B[i + 76*t]] + R[C[i + 76*t]];
R[i + 552*t] = Op[i + 77*t] ? R[B[i + 77*t]] * R[C[i + 77*t]] : R[B[i + 77*t]] + R[C[i + 77*t]];
R[i + 553*t] = Op[i + 78*t] ? R[B[i + 78*t]] * R[C[i + 78*t]] : R[B[i + 78*t]] + R[C[i + 78*t]];
R[i + 554*t] = Op[i + 79*t] ? R[B[i + 79*t]] * R[C[i + 79*t]] : R[B[i + 79*t]] + R[C[i + 79*t]];
R[i + 555*t] = Op[i + 80*t] ? R[B[i + 80*t]] * R[C[i + 80*t]] : R[B[i + 80*t]] + R[C[i + 80*t]];
R[i + 556*t] = Op[i + 81*t] ? R[B[i + 81*t]] * R[C[i + 81*t]] : R[B[i + 81*t]] + R[C[i + 81*t]];
R[i + 557*t] = Op[i + 82*t] ? R[B[i + 82*t]] * R[C[i + 82*t]] : R[B[i + 82*t]] + R[C[i + 82*t]];
R[i + 558*t] = Op[i + 83*t] ? R[B[i + 83*t]] * R[C[i + 83*t]] : R[B[i + 83*t]] + R[C[i + 83*t]];
R[i + 559*t] = Op[i + 84*t] ? R[B[i + 84*t]] * R[C[i + 84*t]] : R[B[i + 84*t]] + R[C[i + 84*t]];
R[i + 560*t] = Op[i + 85*t] ? R[B[i + 85*t]] * R[C[i + 85*t]] : R[B[i + 85*t]] + R[C[i + 85*t]];
R[i + 561*t] = Op[i + 86*t] ? R[B[i + 86*t]] * R[C[i + 86*t]] : R[B[i + 86*t]] + R[C[i + 86*t]];
R[i + 562*t] = Op[i + 87*t] ? R[B[i + 87*t]] * R[C[i + 87*t]] : R[B[i + 87*t]] + R[C[i + 87*t]];
R[i + 563*t] = Op[i + 88*t] ? R[B[i + 88*t]] * R[C[i + 88*t]] : R[B[i + 88*t]] + R[C[i + 88*t]];
R[i + 564*t] = Op[i + 89*t] ? R[B[i + 89*t]] * R[C[i + 89*t]] : R[B[i + 89*t]] + R[C[i + 89*t]];
R[i + 565*t] = Op[i + 90*t] ? R[B[i + 90*t]] * R[C[i + 90*t]] : R[B[i + 90*t]] + R[C[i + 90*t]];
R[i + 566*t] = Op[i + 91*t] ? R[B[i + 91*t]] * R[C[i + 91*t]] : R[B[i + 91*t]] + R[C[i + 91*t]];
R[i + 567*t] = Op[i + 92*t] ? R[B[i + 92*t]] * R[C[i + 92*t]] : R[B[i + 92*t]] + R[C[i + 92*t]];
R[i + 568*t] = Op[i + 93*t] ? R[B[i + 93*t]] * R[C[i + 93*t]] : R[B[i + 93*t]] + R[C[i + 93*t]];
R[i + 569*t] = Op[i + 94*t] ? R[B[i + 94*t]] * R[C[i + 94*t]] : R[B[i + 94*t]] + R[C[i + 94*t]];
R[i + 570*t] = Op[i + 95*t] ? R[B[i + 95*t]] * R[C[i + 95*t]] : R[B[i + 95*t]] + R[C[i + 95*t]];
R[i + 571*t] = Op[i + 96*t] ? R[B[i + 96*t]] * R[C[i + 96*t]] : R[B[i + 96*t]] + R[C[i + 96*t]];
R[i + 572*t] = Op[i + 97*t] ? R[B[i + 97*t]] * R[C[i + 97*t]] : R[B[i + 97*t]] + R[C[i + 97*t]];
R[i + 573*t] = Op[i + 98*t] ? R[B[i + 98*t]] * R[C[i + 98*t]] : R[B[i + 98*t]] + R[C[i + 98*t]];
R[i + 574*t] = Op[i + 99*t] ? R[B[i + 99*t]] * R[C[i + 99*t]] : R[B[i + 99*t]] + R[C[i + 99*t]];
R[i + 575*t] = Op[i + 100*t] ? R[B[i + 100*t]] * R[C[i + 100*t]] : R[B[i + 100*t]] + R[C[i + 100*t]];
R[i + 576*t] = Op[i + 101*t] ? R[B[i + 101*t]] * R[C[i + 101*t]] : R[B[i + 101*t]] + R[C[i + 101*t]];
R[i + 577*t] = Op[i + 102*t] ? R[B[i + 102*t]] * R[C[i + 102*t]] : R[B[i + 102*t]] + R[C[i + 102*t]];
R[i + 578*t] = Op[i + 103*t] ? R[B[i + 103*t]] * R[C[i + 103*t]] : R[B[i + 103*t]] + R[C[i + 103*t]];
R[i + 579*t] = Op[i + 104*t] ? R[B[i + 104*t]] * R[C[i + 104*t]] : R[B[i + 104*t]] + R[C[i + 104*t]];
R[i + 580*t] = Op[i + 105*t] ? R[B[i + 105*t]] * R[C[i + 105*t]] : R[B[i + 105*t]] + R[C[i + 105*t]];
R[i + 581*t] = Op[i + 106*t] ? R[B[i + 106*t]] * R[C[i + 106*t]] : R[B[i + 106*t]] + R[C[i + 106*t]];
R[i + 582*t] = Op[i + 107*t] ? R[B[i + 107*t]] * R[C[i + 107*t]] : R[B[i + 107*t]] + R[C[i + 107*t]];
R[i + 583*t] = Op[i + 108*t] ? R[B[i + 108*t]] * R[C[i + 108*t]] : R[B[i + 108*t]] + R[C[i + 108*t]];
R[i + 584*t] = Op[i + 109*t] ? R[B[i + 109*t]] * R[C[i + 109*t]] : R[B[i + 109*t]] + R[C[i + 109*t]];
R[i + 585*t] = Op[i + 110*t] ? R[B[i + 110*t]] * R[C[i + 110*t]] : R[B[i + 110*t]] + R[C[i + 110*t]];
R[i + 586*t] = Op[i + 111*t] ? R[B[i + 111*t]] * R[C[i + 111*t]] : R[B[i + 111*t]] + R[C[i + 111*t]];
R[i + 587*t] = Op[i + 112*t] ? R[B[i + 112*t]] * R[C[i + 112*t]] : R[B[i + 112*t]] + R[C[i + 112*t]];
R[i + 588*t] = Op[i + 113*t] ? R[B[i + 113*t]] * R[C[i + 113*t]] : R[B[i + 113*t]] + R[C[i + 113*t]];
R[i + 589*t] = Op[i + 114*t] ? R[B[i + 114*t]] * R[C[i + 114*t]] : R[B[i + 114*t]] + R[C[i + 114*t]];
R[i + 590*t] = Op[i + 115*t] ? R[B[i + 115*t]] * R[C[i + 115*t]] : R[B[i + 115*t]] + R[C[i + 115*t]];
R[i + 591*t] = Op[i + 116*t] ? R[B[i + 116*t]] * R[C[i + 116*t]] : R[B[i + 116*t]] + R[C[i + 116*t]];
R[i + 592*t] = Op[i + 117*t] ? R[B[i + 117*t]] * R[C[i + 117*t]] : R[B[i + 117*t]] + R[C[i + 117*t]];
R[i + 593*t] = Op[i + 118*t] ? R[B[i + 118*t]] * R[C[i + 118*t]] : R[B[i + 118*t]] + R[C[i + 118*t]];
R[i + 594*t] = Op[i + 119*t] ? R[B[i + 119*t]] * R[C[i + 119*t]] : R[B[i + 119*t]] + R[C[i + 119*t]];
R[i + 595*t] = Op[i + 120*t] ? R[B[i + 120*t]] * R[C[i + 120*t]] : R[B[i + 120*t]] + R[C[i + 120*t]];
R[i + 596*t] = Op[i + 121*t] ? R[B[i + 121*t]] * R[C[i + 121*t]] : R[B[i + 121*t]] + R[C[i + 121*t]];
R[i + 597*t] = Op[i + 122*t] ? R[B[i + 122*t]] * R[C[i + 122*t]] : R[B[i + 122*t]] + R[C[i + 122*t]];
R[i + 598*t] = Op[i + 123*t] ? R[B[i + 123*t]] * R[C[i + 123*t]] : R[B[i + 123*t]] + R[C[i + 123*t]];
R[i + 599*t] = Op[i + 124*t] ? R[B[i + 124*t]] * R[C[i + 124*t]] : R[B[i + 124*t]] + R[C[i + 124*t]];
R[i + 600*t] = Op[i + 125*t] ? R[B[i + 125*t]] * R[C[i + 125*t]] : R[B[i + 125*t]] + R[C[i + 125*t]];
R[i + 601*t] = Op[i + 126*t] ? R[B[i + 126*t]] * R[C[i + 126*t]] : R[B[i + 126*t]] + R[C[i + 126*t]];
R[i + 602*t] = Op[i + 127*t] ? R[B[i + 127*t]] * R[C[i + 127*t]] : R[B[i + 127*t]] + R[C[i + 127*t]];
R[i + 603*t] = Op[i + 128*t] ? R[B[i + 128*t]] * R[C[i + 128*t]] : R[B[i + 128*t]] + R[C[i + 128*t]];
R[i + 604*t] = Op[i + 129*t] ? R[B[i + 129*t]] * R[C[i + 129*t]] : R[B[i + 129*t]] + R[C[i + 129*t]];
R[i + 605*t] = Op[i + 130*t] ? R[B[i + 130*t]] * R[C[i + 130*t]] : R[B[i + 130*t]] + R[C[i + 130*t]];
R[i + 606*t] = Op[i + 131*t] ? R[B[i + 131*t]] * R[C[i + 131*t]] : R[B[i + 131*t]] + R[C[i + 131*t]];
R[i + 607*t] = Op[i + 132*t] ? R[B[i + 132*t]] * R[C[i + 132*t]] : R[B[i + 132*t]] + R[C[i + 132*t]];
R[i + 608*t] = Op[i + 133*t] ? R[B[i + 133*t]] * R[C[i + 133*t]] : R[B[i + 133*t]] + R[C[i + 133*t]];
R[i + 609*t] = Op[i + 134*t] ? R[B[i + 134*t]] * R[C[i + 134*t]] : R[B[i + 134*t]] + R[C[i + 134*t]];
R[i + 610*t] = Op[i + 135*t] ? R[B[i + 135*t]] * R[C[i + 135*t]] : R[B[i + 135*t]] + R[C[i + 135*t]];
R[i + 611*t] = Op[i + 136*t] ? R[B[i + 136*t]] * R[C[i + 136*t]] : R[B[i + 136*t]] + R[C[i + 136*t]];
R[i + 612*t] = Op[i + 137*t] ? R[B[i + 137*t]] * R[C[i + 137*t]] : R[B[i + 137*t]] + R[C[i + 137*t]];
R[i + 613*t] = Op[i + 138*t] ? R[B[i + 138*t]] * R[C[i + 138*t]] : R[B[i + 138*t]] + R[C[i + 138*t]];
R[i + 614*t] = Op[i + 139*t] ? R[B[i + 139*t]] * R[C[i + 139*t]] : R[B[i + 139*t]] + R[C[i + 139*t]];
R[i + 615*t] = Op[i + 140*t] ? R[B[i + 140*t]] * R[C[i + 140*t]] : R[B[i + 140*t]] + R[C[i + 140*t]];
R[i + 616*t] = Op[i + 141*t] ? R[B[i + 141*t]] * R[C[i + 141*t]] : R[B[i + 141*t]] + R[C[i + 141*t]];
R[i + 617*t] = Op[i + 142*t] ? R[B[i + 142*t]] * R[C[i + 142*t]] : R[B[i + 142*t]] + R[C[i + 142*t]];
R[i + 618*t] = Op[i + 143*t] ? R[B[i + 143*t]] * R[C[i + 143*t]] : R[B[i + 143*t]] + R[C[i + 143*t]];
R[i + 619*t] = Op[i + 144*t] ? R[B[i + 144*t]] * R[C[i + 144*t]] : R[B[i + 144*t]] + R[C[i + 144*t]];
R[i + 620*t] = Op[i + 145*t] ? R[B[i + 145*t]] * R[C[i + 145*t]] : R[B[i + 145*t]] + R[C[i + 145*t]];
R[i + 621*t] = Op[i + 146*t] ? R[B[i + 146*t]] * R[C[i + 146*t]] : R[B[i + 146*t]] + R[C[i + 146*t]];
R[i + 622*t] = Op[i + 147*t] ? R[B[i + 147*t]] * R[C[i + 147*t]] : R[B[i + 147*t]] + R[C[i + 147*t]];
R[i + 623*t] = Op[i + 148*t] ? R[B[i + 148*t]] * R[C[i + 148*t]] : R[B[i + 148*t]] + R[C[i + 148*t]];
R[i + 624*t] = Op[i + 149*t] ? R[B[i + 149*t]] * R[C[i + 149*t]] : R[B[i + 149*t]] + R[C[i + 149*t]];
R[i + 625*t] = Op[i + 150*t] ? R[B[i + 150*t]] * R[C[i + 150*t]] : R[B[i + 150*t]] + R[C[i + 150*t]];
R[i + 626*t] = Op[i + 151*t] ? R[B[i + 151*t]] * R[C[i + 151*t]] : R[B[i + 151*t]] + R[C[i + 151*t]];
R[i + 627*t] = Op[i + 152*t] ? R[B[i + 152*t]] * R[C[i + 152*t]] : R[B[i + 152*t]] + R[C[i + 152*t]];
R[i + 628*t] = Op[i + 153*t] ? R[B[i + 153*t]] * R[C[i + 153*t]] : R[B[i + 153*t]] + R[C[i + 153*t]];
R[i + 629*t] = Op[i + 154*t] ? R[B[i + 154*t]] * R[C[i + 154*t]] : R[B[i + 154*t]] + R[C[i + 154*t]];
__syncthreads();
R[i + 630*t] = Op[i + 155*t] ? R[B[i + 155*t]] * R[C[i + 155*t]] : R[B[i + 155*t]] + R[C[i + 155*t]];
R[i + 631*t] = Op[i + 156*t] ? R[B[i + 156*t]] * R[C[i + 156*t]] : R[B[i + 156*t]] + R[C[i + 156*t]];
R[i + 632*t] = Op[i + 157*t] ? R[B[i + 157*t]] * R[C[i + 157*t]] : R[B[i + 157*t]] + R[C[i + 157*t]];
R[i + 633*t] = Op[i + 158*t] ? R[B[i + 158*t]] * R[C[i + 158*t]] : R[B[i + 158*t]] + R[C[i + 158*t]];
R[i + 634*t] = Op[i + 159*t] ? R[B[i + 159*t]] * R[C[i + 159*t]] : R[B[i + 159*t]] + R[C[i + 159*t]];
R[i + 635*t] = Op[i + 160*t] ? R[B[i + 160*t]] * R[C[i + 160*t]] : R[B[i + 160*t]] + R[C[i + 160*t]];
R[i + 636*t] = Op[i + 161*t] ? R[B[i + 161*t]] * R[C[i + 161*t]] : R[B[i + 161*t]] + R[C[i + 161*t]];
R[i + 637*t] = Op[i + 162*t] ? R[B[i + 162*t]] * R[C[i + 162*t]] : R[B[i + 162*t]] + R[C[i + 162*t]];
R[i + 638*t] = Op[i + 163*t] ? R[B[i + 163*t]] * R[C[i + 163*t]] : R[B[i + 163*t]] + R[C[i + 163*t]];
R[i + 639*t] = Op[i + 164*t] ? R[B[i + 164*t]] * R[C[i + 164*t]] : R[B[i + 164*t]] + R[C[i + 164*t]];
R[i + 640*t] = Op[i + 165*t] ? R[B[i + 165*t]] * R[C[i + 165*t]] : R[B[i + 165*t]] + R[C[i + 165*t]];
R[i + 641*t] = Op[i + 166*t] ? R[B[i + 166*t]] * R[C[i + 166*t]] : R[B[i + 166*t]] + R[C[i + 166*t]];
R[i + 642*t] = Op[i + 167*t] ? R[B[i + 167*t]] * R[C[i + 167*t]] : R[B[i + 167*t]] + R[C[i + 167*t]];
R[i + 643*t] = Op[i + 168*t] ? R[B[i + 168*t]] * R[C[i + 168*t]] : R[B[i + 168*t]] + R[C[i + 168*t]];
R[i + 644*t] = Op[i + 169*t] ? R[B[i + 169*t]] * R[C[i + 169*t]] : R[B[i + 169*t]] + R[C[i + 169*t]];
R[i + 645*t] = Op[i + 170*t] ? R[B[i + 170*t]] * R[C[i + 170*t]] : R[B[i + 170*t]] + R[C[i + 170*t]];
R[i + 646*t] = Op[i + 171*t] ? R[B[i + 171*t]] * R[C[i + 171*t]] : R[B[i + 171*t]] + R[C[i + 171*t]];
R[i + 647*t] = Op[i + 172*t] ? R[B[i + 172*t]] * R[C[i + 172*t]] : R[B[i + 172*t]] + R[C[i + 172*t]];
R[i + 648*t] = Op[i + 173*t] ? R[B[i + 173*t]] * R[C[i + 173*t]] : R[B[i + 173*t]] + R[C[i + 173*t]];
R[i + 649*t] = Op[i + 174*t] ? R[B[i + 174*t]] * R[C[i + 174*t]] : R[B[i + 174*t]] + R[C[i + 174*t]];
R[i + 650*t] = Op[i + 175*t] ? R[B[i + 175*t]] * R[C[i + 175*t]] : R[B[i + 175*t]] + R[C[i + 175*t]];
R[i + 651*t] = Op[i + 176*t] ? R[B[i + 176*t]] * R[C[i + 176*t]] : R[B[i + 176*t]] + R[C[i + 176*t]];
R[i + 652*t] = Op[i + 177*t] ? R[B[i + 177*t]] * R[C[i + 177*t]] : R[B[i + 177*t]] + R[C[i + 177*t]];
R[i + 653*t] = Op[i + 178*t] ? R[B[i + 178*t]] * R[C[i + 178*t]] : R[B[i + 178*t]] + R[C[i + 178*t]];
R[i + 654*t] = Op[i + 179*t] ? R[B[i + 179*t]] * R[C[i + 179*t]] : R[B[i + 179*t]] + R[C[i + 179*t]];
R[i + 655*t] = Op[i + 180*t] ? R[B[i + 180*t]] * R[C[i + 180*t]] : R[B[i + 180*t]] + R[C[i + 180*t]];
R[i + 656*t] = Op[i + 181*t] ? R[B[i + 181*t]] * R[C[i + 181*t]] : R[B[i + 181*t]] + R[C[i + 181*t]];
R[i + 657*t] = Op[i + 182*t] ? R[B[i + 182*t]] * R[C[i + 182*t]] : R[B[i + 182*t]] + R[C[i + 182*t]];
R[i + 658*t] = Op[i + 183*t] ? R[B[i + 183*t]] * R[C[i + 183*t]] : R[B[i + 183*t]] + R[C[i + 183*t]];
R[i + 659*t] = Op[i + 184*t] ? R[B[i + 184*t]] * R[C[i + 184*t]] : R[B[i + 184*t]] + R[C[i + 184*t]];
R[i + 660*t] = Op[i + 185*t] ? R[B[i + 185*t]] * R[C[i + 185*t]] : R[B[i + 185*t]] + R[C[i + 185*t]];
R[i + 661*t] = Op[i + 186*t] ? R[B[i + 186*t]] * R[C[i + 186*t]] : R[B[i + 186*t]] + R[C[i + 186*t]];
R[i + 662*t] = Op[i + 187*t] ? R[B[i + 187*t]] * R[C[i + 187*t]] : R[B[i + 187*t]] + R[C[i + 187*t]];
R[i + 663*t] = Op[i + 188*t] ? R[B[i + 188*t]] * R[C[i + 188*t]] : R[B[i + 188*t]] + R[C[i + 188*t]];
R[i + 664*t] = Op[i + 189*t] ? R[B[i + 189*t]] * R[C[i + 189*t]] : R[B[i + 189*t]] + R[C[i + 189*t]];
R[i + 665*t] = Op[i + 190*t] ? R[B[i + 190*t]] * R[C[i + 190*t]] : R[B[i + 190*t]] + R[C[i + 190*t]];
R[i + 666*t] = Op[i + 191*t] ? R[B[i + 191*t]] * R[C[i + 191*t]] : R[B[i + 191*t]] + R[C[i + 191*t]];
R[i + 667*t] = Op[i + 192*t] ? R[B[i + 192*t]] * R[C[i + 192*t]] : R[B[i + 192*t]] + R[C[i + 192*t]];
R[i + 668*t] = Op[i + 193*t] ? R[B[i + 193*t]] * R[C[i + 193*t]] : R[B[i + 193*t]] + R[C[i + 193*t]];
R[i + 669*t] = Op[i + 194*t] ? R[B[i + 194*t]] * R[C[i + 194*t]] : R[B[i + 194*t]] + R[C[i + 194*t]];
R[i + 670*t] = Op[i + 195*t] ? R[B[i + 195*t]] * R[C[i + 195*t]] : R[B[i + 195*t]] + R[C[i + 195*t]];
R[i + 671*t] = Op[i + 196*t] ? R[B[i + 196*t]] * R[C[i + 196*t]] : R[B[i + 196*t]] + R[C[i + 196*t]];
R[i + 672*t] = Op[i + 197*t] ? R[B[i + 197*t]] * R[C[i + 197*t]] : R[B[i + 197*t]] + R[C[i + 197*t]];
R[i + 673*t] = Op[i + 198*t] ? R[B[i + 198*t]] * R[C[i + 198*t]] : R[B[i + 198*t]] + R[C[i + 198*t]];
R[i + 674*t] = Op[i + 199*t] ? R[B[i + 199*t]] * R[C[i + 199*t]] : R[B[i + 199*t]] + R[C[i + 199*t]];
R[i + 675*t] = Op[i + 200*t] ? R[B[i + 200*t]] * R[C[i + 200*t]] : R[B[i + 200*t]] + R[C[i + 200*t]];
R[i + 676*t] = Op[i + 201*t] ? R[B[i + 201*t]] * R[C[i + 201*t]] : R[B[i + 201*t]] + R[C[i + 201*t]];
R[i + 677*t] = Op[i + 202*t] ? R[B[i + 202*t]] * R[C[i + 202*t]] : R[B[i + 202*t]] + R[C[i + 202*t]];
R[i + 678*t] = Op[i + 203*t] ? R[B[i + 203*t]] * R[C[i + 203*t]] : R[B[i + 203*t]] + R[C[i + 203*t]];
R[i + 679*t] = Op[i + 204*t] ? R[B[i + 204*t]] * R[C[i + 204*t]] : R[B[i + 204*t]] + R[C[i + 204*t]];
R[i + 680*t] = Op[i + 205*t] ? R[B[i + 205*t]] * R[C[i + 205*t]] : R[B[i + 205*t]] + R[C[i + 205*t]];
R[i + 681*t] = Op[i + 206*t] ? R[B[i + 206*t]] * R[C[i + 206*t]] : R[B[i + 206*t]] + R[C[i + 206*t]];
R[i + 682*t] = Op[i + 207*t] ? R[B[i + 207*t]] * R[C[i + 207*t]] : R[B[i + 207*t]] + R[C[i + 207*t]];
R[i + 683*t] = Op[i + 208*t] ? R[B[i + 208*t]] * R[C[i + 208*t]] : R[B[i + 208*t]] + R[C[i + 208*t]];
R[i + 684*t] = Op[i + 209*t] ? R[B[i + 209*t]] * R[C[i + 209*t]] : R[B[i + 209*t]] + R[C[i + 209*t]];
R[i + 685*t] = Op[i + 210*t] ? R[B[i + 210*t]] * R[C[i + 210*t]] : R[B[i + 210*t]] + R[C[i + 210*t]];
R[i + 686*t] = Op[i + 211*t] ? R[B[i + 211*t]] * R[C[i + 211*t]] : R[B[i + 211*t]] + R[C[i + 211*t]];
R[i + 687*t] = Op[i + 212*t] ? R[B[i + 212*t]] * R[C[i + 212*t]] : R[B[i + 212*t]] + R[C[i + 212*t]];
R[i + 688*t] = Op[i + 213*t] ? R[B[i + 213*t]] * R[C[i + 213*t]] : R[B[i + 213*t]] + R[C[i + 213*t]];
R[i + 689*t] = Op[i + 214*t] ? R[B[i + 214*t]] * R[C[i + 214*t]] : R[B[i + 214*t]] + R[C[i + 214*t]];
R[i + 690*t] = Op[i + 215*t] ? R[B[i + 215*t]] * R[C[i + 215*t]] : R[B[i + 215*t]] + R[C[i + 215*t]];
R[i + 691*t] = Op[i + 216*t] ? R[B[i + 216*t]] * R[C[i + 216*t]] : R[B[i + 216*t]] + R[C[i + 216*t]];
R[i + 692*t] = Op[i + 217*t] ? R[B[i + 217*t]] * R[C[i + 217*t]] : R[B[i + 217*t]] + R[C[i + 217*t]];
R[i + 693*t] = Op[i + 218*t] ? R[B[i + 218*t]] * R[C[i + 218*t]] : R[B[i + 218*t]] + R[C[i + 218*t]];
R[i + 694*t] = Op[i + 219*t] ? R[B[i + 219*t]] * R[C[i + 219*t]] : R[B[i + 219*t]] + R[C[i + 219*t]];
R[i + 695*t] = Op[i + 220*t] ? R[B[i + 220*t]] * R[C[i + 220*t]] : R[B[i + 220*t]] + R[C[i + 220*t]];
R[i + 696*t] = Op[i + 221*t] ? R[B[i + 221*t]] * R[C[i + 221*t]] : R[B[i + 221*t]] + R[C[i + 221*t]];
R[i + 697*t] = Op[i + 222*t] ? R[B[i + 222*t]] * R[C[i + 222*t]] : R[B[i + 222*t]] + R[C[i + 222*t]];
R[i + 698*t] = Op[i + 223*t] ? R[B[i + 223*t]] * R[C[i + 223*t]] : R[B[i + 223*t]] + R[C[i + 223*t]];
R[i + 699*t] = Op[i + 224*t] ? R[B[i + 224*t]] * R[C[i + 224*t]] : R[B[i + 224*t]] + R[C[i + 224*t]];
R[i + 700*t] = Op[i + 225*t] ? R[B[i + 225*t]] * R[C[i + 225*t]] : R[B[i + 225*t]] + R[C[i + 225*t]];
R[i + 701*t] = Op[i + 226*t] ? R[B[i + 226*t]] * R[C[i + 226*t]] : R[B[i + 226*t]] + R[C[i + 226*t]];
R[i + 702*t] = Op[i + 227*t] ? R[B[i + 227*t]] * R[C[i + 227*t]] : R[B[i + 227*t]] + R[C[i + 227*t]];
R[i + 703*t] = Op[i + 228*t] ? R[B[i + 228*t]] * R[C[i + 228*t]] : R[B[i + 228*t]] + R[C[i + 228*t]];
R[i + 704*t] = Op[i + 229*t] ? R[B[i + 229*t]] * R[C[i + 229*t]] : R[B[i + 229*t]] + R[C[i + 229*t]];
R[i + 705*t] = Op[i + 230*t] ? R[B[i + 230*t]] * R[C[i + 230*t]] : R[B[i + 230*t]] + R[C[i + 230*t]];
R[i + 706*t] = Op[i + 231*t] ? R[B[i + 231*t]] * R[C[i + 231*t]] : R[B[i + 231*t]] + R[C[i + 231*t]];
R[i + 707*t] = Op[i + 232*t] ? R[B[i + 232*t]] * R[C[i + 232*t]] : R[B[i + 232*t]] + R[C[i + 232*t]];
R[i + 708*t] = Op[i + 233*t] ? R[B[i + 233*t]] * R[C[i + 233*t]] : R[B[i + 233*t]] + R[C[i + 233*t]];
R[i + 709*t] = Op[i + 234*t] ? R[B[i + 234*t]] * R[C[i + 234*t]] : R[B[i + 234*t]] + R[C[i + 234*t]];
R[i + 710*t] = Op[i + 235*t] ? R[B[i + 235*t]] * R[C[i + 235*t]] : R[B[i + 235*t]] + R[C[i + 235*t]];
R[i + 711*t] = Op[i + 236*t] ? R[B[i + 236*t]] * R[C[i + 236*t]] : R[B[i + 236*t]] + R[C[i + 236*t]];
R[i + 712*t] = Op[i + 237*t] ? R[B[i + 237*t]] * R[C[i + 237*t]] : R[B[i + 237*t]] + R[C[i + 237*t]];
R[i + 713*t] = Op[i + 238*t] ? R[B[i + 238*t]] * R[C[i + 238*t]] : R[B[i + 238*t]] + R[C[i + 238*t]];
R[i + 714*t] = Op[i + 239*t] ? R[B[i + 239*t]] * R[C[i + 239*t]] : R[B[i + 239*t]] + R[C[i + 239*t]];
R[i + 715*t] = Op[i + 240*t] ? R[B[i + 240*t]] * R[C[i + 240*t]] : R[B[i + 240*t]] + R[C[i + 240*t]];
__syncthreads();
R[i + 716*t] = Op[i + 241*t] ? R[B[i + 241*t]] * R[C[i + 241*t]] : R[B[i + 241*t]] + R[C[i + 241*t]];
R[i + 717*t] = Op[i + 242*t] ? R[B[i + 242*t]] * R[C[i + 242*t]] : R[B[i + 242*t]] + R[C[i + 242*t]];
R[i + 718*t] = Op[i + 243*t] ? R[B[i + 243*t]] * R[C[i + 243*t]] : R[B[i + 243*t]] + R[C[i + 243*t]];
R[i + 719*t] = Op[i + 244*t] ? R[B[i + 244*t]] * R[C[i + 244*t]] : R[B[i + 244*t]] + R[C[i + 244*t]];
R[i + 720*t] = Op[i + 245*t] ? R[B[i + 245*t]] * R[C[i + 245*t]] : R[B[i + 245*t]] + R[C[i + 245*t]];
R[i + 721*t] = Op[i + 246*t] ? R[B[i + 246*t]] * R[C[i + 246*t]] : R[B[i + 246*t]] + R[C[i + 246*t]];
R[i + 722*t] = Op[i + 247*t] ? R[B[i + 247*t]] * R[C[i + 247*t]] : R[B[i + 247*t]] + R[C[i + 247*t]];
R[i + 723*t] = Op[i + 248*t] ? R[B[i + 248*t]] * R[C[i + 248*t]] : R[B[i + 248*t]] + R[C[i + 248*t]];
R[i + 724*t] = Op[i + 249*t] ? R[B[i + 249*t]] * R[C[i + 249*t]] : R[B[i + 249*t]] + R[C[i + 249*t]];
R[i + 725*t] = Op[i + 250*t] ? R[B[i + 250*t]] * R[C[i + 250*t]] : R[B[i + 250*t]] + R[C[i + 250*t]];
R[i + 726*t] = Op[i + 251*t] ? R[B[i + 251*t]] * R[C[i + 251*t]] : R[B[i + 251*t]] + R[C[i + 251*t]];
R[i + 727*t] = Op[i + 252*t] ? R[B[i + 252*t]] * R[C[i + 252*t]] : R[B[i + 252*t]] + R[C[i + 252*t]];
R[i + 728*t] = Op[i + 253*t] ? R[B[i + 253*t]] * R[C[i + 253*t]] : R[B[i + 253*t]] + R[C[i + 253*t]];
R[i + 729*t] = Op[i + 254*t] ? R[B[i + 254*t]] * R[C[i + 254*t]] : R[B[i + 254*t]] + R[C[i + 254*t]];
R[i + 730*t] = Op[i + 255*t] ? R[B[i + 255*t]] * R[C[i + 255*t]] : R[B[i + 255*t]] + R[C[i + 255*t]];
R[i + 731*t] = Op[i + 256*t] ? R[B[i + 256*t]] * R[C[i + 256*t]] : R[B[i + 256*t]] + R[C[i + 256*t]];
R[i + 732*t] = Op[i + 257*t] ? R[B[i + 257*t]] * R[C[i + 257*t]] : R[B[i + 257*t]] + R[C[i + 257*t]];
R[i + 733*t] = Op[i + 258*t] ? R[B[i + 258*t]] * R[C[i + 258*t]] : R[B[i + 258*t]] + R[C[i + 258*t]];
R[i + 734*t] = Op[i + 259*t] ? R[B[i + 259*t]] * R[C[i + 259*t]] : R[B[i + 259*t]] + R[C[i + 259*t]];
R[i + 735*t] = Op[i + 260*t] ? R[B[i + 260*t]] * R[C[i + 260*t]] : R[B[i + 260*t]] + R[C[i + 260*t]];
R[i + 736*t] = Op[i + 261*t] ? R[B[i + 261*t]] * R[C[i + 261*t]] : R[B[i + 261*t]] + R[C[i + 261*t]];
R[i + 737*t] = Op[i + 262*t] ? R[B[i + 262*t]] * R[C[i + 262*t]] : R[B[i + 262*t]] + R[C[i + 262*t]];
R[i + 738*t] = Op[i + 263*t] ? R[B[i + 263*t]] * R[C[i + 263*t]] : R[B[i + 263*t]] + R[C[i + 263*t]];
R[i + 739*t] = Op[i + 264*t] ? R[B[i + 264*t]] * R[C[i + 264*t]] : R[B[i + 264*t]] + R[C[i + 264*t]];
R[i + 740*t] = Op[i + 265*t] ? R[B[i + 265*t]] * R[C[i + 265*t]] : R[B[i + 265*t]] + R[C[i + 265*t]];
R[i + 741*t] = Op[i + 266*t] ? R[B[i + 266*t]] * R[C[i + 266*t]] : R[B[i + 266*t]] + R[C[i + 266*t]];
R[i + 742*t] = Op[i + 267*t] ? R[B[i + 267*t]] * R[C[i + 267*t]] : R[B[i + 267*t]] + R[C[i + 267*t]];
R[i + 743*t] = Op[i + 268*t] ? R[B[i + 268*t]] * R[C[i + 268*t]] : R[B[i + 268*t]] + R[C[i + 268*t]];
R[i + 744*t] = Op[i + 269*t] ? R[B[i + 269*t]] * R[C[i + 269*t]] : R[B[i + 269*t]] + R[C[i + 269*t]];
R[i + 745*t] = Op[i + 270*t] ? R[B[i + 270*t]] * R[C[i + 270*t]] : R[B[i + 270*t]] + R[C[i + 270*t]];
R[i + 746*t] = Op[i + 271*t] ? R[B[i + 271*t]] * R[C[i + 271*t]] : R[B[i + 271*t]] + R[C[i + 271*t]];
R[i + 747*t] = Op[i + 272*t] ? R[B[i + 272*t]] * R[C[i + 272*t]] : R[B[i + 272*t]] + R[C[i + 272*t]];
R[i + 748*t] = Op[i + 273*t] ? R[B[i + 273*t]] * R[C[i + 273*t]] : R[B[i + 273*t]] + R[C[i + 273*t]];
R[i + 749*t] = Op[i + 274*t] ? R[B[i + 274*t]] * R[C[i + 274*t]] : R[B[i + 274*t]] + R[C[i + 274*t]];
R[i + 750*t] = Op[i + 275*t] ? R[B[i + 275*t]] * R[C[i + 275*t]] : R[B[i + 275*t]] + R[C[i + 275*t]];
R[i + 751*t] = Op[i + 276*t] ? R[B[i + 276*t]] * R[C[i + 276*t]] : R[B[i + 276*t]] + R[C[i + 276*t]];
R[i + 752*t] = Op[i + 277*t] ? R[B[i + 277*t]] * R[C[i + 277*t]] : R[B[i + 277*t]] + R[C[i + 277*t]];
R[i + 753*t] = Op[i + 278*t] ? R[B[i + 278*t]] * R[C[i + 278*t]] : R[B[i + 278*t]] + R[C[i + 278*t]];
R[i + 754*t] = Op[i + 279*t] ? R[B[i + 279*t]] * R[C[i + 279*t]] : R[B[i + 279*t]] + R[C[i + 279*t]];
R[i + 755*t] = Op[i + 280*t] ? R[B[i + 280*t]] * R[C[i + 280*t]] : R[B[i + 280*t]] + R[C[i + 280*t]];
R[i + 756*t] = Op[i + 281*t] ? R[B[i + 281*t]] * R[C[i + 281*t]] : R[B[i + 281*t]] + R[C[i + 281*t]];
R[i + 757*t] = Op[i + 282*t] ? R[B[i + 282*t]] * R[C[i + 282*t]] : R[B[i + 282*t]] + R[C[i + 282*t]];
R[i + 758*t] = Op[i + 283*t] ? R[B[i + 283*t]] * R[C[i + 283*t]] : R[B[i + 283*t]] + R[C[i + 283*t]];
R[i + 759*t] = Op[i + 284*t] ? R[B[i + 284*t]] * R[C[i + 284*t]] : R[B[i + 284*t]] + R[C[i + 284*t]];
R[i + 760*t] = Op[i + 285*t] ? R[B[i + 285*t]] * R[C[i + 285*t]] : R[B[i + 285*t]] + R[C[i + 285*t]];
R[i + 761*t] = Op[i + 286*t] ? R[B[i + 286*t]] * R[C[i + 286*t]] : R[B[i + 286*t]] + R[C[i + 286*t]];
R[i + 762*t] = Op[i + 287*t] ? R[B[i + 287*t]] * R[C[i + 287*t]] : R[B[i + 287*t]] + R[C[i + 287*t]];
R[i + 763*t] = Op[i + 288*t] ? R[B[i + 288*t]] * R[C[i + 288*t]] : R[B[i + 288*t]] + R[C[i + 288*t]];
R[i + 764*t] = Op[i + 289*t] ? R[B[i + 289*t]] * R[C[i + 289*t]] : R[B[i + 289*t]] + R[C[i + 289*t]];
R[i + 765*t] = Op[i + 290*t] ? R[B[i + 290*t]] * R[C[i + 290*t]] : R[B[i + 290*t]] + R[C[i + 290*t]];
R[i + 766*t] = Op[i + 291*t] ? R[B[i + 291*t]] * R[C[i + 291*t]] : R[B[i + 291*t]] + R[C[i + 291*t]];
R[i + 767*t] = Op[i + 292*t] ? R[B[i + 292*t]] * R[C[i + 292*t]] : R[B[i + 292*t]] + R[C[i + 292*t]];
R[i + 768*t] = Op[i + 293*t] ? R[B[i + 293*t]] * R[C[i + 293*t]] : R[B[i + 293*t]] + R[C[i + 293*t]];
R[i + 769*t] = Op[i + 294*t] ? R[B[i + 294*t]] * R[C[i + 294*t]] : R[B[i + 294*t]] + R[C[i + 294*t]];
R[i + 770*t] = Op[i + 295*t] ? R[B[i + 295*t]] * R[C[i + 295*t]] : R[B[i + 295*t]] + R[C[i + 295*t]];
R[i + 771*t] = Op[i + 296*t] ? R[B[i + 296*t]] * R[C[i + 296*t]] : R[B[i + 296*t]] + R[C[i + 296*t]];
R[i + 772*t] = Op[i + 297*t] ? R[B[i + 297*t]] * R[C[i + 297*t]] : R[B[i + 297*t]] + R[C[i + 297*t]];
R[i + 773*t] = Op[i + 298*t] ? R[B[i + 298*t]] * R[C[i + 298*t]] : R[B[i + 298*t]] + R[C[i + 298*t]];
R[i + 774*t] = Op[i + 299*t] ? R[B[i + 299*t]] * R[C[i + 299*t]] : R[B[i + 299*t]] + R[C[i + 299*t]];
R[i + 775*t] = Op[i + 300*t] ? R[B[i + 300*t]] * R[C[i + 300*t]] : R[B[i + 300*t]] + R[C[i + 300*t]];
R[i + 776*t] = Op[i + 301*t] ? R[B[i + 301*t]] * R[C[i + 301*t]] : R[B[i + 301*t]] + R[C[i + 301*t]];
R[i + 777*t] = Op[i + 302*t] ? R[B[i + 302*t]] * R[C[i + 302*t]] : R[B[i + 302*t]] + R[C[i + 302*t]];
R[i + 778*t] = Op[i + 303*t] ? R[B[i + 303*t]] * R[C[i + 303*t]] : R[B[i + 303*t]] + R[C[i + 303*t]];
R[i + 779*t] = Op[i + 304*t] ? R[B[i + 304*t]] * R[C[i + 304*t]] : R[B[i + 304*t]] + R[C[i + 304*t]];
R[i + 780*t] = Op[i + 305*t] ? R[B[i + 305*t]] * R[C[i + 305*t]] : R[B[i + 305*t]] + R[C[i + 305*t]];
R[i + 781*t] = Op[i + 306*t] ? R[B[i + 306*t]] * R[C[i + 306*t]] : R[B[i + 306*t]] + R[C[i + 306*t]];
R[i + 782*t] = Op[i + 307*t] ? R[B[i + 307*t]] * R[C[i + 307*t]] : R[B[i + 307*t]] + R[C[i + 307*t]];
R[i + 783*t] = Op[i + 308*t] ? R[B[i + 308*t]] * R[C[i + 308*t]] : R[B[i + 308*t]] + R[C[i + 308*t]];
R[i + 784*t] = Op[i + 309*t] ? R[B[i + 309*t]] * R[C[i + 309*t]] : R[B[i + 309*t]] + R[C[i + 309*t]];
R[i + 785*t] = Op[i + 310*t] ? R[B[i + 310*t]] * R[C[i + 310*t]] : R[B[i + 310*t]] + R[C[i + 310*t]];
R[i + 786*t] = Op[i + 311*t] ? R[B[i + 311*t]] * R[C[i + 311*t]] : R[B[i + 311*t]] + R[C[i + 311*t]];
R[i + 787*t] = Op[i + 312*t] ? R[B[i + 312*t]] * R[C[i + 312*t]] : R[B[i + 312*t]] + R[C[i + 312*t]];
R[i + 788*t] = Op[i + 313*t] ? R[B[i + 313*t]] * R[C[i + 313*t]] : R[B[i + 313*t]] + R[C[i + 313*t]];
R[i + 789*t] = Op[i + 314*t] ? R[B[i + 314*t]] * R[C[i + 314*t]] : R[B[i + 314*t]] + R[C[i + 314*t]];
R[i + 790*t] = Op[i + 315*t] ? R[B[i + 315*t]] * R[C[i + 315*t]] : R[B[i + 315*t]] + R[C[i + 315*t]];
R[i + 791*t] = Op[i + 316*t] ? R[B[i + 316*t]] * R[C[i + 316*t]] : R[B[i + 316*t]] + R[C[i + 316*t]];
R[i + 792*t] = Op[i + 317*t] ? R[B[i + 317*t]] * R[C[i + 317*t]] : R[B[i + 317*t]] + R[C[i + 317*t]];
R[i + 793*t] = Op[i + 318*t] ? R[B[i + 318*t]] * R[C[i + 318*t]] : R[B[i + 318*t]] + R[C[i + 318*t]];
R[i + 794*t] = Op[i + 319*t] ? R[B[i + 319*t]] * R[C[i + 319*t]] : R[B[i + 319*t]] + R[C[i + 319*t]];
R[i + 795*t] = Op[i + 320*t] ? R[B[i + 320*t]] * R[C[i + 320*t]] : R[B[i + 320*t]] + R[C[i + 320*t]];
R[i + 796*t] = Op[i + 321*t] ? R[B[i + 321*t]] * R[C[i + 321*t]] : R[B[i + 321*t]] + R[C[i + 321*t]];
R[i + 797*t] = Op[i + 322*t] ? R[B[i + 322*t]] * R[C[i + 322*t]] : R[B[i + 322*t]] + R[C[i + 322*t]];
R[i + 798*t] = Op[i + 323*t] ? R[B[i + 323*t]] * R[C[i + 323*t]] : R[B[i + 323*t]] + R[C[i + 323*t]];
R[i + 799*t] = Op[i + 324*t] ? R[B[i + 324*t]] * R[C[i + 324*t]] : R[B[i + 324*t]] + R[C[i + 324*t]];
R[i + 800*t] = Op[i + 325*t] ? R[B[i + 325*t]] * R[C[i + 325*t]] : R[B[i + 325*t]] + R[C[i + 325*t]];
R[i + 801*t] = Op[i + 326*t] ? R[B[i + 326*t]] * R[C[i + 326*t]] : R[B[i + 326*t]] + R[C[i + 326*t]];
R[i + 802*t] = Op[i + 327*t] ? R[B[i + 327*t]] * R[C[i + 327*t]] : R[B[i + 327*t]] + R[C[i + 327*t]];
R[i + 803*t] = Op[i + 328*t] ? R[B[i + 328*t]] * R[C[i + 328*t]] : R[B[i + 328*t]] + R[C[i + 328*t]];
R[i + 804*t] = Op[i + 329*t] ? R[B[i + 329*t]] * R[C[i + 329*t]] : R[B[i + 329*t]] + R[C[i + 329*t]];
R[i + 805*t] = Op[i + 330*t] ? R[B[i + 330*t]] * R[C[i + 330*t]] : R[B[i + 330*t]] + R[C[i + 330*t]];
R[i + 806*t] = Op[i + 331*t] ? R[B[i + 331*t]] * R[C[i + 331*t]] : R[B[i + 331*t]] + R[C[i + 331*t]];
R[i + 807*t] = Op[i + 332*t] ? R[B[i + 332*t]] * R[C[i + 332*t]] : R[B[i + 332*t]] + R[C[i + 332*t]];
R[i + 808*t] = Op[i + 333*t] ? R[B[i + 333*t]] * R[C[i + 333*t]] : R[B[i + 333*t]] + R[C[i + 333*t]];
R[i + 809*t] = Op[i + 334*t] ? R[B[i + 334*t]] * R[C[i + 334*t]] : R[B[i + 334*t]] + R[C[i + 334*t]];
R[i + 810*t] = Op[i + 335*t] ? R[B[i + 335*t]] * R[C[i + 335*t]] : R[B[i + 335*t]] + R[C[i + 335*t]];
R[i + 811*t] = Op[i + 336*t] ? R[B[i + 336*t]] * R[C[i + 336*t]] : R[B[i + 336*t]] + R[C[i + 336*t]];
R[i + 812*t] = Op[i + 337*t] ? R[B[i + 337*t]] * R[C[i + 337*t]] : R[B[i + 337*t]] + R[C[i + 337*t]];
R[i + 813*t] = Op[i + 338*t] ? R[B[i + 338*t]] * R[C[i + 338*t]] : R[B[i + 338*t]] + R[C[i + 338*t]];
R[i + 814*t] = Op[i + 339*t] ? R[B[i + 339*t]] * R[C[i + 339*t]] : R[B[i + 339*t]] + R[C[i + 339*t]];
R[i + 815*t] = Op[i + 340*t] ? R[B[i + 340*t]] * R[C[i + 340*t]] : R[B[i + 340*t]] + R[C[i + 340*t]];
R[i + 816*t] = Op[i + 341*t] ? R[B[i + 341*t]] * R[C[i + 341*t]] : R[B[i + 341*t]] + R[C[i + 341*t]];
R[i + 817*t] = Op[i + 342*t] ? R[B[i + 342*t]] * R[C[i + 342*t]] : R[B[i + 342*t]] + R[C[i + 342*t]];
R[i + 818*t] = Op[i + 343*t] ? R[B[i + 343*t]] * R[C[i + 343*t]] : R[B[i + 343*t]] + R[C[i + 343*t]];
R[i + 819*t] = Op[i + 344*t] ? R[B[i + 344*t]] * R[C[i + 344*t]] : R[B[i + 344*t]] + R[C[i + 344*t]];
R[i + 820*t] = Op[i + 345*t] ? R[B[i + 345*t]] * R[C[i + 345*t]] : R[B[i + 345*t]] + R[C[i + 345*t]];
R[i + 821*t] = Op[i + 346*t] ? R[B[i + 346*t]] * R[C[i + 346*t]] : R[B[i + 346*t]] + R[C[i + 346*t]];
R[i + 822*t] = Op[i + 347*t] ? R[B[i + 347*t]] * R[C[i + 347*t]] : R[B[i + 347*t]] + R[C[i + 347*t]];
R[i + 823*t] = Op[i + 348*t] ? R[B[i + 348*t]] * R[C[i + 348*t]] : R[B[i + 348*t]] + R[C[i + 348*t]];
R[i + 824*t] = Op[i + 349*t] ? R[B[i + 349*t]] * R[C[i + 349*t]] : R[B[i + 349*t]] + R[C[i + 349*t]];
R[i + 825*t] = Op[i + 350*t] ? R[B[i + 350*t]] * R[C[i + 350*t]] : R[B[i + 350*t]] + R[C[i + 350*t]];
R[i + 826*t] = Op[i + 351*t] ? R[B[i + 351*t]] * R[C[i + 351*t]] : R[B[i + 351*t]] + R[C[i + 351*t]];
R[i + 827*t] = Op[i + 352*t] ? R[B[i + 352*t]] * R[C[i + 352*t]] : R[B[i + 352*t]] + R[C[i + 352*t]];
R[i + 828*t] = Op[i + 353*t] ? R[B[i + 353*t]] * R[C[i + 353*t]] : R[B[i + 353*t]] + R[C[i + 353*t]];
R[i + 829*t] = Op[i + 354*t] ? R[B[i + 354*t]] * R[C[i + 354*t]] : R[B[i + 354*t]] + R[C[i + 354*t]];
R[i + 830*t] = Op[i + 355*t] ? R[B[i + 355*t]] * R[C[i + 355*t]] : R[B[i + 355*t]] + R[C[i + 355*t]];
__syncthreads();
R[i + 831*t] = Op[i + 356*t] ? R[B[i + 356*t]] * R[C[i + 356*t]] : R[B[i + 356*t]] + R[C[i + 356*t]];
R[i + 832*t] = Op[i + 357*t] ? R[B[i + 357*t]] * R[C[i + 357*t]] : R[B[i + 357*t]] + R[C[i + 357*t]];
R[i + 833*t] = Op[i + 358*t] ? R[B[i + 358*t]] * R[C[i + 358*t]] : R[B[i + 358*t]] + R[C[i + 358*t]];
R[i + 834*t] = Op[i + 359*t] ? R[B[i + 359*t]] * R[C[i + 359*t]] : R[B[i + 359*t]] + R[C[i + 359*t]];
R[i + 835*t] = Op[i + 360*t] ? R[B[i + 360*t]] * R[C[i + 360*t]] : R[B[i + 360*t]] + R[C[i + 360*t]];
R[i + 836*t] = Op[i + 361*t] ? R[B[i + 361*t]] * R[C[i + 361*t]] : R[B[i + 361*t]] + R[C[i + 361*t]];
R[i + 837*t] = Op[i + 362*t] ? R[B[i + 362*t]] * R[C[i + 362*t]] : R[B[i + 362*t]] + R[C[i + 362*t]];
R[i + 838*t] = Op[i + 363*t] ? R[B[i + 363*t]] * R[C[i + 363*t]] : R[B[i + 363*t]] + R[C[i + 363*t]];
R[i + 839*t] = Op[i + 364*t] ? R[B[i + 364*t]] * R[C[i + 364*t]] : R[B[i + 364*t]] + R[C[i + 364*t]];
R[i + 840*t] = Op[i + 365*t] ? R[B[i + 365*t]] * R[C[i + 365*t]] : R[B[i + 365*t]] + R[C[i + 365*t]];
R[i + 841*t] = Op[i + 366*t] ? R[B[i + 366*t]] * R[C[i + 366*t]] : R[B[i + 366*t]] + R[C[i + 366*t]];
R[i + 842*t] = Op[i + 367*t] ? R[B[i + 367*t]] * R[C[i + 367*t]] : R[B[i + 367*t]] + R[C[i + 367*t]];
R[i + 843*t] = Op[i + 368*t] ? R[B[i + 368*t]] * R[C[i + 368*t]] : R[B[i + 368*t]] + R[C[i + 368*t]];
R[i + 844*t] = Op[i + 369*t] ? R[B[i + 369*t]] * R[C[i + 369*t]] : R[B[i + 369*t]] + R[C[i + 369*t]];
R[i + 845*t] = Op[i + 370*t] ? R[B[i + 370*t]] * R[C[i + 370*t]] : R[B[i + 370*t]] + R[C[i + 370*t]];
R[i + 846*t] = Op[i + 371*t] ? R[B[i + 371*t]] * R[C[i + 371*t]] : R[B[i + 371*t]] + R[C[i + 371*t]];
R[i + 847*t] = Op[i + 372*t] ? R[B[i + 372*t]] * R[C[i + 372*t]] : R[B[i + 372*t]] + R[C[i + 372*t]];
R[i + 848*t] = Op[i + 373*t] ? R[B[i + 373*t]] * R[C[i + 373*t]] : R[B[i + 373*t]] + R[C[i + 373*t]];
R[i + 849*t] = Op[i + 374*t] ? R[B[i + 374*t]] * R[C[i + 374*t]] : R[B[i + 374*t]] + R[C[i + 374*t]];
R[i + 850*t] = Op[i + 375*t] ? R[B[i + 375*t]] * R[C[i + 375*t]] : R[B[i + 375*t]] + R[C[i + 375*t]];
R[i + 851*t] = Op[i + 376*t] ? R[B[i + 376*t]] * R[C[i + 376*t]] : R[B[i + 376*t]] + R[C[i + 376*t]];
R[i + 852*t] = Op[i + 377*t] ? R[B[i + 377*t]] * R[C[i + 377*t]] : R[B[i + 377*t]] + R[C[i + 377*t]];
R[i + 853*t] = Op[i + 378*t] ? R[B[i + 378*t]] * R[C[i + 378*t]] : R[B[i + 378*t]] + R[C[i + 378*t]];
R[i + 854*t] = Op[i + 379*t] ? R[B[i + 379*t]] * R[C[i + 379*t]] : R[B[i + 379*t]] + R[C[i + 379*t]];
R[i + 855*t] = Op[i + 380*t] ? R[B[i + 380*t]] * R[C[i + 380*t]] : R[B[i + 380*t]] + R[C[i + 380*t]];
R[i + 856*t] = Op[i + 381*t] ? R[B[i + 381*t]] * R[C[i + 381*t]] : R[B[i + 381*t]] + R[C[i + 381*t]];
R[i + 857*t] = Op[i + 382*t] ? R[B[i + 382*t]] * R[C[i + 382*t]] : R[B[i + 382*t]] + R[C[i + 382*t]];
R[i + 858*t] = Op[i + 383*t] ? R[B[i + 383*t]] * R[C[i + 383*t]] : R[B[i + 383*t]] + R[C[i + 383*t]];
R[i + 859*t] = Op[i + 384*t] ? R[B[i + 384*t]] * R[C[i + 384*t]] : R[B[i + 384*t]] + R[C[i + 384*t]];
R[i + 860*t] = Op[i + 385*t] ? R[B[i + 385*t]] * R[C[i + 385*t]] : R[B[i + 385*t]] + R[C[i + 385*t]];
R[i + 861*t] = Op[i + 386*t] ? R[B[i + 386*t]] * R[C[i + 386*t]] : R[B[i + 386*t]] + R[C[i + 386*t]];
R[i + 862*t] = Op[i + 387*t] ? R[B[i + 387*t]] * R[C[i + 387*t]] : R[B[i + 387*t]] + R[C[i + 387*t]];
R[i + 863*t] = Op[i + 388*t] ? R[B[i + 388*t]] * R[C[i + 388*t]] : R[B[i + 388*t]] + R[C[i + 388*t]];
R[i + 864*t] = Op[i + 389*t] ? R[B[i + 389*t]] * R[C[i + 389*t]] : R[B[i + 389*t]] + R[C[i + 389*t]];
R[i + 865*t] = Op[i + 390*t] ? R[B[i + 390*t]] * R[C[i + 390*t]] : R[B[i + 390*t]] + R[C[i + 390*t]];
R[i + 866*t] = Op[i + 391*t] ? R[B[i + 391*t]] * R[C[i + 391*t]] : R[B[i + 391*t]] + R[C[i + 391*t]];
R[i + 867*t] = Op[i + 392*t] ? R[B[i + 392*t]] * R[C[i + 392*t]] : R[B[i + 392*t]] + R[C[i + 392*t]];
R[i + 868*t] = Op[i + 393*t] ? R[B[i + 393*t]] * R[C[i + 393*t]] : R[B[i + 393*t]] + R[C[i + 393*t]];
R[i + 869*t] = Op[i + 394*t] ? R[B[i + 394*t]] * R[C[i + 394*t]] : R[B[i + 394*t]] + R[C[i + 394*t]];
R[i + 870*t] = Op[i + 395*t] ? R[B[i + 395*t]] * R[C[i + 395*t]] : R[B[i + 395*t]] + R[C[i + 395*t]];
R[i + 871*t] = Op[i + 396*t] ? R[B[i + 396*t]] * R[C[i + 396*t]] : R[B[i + 396*t]] + R[C[i + 396*t]];
R[i + 872*t] = Op[i + 397*t] ? R[B[i + 397*t]] * R[C[i + 397*t]] : R[B[i + 397*t]] + R[C[i + 397*t]];
R[i + 873*t] = Op[i + 398*t] ? R[B[i + 398*t]] * R[C[i + 398*t]] : R[B[i + 398*t]] + R[C[i + 398*t]];
R[i + 874*t] = Op[i + 399*t] ? R[B[i + 399*t]] * R[C[i + 399*t]] : R[B[i + 399*t]] + R[C[i + 399*t]];
R[i + 875*t] = Op[i + 400*t] ? R[B[i + 400*t]] * R[C[i + 400*t]] : R[B[i + 400*t]] + R[C[i + 400*t]];
R[i + 876*t] = Op[i + 401*t] ? R[B[i + 401*t]] * R[C[i + 401*t]] : R[B[i + 401*t]] + R[C[i + 401*t]];
R[i + 877*t] = Op[i + 402*t] ? R[B[i + 402*t]] * R[C[i + 402*t]] : R[B[i + 402*t]] + R[C[i + 402*t]];
R[i + 878*t] = Op[i + 403*t] ? R[B[i + 403*t]] * R[C[i + 403*t]] : R[B[i + 403*t]] + R[C[i + 403*t]];
R[i + 879*t] = Op[i + 404*t] ? R[B[i + 404*t]] * R[C[i + 404*t]] : R[B[i + 404*t]] + R[C[i + 404*t]];
R[i + 880*t] = Op[i + 405*t] ? R[B[i + 405*t]] * R[C[i + 405*t]] : R[B[i + 405*t]] + R[C[i + 405*t]];
R[i + 881*t] = Op[i + 406*t] ? R[B[i + 406*t]] * R[C[i + 406*t]] : R[B[i + 406*t]] + R[C[i + 406*t]];
R[i + 882*t] = Op[i + 407*t] ? R[B[i + 407*t]] * R[C[i + 407*t]] : R[B[i + 407*t]] + R[C[i + 407*t]];
R[i + 883*t] = Op[i + 408*t] ? R[B[i + 408*t]] * R[C[i + 408*t]] : R[B[i + 408*t]] + R[C[i + 408*t]];
R[i + 884*t] = Op[i + 409*t] ? R[B[i + 409*t]] * R[C[i + 409*t]] : R[B[i + 409*t]] + R[C[i + 409*t]];
R[i + 885*t] = Op[i + 410*t] ? R[B[i + 410*t]] * R[C[i + 410*t]] : R[B[i + 410*t]] + R[C[i + 410*t]];
R[i + 886*t] = Op[i + 411*t] ? R[B[i + 411*t]] * R[C[i + 411*t]] : R[B[i + 411*t]] + R[C[i + 411*t]];
R[i + 887*t] = Op[i + 412*t] ? R[B[i + 412*t]] * R[C[i + 412*t]] : R[B[i + 412*t]] + R[C[i + 412*t]];
R[i + 888*t] = Op[i + 413*t] ? R[B[i + 413*t]] * R[C[i + 413*t]] : R[B[i + 413*t]] + R[C[i + 413*t]];
R[i + 889*t] = Op[i + 414*t] ? R[B[i + 414*t]] * R[C[i + 414*t]] : R[B[i + 414*t]] + R[C[i + 414*t]];
R[i + 890*t] = Op[i + 415*t] ? R[B[i + 415*t]] * R[C[i + 415*t]] : R[B[i + 415*t]] + R[C[i + 415*t]];
R[i + 891*t] = Op[i + 416*t] ? R[B[i + 416*t]] * R[C[i + 416*t]] : R[B[i + 416*t]] + R[C[i + 416*t]];
R[i + 892*t] = Op[i + 417*t] ? R[B[i + 417*t]] * R[C[i + 417*t]] : R[B[i + 417*t]] + R[C[i + 417*t]];
R[i + 893*t] = Op[i + 418*t] ? R[B[i + 418*t]] * R[C[i + 418*t]] : R[B[i + 418*t]] + R[C[i + 418*t]];
R[i + 894*t] = Op[i + 419*t] ? R[B[i + 419*t]] * R[C[i + 419*t]] : R[B[i + 419*t]] + R[C[i + 419*t]];
R[i + 895*t] = Op[i + 420*t] ? R[B[i + 420*t]] * R[C[i + 420*t]] : R[B[i + 420*t]] + R[C[i + 420*t]];
R[i + 896*t] = Op[i + 421*t] ? R[B[i + 421*t]] * R[C[i + 421*t]] : R[B[i + 421*t]] + R[C[i + 421*t]];
R[i + 897*t] = Op[i + 422*t] ? R[B[i + 422*t]] * R[C[i + 422*t]] : R[B[i + 422*t]] + R[C[i + 422*t]];
R[i + 898*t] = Op[i + 423*t] ? R[B[i + 423*t]] * R[C[i + 423*t]] : R[B[i + 423*t]] + R[C[i + 423*t]];
R[i + 899*t] = Op[i + 424*t] ? R[B[i + 424*t]] * R[C[i + 424*t]] : R[B[i + 424*t]] + R[C[i + 424*t]];
R[i + 900*t] = Op[i + 425*t] ? R[B[i + 425*t]] * R[C[i + 425*t]] : R[B[i + 425*t]] + R[C[i + 425*t]];
R[i + 901*t] = Op[i + 426*t] ? R[B[i + 426*t]] * R[C[i + 426*t]] : R[B[i + 426*t]] + R[C[i + 426*t]];
R[i + 902*t] = Op[i + 427*t] ? R[B[i + 427*t]] * R[C[i + 427*t]] : R[B[i + 427*t]] + R[C[i + 427*t]];
R[i + 903*t] = Op[i + 428*t] ? R[B[i + 428*t]] * R[C[i + 428*t]] : R[B[i + 428*t]] + R[C[i + 428*t]];
R[i + 904*t] = Op[i + 429*t] ? R[B[i + 429*t]] * R[C[i + 429*t]] : R[B[i + 429*t]] + R[C[i + 429*t]];
R[i + 905*t] = Op[i + 430*t] ? R[B[i + 430*t]] * R[C[i + 430*t]] : R[B[i + 430*t]] + R[C[i + 430*t]];
R[i + 906*t] = Op[i + 431*t] ? R[B[i + 431*t]] * R[C[i + 431*t]] : R[B[i + 431*t]] + R[C[i + 431*t]];
R[i + 907*t] = Op[i + 432*t] ? R[B[i + 432*t]] * R[C[i + 432*t]] : R[B[i + 432*t]] + R[C[i + 432*t]];
R[i + 908*t] = Op[i + 433*t] ? R[B[i + 433*t]] * R[C[i + 433*t]] : R[B[i + 433*t]] + R[C[i + 433*t]];
R[i + 909*t] = Op[i + 434*t] ? R[B[i + 434*t]] * R[C[i + 434*t]] : R[B[i + 434*t]] + R[C[i + 434*t]];
R[i + 910*t] = Op[i + 435*t] ? R[B[i + 435*t]] * R[C[i + 435*t]] : R[B[i + 435*t]] + R[C[i + 435*t]];
R[i + 911*t] = Op[i + 436*t] ? R[B[i + 436*t]] * R[C[i + 436*t]] : R[B[i + 436*t]] + R[C[i + 436*t]];
R[i + 912*t] = Op[i + 437*t] ? R[B[i + 437*t]] * R[C[i + 437*t]] : R[B[i + 437*t]] + R[C[i + 437*t]];
R[i + 913*t] = Op[i + 438*t] ? R[B[i + 438*t]] * R[C[i + 438*t]] : R[B[i + 438*t]] + R[C[i + 438*t]];
R[i + 914*t] = Op[i + 439*t] ? R[B[i + 439*t]] * R[C[i + 439*t]] : R[B[i + 439*t]] + R[C[i + 439*t]];
R[i + 915*t] = Op[i + 440*t] ? R[B[i + 440*t]] * R[C[i + 440*t]] : R[B[i + 440*t]] + R[C[i + 440*t]];
R[i + 916*t] = Op[i + 441*t] ? R[B[i + 441*t]] * R[C[i + 441*t]] : R[B[i + 441*t]] + R[C[i + 441*t]];
R[i + 917*t] = Op[i + 442*t] ? R[B[i + 442*t]] * R[C[i + 442*t]] : R[B[i + 442*t]] + R[C[i + 442*t]];
R[i + 918*t] = Op[i + 443*t] ? R[B[i + 443*t]] * R[C[i + 443*t]] : R[B[i + 443*t]] + R[C[i + 443*t]];
R[i + 919*t] = Op[i + 444*t] ? R[B[i + 444*t]] * R[C[i + 444*t]] : R[B[i + 444*t]] + R[C[i + 444*t]];
R[i + 920*t] = Op[i + 445*t] ? R[B[i + 445*t]] * R[C[i + 445*t]] : R[B[i + 445*t]] + R[C[i + 445*t]];
R[i + 921*t] = Op[i + 446*t] ? R[B[i + 446*t]] * R[C[i + 446*t]] : R[B[i + 446*t]] + R[C[i + 446*t]];
R[i + 922*t] = Op[i + 447*t] ? R[B[i + 447*t]] * R[C[i + 447*t]] : R[B[i + 447*t]] + R[C[i + 447*t]];
R[i + 923*t] = Op[i + 448*t] ? R[B[i + 448*t]] * R[C[i + 448*t]] : R[B[i + 448*t]] + R[C[i + 448*t]];
R[i + 924*t] = Op[i + 449*t] ? R[B[i + 449*t]] * R[C[i + 449*t]] : R[B[i + 449*t]] + R[C[i + 449*t]];
R[i + 925*t] = Op[i + 450*t] ? R[B[i + 450*t]] * R[C[i + 450*t]] : R[B[i + 450*t]] + R[C[i + 450*t]];
R[i + 926*t] = Op[i + 451*t] ? R[B[i + 451*t]] * R[C[i + 451*t]] : R[B[i + 451*t]] + R[C[i + 451*t]];
R[i + 927*t] = Op[i + 452*t] ? R[B[i + 452*t]] * R[C[i + 452*t]] : R[B[i + 452*t]] + R[C[i + 452*t]];
R[i + 928*t] = Op[i + 453*t] ? R[B[i + 453*t]] * R[C[i + 453*t]] : R[B[i + 453*t]] + R[C[i + 453*t]];
R[i + 929*t] = Op[i + 454*t] ? R[B[i + 454*t]] * R[C[i + 454*t]] : R[B[i + 454*t]] + R[C[i + 454*t]];
R[i + 930*t] = Op[i + 455*t] ? R[B[i + 455*t]] * R[C[i + 455*t]] : R[B[i + 455*t]] + R[C[i + 455*t]];
R[i + 931*t] = Op[i + 456*t] ? R[B[i + 456*t]] * R[C[i + 456*t]] : R[B[i + 456*t]] + R[C[i + 456*t]];
R[i + 932*t] = Op[i + 457*t] ? R[B[i + 457*t]] * R[C[i + 457*t]] : R[B[i + 457*t]] + R[C[i + 457*t]];
R[i + 933*t] = Op[i + 458*t] ? R[B[i + 458*t]] * R[C[i + 458*t]] : R[B[i + 458*t]] + R[C[i + 458*t]];
R[i + 934*t] = Op[i + 459*t] ? R[B[i + 459*t]] * R[C[i + 459*t]] : R[B[i + 459*t]] + R[C[i + 459*t]];
R[i + 935*t] = Op[i + 460*t] ? R[B[i + 460*t]] * R[C[i + 460*t]] : R[B[i + 460*t]] + R[C[i + 460*t]];
R[i + 936*t] = Op[i + 461*t] ? R[B[i + 461*t]] * R[C[i + 461*t]] : R[B[i + 461*t]] + R[C[i + 461*t]];
R[i + 937*t] = Op[i + 462*t] ? R[B[i + 462*t]] * R[C[i + 462*t]] : R[B[i + 462*t]] + R[C[i + 462*t]];
R[i + 938*t] = Op[i + 463*t] ? R[B[i + 463*t]] * R[C[i + 463*t]] : R[B[i + 463*t]] + R[C[i + 463*t]];
R[i + 939*t] = Op[i + 464*t] ? R[B[i + 464*t]] * R[C[i + 464*t]] : R[B[i + 464*t]] + R[C[i + 464*t]];
R[i + 940*t] = Op[i + 465*t] ? R[B[i + 465*t]] * R[C[i + 465*t]] : R[B[i + 465*t]] + R[C[i + 465*t]];
R[i + 941*t] = Op[i + 466*t] ? R[B[i + 466*t]] * R[C[i + 466*t]] : R[B[i + 466*t]] + R[C[i + 466*t]];
R[i + 942*t] = Op[i + 467*t] ? R[B[i + 467*t]] * R[C[i + 467*t]] : R[B[i + 467*t]] + R[C[i + 467*t]];
R[i + 943*t] = Op[i + 468*t] ? R[B[i + 468*t]] * R[C[i + 468*t]] : R[B[i + 468*t]] + R[C[i + 468*t]];
__syncthreads();
R[i + 944*t] = Op[i + 469*t] ? R[B[i + 469*t]] * R[C[i + 469*t]] : R[B[i + 469*t]] + R[C[i + 469*t]];
R[i + 945*t] = Op[i + 470*t] ? R[B[i + 470*t]] * R[C[i + 470*t]] : R[B[i + 470*t]] + R[C[i + 470*t]];
R[i + 946*t] = Op[i + 471*t] ? R[B[i + 471*t]] * R[C[i + 471*t]] : R[B[i + 471*t]] + R[C[i + 471*t]];
R[i + 947*t] = Op[i + 472*t] ? R[B[i + 472*t]] * R[C[i + 472*t]] : R[B[i + 472*t]] + R[C[i + 472*t]];
R[i + 948*t] = Op[i + 473*t] ? R[B[i + 473*t]] * R[C[i + 473*t]] : R[B[i + 473*t]] + R[C[i + 473*t]];
R[i + 949*t] = Op[i + 474*t] ? R[B[i + 474*t]] * R[C[i + 474*t]] : R[B[i + 474*t]] + R[C[i + 474*t]];
R[i + 950*t] = Op[i + 475*t] ? R[B[i + 475*t]] * R[C[i + 475*t]] : R[B[i + 475*t]] + R[C[i + 475*t]];
R[i + 951*t] = Op[i + 476*t] ? R[B[i + 476*t]] * R[C[i + 476*t]] : R[B[i + 476*t]] + R[C[i + 476*t]];
R[i + 952*t] = Op[i + 477*t] ? R[B[i + 477*t]] * R[C[i + 477*t]] : R[B[i + 477*t]] + R[C[i + 477*t]];
R[i + 953*t] = Op[i + 478*t] ? R[B[i + 478*t]] * R[C[i + 478*t]] : R[B[i + 478*t]] + R[C[i + 478*t]];
R[i + 954*t] = Op[i + 479*t] ? R[B[i + 479*t]] * R[C[i + 479*t]] : R[B[i + 479*t]] + R[C[i + 479*t]];
R[i + 955*t] = Op[i + 480*t] ? R[B[i + 480*t]] * R[C[i + 480*t]] : R[B[i + 480*t]] + R[C[i + 480*t]];
R[i + 956*t] = Op[i + 481*t] ? R[B[i + 481*t]] * R[C[i + 481*t]] : R[B[i + 481*t]] + R[C[i + 481*t]];
R[i + 957*t] = Op[i + 482*t] ? R[B[i + 482*t]] * R[C[i + 482*t]] : R[B[i + 482*t]] + R[C[i + 482*t]];
R[i + 958*t] = Op[i + 483*t] ? R[B[i + 483*t]] * R[C[i + 483*t]] : R[B[i + 483*t]] + R[C[i + 483*t]];
R[i + 959*t] = Op[i + 484*t] ? R[B[i + 484*t]] * R[C[i + 484*t]] : R[B[i + 484*t]] + R[C[i + 484*t]];
R[i + 960*t] = Op[i + 485*t] ? R[B[i + 485*t]] * R[C[i + 485*t]] : R[B[i + 485*t]] + R[C[i + 485*t]];
R[i + 961*t] = Op[i + 486*t] ? R[B[i + 486*t]] * R[C[i + 486*t]] : R[B[i + 486*t]] + R[C[i + 486*t]];
R[i + 962*t] = Op[i + 487*t] ? R[B[i + 487*t]] * R[C[i + 487*t]] : R[B[i + 487*t]] + R[C[i + 487*t]];
R[i + 963*t] = Op[i + 488*t] ? R[B[i + 488*t]] * R[C[i + 488*t]] : R[B[i + 488*t]] + R[C[i + 488*t]];
R[i + 964*t] = Op[i + 489*t] ? R[B[i + 489*t]] * R[C[i + 489*t]] : R[B[i + 489*t]] + R[C[i + 489*t]];
R[i + 965*t] = Op[i + 490*t] ? R[B[i + 490*t]] * R[C[i + 490*t]] : R[B[i + 490*t]] + R[C[i + 490*t]];
R[i + 966*t] = Op[i + 491*t] ? R[B[i + 491*t]] * R[C[i + 491*t]] : R[B[i + 491*t]] + R[C[i + 491*t]];
R[i + 967*t] = Op[i + 492*t] ? R[B[i + 492*t]] * R[C[i + 492*t]] : R[B[i + 492*t]] + R[C[i + 492*t]];
R[i + 968*t] = Op[i + 493*t] ? R[B[i + 493*t]] * R[C[i + 493*t]] : R[B[i + 493*t]] + R[C[i + 493*t]];
R[i + 969*t] = Op[i + 494*t] ? R[B[i + 494*t]] * R[C[i + 494*t]] : R[B[i + 494*t]] + R[C[i + 494*t]];
R[i + 970*t] = Op[i + 495*t] ? R[B[i + 495*t]] * R[C[i + 495*t]] : R[B[i + 495*t]] + R[C[i + 495*t]];
R[i + 971*t] = Op[i + 496*t] ? R[B[i + 496*t]] * R[C[i + 496*t]] : R[B[i + 496*t]] + R[C[i + 496*t]];
R[i + 972*t] = Op[i + 497*t] ? R[B[i + 497*t]] * R[C[i + 497*t]] : R[B[i + 497*t]] + R[C[i + 497*t]];
R[i + 973*t] = Op[i + 498*t] ? R[B[i + 498*t]] * R[C[i + 498*t]] : R[B[i + 498*t]] + R[C[i + 498*t]];
R[i + 974*t] = Op[i + 499*t] ? R[B[i + 499*t]] * R[C[i + 499*t]] : R[B[i + 499*t]] + R[C[i + 499*t]];
R[i + 975*t] = Op[i + 500*t] ? R[B[i + 500*t]] * R[C[i + 500*t]] : R[B[i + 500*t]] + R[C[i + 500*t]];
R[i + 976*t] = Op[i + 501*t] ? R[B[i + 501*t]] * R[C[i + 501*t]] : R[B[i + 501*t]] + R[C[i + 501*t]];
R[i + 977*t] = Op[i + 502*t] ? R[B[i + 502*t]] * R[C[i + 502*t]] : R[B[i + 502*t]] + R[C[i + 502*t]];
R[i + 978*t] = Op[i + 503*t] ? R[B[i + 503*t]] * R[C[i + 503*t]] : R[B[i + 503*t]] + R[C[i + 503*t]];
R[i + 979*t] = Op[i + 504*t] ? R[B[i + 504*t]] * R[C[i + 504*t]] : R[B[i + 504*t]] + R[C[i + 504*t]];
R[i + 980*t] = Op[i + 505*t] ? R[B[i + 505*t]] * R[C[i + 505*t]] : R[B[i + 505*t]] + R[C[i + 505*t]];
R[i + 981*t] = Op[i + 506*t] ? R[B[i + 506*t]] * R[C[i + 506*t]] : R[B[i + 506*t]] + R[C[i + 506*t]];
R[i + 982*t] = Op[i + 507*t] ? R[B[i + 507*t]] * R[C[i + 507*t]] : R[B[i + 507*t]] + R[C[i + 507*t]];
R[i + 983*t] = Op[i + 508*t] ? R[B[i + 508*t]] * R[C[i + 508*t]] : R[B[i + 508*t]] + R[C[i + 508*t]];
R[i + 984*t] = Op[i + 509*t] ? R[B[i + 509*t]] * R[C[i + 509*t]] : R[B[i + 509*t]] + R[C[i + 509*t]];
R[i + 985*t] = Op[i + 510*t] ? R[B[i + 510*t]] * R[C[i + 510*t]] : R[B[i + 510*t]] + R[C[i + 510*t]];
R[i + 986*t] = Op[i + 511*t] ? R[B[i + 511*t]] * R[C[i + 511*t]] : R[B[i + 511*t]] + R[C[i + 511*t]];
R[i + 987*t] = Op[i + 512*t] ? R[B[i + 512*t]] * R[C[i + 512*t]] : R[B[i + 512*t]] + R[C[i + 512*t]];
R[i + 988*t] = Op[i + 513*t] ? R[B[i + 513*t]] * R[C[i + 513*t]] : R[B[i + 513*t]] + R[C[i + 513*t]];
R[i + 989*t] = Op[i + 514*t] ? R[B[i + 514*t]] * R[C[i + 514*t]] : R[B[i + 514*t]] + R[C[i + 514*t]];
R[i + 990*t] = Op[i + 515*t] ? R[B[i + 515*t]] * R[C[i + 515*t]] : R[B[i + 515*t]] + R[C[i + 515*t]];
R[i + 991*t] = Op[i + 516*t] ? R[B[i + 516*t]] * R[C[i + 516*t]] : R[B[i + 516*t]] + R[C[i + 516*t]];
R[i + 992*t] = Op[i + 517*t] ? R[B[i + 517*t]] * R[C[i + 517*t]] : R[B[i + 517*t]] + R[C[i + 517*t]];
R[i + 993*t] = Op[i + 518*t] ? R[B[i + 518*t]] * R[C[i + 518*t]] : R[B[i + 518*t]] + R[C[i + 518*t]];
R[i + 994*t] = Op[i + 519*t] ? R[B[i + 519*t]] * R[C[i + 519*t]] : R[B[i + 519*t]] + R[C[i + 519*t]];
R[i + 995*t] = Op[i + 520*t] ? R[B[i + 520*t]] * R[C[i + 520*t]] : R[B[i + 520*t]] + R[C[i + 520*t]];
R[i + 996*t] = Op[i + 521*t] ? R[B[i + 521*t]] * R[C[i + 521*t]] : R[B[i + 521*t]] + R[C[i + 521*t]];
R[i + 997*t] = Op[i + 522*t] ? R[B[i + 522*t]] * R[C[i + 522*t]] : R[B[i + 522*t]] + R[C[i + 522*t]];
R[i + 998*t] = Op[i + 523*t] ? R[B[i + 523*t]] * R[C[i + 523*t]] : R[B[i + 523*t]] + R[C[i + 523*t]];
__syncthreads();
R[i + 999*t] = Op[i + 524*t] ? R[B[i + 524*t]] * R[C[i + 524*t]] : R[B[i + 524*t]] + R[C[i + 524*t]];
R[i + 1000*t] = Op[i + 525*t] ? R[B[i + 525*t]] * R[C[i + 525*t]] : R[B[i + 525*t]] + R[C[i + 525*t]];
R[i + 1001*t] = Op[i + 526*t] ? R[B[i + 526*t]] * R[C[i + 526*t]] : R[B[i + 526*t]] + R[C[i + 526*t]];
R[i + 1002*t] = Op[i + 527*t] ? R[B[i + 527*t]] * R[C[i + 527*t]] : R[B[i + 527*t]] + R[C[i + 527*t]];
R[i + 1003*t] = Op[i + 528*t] ? R[B[i + 528*t]] * R[C[i + 528*t]] : R[B[i + 528*t]] + R[C[i + 528*t]];
R[i + 1004*t] = Op[i + 529*t] ? R[B[i + 529*t]] * R[C[i + 529*t]] : R[B[i + 529*t]] + R[C[i + 529*t]];
R[i + 1005*t] = Op[i + 530*t] ? R[B[i + 530*t]] * R[C[i + 530*t]] : R[B[i + 530*t]] + R[C[i + 530*t]];
R[i + 1006*t] = Op[i + 531*t] ? R[B[i + 531*t]] * R[C[i + 531*t]] : R[B[i + 531*t]] + R[C[i + 531*t]];
R[i + 1007*t] = Op[i + 532*t] ? R[B[i + 532*t]] * R[C[i + 532*t]] : R[B[i + 532*t]] + R[C[i + 532*t]];
R[i + 1008*t] = Op[i + 533*t] ? R[B[i + 533*t]] * R[C[i + 533*t]] : R[B[i + 533*t]] + R[C[i + 533*t]];
R[i + 1009*t] = Op[i + 534*t] ? R[B[i + 534*t]] * R[C[i + 534*t]] : R[B[i + 534*t]] + R[C[i + 534*t]];
R[i + 1010*t] = Op[i + 535*t] ? R[B[i + 535*t]] * R[C[i + 535*t]] : R[B[i + 535*t]] + R[C[i + 535*t]];
R[i + 1011*t] = Op[i + 536*t] ? R[B[i + 536*t]] * R[C[i + 536*t]] : R[B[i + 536*t]] + R[C[i + 536*t]];
R[i + 1012*t] = Op[i + 537*t] ? R[B[i + 537*t]] * R[C[i + 537*t]] : R[B[i + 537*t]] + R[C[i + 537*t]];
R[i + 1013*t] = Op[i + 538*t] ? R[B[i + 538*t]] * R[C[i + 538*t]] : R[B[i + 538*t]] + R[C[i + 538*t]];
R[i + 1014*t] = Op[i + 539*t] ? R[B[i + 539*t]] * R[C[i + 539*t]] : R[B[i + 539*t]] + R[C[i + 539*t]];
R[i + 1015*t] = Op[i + 540*t] ? R[B[i + 540*t]] * R[C[i + 540*t]] : R[B[i + 540*t]] + R[C[i + 540*t]];
R[i + 1016*t] = Op[i + 541*t] ? R[B[i + 541*t]] * R[C[i + 541*t]] : R[B[i + 541*t]] + R[C[i + 541*t]];
R[i + 1017*t] = Op[i + 542*t] ? R[B[i + 542*t]] * R[C[i + 542*t]] : R[B[i + 542*t]] + R[C[i + 542*t]];
R[i + 1018*t] = Op[i + 543*t] ? R[B[i + 543*t]] * R[C[i + 543*t]] : R[B[i + 543*t]] + R[C[i + 543*t]];
R[i + 1019*t] = Op[i + 544*t] ? R[B[i + 544*t]] * R[C[i + 544*t]] : R[B[i + 544*t]] + R[C[i + 544*t]];
R[i + 1020*t] = Op[i + 545*t] ? R[B[i + 545*t]] * R[C[i + 545*t]] : R[B[i + 545*t]] + R[C[i + 545*t]];
R[i + 1021*t] = Op[i + 546*t] ? R[B[i + 546*t]] * R[C[i + 546*t]] : R[B[i + 546*t]] + R[C[i + 546*t]];
R[i + 1022*t] = Op[i + 547*t] ? R[B[i + 547*t]] * R[C[i + 547*t]] : R[B[i + 547*t]] + R[C[i + 547*t]];
R[i + 1023*t] = Op[i + 548*t] ? R[B[i + 548*t]] * R[C[i + 548*t]] : R[B[i + 548*t]] + R[C[i + 548*t]];
R[i + 1024*t] = Op[i + 549*t] ? R[B[i + 549*t]] * R[C[i + 549*t]] : R[B[i + 549*t]] + R[C[i + 549*t]];
R[i + 1025*t] = Op[i + 550*t] ? R[B[i + 550*t]] * R[C[i + 550*t]] : R[B[i + 550*t]] + R[C[i + 550*t]];
R[i + 1026*t] = Op[i + 551*t] ? R[B[i + 551*t]] * R[C[i + 551*t]] : R[B[i + 551*t]] + R[C[i + 551*t]];
R[i + 1027*t] = Op[i + 552*t] ? R[B[i + 552*t]] * R[C[i + 552*t]] : R[B[i + 552*t]] + R[C[i + 552*t]];
R[i + 1028*t] = Op[i + 553*t] ? R[B[i + 553*t]] * R[C[i + 553*t]] : R[B[i + 553*t]] + R[C[i + 553*t]];
R[i + 1029*t] = Op[i + 554*t] ? R[B[i + 554*t]] * R[C[i + 554*t]] : R[B[i + 554*t]] + R[C[i + 554*t]];
R[i + 1030*t] = Op[i + 555*t] ? R[B[i + 555*t]] * R[C[i + 555*t]] : R[B[i + 555*t]] + R[C[i + 555*t]];
R[i + 1031*t] = Op[i + 556*t] ? R[B[i + 556*t]] * R[C[i + 556*t]] : R[B[i + 556*t]] + R[C[i + 556*t]];
R[i + 1032*t] = Op[i + 557*t] ? R[B[i + 557*t]] * R[C[i + 557*t]] : R[B[i + 557*t]] + R[C[i + 557*t]];
R[i + 1033*t] = Op[i + 558*t] ? R[B[i + 558*t]] * R[C[i + 558*t]] : R[B[i + 558*t]] + R[C[i + 558*t]];
R[i + 1034*t] = Op[i + 559*t] ? R[B[i + 559*t]] * R[C[i + 559*t]] : R[B[i + 559*t]] + R[C[i + 559*t]];
R[i + 1035*t] = Op[i + 560*t] ? R[B[i + 560*t]] * R[C[i + 560*t]] : R[B[i + 560*t]] + R[C[i + 560*t]];
R[i + 1036*t] = Op[i + 561*t] ? R[B[i + 561*t]] * R[C[i + 561*t]] : R[B[i + 561*t]] + R[C[i + 561*t]];
R[i + 1037*t] = Op[i + 562*t] ? R[B[i + 562*t]] * R[C[i + 562*t]] : R[B[i + 562*t]] + R[C[i + 562*t]];
R[i + 1038*t] = Op[i + 563*t] ? R[B[i + 563*t]] * R[C[i + 563*t]] : R[B[i + 563*t]] + R[C[i + 563*t]];
R[i + 1039*t] = Op[i + 564*t] ? R[B[i + 564*t]] * R[C[i + 564*t]] : R[B[i + 564*t]] + R[C[i + 564*t]];
R[i + 1040*t] = Op[i + 565*t] ? R[B[i + 565*t]] * R[C[i + 565*t]] : R[B[i + 565*t]] + R[C[i + 565*t]];
R[i + 1041*t] = Op[i + 566*t] ? R[B[i + 566*t]] * R[C[i + 566*t]] : R[B[i + 566*t]] + R[C[i + 566*t]];
R[i + 1042*t] = Op[i + 567*t] ? R[B[i + 567*t]] * R[C[i + 567*t]] : R[B[i + 567*t]] + R[C[i + 567*t]];
R[i + 1043*t] = Op[i + 568*t] ? R[B[i + 568*t]] * R[C[i + 568*t]] : R[B[i + 568*t]] + R[C[i + 568*t]];
R[i + 1044*t] = Op[i + 569*t] ? R[B[i + 569*t]] * R[C[i + 569*t]] : R[B[i + 569*t]] + R[C[i + 569*t]];
R[i + 1045*t] = Op[i + 570*t] ? R[B[i + 570*t]] * R[C[i + 570*t]] : R[B[i + 570*t]] + R[C[i + 570*t]];
R[i + 1046*t] = Op[i + 571*t] ? R[B[i + 571*t]] * R[C[i + 571*t]] : R[B[i + 571*t]] + R[C[i + 571*t]];
__syncthreads();
R[i + 1047*t] = Op[i + 572*t] ? R[B[i + 572*t]] * R[C[i + 572*t]] : R[B[i + 572*t]] + R[C[i + 572*t]];
R[i + 1048*t] = Op[i + 573*t] ? R[B[i + 573*t]] * R[C[i + 573*t]] : R[B[i + 573*t]] + R[C[i + 573*t]];
R[i + 1049*t] = Op[i + 574*t] ? R[B[i + 574*t]] * R[C[i + 574*t]] : R[B[i + 574*t]] + R[C[i + 574*t]];
R[i + 1050*t] = Op[i + 575*t] ? R[B[i + 575*t]] * R[C[i + 575*t]] : R[B[i + 575*t]] + R[C[i + 575*t]];
R[i + 1051*t] = Op[i + 576*t] ? R[B[i + 576*t]] * R[C[i + 576*t]] : R[B[i + 576*t]] + R[C[i + 576*t]];
R[i + 1052*t] = Op[i + 577*t] ? R[B[i + 577*t]] * R[C[i + 577*t]] : R[B[i + 577*t]] + R[C[i + 577*t]];
R[i + 1053*t] = Op[i + 578*t] ? R[B[i + 578*t]] * R[C[i + 578*t]] : R[B[i + 578*t]] + R[C[i + 578*t]];
R[i + 1054*t] = Op[i + 579*t] ? R[B[i + 579*t]] * R[C[i + 579*t]] : R[B[i + 579*t]] + R[C[i + 579*t]];
R[i + 1055*t] = Op[i + 580*t] ? R[B[i + 580*t]] * R[C[i + 580*t]] : R[B[i + 580*t]] + R[C[i + 580*t]];
R[i + 1056*t] = Op[i + 581*t] ? R[B[i + 581*t]] * R[C[i + 581*t]] : R[B[i + 581*t]] + R[C[i + 581*t]];
R[i + 1057*t] = Op[i + 582*t] ? R[B[i + 582*t]] * R[C[i + 582*t]] : R[B[i + 582*t]] + R[C[i + 582*t]];
R[i + 1058*t] = Op[i + 583*t] ? R[B[i + 583*t]] * R[C[i + 583*t]] : R[B[i + 583*t]] + R[C[i + 583*t]];
R[i + 1059*t] = Op[i + 584*t] ? R[B[i + 584*t]] * R[C[i + 584*t]] : R[B[i + 584*t]] + R[C[i + 584*t]];
R[i + 1060*t] = Op[i + 585*t] ? R[B[i + 585*t]] * R[C[i + 585*t]] : R[B[i + 585*t]] + R[C[i + 585*t]];
R[i + 1061*t] = Op[i + 586*t] ? R[B[i + 586*t]] * R[C[i + 586*t]] : R[B[i + 586*t]] + R[C[i + 586*t]];
R[i + 1062*t] = Op[i + 587*t] ? R[B[i + 587*t]] * R[C[i + 587*t]] : R[B[i + 587*t]] + R[C[i + 587*t]];
R[i + 1063*t] = Op[i + 588*t] ? R[B[i + 588*t]] * R[C[i + 588*t]] : R[B[i + 588*t]] + R[C[i + 588*t]];
R[i + 1064*t] = Op[i + 589*t] ? R[B[i + 589*t]] * R[C[i + 589*t]] : R[B[i + 589*t]] + R[C[i + 589*t]];
R[i + 1065*t] = Op[i + 590*t] ? R[B[i + 590*t]] * R[C[i + 590*t]] : R[B[i + 590*t]] + R[C[i + 590*t]];
R[i + 1066*t] = Op[i + 591*t] ? R[B[i + 591*t]] * R[C[i + 591*t]] : R[B[i + 591*t]] + R[C[i + 591*t]];
R[i + 1067*t] = Op[i + 592*t] ? R[B[i + 592*t]] * R[C[i + 592*t]] : R[B[i + 592*t]] + R[C[i + 592*t]];
R[i + 1068*t] = Op[i + 593*t] ? R[B[i + 593*t]] * R[C[i + 593*t]] : R[B[i + 593*t]] + R[C[i + 593*t]];
R[i + 1069*t] = Op[i + 594*t] ? R[B[i + 594*t]] * R[C[i + 594*t]] : R[B[i + 594*t]] + R[C[i + 594*t]];
R[i + 1070*t] = Op[i + 595*t] ? R[B[i + 595*t]] * R[C[i + 595*t]] : R[B[i + 595*t]] + R[C[i + 595*t]];
R[i + 1071*t] = Op[i + 596*t] ? R[B[i + 596*t]] * R[C[i + 596*t]] : R[B[i + 596*t]] + R[C[i + 596*t]];
R[i + 1072*t] = Op[i + 597*t] ? R[B[i + 597*t]] * R[C[i + 597*t]] : R[B[i + 597*t]] + R[C[i + 597*t]];
R[i + 1073*t] = Op[i + 598*t] ? R[B[i + 598*t]] * R[C[i + 598*t]] : R[B[i + 598*t]] + R[C[i + 598*t]];
R[i + 1074*t] = Op[i + 599*t] ? R[B[i + 599*t]] * R[C[i + 599*t]] : R[B[i + 599*t]] + R[C[i + 599*t]];
R[i + 1075*t] = Op[i + 600*t] ? R[B[i + 600*t]] * R[C[i + 600*t]] : R[B[i + 600*t]] + R[C[i + 600*t]];
R[i + 1076*t] = Op[i + 601*t] ? R[B[i + 601*t]] * R[C[i + 601*t]] : R[B[i + 601*t]] + R[C[i + 601*t]];
R[i + 1077*t] = Op[i + 602*t] ? R[B[i + 602*t]] * R[C[i + 602*t]] : R[B[i + 602*t]] + R[C[i + 602*t]];
R[i + 1078*t] = Op[i + 603*t] ? R[B[i + 603*t]] * R[C[i + 603*t]] : R[B[i + 603*t]] + R[C[i + 603*t]];
R[i + 1079*t] = Op[i + 604*t] ? R[B[i + 604*t]] * R[C[i + 604*t]] : R[B[i + 604*t]] + R[C[i + 604*t]];
R[i + 1080*t] = Op[i + 605*t] ? R[B[i + 605*t]] * R[C[i + 605*t]] : R[B[i + 605*t]] + R[C[i + 605*t]];
R[i + 1081*t] = Op[i + 606*t] ? R[B[i + 606*t]] * R[C[i + 606*t]] : R[B[i + 606*t]] + R[C[i + 606*t]];
R[i + 1082*t] = Op[i + 607*t] ? R[B[i + 607*t]] * R[C[i + 607*t]] : R[B[i + 607*t]] + R[C[i + 607*t]];
R[i + 1083*t] = Op[i + 608*t] ? R[B[i + 608*t]] * R[C[i + 608*t]] : R[B[i + 608*t]] + R[C[i + 608*t]];
R[i + 1084*t] = Op[i + 609*t] ? R[B[i + 609*t]] * R[C[i + 609*t]] : R[B[i + 609*t]] + R[C[i + 609*t]];
R[i + 1085*t] = Op[i + 610*t] ? R[B[i + 610*t]] * R[C[i + 610*t]] : R[B[i + 610*t]] + R[C[i + 610*t]];
R[i + 1086*t] = Op[i + 611*t] ? R[B[i + 611*t]] * R[C[i + 611*t]] : R[B[i + 611*t]] + R[C[i + 611*t]];
__syncthreads();
R[i + 1087*t] = Op[i + 612*t] ? R[B[i + 612*t]] * R[C[i + 612*t]] : R[B[i + 612*t]] + R[C[i + 612*t]];
R[i + 1088*t] = Op[i + 613*t] ? R[B[i + 613*t]] * R[C[i + 613*t]] : R[B[i + 613*t]] + R[C[i + 613*t]];
R[i + 1089*t] = Op[i + 614*t] ? R[B[i + 614*t]] * R[C[i + 614*t]] : R[B[i + 614*t]] + R[C[i + 614*t]];
R[i + 1090*t] = Op[i + 615*t] ? R[B[i + 615*t]] * R[C[i + 615*t]] : R[B[i + 615*t]] + R[C[i + 615*t]];
R[i + 1091*t] = Op[i + 616*t] ? R[B[i + 616*t]] * R[C[i + 616*t]] : R[B[i + 616*t]] + R[C[i + 616*t]];
R[i + 1092*t] = Op[i + 617*t] ? R[B[i + 617*t]] * R[C[i + 617*t]] : R[B[i + 617*t]] + R[C[i + 617*t]];
R[i + 1093*t] = Op[i + 618*t] ? R[B[i + 618*t]] * R[C[i + 618*t]] : R[B[i + 618*t]] + R[C[i + 618*t]];
R[i + 1094*t] = Op[i + 619*t] ? R[B[i + 619*t]] * R[C[i + 619*t]] : R[B[i + 619*t]] + R[C[i + 619*t]];
R[i + 1095*t] = Op[i + 620*t] ? R[B[i + 620*t]] * R[C[i + 620*t]] : R[B[i + 620*t]] + R[C[i + 620*t]];
R[i + 1096*t] = Op[i + 621*t] ? R[B[i + 621*t]] * R[C[i + 621*t]] : R[B[i + 621*t]] + R[C[i + 621*t]];
R[i + 1097*t] = Op[i + 622*t] ? R[B[i + 622*t]] * R[C[i + 622*t]] : R[B[i + 622*t]] + R[C[i + 622*t]];
R[i + 1098*t] = Op[i + 623*t] ? R[B[i + 623*t]] * R[C[i + 623*t]] : R[B[i + 623*t]] + R[C[i + 623*t]];
R[i + 1099*t] = Op[i + 624*t] ? R[B[i + 624*t]] * R[C[i + 624*t]] : R[B[i + 624*t]] + R[C[i + 624*t]];
R[i + 1100*t] = Op[i + 625*t] ? R[B[i + 625*t]] * R[C[i + 625*t]] : R[B[i + 625*t]] + R[C[i + 625*t]];
R[i + 1101*t] = Op[i + 626*t] ? R[B[i + 626*t]] * R[C[i + 626*t]] : R[B[i + 626*t]] + R[C[i + 626*t]];
R[i + 1102*t] = Op[i + 627*t] ? R[B[i + 627*t]] * R[C[i + 627*t]] : R[B[i + 627*t]] + R[C[i + 627*t]];
R[i + 1103*t] = Op[i + 628*t] ? R[B[i + 628*t]] * R[C[i + 628*t]] : R[B[i + 628*t]] + R[C[i + 628*t]];
R[i + 1104*t] = Op[i + 629*t] ? R[B[i + 629*t]] * R[C[i + 629*t]] : R[B[i + 629*t]] + R[C[i + 629*t]];
R[i + 1105*t] = Op[i + 630*t] ? R[B[i + 630*t]] * R[C[i + 630*t]] : R[B[i + 630*t]] + R[C[i + 630*t]];
R[i + 1106*t] = Op[i + 631*t] ? R[B[i + 631*t]] * R[C[i + 631*t]] : R[B[i + 631*t]] + R[C[i + 631*t]];
R[i + 1107*t] = Op[i + 632*t] ? R[B[i + 632*t]] * R[C[i + 632*t]] : R[B[i + 632*t]] + R[C[i + 632*t]];
__syncthreads();
R[i + 1108*t] = Op[i + 633*t] ? R[B[i + 633*t]] * R[C[i + 633*t]] : R[B[i + 633*t]] + R[C[i + 633*t]];
R[i + 1109*t] = Op[i + 634*t] ? R[B[i + 634*t]] * R[C[i + 634*t]] : R[B[i + 634*t]] + R[C[i + 634*t]];
R[i + 1110*t] = Op[i + 635*t] ? R[B[i + 635*t]] * R[C[i + 635*t]] : R[B[i + 635*t]] + R[C[i + 635*t]];
R[i + 1111*t] = Op[i + 636*t] ? R[B[i + 636*t]] * R[C[i + 636*t]] : R[B[i + 636*t]] + R[C[i + 636*t]];
R[i + 1112*t] = Op[i + 637*t] ? R[B[i + 637*t]] * R[C[i + 637*t]] : R[B[i + 637*t]] + R[C[i + 637*t]];
R[i + 1113*t] = Op[i + 638*t] ? R[B[i + 638*t]] * R[C[i + 638*t]] : R[B[i + 638*t]] + R[C[i + 638*t]];
R[i + 1114*t] = Op[i + 639*t] ? R[B[i + 639*t]] * R[C[i + 639*t]] : R[B[i + 639*t]] + R[C[i + 639*t]];
R[i + 1115*t] = Op[i + 640*t] ? R[B[i + 640*t]] * R[C[i + 640*t]] : R[B[i + 640*t]] + R[C[i + 640*t]];
R[i + 1116*t] = Op[i + 641*t] ? R[B[i + 641*t]] * R[C[i + 641*t]] : R[B[i + 641*t]] + R[C[i + 641*t]];
R[i + 1117*t] = Op[i + 642*t] ? R[B[i + 642*t]] * R[C[i + 642*t]] : R[B[i + 642*t]] + R[C[i + 642*t]];
R[i + 1118*t] = Op[i + 643*t] ? R[B[i + 643*t]] * R[C[i + 643*t]] : R[B[i + 643*t]] + R[C[i + 643*t]];
R[i + 1119*t] = Op[i + 644*t] ? R[B[i + 644*t]] * R[C[i + 644*t]] : R[B[i + 644*t]] + R[C[i + 644*t]];
R[i + 1120*t] = Op[i + 645*t] ? R[B[i + 645*t]] * R[C[i + 645*t]] : R[B[i + 645*t]] + R[C[i + 645*t]];
R[i + 1121*t] = Op[i + 646*t] ? R[B[i + 646*t]] * R[C[i + 646*t]] : R[B[i + 646*t]] + R[C[i + 646*t]];
R[i + 1122*t] = Op[i + 647*t] ? R[B[i + 647*t]] * R[C[i + 647*t]] : R[B[i + 647*t]] + R[C[i + 647*t]];
__syncthreads();
R[i + 1123*t] = Op[i + 648*t] ? R[B[i + 648*t]] * R[C[i + 648*t]] : R[B[i + 648*t]] + R[C[i + 648*t]];
R[i + 1124*t] = Op[i + 649*t] ? R[B[i + 649*t]] * R[C[i + 649*t]] : R[B[i + 649*t]] + R[C[i + 649*t]];
R[i + 1125*t] = Op[i + 650*t] ? R[B[i + 650*t]] * R[C[i + 650*t]] : R[B[i + 650*t]] + R[C[i + 650*t]];
R[i + 1126*t] = Op[i + 651*t] ? R[B[i + 651*t]] * R[C[i + 651*t]] : R[B[i + 651*t]] + R[C[i + 651*t]];
R[i + 1127*t] = Op[i + 652*t] ? R[B[i + 652*t]] * R[C[i + 652*t]] : R[B[i + 652*t]] + R[C[i + 652*t]];
R[i + 1128*t] = Op[i + 653*t] ? R[B[i + 653*t]] * R[C[i + 653*t]] : R[B[i + 653*t]] + R[C[i + 653*t]];
R[i + 1129*t] = Op[i + 654*t] ? R[B[i + 654*t]] * R[C[i + 654*t]] : R[B[i + 654*t]] + R[C[i + 654*t]];
R[i + 1130*t] = Op[i + 655*t] ? R[B[i + 655*t]] * R[C[i + 655*t]] : R[B[i + 655*t]] + R[C[i + 655*t]];
R[i + 1131*t] = Op[i + 656*t] ? R[B[i + 656*t]] * R[C[i + 656*t]] : R[B[i + 656*t]] + R[C[i + 656*t]];
R[i + 1132*t] = Op[i + 657*t] ? R[B[i + 657*t]] * R[C[i + 657*t]] : R[B[i + 657*t]] + R[C[i + 657*t]];
R[i + 1133*t] = Op[i + 658*t] ? R[B[i + 658*t]] * R[C[i + 658*t]] : R[B[i + 658*t]] + R[C[i + 658*t]];
__syncthreads();
R[i + 1134*t] = Op[i + 659*t] ? R[B[i + 659*t]] * R[C[i + 659*t]] : R[B[i + 659*t]] + R[C[i + 659*t]];
R[i + 1135*t] = Op[i + 660*t] ? R[B[i + 660*t]] * R[C[i + 660*t]] : R[B[i + 660*t]] + R[C[i + 660*t]];
R[i + 1136*t] = Op[i + 661*t] ? R[B[i + 661*t]] * R[C[i + 661*t]] : R[B[i + 661*t]] + R[C[i + 661*t]];
R[i + 1137*t] = Op[i + 662*t] ? R[B[i + 662*t]] * R[C[i + 662*t]] : R[B[i + 662*t]] + R[C[i + 662*t]];
R[i + 1138*t] = Op[i + 663*t] ? R[B[i + 663*t]] * R[C[i + 663*t]] : R[B[i + 663*t]] + R[C[i + 663*t]];
R[i + 1139*t] = Op[i + 664*t] ? R[B[i + 664*t]] * R[C[i + 664*t]] : R[B[i + 664*t]] + R[C[i + 664*t]];
R[i + 1140*t] = Op[i + 665*t] ? R[B[i + 665*t]] * R[C[i + 665*t]] : R[B[i + 665*t]] + R[C[i + 665*t]];
R[i + 1141*t] = Op[i + 666*t] ? R[B[i + 666*t]] * R[C[i + 666*t]] : R[B[i + 666*t]] + R[C[i + 666*t]];
__syncthreads();
R[i + 1142*t] = Op[i + 667*t] ? R[B[i + 667*t]] * R[C[i + 667*t]] : R[B[i + 667*t]] + R[C[i + 667*t]];
R[i + 1143*t] = Op[i + 668*t] ? R[B[i + 668*t]] * R[C[i + 668*t]] : R[B[i + 668*t]] + R[C[i + 668*t]];
R[i + 1144*t] = Op[i + 669*t] ? R[B[i + 669*t]] * R[C[i + 669*t]] : R[B[i + 669*t]] + R[C[i + 669*t]];
R[i + 1145*t] = Op[i + 670*t] ? R[B[i + 670*t]] * R[C[i + 670*t]] : R[B[i + 670*t]] + R[C[i + 670*t]];
R[i + 1146*t] = Op[i + 671*t] ? R[B[i + 671*t]] * R[C[i + 671*t]] : R[B[i + 671*t]] + R[C[i + 671*t]];
R[i + 1147*t] = Op[i + 672*t] ? R[B[i + 672*t]] * R[C[i + 672*t]] : R[B[i + 672*t]] + R[C[i + 672*t]];
__syncthreads();
R[i + 1148*t] = Op[i + 673*t] ? R[B[i + 673*t]] * R[C[i + 673*t]] : R[B[i + 673*t]] + R[C[i + 673*t]];
R[i + 1149*t] = Op[i + 674*t] ? R[B[i + 674*t]] * R[C[i + 674*t]] : R[B[i + 674*t]] + R[C[i + 674*t]];
R[i + 1150*t] = Op[i + 675*t] ? R[B[i + 675*t]] * R[C[i + 675*t]] : R[B[i + 675*t]] + R[C[i + 675*t]];
R[i + 1151*t] = Op[i + 676*t] ? R[B[i + 676*t]] * R[C[i + 676*t]] : R[B[i + 676*t]] + R[C[i + 676*t]];
__syncthreads();
R[i + 1152*t] = Op[i + 677*t] ? R[B[i + 677*t]] * R[C[i + 677*t]] : R[B[i + 677*t]] + R[C[i + 677*t]];
R[i + 1153*t] = Op[i + 678*t] ? R[B[i + 678*t]] * R[C[i + 678*t]] : R[B[i + 678*t]] + R[C[i + 678*t]];
R[i + 1154*t] = Op[i + 679*t] ? R[B[i + 679*t]] * R[C[i + 679*t]] : R[B[i + 679*t]] + R[C[i + 679*t]];
__syncthreads();
R[i + 1155*t] = Op[i + 680*t] ? R[B[i + 680*t]] * R[C[i + 680*t]] : R[B[i + 680*t]] + R[C[i + 680*t]];
R[i + 1156*t] = Op[i + 681*t] ? R[B[i + 681*t]] * R[C[i + 681*t]] : R[B[i + 681*t]] + R[C[i + 681*t]];
__syncthreads();
R[i + 1157*t] = Op[i + 682*t] ? R[B[i + 682*t]] * R[C[i + 682*t]] : R[B[i + 682*t]] + R[C[i + 682*t]];
R[i + 1158*t] = Op[i + 683*t] ? R[B[i + 683*t]] * R[C[i + 683*t]] : R[B[i + 683*t]] + R[C[i + 683*t]];
__syncthreads();
R[i + 1159*t] = Op[i + 684*t] ? R[B[i + 684*t]] * R[C[i + 684*t]] : R[B[i + 684*t]] + R[C[i + 684*t]];
__syncthreads();
R[i + 1160*t] = Op[i + 685*t] ? R[B[i + 685*t]] * R[C[i + 685*t]] : R[B[i + 685*t]] + R[C[i + 685*t]];
__syncthreads();
R[i + 1161*t] = Op[i + 686*t] ? R[B[i + 686*t]] * R[C[i + 686*t]] : R[B[i + 686*t]] + R[C[i + 686*t]];
__syncthreads();
R[i + 1162*t] = Op[i + 687*t] ? R[B[i + 687*t]] * R[C[i + 687*t]] : R[B[i + 687*t]] + R[C[i + 687*t]];
__syncthreads();
R[i + 1163*t] = Op[i + 688*t] ? R[B[i + 688*t]] * R[C[i + 688*t]] : R[B[i + 688*t]] + R[C[i + 688*t]];
__syncthreads();
R[i + 1164*t] = Op[i + 689*t] ? R[B[i + 689*t]] * R[C[i + 689*t]] : R[B[i + 689*t]] + R[C[i + 689*t]];
__syncthreads();
R[i + 1165*t] = Op[i + 690*t] ? R[B[i + 690*t]] * R[C[i + 690*t]] : R[B[i + 690*t]] + R[C[i + 690*t]];
__syncthreads();
R[i + 1166*t] = Op[i + 691*t] ? R[B[i + 691*t]] * R[C[i + 691*t]] : R[B[i + 691*t]] + R[C[i + 691*t]];
__syncthreads();
R[i + 1167*t] = Op[i + 692*t] ? R[B[i + 692*t]] * R[C[i + 692*t]] : R[B[i + 692*t]] + R[C[i + 692*t]];
__syncthreads();
R[i + 1168*t] = Op[i + 693*t] ? R[B[i + 693*t]] * R[C[i + 693*t]] : R[B[i + 693*t]] + R[C[i + 693*t]];
if (i==0) { final += R[1168*t]; }
__syncthreads();
}
if (i==0) { A[0]= final;}
}
|
14,939 | #include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <unistd.h>
#define X 153
#define Y 42
#define DELAY 90
#define ROW 10
#define COL 64
__global__ void ga (char *A, char *B) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
int j = blockIdx.y * blockDim.y + threadIdx.y;
int neighbours = 0;
if( ((i+1) >= X) || ((i-1) < 0) || ((j+1) >= Y) || ((j-1) < 0) ){
*(B + i + X*j) = '-';
}else{
if(*(A + (i+1) + X*j) == '*'){
neighbours++;
}
if(*(A + (i-1) + X*j) == '*'){
neighbours++;
}
if(*(A + i + X*(j+1)) == '*'){
neighbours++;
}
if(*(A + i + X*(j-1)) == '*'){
neighbours++;
}
if(*(A + (i+1) + X*(j+1)) == '*'){
neighbours++;
}
if(*(A + (i+1) + X*(j-1)) == '*'){
neighbours++;
}
if(*(A + (i-1) + X*(j+1)) == '*'){
neighbours++;
}
if(*(A + (i-1) + X*(j-1)) == '*'){
neighbours++;
}
if( *(A + i + X*j) == '*'){
if((neighbours == 2) || (neighbours == 3)){
*(B + i + X*j) = '*';
}else{
*(B + i + X*j) = '-';
}
}else{
if((neighbours == 3)){
*(B + i + X*j) = '*';
}else{
*(B + i + X*j) = '-';
}
}
}
return;
}
int main(void){
char *A, *B, *C;
int i, j, x;
cudaMallocManaged (&A, X * Y * sizeof(char));
cudaMallocManaged (&B, X * Y * sizeof(char));
cudaDeviceSynchronize ();
dim3 blocksPerGrid (X, Y, 1);
dim3 threadsPerBlock (1, 1, 1);
for(j = 0; j < Y; j++){
for(i = 0; i < X; i++){
*(A + i + X*j) = '-';
}
}
printf("select a configuration\n1. exploder\n2. 10 cell row\n3. gosper glider gun\n");
x = getchar();
if(x == '1'){
for(j=20;j<25;j++){ //exploder
*(A + 40 + X*j) = '*';
*(A + 44 + X*j) = '*';
}
*(A + 42 + X*20) = '*';
*(A + 42 + X*24) = '*';
}else if (x == '2'){
for(j=15,i=35;i<45;i++){ //10 cell row
*(A + i + X*j) = '*';
}
}else if (x == '3'){
*(A + (COL) + X*(ROW)) = '*'; //gosper glider gun
*(A + (COL) + X*(ROW+1)) = '*';
*(A + (COL-2) + X*(ROW+1)) = '*';
*(A + (COL) + X*(ROW+5)) = '*';
*(A + (COL) + X*(ROW+6)) = '*';
*(A + (COL-3) + X*(ROW+2)) = '*';
*(A + (COL-4) + X*(ROW+2)) = '*';
*(A + (COL-4) + X*(ROW+3)) = '*';
*(A + (COL-3) + X*(ROW+3)) = '*';
*(A + (COL-3) + X*(ROW+4)) = '*';
*(A + (COL-4) + X*(ROW+4)) = '*';
*(A + (COL-2) + X*(ROW+5)) = '*';
*(A + (COL+10) + X*(ROW+2)) = '*';
*(A + (COL+10) + X*(ROW+3)) = '*';
*(A + (COL+11) + X*(ROW+2)) = '*';
*(A + (COL+11) + X*(ROW+3)) = '*';
*(A + (COL-7) + X*(ROW+5)) = '*';
*(A + (COL-8) + X*(ROW+4)) = '*';
*(A + (COL-8) + X*(ROW+5)) = '*';
*(A + (COL-8) + X*(ROW+6)) = '*';
*(A + (COL-9) + X*(ROW+3)) = '*';
*(A + (COL-9) + X*(ROW+7)) = '*';
*(A + (COL-10) + X*(ROW+5)) = '*';
*(A + (COL-11) + X*(ROW+2)) = '*';
*(A + (COL-11) + X*(ROW+8)) = '*';
*(A + (COL-12) + X*(ROW+2)) = '*';
*(A + (COL-12) + X*(ROW+8)) = '*';
*(A + (COL-13) + X*(ROW+3)) = '*';
*(A + (COL-13) + X*(ROW+7)) = '*';
*(A + (COL-14) + X*(ROW+4)) = '*';
*(A + (COL-14) + X*(ROW+5)) = '*';
*(A + (COL-14) + X*(ROW+6)) = '*';
*(A + (COL-23) + X*(ROW+4)) = '*';
*(A + (COL-23) + X*(ROW+5)) = '*';
*(A + (COL-24) + X*(ROW+4)) = '*';
*(A + (COL-24) + X*(ROW+5)) = '*';
}else{
printf("invalid selection\n");
return 0;
}
while(1){
system("clear");
printf("\n");
for(j = 3 ; j < (Y-3) ; j++){
for(i = 3 ; i < (X-3); i++){
printf("%c", *(A + i + X*j));
}
printf("\n");
}
ga <<< blocksPerGrid, threadsPerBlock>>> (A, B);
cudaDeviceSynchronize ();
C = A;
A = B;
B = C;
usleep(DELAY*1000);
}
return 0;
}
|
14,940 | #include "includes.h"
__global__ void kernel_mul(char* newB, char* first, char* second, int size_first, int size_second, int * size_newB) {
int i = threadIdx.x;
int j = threadIdx.y;
int tid = j * gridDim.x * blockDim.x + i ;
if(j!=0 && i!=0){
newB[tid] = first[i] * second[j];
}
if(j==0 && i==0){
if(first[j] != second[i])
newB[0]='-';
else
newB[0]='+';
}
} |
14,941 | //#include <omp.h>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <vector>
#include <sys/mman.h>
#include <sys/stat.h>
#include <iostream>
#include <fcntl.h>
#include <cmath>
using namespace std;
__device__ __managed__ float *x, *y, *z, gpuTotal;
__global__ void calcGravity(const size_t n){
unsigned int i = threadIdx.x + blockDim.x * blockIdx.x;
if(i==0) gpuTotal=0;
if(i<n){
float result = 0;
float dx, dy, dz;
for(int j=i+1;j<n;j++){
dx = x[i]-x[j];
dy = y[i]-y[j];
dz = z[i]-z[j];
result+=rsqrt(dx*dx+dy*dy+dz*dz);
}
atomicAdd(&gpuTotal, result);
}
}
int main(int argc, char* argv[]){
char* &filename = argv[1];
vector<const char*> lineAddrs;
struct stat st;
stat(filename, &st);
size_t filesize = st.st_size;
int fd = open(filename,O_RDONLY,0);
void* file = mmap(NULL, filesize, PROT_READ, MAP_PRIVATE | MAP_POPULATE, fd, 0);
const char* input = (const char*) file;
int lines=0;
lineAddrs.push_back(input);
cout<<"Reading file"<<endl;
for(int i=0;i<filesize;i++){
if(input[i]=='\n'){
lines++;
lineAddrs.push_back(input+i+1);
}
}
cudaMallocManaged(&x, (size_t) lines*sizeof(float));
cudaMallocManaged(&y, (size_t) lines*sizeof(float));
cudaMallocManaged(&z, (size_t) lines*sizeof(float));
for(int i=0;i<lines;i++){
const char *a,*b,*c;
a=lineAddrs[i];
b=strpbrk(strpbrk(a," \t"),"-0123456789");
c=strpbrk(strpbrk(b," \t"),"-0123456789");
x[i]=atof(a);
y[i]=atof(b);
z[i]=atof(c);
}
munmap(file, filesize);
const size_t block_size = 256;
size_t grid_size = (lines + block_size -1)/ block_size;
cout<<"Sending to GPU"<<endl;
// launch the kernel
calcGravity<<<grid_size, block_size>>>(lines);
cudaDeviceSynchronize();
gpuTotal*=-1;
cout<<gpuTotal<<endl;
return 0;
}
|
14,942 | __global__ void kernel_deformation(float *img1, cudaTextureObject_t tex_img, float *mx2, float *my2, float *mz2, int nx, int ny, int nz){
int ix = 16 * blockIdx.x + threadIdx.x;
int iy = 16 * blockIdx.y + threadIdx.y;
int iz = 4 * blockIdx.z + threadIdx.z;
if (ix >= nx || iy >= ny || iz >= nz)
return;
int id = iy + ix * ny + iz * nx * ny;
float xi = iy + 1.0f + my2[id];
float yi = ix + 1.0f + mx2[id];
float zi = iz + 1.0f + mz2[id];
img1[id] = tex3D<float>(tex_img, xi - 0.5f, yi - 0.5f, zi - 0.5f);
// int id = ix + iy * nx + iz * nx * ny; // index for image
// int id2 = iy + ix * ny + iz * nx * ny; // index for DVFs
// float xi = iy + 0.5f + my2[id2];
// float yi = ix + 0.5f + mx2[id2];
// float zi = iz + 0.5f + mz2[id2];
// img1[id2] = tex3D<float>(tex_img, xi, yi, zi);
// img1[id] = 0.0f;
// if (xi < 0.5f || xi >= nx - 0.5f || yi < 0.5f || yi >= ny - 0.5f || zi < 0.5f || zi >= nz - 0.5f)
// return;
// if (xi < 0.5f) {xi = 0.5f;}
// if (xi > nx - 0.5f) {xi = nx - 0.5f;}
// int ix1, ix2, iy1, iy2, iz1, iz2;
// float wx1, wx2, wy1, wy2, wz1, wz2;
// if (xi < 0.5f)
// {ix1 = 0; ix2 = 1; wx2 = 0.0f; wx1 = 1.0f;}
// else{
// if (xi >= nx - 0.5f)
// {ix1 = nx - 1; ix2 = 1; wx2 = 0.0f; wx1 = 1.0f;}
// else
// {ix1 = (int)floor(xi - 0.5f); ix2 = ix1 + 1; wx2 = xi - 0.5f - (float)ix1; wx1 = 1.0f - wx2;}
// }
// if (yi < 0.5f)
// {iy1 = 0; iy2 = 1; wy2 = 0.0f; wy1 = 1.0f;}
// else{
// if (yi >= ny - 0.5f)
// {iy1 = ny - 1; iy2 = 1; wy2 = 0.0f; wy1 = 1.0f;}
// else
// {iy1 = (int)floor(yi - 0.5f); iy2 = iy1 + 1; wy2 = yi - 0.5f - (float)iy1; wy1 = 1.0f - wy2;}
// }
// if (zi < 0.5f)
// {iz1 = 0; iz2 = 1; wz2 = 0.0f; wz1 = 1.0f;}
// else{
// if (zi >= nz - 0.5f)
// {iz1 = nz - 1; iz2 = 1; wz2 = 0.0f; wz1 = 1.0f;}
// else
// {iz1 = (int)floor(zi - 0.5f); iz2 = iz1 + 1; wz2 = zi - 0.5f - (float)iz1; wz1 = 1.0f - wz2; }
// }
// ix1 = (int)floor(xi - 0.5f); ix2 = ix1 + 1; wx2 = xi - 0.5f - (float)ix1; wx1 = 1.0f - wx2;
// iy1 = (int)floor(yi - 0.5f); iy2 = iy1 + 1; wy2 = yi - 0.5f - (float)iy1; wy1 = 1.0f - wy2;
// iz1 = (int)floor(zi - 0.5f); iz2 = iz1 + 1; wz2 = zi - 0.5f - (float)iz1; wz1 = 1.0f - wz2;
// img1[id] += img[ix1 + iy1 * nx + iz1 * nx * ny] * wx1 * wy1 * wz1;
// img1[id] += img[ix1 + iy1 * nx + iz2 * nx * ny] * wx1 * wy1 * wz2;
// img1[id] += img[ix1 + iy2 * nx + iz1 * nx * ny] * wx1 * wy2 * wz1;
// img1[id] += img[ix1 + iy2 * nx + iz2 * nx * ny] * wx1 * wy2 * wz2;
// img1[id] += img[ix2 + iy1 * nx + iz1 * nx * ny] * wx2 * wy1 * wz1;
// img1[id] += img[ix2 + iy1 * nx + iz2 * nx * ny] * wx2 * wy1 * wz2;
// img1[id] += img[ix2 + iy2 * nx + iz1 * nx * ny] * wx2 * wy2 * wz1;
// img1[id] += img[ix2 + iy2 * nx + iz2 * nx * ny] * wx2 * wy2 * wz2;
}
// int x = blockSize.x * blockIdx.x + threadIdx.x;
// int y = blockSize.y * blockIdx.y + threadIdx.y;
// int z = blockSize.z * blockIdx.z + threadIdx.z;
// if (x >= nx || y >= ny || z >= nz)
// return;
// int xi = mx2[x][y][z];
// int yi = my2[x][y][z];
// int zi = mz2[x][y][z];
// singleViewImg1[x][y][z] = tex3D<float>(tex_img, xi-0.5f, yi-0.5f, zi-0.5f);
// }
|
14,943 | #include "includes.h"
#define bufSize 700000
struct timeval startwtime,endwtime;
float *h_a; // Table at host
float *d_a; // Table at device
int tsize=0; // number of rows or columns
size_t size = 0 ; // size of table( tsize* tsize * sizeof(float*))
float* test;
void print(float *);
void make_table();
void serial();
void check();
void copytables();
__global__ void Kernel1(float *A,int N,int k){
int i = blockDim.x * blockIdx.x + threadIdx.x;
int j = blockDim.y * blockIdx.y + threadIdx.y;
//printf("Hello from %d %d \n",threadIdx.x,threadIdx.y);
if ( A[i*N+j] > A[i*N+k] + A[k*N+j] ){
A[i*N+j] = A[i*N+k] + A[k*N+j];
}
} |
14,944 | #include "includes.h"
__global__ void calculate_distances(float * sweeper_pos_v, float * mine_pos_v, int num_sweepers, int num_mines, float * distance_v, float * inputs, int * sweeper_score_v, int width, int height, int size)
{
#define sweeperIdx blockIdx.y
#define mineIdx threadIdx.x*2
int distanceIdx = (blockIdx.y * num_mines) + threadIdx.x;
float vec_x;
float vec_y;
float distance;
__shared__ float sweeper_pos[2];
if (threadIdx.x < 2)
{
sweeper_pos[threadIdx.x] = sweeper_pos_v[sweeperIdx + threadIdx.x];
inputs[((sweeperIdx * 4) + threadIdx.x) + 2] = sweeper_pos[threadIdx.x]; //copy the sweeper position out to the inputs for the neural network in parallel
}
__syncthreads();
vec_x = mine_pos_v[mineIdx] - sweeper_pos[0];
vec_y = mine_pos_v[mineIdx + 1] - sweeper_pos[1];
distance = sqrt((vec_x * vec_x) + (vec_y * vec_y));
distance_v[distanceIdx] = distance;
if (distance < size)
{
/*
mine_pos_v[mineIdx] = width / 2;
mine_pos_v[mineIdx + 1] = height / 2;
*/
mine_pos_v[mineIdx] = ((threadIdx.x + 1 ) * clock()) % width;
mine_pos_v[mineIdx + 1] = ((threadIdx.x + 1) * clock()) % height;
sweeper_score_v[sweeperIdx]++;
}
#undef sweeperIdx
#undef mineIdx
} |
14,945 | #include <thrust/device_vector.h>
#include <thrust/execution_policy.h>
#include <thrust/host_vector.h>
#include <thrust/sort.h>
#include <cuda_runtime.h>
#include <cassert>
#include <exception>
#include <iostream>
#include <iterator>
#include <numeric>
#include <random>
#include <string>
#define CE(err) \
{ \
if (err != cudaSuccess) \
{ \
std::cout << "CUDA error in " << __FUNCTION__ << " (" << __FILE__ \
<< ":" << __LINE__ << ") - " << cudaGetErrorString(err); \
std::terminate(); \
} \
}
constexpr size_t N = 3'000'000;
void case1()
{
thrust::host_vector<int> h_vec(N);
// set values
std::mt19937 m(0);
std::uniform_int_distribution<int> d(0, 20);
thrust::generate(h_vec.begin(), h_vec.end(), [&]() { return d(m); });
// copy to device?
thrust::device_vector<int> d_vec;
d_vec = h_vec;
////////////////////////////////////////////////////////////////////////////
// actual kernel
thrust::sort(thrust::device, d_vec.begin(), d_vec.end());
// copy back
h_vec = d_vec;
}
void case2()
{
thrust::host_vector<int> h_vec(N);
// set values
std::mt19937 m(0);
std::uniform_int_distribution<int> d(0, 20);
thrust::generate(h_vec.begin(), h_vec.end(), [&]() { return d(m); });
thrust::device_vector<int> d_vec;
d_vec = h_vec;
////////////////////////////////////////////////////////////////////////////
// reduce
int sum = thrust::reduce(d_vec.begin(), d_vec.end());
}
template <typename T>
struct k1
{
__host__ __device__ T operator()(T a, T b)
{
return a + b;
}
};
void case3()
{
thrust::host_vector<int> hv1(N);
thrust::host_vector<int> hv2(N);
// set values
std::mt19937 m(0);
std::uniform_int_distribution<int> d(0, 20);
thrust::generate(hv1.begin(), hv1.end(), [&]() { return d(m); });
thrust::generate(hv2.begin(), hv2.end(), [&]() { return d(m); });
// copy to device?
thrust::device_vector<int> dv1 = hv1;
thrust::device_vector<int> dv2 = hv2;
////////////////////////////////////////////////////////////////////////////
thrust::device_vector<int> dvr(N);
thrust::transform(
dv1.begin(), dv1.end(), dv2.begin(), dvr.begin(), k1<int>());
}
void test()
{
case1();
case2();
case3();
}
int main()
{
try
{
test();
}
catch (std::exception const& ex)
{
std::cout << "exception: " << ex.what() << "\n";
return 1;
}
return 0;
}
|
14,946 | #include <thrust/device_vector.h>
#include <thrust/reduce.h>
#include <iostream>
int main(void)
{
thrust::device_vector<int> data(4);
data[0] = 10;
data[1] = 20;
data[2] = 30;
data[3] = 40;
int sum = thrust::reduce(data.begin(), data.end());
std::cout<< "sum is "<<sum<<std::endl;
return 0;
}
|
14,947 | #include <cuda_runtime.h>
#include <iostream>
#define BLOCK_SIZE 256
using namespace std;
__global__ void vectorInc(int* input, int* output, int numElements) {
int t = blockDim.x * blockIdx.x + threadIdx.x;
// bounds
if (t < numElements)
output[t] = input[t] + 1;
}
// entry point
int main() {
const int numElements = 10;
int host_input[numElements] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
int* device_input;
int* device_output;
int* host_output;
// copy memory and allocate for output
host_output = (int*) malloc(sizeof(int) * numElements);
cudaMalloc((void**) &device_input, sizeof(int) * numElements);
cudaMalloc((void**) &device_output, sizeof(int) * numElements);
cudaMemcpy(device_input, host_input, sizeof(int) * numElements, cudaMemcpyHostToDevice);
// init kernel
dim3 blockDim(BLOCK_SIZE);
dim3 gridDim(ceil(1.0 * numElements / BLOCK_SIZE));
vectorInc<<<gridDim, blockDim>>>(device_input, device_output, numElements);
// wait for device to finish
cudaDeviceSynchronize();
// copy answer back to host
cudaMemcpy(host_output, device_output, sizeof(int) * numElements, cudaMemcpyDeviceToHost);
// verify answer
for (int i = 0; i < numElements; i++) {
cout << host_output[i] << " ";
}
cout << endl;
// free memory
cudaFree(device_output);
cudaFree(device_input);
free(device_input);
free(device_output);
return 0;
} |
14,948 | #include "definitions.cuh"
#include "stdio.h"
#define RADIUS 1
//Performs CFD calculation on global memory. This code does not use any advance optimization technique on GPU
// But still acheives many fold performance gain
__global__ void calculateCFD_V1( float* input, float* output, unsigned int Ni, unsigned int Nj,
float h)
{
unsigned int i = blockDim.x * blockIdx.x + threadIdx.x; // Y - ID
unsigned int j = blockDim.y * blockIdx.y + threadIdx.y; // X - ID
unsigned int iPrev = i-1; // Previous Y element
unsigned int iNext = i+1; // Next Y element
unsigned int jPrev = j-1; //Previous X element
unsigned int jNext = j+1; // Next X element
unsigned int index = i * Nj + j;
if( i > 0 && j > 0 && i < (Ni-1) && j <(Nj-1))
output[index] = 0.25f * (input[iPrev * Nj + j] + input[iNext* Nj + j] + input[i * Nj+ jPrev]
+ input[i* Nj + jNext] - 4*h*h);
}
//This version of Kernel uses optimization by copying the data into shared memory and hence results in better performance
__global__ void calculateCFD_V2( float* input, float* output, unsigned int Ni, unsigned int Nj,
float h){
//printf("\nthread_per_block:x:%d,y:%d\n",blockDim.x,blockDim.y);
//32,16
//Current Global ID
unsigned int i = blockDim.x * blockIdx.x + threadIdx.x; // Y - ID
unsigned int j = blockDim.y * blockIdx.y + threadIdx.y; // X - ID
unsigned int xlindex = threadIdx.x+RADIUS;
unsigned int ylindex = threadIdx.y+RADIUS;
// Fill the size of shared memory
__shared__ float sData [2*RADIUS+THREADS_PER_BLOCK_X][2*RADIUS+THREADS_PER_BLOCK_Y];
unsigned int index = (i)* Nj + (j) ;
sData[xlindex][ylindex] = input[index];
if (threadIdx.x < RADIUS) {
if(blockIdx.x > 0)
sData[xlindex - RADIUS][ylindex] = input[index-Ni*RADIUS];
if(blockIdx.x < (gridDim.x-1))
sData[xlindex + THREADS_PER_BLOCK_X][ylindex] = input[index + THREADS_PER_BLOCK_X*Ni];
}
if (threadIdx.y < RADIUS)
{
if(blockIdx.y > 0)
sData[xlindex][ylindex - RADIUS] = input[index - RADIUS];
if(blockIdx.y < (gridDim.y - 1))
sData[xlindex][ylindex + THREADS_PER_BLOCK_Y] = input[index + THREADS_PER_BLOCK_Y];
}
__syncthreads();
//Add synchronization. Guess Why?
if( i > 0 && j > 0 && i < (Ni-1) && j <(Nj-1))
output[index] = 0.25f * (sData[xlindex-1][ylindex] + sData[xlindex+1][ylindex] + sData[xlindex][ylindex-1]
+ sData[xlindex][ylindex+1] - 4*h*h);
}
/*
__global__ void calculateCFD_V2( float* input, float* output, unsigned int Ni, unsigned int Nj,
float h){
//printf("\nthread_per_block:x:%d,y:%d\n",blockDim.x,blockDim.y);
//32,16
//Current Global ID
unsigned int j = blockDim.x * blockIdx.x + threadIdx.x; // Y - ID
unsigned int i = blockDim.y * blockIdx.y + threadIdx.y; // X - ID
unsigned int xlindex = threadIdx.x+RADIUS;
unsigned int ylindex = threadIdx.y+RADIUS;
// Fill the size of shared memory
__shared__ float sData [2*RADIUS+THREADS_PER_BLOCK_Y][2*RADIUS+THREADS_PER_BLOCK_X];
unsigned int index = (i)* Nj + (j) ;
sData[ylindex][xlindex] = input[index];
if (threadIdx.x < RADIUS) {
if(blockIdx.x > 0)
sData[ylindex][xlindex - RADIUS] = input[index-RADIUS];
if(blockIdx.x < (gridDim.x-1))
sData[ylindex][xlindex + THREADS_PER_BLOCK_X] = input[index + THREADS_PER_BLOCK_X];
}
if (threadIdx.y < RADIUS)
{
if(blockIdx.y > 0)
sData[ylindex - RADIUS][xlindex] = input[index - RADIUS*Ni];
if(blockIdx.y < (gridDim.y - 1))
sData[ylindex + THREADS_PER_BLOCK_Y][xlindex] = input[index + THREADS_PER_BLOCK_Y*Ni];
}
__syncthreads();
//Add synchronization. Guess Why?
if( i > 0 && j > 0 && i < (Ni-1) && j <(Nj-1))
output[index] = 0.25f * (sData[ylindex-1][xlindex] + sData[ylindex+1][xlindex] + sData[ylindex][xlindex-1]
+ sData[ylindex][xlindex+1] - 4*h*h);
}
*/ |
14,949 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#include <cuda.h>
#define N 10000000
#define MAX_ERR 1e-6
__global__
void vector_add_kernel(float *out, float *a, float *b, int n){
int i = threadIdx.x + blockDim.x*blockIdx.x;
//printf("%d,%d,%d\n",i,n,a[i]);
if (i<n)
out[i] = a[i] + b[i];
//printf("%d!",out[i]);
}
__host__
void vector_add(float *out, float *a, float *b, int n){
int size = n * sizeof(float);
float *d_A, *d_B, *d_out;
cudaMalloc((void **) &d_A, size);
cudaMemcpy(d_A, a, size, cudaMemcpyHostToDevice);
cudaMalloc((void **) &d_B, size);
cudaMemcpy(d_B, b, size, cudaMemcpyHostToDevice);
cudaMalloc((void **) &d_out, size);
int block_size = 256;
int total_block = (N + block_size -1)/block_size;
//printf("%f,%f\n",a[0],b[0]);
//printf("%d,%d", total_block,block_size);
vector_add_kernel<<<total_block,block_size>>>(d_out, d_A, d_B, N);
cudaMemcpy(out, d_out, size, cudaMemcpyDeviceToHost);
cudaFree(d_A);
cudaFree(d_B);
cudaFree(d_out);
}
int main(){
float *a, *b, *out;
// Allocate memory
a = (float*)malloc(sizeof(float) * N);
b = (float*)malloc(sizeof(float) * N);
out = (float*)malloc(sizeof(float) * N);
// Initialize array
for(int i = 0; i < N; i++){
a[i] = 1.0f;
b[i] = 2.0f;
}
// Main function
vector_add(out, a, b, N);
// Verification
for(int i = 0; i < N; i++){
assert(fabs(out[i] - a[i] - b[i]) < MAX_ERR);
}
printf("out[0] = %f\n", out[0]);
printf("PASSED\n");
}
|
14,950 | #include "includes.h"
__global__ void MakeSplits_Large(int baseAggregateIdx, int* splitting, int* aggregation, int* aggMapAdjIndices, int* aggMapAdjacency, int* adjIndices, int* adjacency) {
int currentAgg = splitting[blockIdx.x];
int aggBegin = aggMapAdjIndices[currentAgg];
int aggSize = aggMapAdjIndices[currentAgg + 1] - aggBegin;
int newAgg = baseAggregateIdx + blockIdx.x;
__shared__ int nodeIds[256];
__shared__ int scratchA[256];
__shared__ int scratchB[256];
__shared__ int rootA;
__shared__ int rootB;
__shared__ int aCount, bCount;
__shared__ bool incomplete;
incomplete = true;
aCount = 1;
bCount = 1;
// Load in the node Id's from the aggregate map to the shared array:
if (threadIdx.x < aggSize)
nodeIds[threadIdx.x] = aggMapAdjacency[aggBegin + threadIdx.x];
__syncthreads();
// Each thread loads it's neighbors list into registers, translating into
// aggregate offsets.
int neighbors[40];
int nextNeighbor = 0;
int nodeId = -1;
if (threadIdx.x < aggSize)
{
nodeId = nodeIds[threadIdx.x];
int start = adjIndices[nodeId];
int end = adjIndices[nodeId + 1];
for (int i = start; i < end; i++)
{
int neighborId = adjacency[i];
int a = 0, b = aggSize - 1, midpoint;
while (a < b)
{
midpoint = a + ((b - a) / 2);
if (nodeIds[midpoint] < neighborId)
a = midpoint + 1;
else
b = midpoint;
}
if (nodeIds[a] == neighborId)
{
neighbors[nextNeighbor++] = a;
}
}
}
__syncthreads();
// Find the farthest node from the lowest indexed node (first root point)
// Start by marking the first node and threads without a node as seen
// Mark initial distances in scratch vector
if (threadIdx.x < aggSize)
scratchA[threadIdx.x] = threadIdx.x == 0 ? 0 : -1;
int myDist = threadIdx.x == 0 ? 0 : -1;
bool swapped = false;
incomplete = true;
__syncthreads();
while (incomplete)
{
// Set the incomplete flag to false
incomplete = false;
__syncthreads();
// Check if a neighbor has a positive distance
if (threadIdx.x < aggSize && myDist == -1)
{
for (int i = 0; i < nextNeighbor; i++)
{
int neighborDist = scratchA[neighbors[i]];
if (neighborDist > -1)
myDist = neighborDist + 1;
}
}
__syncthreads();
if (threadIdx.x < aggSize && myDist > 0 && !swapped)
{
swapped = true;
scratchA[threadIdx.x] = myDist;
rootA = threadIdx.x;
incomplete = true;
}
__syncthreads();
}
// Find the farthest node from the first root point (second root point)
// Start by marking the first node and threads without a node as seen
// Mark initial distances in scratch vector
if (threadIdx.x < aggSize)
scratchA[threadIdx.x] = threadIdx.x == rootA ? 0 : -1;
myDist = threadIdx.x == rootA ? 0 : -1;
swapped = false;
incomplete = true;
__syncthreads();
while (incomplete)
{
// Set the incomplete flag to false
incomplete = false;
__syncthreads();
// Check if a neighbor has a positive distance
if (threadIdx.x < aggSize && myDist == -1)
{
for (int i = 0; i < nextNeighbor; i++)
{
int neighborDist = scratchA[neighbors[i]];
if (neighborDist > -1)
{
myDist = neighborDist + 1;
}
}
}
__syncthreads();
if (threadIdx.x < aggSize && myDist > 0 && !swapped)
{
swapped = true;
scratchA[threadIdx.x] = myDist;
rootB = threadIdx.x;
incomplete = true;
}
__syncthreads();
}
// Setting an assigned aggregate label (In ScratchA) for every node with the node at
// rootA being assigned the current aggregate ID and the node at rootB
// being assigned the newAgg ID and set initial distances from a root node
// (In ScratchB) for each node, -1 for unknown and 0 for the roots
int myAggregate = -1;
if (threadIdx.x == rootA)
myAggregate = currentAgg;
if (threadIdx.x == rootB)
myAggregate = newAgg;
if (threadIdx.x < aggSize)
{
scratchA[threadIdx.x] = myAggregate;
scratchB[threadIdx.x] = myAggregate > -1 ? 0 : -1;
}
incomplete = true;
__syncthreads();
// Assign nodes to each aggregate until no unassigned nodes remain.
while (incomplete)
{
// Set the incomplete flag to false
incomplete = false;
__syncthreads();
if (threadIdx.x < aggSize && myAggregate == -1)
{
for (int i = 0; i < nextNeighbor; i++)
{
int neighborAgg = scratchA[neighbors[i]];
if (neighborAgg > -1)
{
myDist = scratchB[neighbors[i]] + 1;
myAggregate = neighborAgg;
}
}
if (myAggregate == -1)
incomplete = true;
if (myAggregate == newAgg)
atomicAdd((unsigned int*)&bCount, (unsigned)1);
if (myAggregate == currentAgg)
atomicAdd((unsigned int*)&aCount, (unsigned)1);
}
__syncthreads();
if (threadIdx.x < aggSize)
{
scratchA[threadIdx.x] = myAggregate;
scratchB[threadIdx.x] = myDist;
}
__syncthreads();
}
// If the split was uneven try to repair it
int sizeDifference = aCount > bCount ? aCount - bCount : bCount - aCount;
bool moveToA = aCount < bCount;
__shared__ int moved;
moved = 0;
int toMove = sizeDifference / 2;
incomplete = true;
__syncthreads();
while (incomplete && moved < toMove)
{
incomplete = false;
__syncthreads();
bool swapping = false;
int newDist = INT_MAX;
if (threadIdx.x < aggSize)
{
bool canSwap = moveToA ? myAggregate == newAgg : myAggregate == currentAgg;
bool borderNode = false;
// Check if this node has no same aggregate neighbors of higher distance
// and on a border
for (int i = 0; i < nextNeighbor; i++)
{
int neighborAgg = scratchA[neighbors[i]];
int neighborDist = scratchB[neighbors[i]];
if (neighborAgg == myAggregate && neighborDist > myDist)
canSwap = false;
if (neighborAgg != myAggregate)
{
if (neighborDist + 1 < newDist)
newDist = neighborDist + 1;
borderNode = true;
}
}
// If a node could swap see if it will
if (borderNode && canSwap && atomicAdd((unsigned int*)&moved, 1) < toMove)
{
swapping = true;
}
}
__syncthreads();
if (swapping)
{
int a = moveToA ? 1 : -1;
atomicAdd((unsigned int*)&bCount, -a);
atomicAdd((unsigned int*)&aCount, a);
scratchA[threadIdx.x] = moveToA ? currentAgg : newAgg;
scratchB[threadIdx.x] = newDist;
incomplete = true;
}
__syncthreads();
}
// Write out the values to the aggregation array
if (threadIdx.x < aggSize)
{
aggregation[nodeId] = scratchA[threadIdx.x];
}
} |
14,951 | #include "includes.h"
__global__ void integrateBinsT(int width, int height, int nbins, int binPitch, int* devIntegrals) {
const int blockY = blockDim.y * blockIdx.x;
const int threadY = threadIdx.y;
const int bin = threadIdx.x;
const int y = blockY + threadY;
if (y >= height) return;
if (bin >= binPitch) return;
int* imagePointer = devIntegrals + binPitch * y * width + bin;
int accumulant = 0;
for(int x = 0; x < width; x++) {
accumulant += *imagePointer;
*imagePointer = accumulant;
imagePointer += binPitch;
}
} |
14,952 | // nvcc iso2PCF.cu -o par.out && ./par.out data_5K.dat rand0_5K.dat 5000 30 180
#include <iostream>
#include <fstream>
#include <string.h>
#include <time.h>
#include <math.h>
using namespace std;
//Point with weight value. Structure
struct PointW3D{
float x;
float y;
float z;
float w;
};
struct Node{
int len; // Number of points in the node
PointW3D *elements; // Points in the node
};
void open_files(string name_file, int pts, PointW3D *datos){
/* Opens the daya files. Receives the file location, number of points to read and the array of points where the data is stored */
ifstream file;
string mypathto_files = "../../../fake_DATA/DATOS/";
//This creates the full path to where I have my data files
name_file.insert(0,mypathto_files);
file.open(name_file.c_str(), ios::in | ios::binary); //Tells the program this is a binary file using ios::binary
if (file.fail()){
cout << "Failed to load the file in " << name_file << endl;
exit(1);
}
for ( int c = 0; c < pts; c++) //Reads line by line and stores each c line in the c PointW3D element of the array
{
file >> datos[c].x >> datos[c].y >> datos[c].z >> datos[c].w;
}
file.close();
}
//====================================================================
void save_histogram(string name, int bns, double *histo){
/* This function saves a one dimensional histogram in a file.
Receives the name of the file, number of bins in the histogram and the histogram array
*/
ofstream file2;
file2.open(name.c_str(), ios::out | ios::binary);
if (file2.fail()){
cout << "Failed to save the the histogram in " << name << endl;
exit(1);
}
for (int i = 0; i < bns; i++){
file2 << histo[i] << endl;
}
file2.close();
}
//====================================================================
void save_histogram(string name, int bns, float *histo){
/* This function saves a one dimensional histogram in a file.
Receives the name of the file, number of bins in the histogram and the histogram array
*/
ofstream file2;
file2.open(name.c_str(), ios::out | ios::binary);
if (file2.fail()){
cout << "Failed to save the the histogram in " << name << endl;
exit(1);
}
for (int i = 0; i < bns; i++){
file2 << histo[i] << endl;
}
file2.close();
}
//===================================================================
void add(PointW3D *&array, int &lon, float _x, float _y, float _z, float _w){
/*
This function manages adding points to an specific Node. It receives the previous array, longitude and point to add
and updates the previous array and length with the same array with the new point at the end and adds +1 to the length +1
It manages the memory allocation and free of the previous and new elements.
*/
lon++;
PointW3D *array_aux;
cudaMallocManaged(&array_aux, lon*sizeof(PointW3D));
for (int i=0; i<lon-1; i++){
array_aux[i].x = array[i].x;
array_aux[i].y = array[i].y;
array_aux[i].z = array[i].z;
array_aux[i].w = array[i].w;
}
cudaFree(array);
array = array_aux;
array[lon-1].x = _x;
array[lon-1].y = _y;
array[lon-1].z = _z;
array[lon-1].w = _w;
}
void make_nodos(Node ***nod, PointW3D *dat, unsigned int partitions, float size_node, unsigned int np){
/*
This function classifies the data in the nodes
Args
nod: Node 3D array where the data will be classified
dat: array of PointW3D data to be classified and stored in the nodes
partitions: number nodes in each direction
size_node: dimensions of a single node
np: number of points in the dat array
*/
int row, col, mom;
// First allocate memory as an empty node:
for (row=0; row<partitions; row++){
for (col=0; col<partitions; col++){
for (mom=0; mom<partitions; mom++){
nod[row][col][mom].len = 0;
cudaMallocManaged(&nod[row][col][mom].elements, sizeof(PointW3D));
}
}
}
// Classificate the ith elment of the data into a node and add that point to the node with the add function:
for (int i=0; i<np; i++){
row = (int)(dat[i].x/size_node);
col = (int)(dat[i].y/size_node);
mom = (int)(dat[i].z/size_node);
add(nod[row][col][mom].elements, nod[row][col][mom].len, dat[i].x, dat[i].y, dat[i].z, dat[i].w);
}
}
//====================================================================
//============ Kernels Section =======================================
//====================================================================
__device__ void count_distances11(float *XX, PointW3D *elements, int len, float ds, float dd_max, int sum){
/*
This device function counts the distances betweeen points within one node.
Args:
XX: The histogram where the distances are counted in
elements: Array of PointW3D points inside the node
len: lenght of the elements array
ds: number of bins divided by the maximum distance. Used to calculate the bin it should be counted at
dd_max: The maximum distance of interest.
*/
int bin;
float d, v;
float x1,y1,z1,w1,x2,y2,z2,w2;
for (int i=0; i<len-1; ++i){
x1 = elements[i].x;
y1 = elements[i].y;
z1 = elements[i].z;
w1 = elements[i].w;
for (int j=i+1; j<len; ++j){
x2 = elements[j].x;
y2 = elements[j].y;
z2 = elements[j].z;
w2 = elements[j].w;
d = (x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1);
if (d<=dd_max+1){
bin = (int)(sqrt(d)*ds);
v = 2*w1*w2;
atomicAdd(&XX[bin],sum);
}
}
}
}
__device__ void count_distances12(float *XX, PointW3D *elements1, int len1, PointW3D *elements2, int len2, float ds, float dd_max, int sum){
/*
This device function counts the distances betweeen points between two different nodes.
Args:
XX: The histogram where the distances are counted in
elements1: Array of PointW3D points inside the first node
len1: lenght of the first elements array
elements2: Array of PointW3D points inside the second node
len2: lenght of the second elements array
ds: number of bins divided by the maximum distance. Used to calculate the bin it should be counted at
dd_max: The maximum distance of interest.
*/
int bin;
float d, v;
float x1,y1,z1,w1,x2,y2,z2,w2;
for (int i=0; i<len1; ++i){
x1 = elements1[i].x;
y1 = elements1[i].y;
z1 = elements1[i].z;
w1 = elements1[i].w;
for (int j=0; j<len2; ++j){
x2 = elements2[j].x;
y2 = elements2[j].y;
z2 = elements2[j].z;
w2 = elements2[j].w;
d = (x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1);
if (d<=dd_max+1){
bin = (int)(sqrt(d)*ds);
v = 2*w1*w2;
atomicAdd(&XX[bin],sum);
}
}
}
}
__device__ void BPC_loop(float *XX, Node ***nodeD, int row, int col, int mom, int partitions, int did_max, float dd_max, float ds, int sum, float size_box, bool x_border, bool y_border, bool z_border, bool x_upperborder, bool y_upperborder, bool z_upperborder, bool x_lowerborder, bool y_lowerborder, bool z_lowerborder){
/*
This device function counts the distances betweeen points between two different nodes from periodic boundary conditiojns.
Args:
XX: The histogram where the distances are counted in
elements1: Array of PointW3D points inside the first node
len1: lenght of the first elements array
elements2: Array of PointW3D points inside the second node
len2: lenght of the second elements array
ds: number of bins divided by the maximum distance. Used to calculate the bin it should be counted at
dd_max: The maximum distance of interest.
*/
int bin, d_node, u, v, w, did_max2=did_max*did_max;
float d, s;
float x1,y1,z1,w1,dx12,dy12,dz12,w2;
int x_from = 10; //((row-did_max)*(row>did_max))*(!x_border) + (partitions-(did_max-row))*(x_lowerborder&&!x_upperborder);
int x_to = partitions-10; //(partitions-1)*((row+did_max>partitions-1 && !x_upperborder)||x_lowerborder) + (row+did_max)*((row+did_max<partitions)&&!x_border) + (!x_lowerborder&&x_upperborder)*(x_from+(did_max-(partitions-1-row)));
int y_from = 10; //((col-did_max)*(col>did_max))*(!y_border) + (partitions-(did_max-col))*(y_lowerborder&&!y_upperborder);
int y_to = partitions-10; // (partitions-1)*((col+did_max>partitions-1 && !y_upperborder)||y_lowerborder) + (col+did_max)*((col+did_max<partitions)&&!y_border) + (!y_lowerborder&&y_upperborder)*(y_from+(did_max-(partitions-1-col)));
int z_from = 10; // ((mom-did_max)*(mom>did_max))*(!z_border) + (partitions-(did_max-mom))*(z_lowerborder&&!z_upperborder);
int z_to = partitions-10; //(partitions-1)*((mom+did_max>partitions-1 && !z_upperborder)||z_lowerborder) + (mom+did_max)*((mom+did_max<partitions)&&!z_border) + (!z_lowerborder&&z_upperborder)*(z_from+(did_max-(partitions-1-mom)));
//If the z direction is not the nearest border the z index it is 0 if mom<did_max or mom-did-max otherwise.
//If both z borders or ONLY the upper z border are the nearest borders the z index starts from 0
//If ONLY the lower z border is the nearest the z index starts from partitions-(did_max-mom)
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//If both borders are the nearest the highest limit is partitions-1
//If the lower border is the nearest the highes limit is partitions-1
//If the upper border is not the nerarest and mom+did_max>partitions the highest limit is partitions-1
//If this is not the border side and mom+did_max< paritions then the highest limit is mom+did_max
//If only the upper border is the nearest border the higher limit is the lower limit + (did_max-(partitions-1-mom))
for (u=x_from; u<=x_to; u++){
for (v=y_from; v<=y_to; v++){
for (w=z_from; w<=z_to; w++){
d_node=(w-mom)*(w-mom) + (v-col)*(v-col) + (u-row)*(u-row);
if (d_node<=did_max2){
for (int i=0; i<nodeD[row][col][mom].len; ++i){
x1 = nodeD[row][col][mom].elements[i].x;
y1 = nodeD[row][col][mom].elements[i].y;
z1 = nodeD[row][col][mom].elements[i].z;
w1 = nodeD[row][col][mom].elements[i].w;
for (int j=0; j<nodeD[u][v][w].len; ++j){
dx12 = fabsf(x1-nodeD[u][v][w].elements[j].x) - size_box*x_border;
dy12 = fabsf(y1-nodeD[u][v][w].elements[j].y) - size_box*y_border;
dz12 = fabsf(z1-nodeD[u][v][w].elements[j].z) - size_box*z_border;
w2 = nodeD[u][v][w].elements[j].w;
d = dx12*dx12 + dy12*dy12 + dz12*dz12;
if (d<=dd_max+1){
bin = (int)(sqrt(d)*ds);
s = 2*w1*w2;
atomicAdd(&XX[bin],sum);
}
}
}
}
}
}
}
}
__global__ void BPC_XX(float *XX_A, float *XX_B, Node ***nodeD, float ds, float d_max, float size_node, float size_box){
/*
This device function counts the distances betweeen points between a node and a node reproduction in the border.
Args:
XX: The histogram where the distances are counted in
nodeD: Full array of nodes
ds: number of bins divided by the maximum distance. Used to calculate the bin it should be counted at
dd_max: The maximum distance of interest.
did_max: maximum number of node between two nodes be considered
did_max2: did_max*did_max
size_box: Size of the whole box
*/
int idx = blockIdx.x * blockDim.x + threadIdx.x;
int partitions = (int)(ceilf(size_box/size_node));
if (idx<(partitions*partitions*partitions)){
//Get the node positon in this thread
int mom = (int) (idx/(partitions*partitions));
int col = (int) ((idx%(partitions*partitions))/partitions);
int row = idx%partitions;
//printf("%i, %i, %i \n", mom, col,row);
//This may see redundant but with this these often checked values are upgraded to device memory
float dd_max = d_max*d_max;
int did_max = (int)(ceilf((d_max+size_node*sqrt(3.0))/size_node));
if (idx==0){
printf("Partitions: %i, did_max: %i\n", partitions, did_max);
}
if (nodeD[row][col][mom].len > 0 && (row<did_max-1 || partitions-row<did_max || col<did_max-1 || partitions-col<did_max || mom<did_max-1 || partitions-mom<did_max)){
//Only if the current node has elements and it is near to any border does the thread will be active
atomicAdd(&XX_A[0],1); //Count how many nodes are considered as near a border
/*
bool x_border=false, y_border=false, z_border=false, x_upperborder=false, y_upperborder=false, z_upperborder=false, x_lowerborder=false, y_lowerborder=false, z_lowerborder=false;
x_border=(row<did_max-1 || partitions-row<did_max);
if (x_border){
x_upperborder=partitions-row<did_max;
x_lowerborder=row<did_max-1;
BPC_loop(XX_A, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, x_border, false, false, x_upperborder, false, false, x_lowerborder, false, false);
}
y_border=(col<did_max-1 || partitions-col<did_max);
if (y_border){
y_upperborder=partitions-col<did_max;
y_lowerborder=col<did_max-1;
//Only Y boundaries
BPC_loop(XX_B, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, false, y_border, false, false, y_upperborder, false, false, y_lowerborder, false);
if (x_border){
//Boundaries in the XY walls
BPC_loop(XX_A, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, x_border, y_border, false, x_upperborder, y_upperborder, false, x_lowerborder, y_lowerborder, false);
}
}
z_border=(mom<did_max-1 || partitions-mom<did_max);
if (z_border){
z_upperborder=partitions-mom<did_max;
z_lowerborder=mom<did_max-1;
//Only Z boundaries
BPC_loop(XX_B, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, false, false, z_border, false, false, z_upperborder, false, false, z_lowerborder);
if (x_border){
//For the ZY corner
BPC_loop(XX_A, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, x_border, false, z_border, x_upperborder, false, z_upperborder, x_lowerborder, false, z_lowerborder);
if (y_border){
//For the XYZ corner
BPC_loop(XX_B, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, x_border, y_border, z_border, x_upperborder, y_upperborder, z_upperborder, x_lowerborder, y_lowerborder, z_lowerborder);
}
}
if (y_border){
//For the YZ
BPC_loop(XX_A, nodeD, row, col, mom, partitions, did_max, dd_max, ds, 2, size_box, false, y_border, z_border, false, y_upperborder, z_upperborder, false, y_lowerborder, z_lowerborder);
}
}
*/
}
}
}
__global__ void make_histoXX(float *XX_A, float *XX_B, Node ***nodeD, float ds, float d_max, float size_node, float size_box){
int idx = blockIdx.x * blockDim.x + threadIdx.x;
int partitions = (int)(ceilf(size_box/size_node));
if (idx<(partitions*partitions*partitions)){
//Get the node positon in this thread
int mom = (int) (idx/(partitions*partitions));
int col = (int) ((idx%(partitions*partitions))/partitions);
int row = idx%partitions;
//printf("%i, %i, %i \n", mom, col,row)
if (nodeD[row][col][mom].len > 0){
//This may see redundant but with this these often checked values are upgraded to device memory
float dd_max = d_max*d_max;
int did_max = (int)(ceilf((d_max+size_node*sqrt(3.0))/size_node));
int did_max2 = did_max*did_max;
// Counts distances betweeen the same node
if (idx%2==0){ //If the main index is even stores the countings in the XX_A subhistogram
count_distances11(XX_A, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, ds, dd_max, 2);
} else { //If the main index is odd stores the countings in the XX_B subhistogram
count_distances11(XX_B, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, ds, dd_max, 2);
}
int u,v,w; // Position index of the second node
int dx_nod12, dy_nod12, dz_nod12, dd_nod12; //Internodal distance
//Second node movil in Z direction
for(w = mom+1; w<partitions && w-row<=did_max; w++){
if (idx%2==0){ //If the main index is even stores the countings in the XX_A subhistogram
count_distances12(XX_A, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[row][col][w].elements, nodeD[row][col][w].len, ds, dd_max, 2);
} else { //If the main index is odd stores the countings in the XX_B subhistogram
count_distances12(XX_B, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[row][col][w].elements, nodeD[row][col][w].len, ds, dd_max, 2);
}
}
//Second node movil in YZ
for(v=col+1; v<partitions && v-col<=did_max; v++){
dy_nod12 = v-col;
for(w=(mom-did_max)*(mom>did_max); w<partitions && w-mom<=did_max; w++){
dz_nod12 = w-mom;
dd_nod12 = dz_nod12*dz_nod12 + dy_nod12*dy_nod12;
if (dd_nod12<=did_max2){
if (idx%2==0){
count_distances12(XX_A, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[row][v][w].elements, nodeD[row][v][w].len, ds, dd_max, 2);
} else {
count_distances12(XX_B, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[row][v][w].elements, nodeD[row][v][w].len, ds, dd_max, 2);
}
}
//}
}
}
//Second node movil in XYZ
for(u = row+1; u < partitions && u-row< did_max; u++){
dx_nod12 = u-row;
for(v = (col-did_max)*(col>did_max); v < partitions && v-col< did_max; v++){
dy_nod12 = v-col;
for(w = (mom-did_max)*(mom>did_max); w < partitions && w-mom< did_max; w++){
dz_nod12 = w-mom;
dd_nod12 = dz_nod12*dz_nod12 + dy_nod12*dy_nod12 + dx_nod12*dx_nod12;
if (dd_nod12<=did_max2){
if (idx%2==0){
count_distances12(XX_A, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[u][v][w].elements, nodeD[u][v][w].len, ds, dd_max, 2);
} else {
count_distances12(XX_B, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeD[u][v][w].elements, nodeD[u][v][w].len, ds, dd_max, 2);
}
}
}
}
}
}
}
}
__global__ void make_histoXY(float *XY_A, float *XY_B, Node ***nodeD, Node ***nodeR, float ds, float d_max, float size_node, float size_box){
int idx = blockIdx.x * blockDim.x + threadIdx.x;
int partitions = (int)(ceilf(size_box/size_node));
if (idx<(partitions*partitions*partitions)){
//Get the node positon in this thread
int mom = (int) (idx/(partitions*partitions));
int col = (int) ((idx%(partitions*partitions))/partitions);
int row = idx%partitions;
if (nodeD[row][col][mom].len > 0){
//This may see redundant but with this these often checked values are upgraded to device memory
float dd_max = d_max*d_max;
int did_max = (int)(ceilf((d_max+size_node*sqrt(3.0))/size_node));
int did_max2 = did_max*did_max;
int u,v,w; //Position of the second node
unsigned int dx_nod12, dy_nod12, dz_nod12, dd_nod12;
//Second node movil in XYZ
//for(u = (row-did_max)*(row>did_max); u < partitions && u-row< did_max; u++){
for(u = 0; u < partitions && u-row< did_max; u++){
dx_nod12 = u-row;
//for(v = (col-did_max)*(col>did_max); v < partitions && v-col< did_max; v++){
for(v = 0; v < partitions && v-col< did_max; v++){
dy_nod12 = v-col;
//for(w = (mom-did_max)*(mom>did_max); w < partitions && w-mom< did_max; w++){
for(w = 0; w < partitions && w-mom< did_max; w++){
dz_nod12 = w-mom;
dd_nod12 = dz_nod12*dz_nod12 + dy_nod12*dy_nod12 + dx_nod12*dx_nod12;
if (dd_nod12<=did_max2){
if (idx%2==0){
count_distances12(XY_A, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeR[u][v][w].elements, nodeR[u][v][w].len, ds, dd_max, 1);
} else {
count_distances12(XY_B, nodeD[row][col][mom].elements, nodeD[row][col][mom].len, nodeR[u][v][w].elements, nodeR[u][v][w].len, ds, dd_max, 1);
}
}
}
}
}
}
}
}
__global__ void make_analyticRR(float *RR, float d_max, int bn, float size_box, int n_pts){
/*
Analytic calculation of the RR histogram
*/
int a = threadIdx.x;
if (a < bn){
float dr = (d_max/bn);
float V = size_box*size_box*size_box;
float beta1 = n_pts*n_pts/V;
float alph = 4*(2*acosf(0.0))*(beta1)*dr*dr*dr/3;
float r1, r2;
r2 = (float) a;
r1 = r2+1;
float sum = alph*((r1*r1*r1)-(r2*r2*r2));
atomicAdd(&RR[a],sum);
}
}
int main(int argc, char **argv){
unsigned int np = stoi(argv[3]), bn = stoi(argv[4]);
float dmax = stof(argv[5]);
float ds = ((float)(bn))/dmax, size_box = 250.0, alpha = 2.176;
float size_node = alpha*(size_box/pow((float)(np),1/3.));
unsigned int partitions = (int)(ceil(size_box/size_node));
float *DD_A, *DR_A, *DD_B, *DR_B, *RR;
double *DD, *DR;
PointW3D *dataD;
PointW3D *dataR;
cudaMallocManaged(&dataD, np*sizeof(PointW3D));
cudaMallocManaged(&dataR, np*sizeof(PointW3D));
// Name of the files where the results are saved
string nameDD = "DDiso.dat", nameRR = "RRiso.dat", nameDR = "DRiso.dat";
// Allocate memory for the histogram as double
// And the subhistograms as simple presision floats
DD = new double[bn];
RR = new float[bn];
DR = new double[bn];
cudaMallocManaged(&DD_A, bn*sizeof(float));
cudaMallocManaged(&DR_A, bn*sizeof(float));
cudaMallocManaged(&DD_B, bn*sizeof(float));
cudaMallocManaged(&DR_B, bn*sizeof(float));
//Initialize the histograms in 0
for (int i = 0; i < bn; i++){
*(DD+i) = 0;
*(RR+i) = 0;
*(DR+i) = 0;
*(DD_A+i) = 0;
*(DR_A+i) = 0;
*(DD_B+i) = 0;
*(DR_B+i) = 0;
}
// Open and read the files to store the data in the arrays
open_files(argv[1], np, dataD);
open_files(argv[2], np, dataR);
//Init the nodes arrays
Node ***nodeD;
Node ***nodeR;
cudaMallocManaged(&nodeR, partitions*sizeof(Node**));
cudaMallocManaged(&nodeD, partitions*sizeof(Node**));
for (int i=0; i<partitions; i++){
cudaMallocManaged(&*(nodeR+i), partitions*sizeof(Node*));
cudaMallocManaged(&*(nodeD+i), partitions*sizeof(Node*));
for (int j=0; j<partitions; j++){
cudaMallocManaged(&*(*(nodeR+i)+j), partitions*sizeof(Node));
cudaMallocManaged(&*(*(nodeD+i)+j), partitions*sizeof(Node));
}
}
//Classificate the data into the nodes
make_nodos(nodeD, dataD, partitions, size_node, np);
make_nodos(nodeR, dataR, partitions, size_node, np);
//Get the dimensions of the GPU grid
int blocks = (int)(ceil((float)((partitions*partitions*partitions)/512.0)));
dim3 grid(blocks,1,1);
dim3 block(512,1,1);
clock_t begin = clock();
//Launch the kernels
//make_histoXX<<<grid,block>>>(DD_A, DD_B, nodeD, ds, dmax, size_node, size_box);
BPC_XX<<<grid,block>>>(DD_A, DD_B, nodeD, ds, dmax, size_node, size_box);
//make_histoXY<<<grid,block>>>(DR_A, DR_B, nodeD, nodeR, ds, dmax, size_node, size_box);
blocks = (int)(ceil((float)(bn)/512.0));
dim3 grid_a(blocks,1,1);
dim3 block_a(512,1,1);
make_analyticRR<<<grid_a,block_a>>>(RR, dmax, bn, size_box, np);
//Waits for the GPU to finish
cudaDeviceSynchronize();
cout << "Size of a node " << size_node << endl;
cout << "Nodes considered as boundary: " << DD_A[0] <<endl;
//Check here for errors
cudaError_t error = cudaGetLastError();
cout << "The error code is " << error << endl;
if(error != 0)
{
// print the CUDA error message and exit
printf("CUDA error: %s\n", cudaGetErrorString(error));
exit(-1);
}
clock_t end = clock();
double time_spent = (double)(end - begin) / CLOCKS_PER_SEC;
printf("\nSpent time = %.4f seg.\n", time_spent );
//Collect the subhistograms data into the double precision main histograms
//THis has to be done in CPU since GPU only allows single precision
for (int i = 0; i < bn; i++){
DD[i] = (double)(DD_A[i]+ DD_B[i]);
DR[i] = (double)(DR_A[i]+ DR_B[i]);
}
cout << "Termine de hacer todos los histogramas" << endl;
/*
// Shows the histograms
cout << "\nHistograma DD:" << endl;
int sum = 0;
for (int k = 0; k<bn; k++){
cout << DD[k] << "\t";
sum += DD[k];
}
cout << "Total: " << sum << endl;
cout << "\nHistograma RR:" << endl;
for (int k = 0; k<bn; k++){
cout << RR[k] << "\t";
}
cout << "\nHistograma DR:" << endl;
for (int k = 0; k<bn; k++){
cout << DR[k] << "\t";
}
*/
// Guardamos los histogramas
save_histogram(nameDD, bn, DD);
cout << "Guarde histograma DD..." << endl;
save_histogram(nameRR, bn, RR);
cout << "Guarde histograma RR..." << endl;
save_histogram(nameDR, bn, DR);
cout << "Guarde histograma DR..." << endl;
//Free the memory
cudaFree(&dataD);
cudaFree(&dataR);
delete[] DD;
delete[] DR;
delete[] RR;
cudaFree(&DD_A);
cudaFree(&DR_A);
cudaFree(&DD_B);
cudaFree(&DR_B);
for (int i=0; i<partitions; i++){
for (int j=0; j<partitions; j++){
cudaFree(&*(*(nodeR+i)+j));
cudaFree(&*(*(nodeD+i)+j));
}
cudaFree(&*(nodeR+i));
cudaFree(&*(nodeD+i));
}
cudaFree(&nodeR);
cudaFree(&nodeD);
cout << "Programa Terminado..." << endl;
return 0;
}
|
14,953 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#define N 1024
__global__ void CUDABinary(char *A, int *L)
{
int i = threadIdx.x;
int dec = (int)A[i];
int j = 1, rem[10] = {0};
while(dec != 0)
{
rem[j++] = dec % 2;
dec /= 2;
}
int bin = 0;
while(j != 0)
{
bin = bin * 10 + rem[j--];
}
printf("%d ", bin);
//printf("%s\n", C[i]);
}
int main(int argc, char const *argv[])
{
char A[N];
//char C[N];
char *pA;
int *pL;
printf("Enter string(A): ");
scanf("%[^\n]%*c", A);
//printf("C = \n");>>>(p
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start, 0);
cudaMalloc((void**)&pA, N * sizeof(char));
cudaMalloc((void**)&pL, sizeof(int));
//cudaMalloc((void**)&pC, N * sizeof(char));
int L = strlen(A);
cudaMemcpy(pA, A, N * sizeof(char), cudaMemcpyHostToDevice);
cudaMemcpy(pL, &L, sizeof(int), cudaMemcpyHostToDevice);
cudaError_t error = cudaGetLastError();
if (error != cudaSuccess)
{
printf("CUDA Error1: %s\n", cudaGetErrorString(error));
}
printf("Binary values:\n");
CUDABinary<<<1, L>>>(pA, pL);
error = cudaGetLastError();
if (error != cudaSuccess)
{
printf("CUDA Error2: %s\n", cudaGetErrorString(error));
}
cudaMemcpy(A, pA, N * sizeof(char), cudaMemcpyDeviceToHost);
cudaEventRecord(stop, 0);
cudaEventSynchronize(stop);
float elapsedTime;
cudaEventElapsedTime(&elapsedTime, start, stop);
//printf("Value of C in Host:\n");
//printf("Reversed A: %s\n", A);
/*for (int i = 0; i < N; ++i)
{
printf("%c\n", C[i]);
}*/
printf("\nTime Taken = %f\n", elapsedTime);
cudaFree(pA);
cudaFree(pL);
//cudaFree(pC);
return 0;
}
|
14,954 | #include <thrust/host_vector.h>
#include <thrust/device_vector.h>
#include <thrust/remove.h>
#include <thrust/random.h>
// This example generates random points in the
// unit square [0,1)x[0,1) and then removes all
// points where x^2 + y^2 > 1
//
// The x and y coordinates are stored in separate arrays
// and a zip_iterator is used to combine them together
typedef thrust::device_vector<float>::iterator DVIfloat;
typedef thrust::tuple<DVIfloat, DVIfloat> TDVIfloat;
typedef thrust::zip_iterator<TDVIfloat> zip;
//template <typename T>
struct is_outside_circle
{
template <typename Tuple>
__host__ __device__
bool operator()(const Tuple& tuple) const
{
// unpack the tuple into x and y coordinates
// const T x = thrust::get<0>(tuple);
// const T y = thrust::get<1>(tuple);
float x = thrust::get<0>(tuple);
float y = thrust::get<1>(tuple);
if (x*x + y*y > 1)
return true;
else
return false;
}
};
int main(void)
{
const size_t N = 20;
// generate random points in the unit square on the host
thrust::default_random_engine rng;
thrust::uniform_real_distribution<float> u01(0.0f, 1.0f);
thrust::host_vector<float> x(N);
thrust::host_vector<float> y(N);
thrust::device_vector<float> dx;
thrust::device_vector<float> dy;
for(size_t i = 0; i < N; i++)
{
x[i] = u01(rng);
y[i] = u01(rng);
}
dx = x; dy = y;
// print the initial points
std::cout << std::fixed;
std::cout << "Generated " << N << " points" << std::endl;
for(size_t i = 0; i < N; i++)
std::cout << "(" << x[i] << "," << y[i] << ")" << std::endl;
std::cout << std::endl;
TDVIfloat tb = thrust::make_tuple(dx.begin(), dy.begin());
TDVIfloat te = thrust::make_tuple(dx.end(), dy.end());
zip zb = thrust::make_zip_iterator(tb);
zip ze = thrust::make_zip_iterator(te);
// remove points where x^2 + y^2 > 1 and determine new array sizes
/*
size_t new_size = thrust::remove_if(thrust::make_zip_iterator(thrust::make_tuple(dx.begin(), dy.begin())),
thrust::make_zip_iterator(thrust::make_tuple(dx.end(), dy.end())),
is_outside_circle<float>())
- thrust::make_zip_iterator(thrust::make_tuple(dx.begin(), dy.begin()));
*/
// zip zn = thrust::remove_if(zb, ze, is_outside_circle<float>());
zip zn = thrust::remove_if(zb, ze, is_outside_circle());
size_t new_size = zn - zb;
// resize the vectors (note: this does not free any memory)
dx.resize(new_size);
dy.resize(new_size);
thrust::copy(dx.begin(), dx.end(), x.begin());
thrust::copy(dy.begin(), dy.end(), y.begin());
x.resize(new_size);
y.resize(new_size);
x.push_back(1.0);
// print the filtered points
std::cout << "After stream compaction, " << new_size << " points remain" << std::endl;
for(size_t i = 0; i < new_size; i++)
std::cout << "(" << x[i] << "," << y[i] << ")" << std::endl;
std::cout << "capacity " << x.capacity() << " " << dx.capacity() << std::endl;
std::cout << "size " << x.size() << " " << dx.size() << std::endl;
return 0;
}
|
14,955 | #include "includes.h"
__global__ void init_array(int *g_data, int *factor, int num_iterations)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
for (int i = 0; i<num_iterations; i++)
g_data[idx] += *factor; // non-coalesced on purpose, to burn time
} |
14,956 | #include "includes.h"
__global__ void IfThenElse(bool * b, float * x, size_t idxb, size_t idxf, size_t N)
{
for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < N; i += blockDim.x * gridDim.x)
{
//printf("From IfThenElse %d, %f, %f\n", b[(idxb-1)*N+i], x[(idxf-1)*N+i], x[(idxf-2)*N+i]);
if (b[(idxb-1)*N+i])
x[(idxf-2)*N+i] = x[(idxf-1)*N+i];
//printf("After IfThenElse %f\n", x[(idxf-2)*N+i]);
}
return;
} |
14,957 | #include <cuda.h>
#include <stdio.h>
__global__ void add(int *a, int *b, int *c, int n) {
int index = threadIdx.x+blockIdx.x*blockDim.x;
if(index < n)
c[index] = a[index] + b[index];
}
#define N 9
#define M 5
void random_ints(int* x, int size)
{
int i;
for (i=0;i<size;i++) {
x[i]=rand()%10;
}
}
int main(void) {
int *a1, *b1, *c1, *a2, *b2, *c2; // host copies of a, b, c
int *a1_d, *b1_d, *c1_d, *a2_d, *b2_d, *c2_d; // device copies of a, b, c
int size = N * sizeof(int);
cudaStream_t stream1,stream2;
cudaError_t error;
error = cudaStreamCreate(&stream1);
error = cudaStreamCreate(&stream2);
// Alloc space for device copies of a, b, c
cudaMalloc((void **)&a1_d, size);
cudaMalloc((void **)&b1_d, size);
cudaMalloc((void **)&c1_d, size);
cudaMalloc((void **)&a2_d, size);
cudaMalloc((void **)&b2_d, size);
cudaMalloc((void **)&c2_d, size);
// Alloc space for host copies of a, b, c and setup input values
a1 = (int *)malloc(size); random_ints(a1, N);
b1 = (int *)malloc(size); random_ints(b1, N);
c1 = (int *)malloc(size);
a2 = (int *)malloc(size); random_ints(a2, N);
b2 = (int *)malloc(size); random_ints(b2, N);
c2 = (int *)malloc(size);
// Copy inputs to device
cudaMemcpyAsync(a1_d, a1, size, cudaMemcpyHostToDevice, stream1);
cudaMemcpyAsync(b1_d, b1, size, cudaMemcpyHostToDevice, stream1);
cudaMemcpyAsync(a2_d, a2, size, cudaMemcpyHostToDevice, stream2);
cudaMemcpyAsync(b2_d, b2, size, cudaMemcpyHostToDevice, stream2);
// Launch add() kernel on GPU
add<<<(N+M-1)/M,M,0,stream1>>>(a1_d, b1_d, c1_d,N);
add<<<(N+M-1)/M,M,0,stream2>>>(a2_d, b2_d, c2_d,N);
// Copy result back to host
cudaMemcpyAsync(c1, c1_d, size, cudaMemcpyDeviceToHost, stream1);
cudaMemcpyAsync(c2, c2_d, size, cudaMemcpyDeviceToHost, stream2);
/*error=cudaStreamSynchronize(stream1);
// Print results
for(int i=0; i<N; i++)printf("stream1 %d + %d = %d\n",a1[i],b1[i],c1[i]);
error=cudaStreamSynchronize(stream2);
// Print results
for(int i=0; i<N; i++)printf("stream2 %d + %d = %d\n",a2[i],b2[i],c2[i]);
*/
cudaDeviceSynchronize();
for(int i=0; i<N; i++)printf("stream1 %d + %d = %d\t stream2 %d + %d = %d\n",a1[i],b1[i],c1[i],a2[i],b2[i],c2[i]);
if (error != 0) {
printf ("%s\n", cudaGetErrorString (error));
exit (1);
}
// Cleanup
free(a1); free(b1); free(c1);
free(a2); free(b2); free(c2);
cudaFree(a1_d); cudaFree(b1_d); cudaFree(c1_d);
cudaFree(a2_d); cudaFree(b2_d); cudaFree(c2_d);
return 0;
}
|
14,958 | /*
Implement your CUDA kernel in this file
*/
__global__ void mirror_boundaries(double *E_prev, const int n, const int m)
{
int row = blockIdx.y*blockDim.y + threadIdx.y + 1;
int col = blockIdx.x*blockDim.x + threadIdx.x + 1;
if (col == 1) {
E_prev[row*(n+2)] = E_prev[row*(n+2) + 2];
E_prev[row*(n+2) + n + 1] = E_prev[row*(n+2) + n - 1];
}
if (row == 1) {
E_prev[col] = E_prev[2*(n+2) + col];
E_prev[(m+1)*(n+2) + col] = E_prev[(m-1)*(n+2) + col];
}
}
__global__ void solve_for_pde (double *E, const double *E_prev, const double alpha,
const int n, const int m)
{
int row = blockIdx.y*blockDim.y + threadIdx.y + 1;
int col = blockIdx.x*blockDim.x + threadIdx.x + 1;
if ((row - 1 < m) && (col - 1 < n))
E[row*(n+2)+col] = E_prev[row*(n+2)+col] + alpha*(E_prev[row*(n+2)+col+1] + E_prev[row*(n+2)+col-1] - 4*E_prev[row*(n+2)+col] + E_prev[(row+1)*(n+2)+col] + E_prev[(row-1)*(n+2)+col]);
}
__global__ void solve_for_ode (double *E, double *R, const double alpha,
const int n, const int m, const double kk,
const double dt, const double a, const double epsilon,
const double M1,const double M2, const double b)
{
int row = blockIdx.y*blockDim.y + threadIdx.y + 1;
int col = blockIdx.x*blockDim.x + threadIdx.x + 1;
if ((row - 1 < m) && (col - 1 < n)) {
E[row*(n+2)+col] = E[row*(n+2)+col] - dt*(kk*E[row*(n+2)+col]*(E[row*(n+2)+col] - a)*(E[row*(n+2)+col] - 1) + E[row*(n+2)+col]*R[row*(n+2)+col]);
R[row*(n+2)+col] = R[row*(n+2)+col] + dt*(epsilon + M1*R[row*(n+2)+col]/(E[row*(n+2)+col] + M2))*(-R[row*(n+2)+col] - kk*E[row*(n+2)+col]*(E[row*(n+2)+col] - b - 1));
}
}
|
14,959 | /*
* Copyright 1993-2010 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property and
* proprietary rights in and to this software and related documentation.
* Any use, reproduction, disclosure, or distribution of this software
* and related documentation without an express license agreement from
* NVIDIA Corporation is strictly prohibited.
*
* Please refer to the applicable NVIDIA end user license agreement (EULA)
* associated with this source code for terms and conditions that govern
* your use of this NVIDIA software.
*
*/
#include<bits/stdc++.h>
using namespace std;
#define pi (2.0*acos(0.0))
#define eps 1e-6
#define ll long long
#define inf (1<<29)
#define vi vector<int>
#define vll vector<ll>
#define sc(x) scanf("%d",&x)
#define scl(x) scanf("%lld",&x)
#define all(v) v.begin() , v.end()
#define me(a,val) memset( a , val ,sizeof(a) )
#define pb(x) push_back(x)
#define pii pair<int,int>
#define mp(a,b) make_pair(a,b)
#define Q(x) (x) * (x)
#define L(x) ((x<<1) + 1)
#define R(x) ((x<<1) + 2)
#define M(x,y) ((x+y)>>1)
#define fi first
#define se second
#define MOD 1000000007
#define ios ios::sync_with_stdio(0)
typedef struct {
int width;
int height;
float* elements;
} Matrix;
// Thread block size
#define BLOCK_SIZE 128
#define N 2048
// Forward declaration of the matrix multiplication kernel
__global__ void MatMulKernel(const Matrix, const Matrix, Matrix);
// Matrix multiplication - Host code
// Matrix dimensions are assumed to be multiples of BLOCK_SIZE
void print(Matrix A){
for(int i = 0 ; i < 10 ; i++){
for(int j = 0 ; j < 10 ; j++)
printf("%.0lf ",A.elements[ i* N + j ]);
printf("\n");
}
}
void MatMul(const Matrix A, const Matrix B, Matrix C){
// Load A and B to device memory
Matrix d_A;
d_A.width = A.width; d_A.height = A.height;
size_t size = A.width * A.height * sizeof(float);
cudaMalloc(&d_A.elements, size);
cudaMemcpy(d_A.elements, A.elements, size,
cudaMemcpyHostToDevice);
Matrix d_B;
d_B.width = B.width; d_B.height = B.height;
size = B.width * B.height * sizeof(float);
cudaMalloc(&d_B.elements, size);
cudaMemcpy(d_B.elements, B.elements, size,
cudaMemcpyHostToDevice);
// Allocate C in device memory
Matrix d_C;
d_C.width = C.width; d_C.height = C.height;
size = C.width * C.height * sizeof(float);
cudaMalloc(&d_C.elements, size);
// Invoke kernel
dim3 dimBlock(BLOCK_SIZE, BLOCK_SIZE);
dim3 dimGrid(B.width / dimBlock.x, A.height / dimBlock.y);
MatMulKernel<<<dimGrid, dimBlock>>>(d_A, d_B, d_C);
// Read C from device memory
cudaMemcpy(C.elements, d_C.elements, size , cudaMemcpyDeviceToHost);
print(C);
// Free device memory
cudaFree(d_A.elements);
cudaFree(d_B.elements);
cudaFree(d_C.elements);
}
// Matrix multiplication kernel called by MatMul()
__global__ void MatMulKernel(Matrix A, Matrix B, Matrix C){
// Each thread computes one element of C
// by accumulating results into Cvalue
float Cvalue = 0;
int row = blockIdx.y * blockDim.y + threadIdx.y;
int col = blockIdx.x * blockDim.x + threadIdx.x;
for (int e = 0; e < A.width; ++e)
Cvalue += A.elements[row * A.width + e] * B.elements[e * B.width + col];
C.elements[row * C.width + col] = Cvalue;
}
int main( void ) {
Matrix A , B , C;
A.width = B.width = C.width = N;
A.height = B.height = C.height = N;
A.elements = (float *)malloc( N * N * sizeof(float) );
B.elements = (float *)malloc( N * N * sizeof(float) );
C.elements = (float *)malloc( N * N * sizeof(float) );
for(int i = 0 ; i < N ;i++)
for(int j = 0 ; j < N ; j++)
A.elements[i*N + j] = (i==j) , B.elements[i*N + j] = (i==j);
/*for(int i = 0 ; i < N ;i++)
for(int j = 0 ; j < N ; j++){
float r = 0;
for(int k = 0 ; k < N ; k++)
r += A.elements[i*N + k] * B.elements[k*N + j];
C.elements[i*N + j] = r;
}
*/
MatMul( A , B , C );
}
|
14,960 | /**
* Vector addition: C = A + B.
*
* This sample is a very basic sample that implements element by element
* vector addition.
*
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
// For the CUDA runtime routines (prefixed with "cuda_")
#include <cuda_runtime.h>
/**
* Get parameters from STDIN.
*/
static void read_from_stdin(int *gid, int *n)
{
char *s, buf[1024];
fgets(buf, 1023, stdin);
if ((s = strchr(buf, '#')) != NULL) *s = '\0';
*gid = atoi(buf);
fgets(buf, 1023, stdin);
if ((s = strchr(buf, '#')) != NULL) *s = '\0';
*n = atoi(buf);
}
/**
* CUDA Kernel Device code
*
* Computes the vector addition of A and B into C. The 3 vectors have the same
* number of elements numElements.
*/
__global__ void
vectorAdd(const float *A, const float *B, float *C, int numElements)
{
int i = blockDim.x * blockIdx.x + threadIdx.x;
if (i < numElements)
{
C[i] = A[i] + B[i];
}
}
/**
* Host main routine
*/
int main(int argc, char **argv)
{
int gid, numElements;
// Error code to check return values for CUDA calls
cudaError_t err = cudaSuccess;
read_from_stdin(&gid, &numElements);
err = cudaSetDevice(gid);
if (err != cudaSuccess) {
printf("!!! Cannot select GPU with device ID = %d\n", gid);
exit(1);
}
printf("Choose GPU with device ID = %d\n", gid);
// Print the vector length to be used, and compute its size
size_t size = numElements * sizeof(float);
printf("[Vector addition of %d elements]\n", numElements);
// Allocate the input vector A in the host (CPU)
float *h_A = (float *)malloc(size);
// Allocate the input vector B in the host
float *h_B = (float *)malloc(size);
// Allocate the output vector C in the host
float *h_C = (float *)malloc(size);
// Verify that allocations succeeded
if (h_A == NULL || h_B == NULL || h_C == NULL)
{
fprintf(stderr, "Failed to allocate host vectors!\n");
exit(EXIT_FAILURE);
}
// Initialize the host input vectors with random numbers with uniform distribution in (0,1)
for (int i = 0; i < numElements; ++i)
{
h_A[i] = rand()/(float)RAND_MAX;
h_B[i] = rand()/(float)RAND_MAX;
}
// Allocate the input vector A in device
float *d_A = NULL;
err = cudaMalloc((void **)&d_A, size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector A (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Allocate the input vector B in device
float *d_B = NULL;
err = cudaMalloc((void **)&d_B, size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector B (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Allocate the output vector C in device
float *d_C = NULL;
err = cudaMalloc((void **)&d_C, size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector C (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Copy the input vectors A and B from the host memory to the device memory
printf("Copy input vectors from the host memory to the CUDA device\n");
err = cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector A from host to device (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector B from host to device (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Launch the Vector Add CUDA Kernel
int threadsPerBlock = 256;
int blocksPerGrid =(numElements + threadsPerBlock - 1) / threadsPerBlock;
printf("CUDA kernel launch with %d blocks of %d threads\n", blocksPerGrid, threadsPerBlock);
vectorAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, numElements);
err = cudaGetLastError();
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to launch vectorAdd kernel (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Copy the device result vector in device memory to the host result vector
// in host memory.
printf("Copy output data from the CUDA device to the host memory\n");
err = cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector C from device to host (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Verify that the result vector is correct
for (int i = 0; i < numElements; ++i)
{
if (fabs(h_A[i] + h_B[i] - h_C[i]) > 1e-5)
{
fprintf(stderr, "Result verification failed at element %d!\n", i);
exit(EXIT_FAILURE);
}
}
printf("Test PASSED\n");
// Free device global memory
err = cudaFree(d_A);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to free device vector A (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaFree(d_B);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to free device vector B (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaFree(d_C);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to free device vector C (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// Free host memory
free(h_A);
free(h_B);
free(h_C);
printf("Done\n");
return 0;
}
|
14,961 | #include <stdio.h>
#include <assert.h>
#define N 100000
#define tb 512 // tamaño bloque
__global__ void VecAdd(int* D)
{
int ii = blockIdx.x * blockDim.x + threadIdx.x;
int stride = blockDim.x * gridDim.x;
for (int i=ii; i<N; i+=stride){
D[2*N + i] = D[i] + D[N + i];
}
}
cudaError_t testCuErr(cudaError_t result)
{
if (result != cudaSuccess) {
printf("CUDA Runtime Error: %s\n", cudaGetErrorString(result));
assert(result == cudaSuccess); // si no se cumple, se aborta el programa
}
return result;
}
int main()
{ cudaFree(0);
int *H,*D;
int i, dg; int size = 3*N*sizeof(int);
H = (int*)malloc(size);
// reservamos espacio en la memoria global del device
testCuErr(cudaMallocHost((void**)&D, size));
// inicializamos HA y HB
for (i=0; i<3*N; i++) {
if(i < N){
H[i] = -i;
}else if((i < 2*N) && (i > N-1)){
H[i] = 3*i;
}
}
testCuErr(cudaMemcpy(D, H, size, cudaMemcpyHostToDevice));
dg = (N+tb-1)/tb; if (dg>65535) dg=65535;
// llamamos al kernel
VecAdd <<<dg, tb>>>(D); // N o más hilos ejecutan el kernel en paralelo
testCuErr(cudaGetLastError());
testCuErr(cudaMemcpy(H, D, size, cudaMemcpyDeviceToHost));
// liberamos la memoria reservada en el device
testCuErr(cudaFreeHost(D));
// una vez que tenemos los resultados en el host, comprobamos que son correctos
for (i = 0; i < N; i++){// printf("%d + %d = %d\n",HA[i],HB[i],HC[i]);
//printf("%d = %d + %d\n",H[2*N + i],H[i],H[N + i]);
if (H[2*N + i] != (H[i] + H[N + i])){
printf("error en componente %d\n", i);}
}
free(H);
return 0;
}
|
14,962 | /*
========================================================================================
Name: cuda_demo.cu
Author: Mingran Peng
Class: EECS 587, Fall 2020
Description : Demo program for HW4
P.S. Fell free to use or modify this code for future terms of EECS 587 or other courses
Add you name if you modify it and preserve all author names
========================================================================================
*/
#include <algorithm>
#include <iostream>
#include <cstdio>
#include <cmath>
#include <numeric>
#include <iostream>
#include <cstring>
#include <sstream>
#include <string>
using namespace std;
cudaError_t addWithCuda(double *c, unsigned int size, int n, double *Along, int t);
__global__ void addKernel(double *c, int size, int n, double *dev_Along, int t)
{
double arr[4];
double secondSmallest=0;
double temp;
double update;
// calculate index here
int localro = threadIdx.x;
int localco = threadIdx.y;
int globalro = blockIdx.x * blockDim.x + threadIdx.x;
int globalco = blockIdx.y * blockDim.y + threadIdx.y;
int globalind = globalro * n + globalco;
// Synchronize all threads in the block to make sure copy is done
__syncthreads();
if ((globalro>0) && (globalco>0) && (globalro<(n-1)) && (globalco<(n-1))){
int il = globalro+1;
int is = globalro-1;
int jl = globalco+1;
int js = globalco-1;
arr[0] = dev_Along[il*n+jl];
arr[1] = dev_Along[il*n+js];
arr[2] = dev_Along[is*n+jl];
arr[3] = dev_Along[is*n+js];
for(int i=0;i<4;i++)
{
for(int j=i+1;j<4;j++)
{
if(arr[i]>arr[j])
{
temp =arr[i];
arr[i]=arr[j];
arr[j]=temp;
}
}
}
secondSmallest = arr[1];
update = dev_Along[globalind] + secondSmallest;
}
else if ((globalro==0)||(globalco==0)||(globalro==(n-1))||(globalco==(n-1))){
update = dev_Along[globalind];
}
else{}
if((globalro<n)&&(globalco<n))
c[globalind] = update;
}
int main(int argc, char* argv[])
{
int n;
int t;
n = atoi(argv[1]);
t = atoi(argv[2]);
int size = n*n;//number of elements
double *c; // returned array
//initiallize
double *Along;
Along = new double [size];
c = new double [size];
for (int i=0;i<size;i++){
int ro = floor(i/n);
int co = floor(i%n);
Along[i] = pow((1+cos(2*ro)+sin(co)),2);
}
cudaError_t cudaStatus = addWithCuda(c, size, n, Along, t);
if (cudaStatus != cudaSuccess) {
cout<<"addWithCuda failed!"<<endl;
return -1;
}
//here we get the c array then we can do the sum and check the certain element
// examine
double initial_sum = 0;
double sumc = accumulate(c, c+size, initial_sum);
cout<<"Sum: "<<sumc<<endl;
cout<<"A(37,47): "<<c[37*n+47]<<endl;
return 0;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t addWithCuda(double *c, unsigned int size, int n, double *Along, int t)
{
double *dev_Along = 0;
//dev_Along = new double[size];
double *dev_c = 0;
dev_c = new double[size];
for (int i =0;i<size;i++){
dev_c[i]=0;
}
cudaError_t cudaStatus;
cudaEvent_t start, stop;
float gpu_time = 0.0f;
dim3 gridSize(ceil(n/32)+1,ceil(n/32)+1,1);
dim3 blockSize(32,32,1);
// Choose which GPU to run on, 0 if you have only one GPU
// on-chip GPU does not count
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
cout<<"cudaSetDevice failed!"<<endl;
goto Error;
}
// Malloc memory on GPU
cudaStatus = cudaMalloc((void**)&dev_Along, size * sizeof(double));
if (cudaStatus != cudaSuccess) {
cout<<"cudaMalloc failed!"<<endl;
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_c, size * sizeof(double));
if (cudaStatus != cudaSuccess) {
cout<<"cudaMalloc failed!"<<endl;
goto Error;
}
// Copy memory from Host to Device
cudaStatus = cudaMemcpy(dev_Along, Along, size * sizeof(double), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
cout<<"cudaMemcpy failed!"<<endl;
goto Error;
}
// Set up timing
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
// Launch a kernel on the GPU with one thread for each element
//cout<<"allocating "<<gridSize<<" blocks, "<<blockSize<<" threads per block"<<endl;
for (int tt=0;tt<t;tt++){
addKernel<<<gridSize, blockSize>>>(dev_c, size, n,dev_Along,t);
if (cudaStatus != cudaSuccess) {
cout<<"cudaMemcpy failed!"<<endl;
goto Error;
}
cudaStatus = cudaMemcpy(dev_Along, dev_c, size * sizeof(double), cudaMemcpyDeviceToDevice);
}
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
cout<<"addKernel failed: "<<cudaGetErrorString(cudaStatus)<<endl;
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
cout<<"cudaDeviceSynchronize failed: "<<cudaGetErrorString(cudaStatus)<<endl;
goto Error;
}
// Close timing
cudaEventRecord(stop);
cudaEventSynchronize(stop);
cudaEventElapsedTime(&gpu_time, start, stop);
cout<<"Time spent: "<<gpu_time<<"ms"<<endl;
cudaEventDestroy(start);
cudaEventDestroy(stop);
// Copy memory from devide to host
cudaStatus = cudaMemcpy(c, dev_c, size * sizeof(double), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
cout<<"cudaMemcpy failed!"<<endl;
goto Error;
}
Error:
cudaFree(dev_c);
cudaFree(dev_Along);
return cudaStatus;
}
|
14,963 | #include <pthread.h>
#include <cstdlib>
#include <iostream>
#include <vector>
#define COMPRESSION_BATCH_SIZE 32
using namespace std;
struct ThreadArg {
float *original_data;
long num_elements;
int thread_num;
float ***compressed_data;
bool **compressed_data_taken;
unsigned int *mask;
};
struct CompressedPos {
long compressed_data_batch;
long offset;
};
int n_threads = 8;
int n_compressed_data_batches = 8;
long layer_sizes_alexnet[] = {56l * 56 * 96, 28l * 28 * 96, 27l * 27 * 256,
13l * 13 * 256, 13l * 12 * 384, 13l * 12 * 384,
13l * 13 * 256, 6l * 6 * 256};
bool layer_compress_alexnet[] = {true, true, true, true,
true, true, true, true};
long layer_density_alexnet[] = {50, 80, 40, 60, 70, 70, 30, 60};
int num_layers_alexnet = 8;
long layer_sizes_vgg[] = {224l * 224 * 64, 224l * 224 * 64, 112l * 112 * 64,
112l * 112 * 128, 112l * 112 * 128, 56l * 56 * 128,
56l * 56 * 256, 56l * 56 * 256, 56l * 56 * 256,
28l * 28 * 256, 28l * 28 * 512, 28l * 28 * 512,
28l * 28 * 512, 14l * 14 * 512, 14l * 14 * 512,
14l * 14 * 512, 14l * 14 * 512, 7l * 7 * 512};
long layer_density_vgg[] = {50, 20, 30, 20, 10, 20, 20, 20, 10,
20, 20, 10, 10, 10, 20, 20, 10, 15};
bool layer_compress_vgg[] = {true, true, true, true, true, true,
true, true, true, true, true, true,
true, true, true, true, true, true};
int num_layers_vgg = 18;
// long *layer_sizes = layer_sizes_alexnet;
// bool *layer_compress = layer_compress_alexnet;
// long *layer_density = layer_density_alexnet;
// int num_layers = num_layers_alexnet;
long *layer_sizes = layer_sizes_alexnet;
bool *layer_compress = layer_compress_alexnet;
long *layer_density = layer_density_alexnet;
int num_layers = num_layers_alexnet;
void *compressThread(void *arg) {
ThreadArg *thread_arg = (ThreadArg *)arg;
float *original_data = thread_arg->original_data;
float ***compressed_data = thread_arg->compressed_data;
bool **compressed_data_taken = thread_arg->compressed_data_taken;
unsigned int *mask = thread_arg->mask;
int thread_num = thread_arg->thread_num;
long num_elements = thread_arg->num_elements;
long start = thread_num * num_elements / n_threads;
long n_compression_batches =
num_elements / n_threads / COMPRESSION_BATCH_SIZE;
long compressed_data_batch_size =
num_elements / n_threads / n_compressed_data_batches;
cudaMallocHost((void **)&compressed_data[thread_num],
n_compressed_data_batches * sizeof(float *));
cudaMallocHost((void **)&compressed_data_taken[thread_num],
n_compressed_data_batches * sizeof(bool));
for (int i = 0; i < n_compressed_data_batches; i++) {
compressed_data_taken[thread_num][i] = false;
}
CompressedPos current_pos;
current_pos.compressed_data_batch = -1,
current_pos.offset = compressed_data_batch_size;
for (long i = 0; i < n_compression_batches; i++) {
long mask_pos =
(i * COMPRESSION_BATCH_SIZE + start) / COMPRESSION_BATCH_SIZE;
mask[mask_pos] = 0;
for (long j = i * COMPRESSION_BATCH_SIZE + start;
j < (i + 1) * COMPRESSION_BATCH_SIZE + start; j++) {
if (original_data[j] > 0) {
if (current_pos.offset == compressed_data_batch_size) {
cudaMallocHost(
(void **)&compressed_data[thread_num]
[current_pos.compressed_data_batch + 1],
compressed_data_batch_size * sizeof(float));
compressed_data_taken[thread_num]
[current_pos.compressed_data_batch + 1] = true;
current_pos.compressed_data_batch =
current_pos.compressed_data_batch + 1;
current_pos.offset = 0;
}
mask[mask_pos] = (mask[mask_pos] << 1) + 1;
compressed_data[thread_num][current_pos.compressed_data_batch]
[current_pos.offset] = original_data[j];
current_pos.offset += 1;
} else {
mask[mask_pos] = (mask[mask_pos] << 1);
}
}
}
return NULL;
}
void *decompressThread(void *arg) {
ThreadArg *thread_arg = (ThreadArg *)arg;
float *original_data = thread_arg->original_data;
float ***compressed_data = thread_arg->compressed_data;
bool **compressed_data_taken = thread_arg->compressed_data_taken;
unsigned int *mask = thread_arg->mask;
int thread_num = thread_arg->thread_num;
long num_elements = thread_arg->num_elements;
long start = thread_num * num_elements / n_threads;
long n_compression_batches =
num_elements / n_threads / COMPRESSION_BATCH_SIZE;
long compressed_data_batch_size =
num_elements / n_threads / n_compressed_data_batches;
// cudaMallocHost((void **)&compressed_data[thread_num],
// n_compressed_data_batches * sizeof(float *));
CompressedPos current_pos;
current_pos.compressed_data_batch = 0, current_pos.offset = 0;
for (long i = 0; i < n_compression_batches; i++) {
long mask_pos =
(i * COMPRESSION_BATCH_SIZE + start) / COMPRESSION_BATCH_SIZE;
for (long j = i * COMPRESSION_BATCH_SIZE + start;
j < (i + 1) * COMPRESSION_BATCH_SIZE + start; j++) {
if (mask[mask_pos] & 0x80000000 > 0) {
original_data[j] =
compressed_data[thread_num][current_pos.compressed_data_batch]
[current_pos.offset];
current_pos.offset += 1;
if (current_pos.offset == compressed_data_batch_size) {
current_pos.compressed_data_batch += 1;
current_pos.offset = 0;
}
} else {
original_data[j] = 0;
}
mask[mask_pos] = mask[mask_pos] << 1;
}
}
for (int i = 0; i < n_compressed_data_batches; i++) {
if (compressed_data_taken[thread_num][i])
cudaFreeHost(compressed_data[thread_num][i]);
else
break;
}
cudaFreeHost(compressed_data_taken[thread_num]);
cudaFreeHost(compressed_data[thread_num]);
return NULL;
}
int main() {
int batch_size = 64;
long total_space = 0;
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
vector<float ***> compressed_data_vec;
vector<unsigned int *> mask_vec;
vector<bool **> compressed_data_taken_vec;
pthread_t threads[n_threads];
for (int i = 0; i < num_layers; i++) {
layer_sizes[i] *= batch_size;
}
vector<float> compression_times;
float total_milli = 0.0;
for (int j = 0; j < num_layers; j++) {
if (!layer_compress[j]) continue;
long num_elements = layer_sizes[j];
float *original_data, ***compressed_data;
bool **compressed_data_taken;
unsigned int *mask;
cudaMallocHost((void **)&original_data, num_elements * sizeof(float));
// cudaMallocHost((void **)&compressed_data, num_elements * sizeof(float));
// generate data
for (long i = 0; i < num_elements; i++) {
if (rand() % 100 < layer_density[j])
original_data[i] = 1;
else
original_data[i] = 0;
}
if (num_elements % n_threads != 0) {
cout << "bad number of threads" << endl;
exit(0);
}
if ((num_elements / n_threads) % COMPRESSION_BATCH_SIZE != 0) {
cout << "bad num_elements or n_threads" << endl;
exit(0);
}
cout << "starting " << j << endl;
cudaEventRecord(start);
cudaMallocHost((void **)&compressed_data, n_threads * sizeof(float **));
cudaMallocHost((void **)&mask, num_elements / COMPRESSION_BATCH_SIZE *
sizeof(unsigned int));
cudaMallocHost((void **)&compressed_data_taken, n_threads * sizeof(bool *));
ThreadArg thread_arg[n_threads];
for (int i = 0; i < n_threads; i++) {
thread_arg[i].original_data = original_data;
thread_arg[i].compressed_data = compressed_data;
thread_arg[i].compressed_data_taken = compressed_data_taken;
thread_arg[i].mask = mask;
thread_arg[i].thread_num = i;
thread_arg[i].num_elements = num_elements;
}
for (int i = 0; i < n_threads; i++) {
pthread_create(&threads[i], NULL, &compressThread,
(void *)&thread_arg[i]);
}
for (int i = 0; i < n_threads; i++) {
pthread_join(threads[i], NULL);
}
compressed_data_vec.push_back(compressed_data);
mask_vec.push_back(mask);
compressed_data_taken_vec.push_back(compressed_data_taken);
cudaFreeHost(original_data);
// for (int i = 0; i < 27 * 27 * 256 * 128; i++);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milli;
cudaEventElapsedTime(&milli, start, stop);
compression_times.push_back(milli);
total_milli += milli;
// cout << milli << endl;
cudaFreeHost(original_data);
// cudaFreeHost(compressed_data);
// cudaFreeHost(mask);
}
for (int i = 0; i < compression_times.size(); i++) {
cout << compression_times[i] << endl;
}
cout << total_milli << endl;
// calculating space consumed
int k = 0;
for (int j = 0; j < num_layers; j++) {
long num_elements = layer_sizes[j];
long cur_space = 0;
if (!layer_compress[j]) {
cur_space = num_elements * sizeof(float);
total_space += cur_space;
continue;
}
bool **compressed_data_taken = compressed_data_taken_vec[k];
long compressed_data_batch_size =
num_elements / n_threads / n_compressed_data_batches;
for (int thread_num = 0; thread_num < n_threads; thread_num++) {
for (int i = 0; i < n_compressed_data_batches; i++) {
if (compressed_data_taken[thread_num][i])
cur_space += compressed_data_batch_size;
else
break;
}
}
// add size of mask
cur_space += num_elements / COMPRESSION_BATCH_SIZE;
cur_space *= sizeof(float);
total_space += cur_space;
k++;
}
cout << "total_space_compressed(MB): " << total_space * 1.0 / (1024 * 1024)
<< endl;
// {
// int n;
// cout << "waiting..\n";
// cin >> n;
// }
// decompression
cout << "decompress" << endl;
vector<float> decompression_times;
float total_milli_decompress = 0.0;
for (int j = num_layers - 1; j >= 0; j--) {
if (!layer_compress[j]) continue;
long num_elements = layer_sizes[j];
float *original_data, ***compressed_data;
bool **compressed_data_taken;
unsigned int *mask;
compressed_data = compressed_data_vec.back();
mask = mask_vec.back();
compressed_data_taken = compressed_data_taken_vec.back();
compressed_data_vec.pop_back();
mask_vec.pop_back();
compressed_data_taken_vec.pop_back();
// cudaMallocHost((void **)&compressed_data, num_elements * sizeof(float));
cout << "starting " << j << endl;
cudaEventRecord(start);
cudaMallocHost((void **)&original_data, num_elements * sizeof(float));
ThreadArg thread_arg[n_threads];
for (int i = 0; i < n_threads; i++) {
thread_arg[i].original_data = original_data;
thread_arg[i].compressed_data = compressed_data;
thread_arg[i].compressed_data_taken = compressed_data_taken;
thread_arg[i].mask = mask;
thread_arg[i].thread_num = i;
thread_arg[i].num_elements = num_elements;
}
for (int i = 0; i < n_threads; i++) {
pthread_create(&threads[i], NULL, &decompressThread,
(void *)&thread_arg[i]);
}
for (int i = 0; i < n_threads; i++) {
pthread_join(threads[i], NULL);
}
cudaFreeHost(compressed_data_taken);
cudaFreeHost(compressed_data);
cudaFreeHost(mask);
// cudaFreeHost(original_data);
// for (int i = 0; i < 27 * 27 * 256 * 128; i++);
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milli;
cudaEventElapsedTime(&milli, start, stop);
decompression_times.insert(decompression_times.begin(), milli);
total_milli_decompress += milli;
// cout << milli << endl;
// cudaFreeHost(compressed_data);
// cudaFreeHost(mask);
}
for (int i = 0; i < decompression_times.size(); i++) {
cout << decompression_times[i] << endl;
}
cout << total_milli_decompress << endl;
// calculating total space
total_space = 0;
for (int j = 0; j < num_layers; j++) {
long num_elements = layer_sizes[j];
long cur_space = 0;
cur_space = num_elements * sizeof(float);
total_space += cur_space;
}
cout << "total space(MB): " << total_space * 1.0 / (1024 * 1024) << endl;
} |
14,964 | /* typedef a 32 bit type */
typedef unsigned int UINT4;
/* Data structure for MD5 (Message Digest) computation */
typedef struct {
UINT4 i[2]; /* number of _bits_ handled mod 2^64 */
UINT4 buf[4]; /* scratch buffer */
unsigned char in[64]; /* input buffer */
unsigned char digest[16]; /* actual digest after MD5Final call */
} MD5_CTX;
/* forward declaration */
static void Transform (UINT4 * buf, UINT4 * in);
static unsigned char PADDING[64] = {
0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
};
/* F, G and H are basic MD5 functions: selection, majority, parity */
#define F(x, y, z) (((x) & (y)) | ((~x) & (z)))
#define G(x, y, z) (((x) & (z)) | ((y) & (~z)))
#define H(x, y, z) ((x) ^ (y) ^ (z))
#define I(x, y, z) ((y) ^ ((x) | (~z)))
/* ROTATE_LEFT rotates x left n bits */
#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32-(n))))
/* FF, GG, HH, and II transformations for rounds 1, 2, 3, and 4 */
/* Rotation is separate from addition to prevent recomputation */
#define FF(a, b, c, d, x, s, ac) \
{(a) += F ((b), (c), (d)) + (x) + (UINT4)(ac); \
(a) = ROTATE_LEFT ((a), (s)); \
(a) += (b); \
}
#define GG(a, b, c, d, x, s, ac) \
{(a) += G ((b), (c), (d)) + (x) + (UINT4)(ac); \
(a) = ROTATE_LEFT ((a), (s)); \
(a) += (b); \
}
#define HH(a, b, c, d, x, s, ac) \
{(a) += H ((b), (c), (d)) + (x) + (UINT4)(ac); \
(a) = ROTATE_LEFT ((a), (s)); \
(a) += (b); \
}
#define II(a, b, c, d, x, s, ac) \
{(a) += I ((b), (c), (d)) + (x) + (UINT4)(ac); \
(a) = ROTATE_LEFT ((a), (s)); \
(a) += (b); \
}
void MD5Init (MD5_CTX * mdContext)
{
mdContext->i[0] = mdContext->i[1] = (UINT4)0;
/* Load magic initialization constants.
*/
mdContext->buf[0] = (UINT4)0x67452301;
mdContext->buf[1] = (UINT4)0xefcdab89;
mdContext->buf[2] = (UINT4)0x98badcfe;
mdContext->buf[3] = (UINT4)0x10325476;
}
void MD5Update (MD5_CTX * mdContext, unsigned char * inBuf, unsigned int inLen)
{
UINT4 in[16];
int mdi;
unsigned int i, ii;
/* compute number of bytes mod 64 */
mdi = (int)((mdContext->i[0] >> 3) & 0x3F);
/* update number of bits */
if ((mdContext->i[0] + ((UINT4)inLen << 3)) < mdContext->i[0])
mdContext->i[1]++;
mdContext->i[0] += ((UINT4)inLen << 3);
mdContext->i[1] += ((UINT4)inLen >> 29);
while (inLen--) {
/* add new character to buffer, increment mdi */
mdContext->in[mdi++] = *inBuf++;
/* transform if necessary */
if (mdi == 0x40) {
for (i = 0, ii = 0; i < 16; i++, ii += 4)
in[i] = (((UINT4)mdContext->in[ii+3]) << 24) |
(((UINT4)mdContext->in[ii+2]) << 16) |
(((UINT4)mdContext->in[ii+1]) << 8) |
((UINT4)mdContext->in[ii]);
Transform (mdContext->buf, in);
mdi = 0;
}
}
}
void MD5Final (MD5_CTX * mdContext)
{
UINT4 in[16];
int mdi;
unsigned int i, ii;
unsigned int padLen;
/* save number of bits */
in[14] = mdContext->i[0];
in[15] = mdContext->i[1];
/* compute number of bytes mod 64 */
mdi = (int)((mdContext->i[0] >> 3) & 0x3F);
/* pad out to 56 mod 64 */
padLen = (mdi < 56) ? (56 - mdi) : (120 - mdi);
MD5Update (mdContext, PADDING, padLen);
/* append length in bits and transform */
for (i = 0, ii = 0; i < 14; i++, ii += 4)
in[i] = (((UINT4)mdContext->in[ii+3]) << 24) |
(((UINT4)mdContext->in[ii+2]) << 16) |
(((UINT4)mdContext->in[ii+1]) << 8) |
((UINT4)mdContext->in[ii]);
Transform (mdContext->buf, in);
/* store buffer in digest */
for (i = 0, ii = 0; i < 4; i++, ii += 4) {
mdContext->digest[ii] = (unsigned char)(mdContext->buf[i] & 0xFF);
mdContext->digest[ii+1] =
(unsigned char)((mdContext->buf[i] >> 8) & 0xFF);
mdContext->digest[ii+2] =
(unsigned char)((mdContext->buf[i] >> 16) & 0xFF);
mdContext->digest[ii+3] =
(unsigned char)((mdContext->buf[i] >> 24) & 0xFF);
}
}
/* Basic MD5 step. Transform buf based on in.
*/
static void Transform (UINT4 * buf, UINT4 * in)
{
UINT4 a = buf[0], b = buf[1], c = buf[2], d = buf[3];
/* Round 1 */
#define S11 7
#define S12 12
#define S13 17
#define S14 22
FF ( a, b, c, d, in[ 0], S11, 3614090360); /* 1 */
FF ( d, a, b, c, in[ 1], S12, 3905402710); /* 2 */
FF ( c, d, a, b, in[ 2], S13, 606105819); /* 3 */
FF ( b, c, d, a, in[ 3], S14, 3250441966); /* 4 */
FF ( a, b, c, d, in[ 4], S11, 4118548399); /* 5 */
FF ( d, a, b, c, in[ 5], S12, 1200080426); /* 6 */
FF ( c, d, a, b, in[ 6], S13, 2821735955); /* 7 */
FF ( b, c, d, a, in[ 7], S14, 4249261313); /* 8 */
FF ( a, b, c, d, in[ 8], S11, 1770035416); /* 9 */
FF ( d, a, b, c, in[ 9], S12, 2336552879); /* 10 */
FF ( c, d, a, b, in[10], S13, 4294925233); /* 11 */
FF ( b, c, d, a, in[11], S14, 2304563134); /* 12 */
FF ( a, b, c, d, in[12], S11, 1804603682); /* 13 */
FF ( d, a, b, c, in[13], S12, 4254626195); /* 14 */
FF ( c, d, a, b, in[14], S13, 2792965006); /* 15 */
FF ( b, c, d, a, in[15], S14, 1236535329); /* 16 */
/* Round 2 */
#define S21 5
#define S22 9
#define S23 14
#define S24 20
GG ( a, b, c, d, in[ 1], S21, 4129170786); /* 17 */
GG ( d, a, b, c, in[ 6], S22, 3225465664); /* 18 */
GG ( c, d, a, b, in[11], S23, 643717713); /* 19 */
GG ( b, c, d, a, in[ 0], S24, 3921069994); /* 20 */
GG ( a, b, c, d, in[ 5], S21, 3593408605); /* 21 */
GG ( d, a, b, c, in[10], S22, 38016083); /* 22 */
GG ( c, d, a, b, in[15], S23, 3634488961); /* 23 */
GG ( b, c, d, a, in[ 4], S24, 3889429448); /* 24 */
GG ( a, b, c, d, in[ 9], S21, 568446438); /* 25 */
GG ( d, a, b, c, in[14], S22, 3275163606); /* 26 */
GG ( c, d, a, b, in[ 3], S23, 4107603335); /* 27 */
GG ( b, c, d, a, in[ 8], S24, 1163531501); /* 28 */
GG ( a, b, c, d, in[13], S21, 2850285829); /* 29 */
GG ( d, a, b, c, in[ 2], S22, 4243563512); /* 30 */
GG ( c, d, a, b, in[ 7], S23, 1735328473); /* 31 */
GG ( b, c, d, a, in[12], S24, 2368359562); /* 32 */
/* Round 3 */
#define S31 4
#define S32 11
#define S33 16
#define S34 23
HH ( a, b, c, d, in[ 5], S31, 4294588738); /* 33 */
HH ( d, a, b, c, in[ 8], S32, 2272392833); /* 34 */
HH ( c, d, a, b, in[11], S33, 1839030562); /* 35 */
HH ( b, c, d, a, in[14], S34, 4259657740); /* 36 */
HH ( a, b, c, d, in[ 1], S31, 2763975236); /* 37 */
HH ( d, a, b, c, in[ 4], S32, 1272893353); /* 38 */
HH ( c, d, a, b, in[ 7], S33, 4139469664); /* 39 */
HH ( b, c, d, a, in[10], S34, 3200236656); /* 40 */
HH ( a, b, c, d, in[13], S31, 681279174); /* 41 */
HH ( d, a, b, c, in[ 0], S32, 3936430074); /* 42 */
HH ( c, d, a, b, in[ 3], S33, 3572445317); /* 43 */
HH ( b, c, d, a, in[ 6], S34, 76029189); /* 44 */
HH ( a, b, c, d, in[ 9], S31, 3654602809); /* 45 */
HH ( d, a, b, c, in[12], S32, 3873151461); /* 46 */
HH ( c, d, a, b, in[15], S33, 530742520); /* 47 */
HH ( b, c, d, a, in[ 2], S34, 3299628645); /* 48 */
/* Round 4 */
#define S41 6
#define S42 10
#define S43 15
#define S44 21
II ( a, b, c, d, in[ 0], S41, 4096336452); /* 49 */
II ( d, a, b, c, in[ 7], S42, 1126891415); /* 50 */
II ( c, d, a, b, in[14], S43, 2878612391); /* 51 */
II ( b, c, d, a, in[ 5], S44, 4237533241); /* 52 */
II ( a, b, c, d, in[12], S41, 1700485571); /* 53 */
II ( d, a, b, c, in[ 3], S42, 2399980690); /* 54 */
II ( c, d, a, b, in[10], S43, 4293915773); /* 55 */
II ( b, c, d, a, in[ 1], S44, 2240044497); /* 56 */
II ( a, b, c, d, in[ 8], S41, 1873313359); /* 57 */
II ( d, a, b, c, in[15], S42, 4264355552); /* 58 */
II ( c, d, a, b, in[ 6], S43, 2734768916); /* 59 */
II ( b, c, d, a, in[13], S44, 1309151649); /* 60 */
II ( a, b, c, d, in[ 4], S41, 4149444226); /* 61 */
II ( d, a, b, c, in[11], S42, 3174756917); /* 62 */
II ( c, d, a, b, in[ 2], S43, 718787259); /* 63 */
II ( b, c, d, a, in[ 9], S44, 3951481745); /* 64 */
buf[0] += a;
buf[1] += b;
buf[2] += c;
buf[3] += d;
}
/*
**********************************************************************
** End of md5.c **
******************************* (cut) ********************************
*/
/*
**********************************************************************
** md5driver.c -- sample routines to test **
** RSA Data Security, Inc. MD5 message digest algorithm. **
** Created: 2/16/90 RLR **
** Updated: 1/91 SRD **
**********************************************************************
*/
/*
**********************************************************************
** Copyright (C) 1990, RSA Data Security, Inc. All rights reserved. **
** **
** RSA Data Security, Inc. makes no representations concerning **
** either the merchantability of this software or the suitability **
** of this software for any particular purpose. It is provided "as **
** is" without express or implied warranty of any kind. **
** **
** These notices must be retained in any copies of any part of this **
** documentation and/or software. **
**********************************************************************
*/
#include <stdio.h>
#include <sys/types.h>
#include <time.h>
#include <string.h>
/* Prints message digest buffer in mdContext as 32 hexadecimal digits.
Order is from low-order byte to high-order byte of digest.
Each byte is printed with high-order hexadecimal digit first.
*/
static void MDPrint (MD5_CTX * mdContext,char * hash)
{
int i;
for (i = 0; i < 16; i++){
//printf ("%02x", mdContext->digest[i]);
if (i == 0)
sprintf(hash,"%02x", mdContext->digest[i]);
else
sprintf(hash + strlen(hash),"%02x", mdContext->digest[i]);
}
}
static void MDString (char * inString,char * hash)
{
MD5_CTX mdContext;
unsigned int len = strlen (inString);
unsigned char * uInString = reinterpret_cast<unsigned char *>( inString );
MD5Init (&mdContext);
MD5Update (&mdContext, uInString, len);
MD5Final (&mdContext);
MDPrint (&mdContext,hash);
//printf (" \"%s\"\n\n", uInString);
} |
14,965 | /*
* This sample implements a separable convolution
* of a 2D image with an arbitrary filter.
*/
#include <time.h>
#include <stdio.h>
#include <stdlib.h>
//unsigned int radius;
#define radius 16
#define FILTER_LENGTH (2 * radius + 1)
#define ABS(val) ((val)<0.0 ? (-(val)) : (val))
#define accuracy 6
#define tileRH 1
#define tileRW 512
#define tileCH 16
#define tileCW 16
typedef float numid;
__constant__ numid d_Filter[FILTER_LENGTH];
__global__ void tiledConvRowGPU(numid *d_Dst, numid *d_Src, int imageW, int imageH){
int k;
numid sum = 0;
int tx=threadIdx.x;
int ty=threadIdx.y;
int bx=blockIdx.x;
int by=blockIdx.y;
int row = blockDim.y*by + ty ;
int col = blockDim.x*bx + tx;
int newImageW = imageW + radius * 2;
__shared__ numid ShMemory[tileRH] [tileRW + 2 * radius];
if(tx-radius<0){ //Near Left Bounds
ShMemory[ty][tx] = d_Src[(row+radius) * newImageW + col];
}
ShMemory[ty][tx+radius] = d_Src[(row+radius) * newImageW + col + radius]; //Center
if(tx >= (tileRW - radius)){
ShMemory[ty] [tx + 2 * radius] = d_Src[(row+radius) * newImageW + col + 2 * radius]; //Near Right Bounds
}
__syncthreads();
for (k = -radius; k <= radius; k++) {
sum += ShMemory[ty][tx+k+radius] * d_Filter[radius - k];
}
d_Dst[(row+radius) * newImageW + col+radius] = sum;
}
__global__ void tiledConvColGPU(numid *d_Dst, numid *d_Src, int imageW, int imageH){
int k;
numid sum = 0;
int tx=threadIdx.x;
int ty=threadIdx.y;
int bx=blockIdx.x;
int by=blockIdx.y;
int row = blockDim.y*by + ty ;
int col = blockDim.x*bx + tx;
int newImageW = imageW + radius * 2;
__shared__ numid ShMemory[tileCH + 2 * radius][ tileCW];
if(ty-radius<0){ //Upper Bounds
ShMemory[ty] [tx] = d_Src[row * newImageW + col + radius];
}
ShMemory[ty + radius][ tx ] = d_Src[(row + radius) * newImageW + col + radius ]; //Center
ShMemory[ty + 2 * radius ][ tx ] = d_Src[(row + 2* radius) * newImageW + col + radius ]; //Lower Bounds
__syncthreads();
for (k = -radius; k <= radius; k++) {
sum += ShMemory[(ty + k + radius)][tx] * d_Filter[radius - k];
}
d_Dst[ (row + radius) * newImageW + col + radius] = sum;
}
////////////////////////////////////////////////////////////////////////////////
// Reference row convolution filter
////////////////////////////////////////////////////////////////////////////////
void convolutionRowCPU(numid *h_Dst, numid *h_Src, numid *h_Filter,
int imageW, int imageH, int filterR) {
int x, y, k;
for (y = 0; y < imageH; y++) {
for (x = 0; x < imageW; x++) {
numid sum = 0;
for (k = -filterR; k <= filterR; k++) {
int d = x + k;
if (d >= 0 && d < imageW) {
sum += h_Src[y * imageW + d] * h_Filter[filterR - k];
}
h_Dst[y * imageW + x] = sum;
}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Reference column convolution filter
////////////////////////////////////////////////////////////////////////////////
void convolutionColumnCPU(numid *h_Dst, numid *h_Src, numid *h_Filter,
int imageW, int imageH, int filterR) {
int x, y, k;
for (y = 0; y < imageH; y++) {
for (x = 0; x < imageW; x++) {
numid sum = 0;
for (k = -filterR; k <= filterR; k++) {
int d = y + k;
if (d >= 0 && d < imageH) {
sum += h_Src[d * imageW + x] * h_Filter[filterR - k];
}
h_Dst[y * imageW + x] = sum;
}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Main program
////////////////////////////////////////////////////////////////////////////////
int main(int argc, char **argv) {
cudaSetDevice(0);
numid
*h_Filter,
*h_Input,
*h_PadInput,
*h_Buffer,
*h_OutputCPU,
*d_Input,
*d_Buffer,
*d_OutputGPU,
*result;
struct timespec tv1, tv2;
cudaEvent_t start;
cudaEvent_t stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
int imageW;
int imageH;
unsigned int i,j;
if(argc<2){
printf("Please specify the image size as execution arguments\n");
return 0;
}
imageW=atoi(argv[1]);
// Ta imageW, imageH ta dinei o xrhsths kai thewroume oti einai isa,
// dhladh imageW = imageH = N, opou to N to dinei o xrhsths.
// Gia aplothta thewroume tetragwnikes eikones.
// printf("Enter image size. Should be a power of two and greater than %d : ", FILTER_LENGTH);
// scanf("%d", &imageW);
imageH = imageW;
printf("Image Width x Height = %i x %i\n\n", imageW, imageH);
printf("Allocating and initializing host arrays...\n");
// Tha htan kalh idea na elegxete kai to apotelesma twn malloc...
h_Filter = (numid *)malloc(FILTER_LENGTH * sizeof(numid));
h_Input = (numid *)malloc(imageW * imageH * sizeof(numid));
h_PadInput = (numid *)malloc((imageW+radius*2 )*(2*radius+ imageH) * sizeof(numid)) ;
h_Buffer = (numid *)malloc(imageW * imageH * sizeof(numid));
h_OutputCPU = (numid *)malloc(imageW * imageH * sizeof(numid));
result = (numid *)malloc((imageW+2*radius) * (imageH+2*radius) * sizeof(numid));
cudaMalloc(&d_Input,(imageW+2*radius)*(imageH+2*radius)*sizeof(numid));
cudaMalloc(&d_Buffer,(imageW+2*radius)*(imageH+2*radius)*sizeof(numid));
cudaMemset(d_Buffer,0,(imageW+2*radius)*(imageH+2*radius)*sizeof(numid));
cudaMalloc(&d_OutputGPU,(imageW+2*radius)*(imageH+2*radius)*sizeof(numid));
if(d_Filter==NULL || d_Input==NULL || d_Buffer==NULL || d_OutputGPU==NULL){
printf("Cuda Malloc Failed\n");
return 0;
}
cudaMemset(d_OutputGPU,0,(imageW+2*radius)*(imageH+2*radius)*sizeof(numid));
// to 'h_Filter' apotelei to filtro me to opoio ginetai to convolution kai
// arxikopoieitai tuxaia. To 'h_Input' einai h eikona panw sthn opoia ginetai
// to convolution kai arxikopoieitai kai auth tuxaia.
srand(200);
for (i = 0; i < FILTER_LENGTH; i++) {
h_Filter[i] = (numid)(rand() % 16);
}
for (i = 0; i < imageW * imageH; i++) {
h_Input[i] = (numid)rand() / ((numid)RAND_MAX / 255) + (numid)rand() / (numid)RAND_MAX;
}
// To parakatw einai to kommati pou ekteleitai sthn CPU kai me vash auto prepei na ginei h sugrish me thn GPU.
printf("CPU computation...\n");
clock_gettime(CLOCK_MONOTONIC_RAW, &tv1);
convolutionRowCPU(h_Buffer, h_Input, h_Filter, imageW, imageH, radius); // convolution kata grammes
convolutionColumnCPU(h_OutputCPU, h_Buffer, h_Filter, imageW, imageH, radius); // convolution kata sthles
clock_gettime(CLOCK_MONOTONIC_RAW, &tv2);
printf ("CPU time = %10g seconds\n",
(double) (tv2.tv_nsec - tv1.tv_nsec) / 1000000000.0 +
(double) (tv2.tv_sec - tv1.tv_sec));
dim3 dimGridR(imageW/tileRW,imageH/tileRH);
dim3 dimBlockR(tileRW,tileRH);
dim3 dimGridC(imageW/tileCW,imageH/tileCH);
dim3 dimBlockC(tileCW,tileCH);
for(i=0;i<(imageW+2*radius)*(imageW+2*radius);i++){
h_PadInput[i]=0;
}
for(i=0;i<imageW;i++){
for(j=0;j<imageW;j++){
h_PadInput[(i+radius)*(2*radius+imageW)+j+radius]=h_Input[i*imageW+j];
}
}
printf("GPU computation... \n");
cudaMemcpyToSymbol(d_Filter, h_Filter,FILTER_LENGTH*sizeof(numid));
cudaMemcpy(d_Input,h_PadInput,(imageH+2*radius)*(imageW+2*radius)*sizeof(numid),cudaMemcpyHostToDevice);
cudaEventRecord(start,0);
tiledConvRowGPU <<< dimGridR, dimBlockR >>>(d_Buffer, d_Input, imageW, imageH);
cudaThreadSynchronize();
cudaError_t error=cudaGetLastError();
if(error!=cudaSuccess){
printf("Cuda Error:%s\n",cudaGetErrorString(error));
cudaDeviceReset();
return 0;
}
tiledConvColGPU <<< dimGridC, dimBlockC >>>(d_OutputGPU, d_Buffer , imageW, imageH);
cudaThreadSynchronize();
error=cudaGetLastError();
if(error!=cudaSuccess){
printf("Cuda Error:%s\n",cudaGetErrorString(error));
cudaDeviceReset();
return 0;
}
cudaEventRecord(stop,0);
cudaMemcpy(result,d_OutputGPU,(imageH+2*radius)*(imageW+2*radius)*sizeof(numid),cudaMemcpyDeviceToHost);
float elapsed;
cudaEventSynchronize(stop);
cudaEventElapsedTime(&elapsed,start,stop);
printf("GPU time :%f ms.\n",elapsed);
// Kanete h sugrish anamesa se GPU kai CPU kai an estw kai kapoio apotelesma xeperna thn akriveia
// pou exoume orisei, tote exoume sfalma kai mporoume endexomenws na termatisoume to programma mas
for(i=0;i<imageW;i++){
for(j=0;j<imageH;j++){
numid diff= h_OutputCPU[i*imageW+j]-result[(i+radius)*(imageW+2*radius)+j+radius];
if(ABS(diff)>accuracy){
printf("sfalma akriveias %f",ABS(diff));
}
}
}
// free all the allocated memory
free(h_OutputCPU);
free(h_Buffer);
free(h_Input);
free(h_Filter);
cudaFree(d_OutputGPU);
cudaFree(d_Buffer);
cudaFree(d_Input);
cudaFree(d_Filter);
cudaDeviceReset();
return 0;
}
|
14,966 | extern "C"{
// #include <stdio.h>
// #define QUANTIZED_LEVEL 3
#define WORKER_NUMBER 3
/* BKDR Hash Function */
__device__ unsigned int BKDR_hash(char *str){
unsigned int seed = 131;
unsigned int hash = 0;
for(int i=0;i<4;i++){
hash = hash * seed + (str[i]);
}
return (hash & 0x7FFFFFFF);
}
// change the type of keysInGroup_gpu, add quant_level,to add bit_unit
__global__ void InsertKernel(float *lsstable_gpu, unsigned int *keysInGroup_gpu, float *grad_gpu, int *lsstable_size_gpu, float * cluster_center_gpu, int grad_num, int cluster_number, int quant_level, int max_table_size){
int idx = blockIdx.x * blockDim.x + threadIdx.x;
int bit_uint = int(32/quant_level);// how many code in one longlong int, should be pass by the parameter, to do
if (idx < grad_num){
int col = idx % bit_uint;
int row = int(idx / bit_uint);// code cordination
float tmp = fabsf(cluster_center_gpu[0] - grad_gpu[idx]);
int i;
unsigned int tmp_index = 0;//closest cluster index
for(i=1;i<cluster_number;i++){
float tmp_diff = fabsf(cluster_center_gpu[i] - grad_gpu[idx]);
if(tmp_diff < tmp){
tmp = tmp_diff;
tmp_index = i;
}
}
unsigned int tmp_bits = tmp_index;
tmp_bits <<= (quant_level * col);
atomicOr(&(keysInGroup_gpu[row]), tmp_bits);
// if(idx==15){
// printf("id %d, quant %d\n", idx,tmp_index);
// printf("id %d, tmp_bits %d\n", idx,tmp_bits);
// printf("id %d, keysInGroup_gpu %d\n", idx,keysInGroup_gpu[row]);
// }
// printf("id %d, quant %d\n", idx,keysInGroup_gpu[row]);
int hash_pos = (BKDR_hash((char *)&idx)) % (lsstable_size_gpu[tmp_index]);
atomicAdd(&(lsstable_gpu[tmp_index*2*max_table_size + hash_pos]), grad_gpu[idx]);
atomicAdd(&(lsstable_gpu[(tmp_index*2+1)*max_table_size + hash_pos]), 1.0);
}
}
__global__ void get_lsstable_kernel(float *lsstable_gpu, int *lsstable_size_gpu, int cluster_number, int max_table_size){
int idx = blockDim.x * blockIdx.x + threadIdx.x;
int idy = blockIdx.y;
if(idx < lsstable_size_gpu[idy]){
if(lsstable_gpu[(idy*2+1)*max_table_size + idx]!=0){
lsstable_gpu[(idy*2)*max_table_size + idx] /= lsstable_gpu[(idy*2+1)*max_table_size + idx];
}
}
}
/****
__global__ void DecodeKernel(float *lsstable_gpu, unsigned char *keysInGroup_gpu, float *grad_gpu, int *lsstable_size_gpu, int grad_num, int cluster_number, int max_table_size){
__shared__ float cache[128][TOTAL_CLUSTER_NUMBER];
int idx = threadIdx.x + blockIdx.x * blockDim.x;
int idy = threadIdx.y;
if (idx < grad_num){
int group_times = keysInGroup_gpu[idx*cluster_number + idy];
int hash_pos = (BKDR_hash((char *)&idx)) % (lsstable_size_gpu[idy]);
cache[threadIdx.x][threadIdx.y] = lsstable_gpu[2*idy*max_table_size + hash_pos] * group_times;
}
__syncthreads();
int i = TOTAL_CLUSTER_NUMBER/2;
while(i != 0){
if(threadIdx.y<i){
cache[threadIdx.x][threadIdx.y] += cache[threadIdx.x][threadIdx.y+i];
}
__syncthreads();
i /= 2;
}
if(idx < grad_num && threadIdx.y==0){
grad_gpu[idx] = cache[threadIdx.x][0]/TOTAL_CLUSTER_NUMBER;
}
}
****/
// change the type of keysInGroup_gpu, add int quant_level, int workers_number, int keysInGroup_size, to add bit_unit
__global__ void Decode_Multi_Kernel(float *lsstable_gpu, unsigned int *keysInGroup_gpu, float *grad_gpu, int *lsstable_size_gpu, int grad_num, int workers_number, int keysInGroup_size, int quant_level, uint cluster_number, int max_table_size){
extern __shared__ float cache[]; //each row save the decode value from the concatenate keysInGroup and compute one grad, 128*WORKER_NUMBER
int idx = threadIdx.x + blockIdx.x * blockDim.x; //to find the grad id
int idy = threadIdx.y; //to find which worker's keysInGroup
int bit_uint = int(32/quant_level);// how many code in one longlong int, should be pass by the parameter, to do
if (idx < grad_num){
int col = idx % bit_uint;
int row = int(idx / bit_uint);// code cordination
unsigned int tmp_bits = (cluster_number-1) << (quant_level * col);//mask
tmp_bits &= keysInGroup_gpu[idy * keysInGroup_size + row];
int group = tmp_bits >> (quant_level * col);//get the code
// if(idx==(grad_num-1)){
// printf("id %d, pos %d\n", idx,idy * keysInGroup_size + row);
// printf("id %d, tmp_bits %d\n", idx,tmp_bits);
// printf("id %d, quant %d\n", idx,group);
// }
int hash_pos = (BKDR_hash((char *)&idx)) % (lsstable_size_gpu[group]);
cache[threadIdx.x* workers_number + threadIdx.y] = lsstable_gpu[2*group*max_table_size + hash_pos];
}
__syncthreads();
int i = workers_number/2;
while(i != 0){
if(threadIdx.y<i){
cache[threadIdx.x* workers_number + threadIdx.y] += cache[threadIdx.x * workers_number + (threadIdx.y+i)];
}
__syncthreads();
i /= 2;
}
if(idx < grad_num && threadIdx.y==0){
grad_gpu[idx] = cache[threadIdx.x * workers_number + 0]/workers_number;
}
}
} |
14,967 | #include<cuda_runtime.h>
#include<device_launch_parameters.h>
//#include<helper_cuda.h>
#include<cuda.h>
#include<stdio.h>
#define BLOCK_X 4
#define BLOCK_Y 4
#define N 8
__global__ void add(double *a, double *b, double *c, double *dout){
int i = threadIdx.x + BLOCK_X * blockIdx.x;
int j = threadIdx.y + BLOCK_Y * blockIdx.y;
int idx = i * N + j;
c[idx] = a[idx] + b[idx];
__syncthreads();
c[0] = a[0] + b[0];
dout[0] = 1919;
dout[1] = 9199;
}
int main(){
double *a, *b, *out;
double *d_a, *d_b, *d_out;
double *iout, *dout;
a = (double*)malloc(sizeof(double) * N * N);
b = (double*)malloc(sizeof(double) * N * N);
out = (double*)malloc(sizeof(double) * N * N);
iout = (double*)malloc(sizeof(double) * 2);
cudaMalloc((void**)&d_out, sizeof(double) * N * N);
cudaMalloc((void**)&d_a, sizeof(double) * N * N);
cudaMalloc((void**)&d_b, sizeof(double) * N * N);
cudaMalloc((void**)&dout, sizeof(double) * 2);
for (int i = 0 ; i < N * N; ++i){
a[i] = 1.0f;
b[i] = 2.0f;
}
cudaMemcpy(d_a, a, sizeof(double) * N * N, cudaMemcpyHostToDevice);
cudaMemcpy(d_b, b, sizeof(double) * N * N, cudaMemcpyHostToDevice);
int bx = 1 + (N - 1) / BLOCK_X;
int by = 1 + (N - 1) / BLOCK_Y;
dim3 dimGrid(bx, by);
dim3 dimBlock(BLOCK_X, BLOCK_Y);
add<<<dimGrid, dimBlock>>>(d_a, d_b, d_out, dout);
cudaMemcpy(out, d_out, sizeof(double) * N * N, cudaMemcpyDeviceToHost);
cudaMemcpy(iout, dout, sizeof(double) * 2, cudaMemcpyDeviceToHost);
for (int i = 0; i < N * N; ++i){
printf("%f ", out[i]);
}
printf("\n");
printf("%f\n", iout[0]);
printf("%f\n", iout[1]);
return 0;
}
|
14,968 |
// nvcc UseThrust.cu -o /tmp/a.out && /tmp/a.out
#include <sstream>
#include <iostream>
#include <thrust/version.h>
static std::string ThrustVersionString()
{
std::stringstream ss ;
ss
<< THRUST_MAJOR_VERSION
<< "."
<< THRUST_MINOR_VERSION
<< "."
<< THRUST_SUBMINOR_VERSION
<< "p"
<< THRUST_PATCH_NUMBER
;
return ss.str();
}
int main(int argc, char** argv)
{
std::cout << ThrustVersionString() << std::endl ;
return 0 ;
}
|
14,969 | #include <stdio.h>
#include <cuda.h>
__global__ void K1() {
printf("in K1\n");
}
__global__ void K2() {
printf("in K2\n");
}
__global__ void K3() {
printf("in K3\n");
}
__global__ void K4() {
printf("in K4\n");
}
int main() {
cudaStream_t s0, s1;
cudaEvent_t e0, e1;
cudaSetDevice(0);
cudaStreamCreate(&s0);
cudaEventCreate(&e0);
K1<<<1, 1, 0, s0>>>();
cudaEventRecord(e0, s0);
K2<<<1, 1, 0, s0>>>();
cudaSetDevice(1);
cudaStreamCreate(&s1);
cudaEventCreate(&e1);
K3<<<1, 1, 0, s1>>>();
cudaStreamWaitEvent(s1, e0, 0);
K4<<<1, 1, 0, s1>>>();
cudaDeviceSynchronize();
cudaSetDevice(0);
cudaDeviceSynchronize();
return 0;
}
|
14,970 | extern "C" __global__ void
fft_norm(float2* arr, int size)
{
int i = blockDim.x * blockIdx.x + threadIdx.x;
arr[i].x /= size;
arr[i].y /= size;
} |
14,971 | //#include "../common/book.h"
#include<iostream>
#define N 10
#define CUDA_KERNEL __global__
using namespace std;
template <typename F, typename ...Args>
void cuda_launch_kernel(F fun, Args ...args) {
fun<<<1,1>>>(args...);
//cuda_check_last(typeid(F).name());
}
CUDA_KERNEL void add( int *a, int *b, int *c ) {
for (int i=0; i < N; i++) {
c[i] = a[i] + b[i];
}
}
int main( void ) {
int a[N], b[N], c[N];
int *dev_a, *dev_b, *dev_c;
// allocate the memory on the GPU
cudaMalloc( (void**)&dev_a, N * sizeof(int) ) ;
cudaMalloc( (void**)&dev_b, N * sizeof(int) ) ;
cudaMalloc( (void**)&dev_c, N * sizeof(int) );
// fill the arrays 'a' and 'b' on the CPU
for (int i=0; i<N; i++) {
a[i] = i;
b[i] = i * i;
c[i] = 0;
}
// copy the arrays 'a' and 'b' to the GPU
cudaMemcpy( dev_a, a, N * sizeof(int),
cudaMemcpyHostToDevice );
cudaMemcpy( dev_b, b, N * sizeof(int),
cudaMemcpyHostToDevice ) ;
cuda_launch_kernel(add, dev_a, dev_b, dev_c);
// copy the array 'c' back from the GPU to the CPU
cudaMemcpy( c, dev_c, N * sizeof(int),
cudaMemcpyDeviceToHost );
// display the results
for (int i=0; i<N; i++) {
printf( "%d + %d = %d\n", a[i], b[i], c[i] );
}
// free the memory allocated on the GPU
cudaFree( dev_a );
cudaFree( dev_b );
cudaFree( dev_c );
return 0;
}
|
14,972 | /*
*
* PROBLEM STATEMENT:
*
* Each row of the given matrix is exponentiated as many times as the row number. parallely
* eg. row 1 is the same. row 2 is squared. row 3 is cubed and so on...
*
*
*/
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdio.h>
#include <stdlib.h>
__global__ void exponentiate(int *A, int *B){
int ele = threadIdx.x, row=blockIdx.x, no_eles = blockDim.x;
B[row*no_eles + ele] = 1;
for(int i=0; i<=row; i++){
B[row*no_eles + ele] = B[row*no_eles + ele] * A[row*no_eles + ele];
}
}
int main(){
int *a, *t, m, n;
int *d_a, *d_t;
printf("Enter the value of m: "); scanf("%d",&m);
printf("Enter the value of n: "); scanf("%d",&n);
int size = sizeof(int)*m*n;
a=(int*)malloc(size);
t=(int*)malloc(size);
printf("Enter input matrix: \n");
for(int i=0; i<m*n; i++)
scanf("%d",&a[i]);
cudaMalloc((void**)&d_a,size);
cudaMalloc((void**)&d_t,size);
cudaMemcpy(d_a,a,size,cudaMemcpyHostToDevice);
exponentiate<<<m,n>>>(d_a,d_t);
cudaMemcpy(t,d_t,size,cudaMemcpyDeviceToHost);
printf("Resultant matrix:\n");
for(int i=0; i<m; i++){
for(int j=0; j<n; j++){
printf("%d ",t[i*n+j]);
}
printf("\n");
}
cudaFree(d_a);
cudaFree(d_t);
return 0;
}
|
14,973 | // Introduction to GPU Programming
// Module 05: Shared and Constant Memory
// Author : Justin Renga
// Date : 03/03/2017
// Bibliography (source-list):
// [1] http://cuda-programming.blogspot.com/2013/01/what-is-constant-memory-in-cuda.html
// [2] constant_memory.cu
// [3] shared_memory2.cu
// #includes
#include <time.h>
#include <stdio.h>
#include <stdlib.h>
#define ELEMENT_COUNT 2048
// Declare CONSTANT Memory here:
__constant__ static unsigned int MASKS[2];
// Declare static SHARED Memory here:
__shared__ unsigned int sharedMemory [ELEMENT_COUNT];
// Declare global host data here:
unsigned int initializedRNG;
// ---------------------------------------- DEVICE OPERATIONS -----------------------------------------
// @brief Perform a masking operation based on the evenness of the data
//
// @param [ in] data An array that contains input data
// @param [out] output An array that contains the resulting data
__global__ void maskBits(const unsigned int* data,
unsigned int* const output)
{
// Compute the thread index
const unsigned int threadIndex = (blockIdx.x * blockDim.x) + threadIdx.x;
const unsigned int dataValue = data[threadIndex];
// Apply the appropriate mask value and assign to the output
output[threadIndex] = dataValue ^ MASKS[dataValue % 2];
}
// @brief Performs an array reversal
//
// @param [ in] The shared memory
// @param [inout] The device memory
// @param [ in] The number of elements
// @param [ in] The current thread index
__device__ void rotateArray(unsigned int* const data,
unsigned int* const deviceData,
unsigned int elementCount,
unsigned int threadIndex)
{
// Get the next appropriately rotated thread index
unsigned int rotatedIndex = elementCount - threadIndex - 1;
data[threadIndex] = deviceData[rotatedIndex];
__syncthreads();
// Store the newly rotated memory back into global memory
deviceData[threadIndex] = data[threadIndex];
}
// @brief A kernel function used to flip the contents of an array
__global__ void swapArray(unsigned int* data, const unsigned int elementCount)
{
// Get the current thread index
unsigned int threadIndex = (blockIdx.x * blockDim.x) + threadIdx.x;
rotateArray(sharedMemory, data, elementCount, threadIndex);
}
// ----------------------------------------- HOST OPERATIONS -----------------------------------------
// @brief Initialize the Random number generator and ensure it only initializes one time
void initializeRandomNumbers()
{
if (initializedRNG == 0)
{
srand(time(NULL));
initializedRNG = 1;
}
}
// @brief Generates a series of random numbers for the provided array, given the number of desired numbers
// and the maximum (exclusive) value.
//
// @param [inout] data The data array that will contain the random numbers
// @param [ in] elementCount The number of elements to store in the data array
// @param [ in] max The maximum random number to use (exclusive)
void generateRandomNumbers( unsigned int* data,
const unsigned int elementCount,
const unsigned int max)
{
// Check to make sure the RNG has been initialized
if (initializedRNG == 0)
{
// If not, initialize the RNG
initializeRandomNumbers();
}
for (unsigned int i = 0; i < elementCount; ++i)
{
data[i] = rand() % max;
}
}
// @brief Host function to run the GPU algorithms
//
// @param [in] threadCount Number of threads per block
// @param [in] blockCount Number of blocks
// @param [in] maskMode Should the algorithm to run be the mask algorithm?
void runAlgorithm(unsigned int threadCount, unsigned int blockCount, bool maskMode)
{
static unsigned int elementCount = threadCount * blockCount;
// Declare the device array size
static unsigned int deviceSize = sizeof(unsigned int) * elementCount;
// Allocate host memory to copy to/from the GPU
unsigned int* hostInput = (unsigned int*)malloc(deviceSize);
unsigned int* hostOutput = (unsigned int*)calloc(elementCount, sizeof(unsigned int));
// Populate the host memory with random numbers from 0 to elementCount - 1
generateRandomNumbers(hostInput, elementCount, elementCount);
// Allocate global memory for the GPU
unsigned int* deviceInput;
unsigned int* deviceOutput;
cudaMalloc((void**)&deviceInput , deviceSize);
cudaMalloc((void**)&deviceOutput, deviceSize);
// Initialize the device output memory to zero
cudaMemset(deviceOutput, 0, deviceSize);
// Copy the host input memory to GPU input memory
cudaMemcpy(deviceInput, hostInput, deviceSize, cudaMemcpyHostToDevice);
if (maskMode)
{
// Define the constant values for the MASK
static unsigned int masks[2];
// Mask ZERO: 00000001 00100011 01000101 01100111
// Mask ONE : 10111111 11101010 11001101 00010110
masks[0] = 0x01234567;
masks[1] = 0xBFEACD16;
// Copy the masks symbol from host to constant GPU memory:
cudaMemcpyToSymbol(MASKS, masks, sizeof(unsigned int) * 2);
// Execute the kernel
maskBits<<<blockCount, threadCount>>>(deviceInput, deviceOutput);
}
else
{
swapArray<<<blockCount, threadCount>>>(deviceInput, elementCount);
// Make sure the GPU has completed before copying everything over
cudaThreadSynchronize();
}
if (maskMode)
{
// Copy memory from device to host for output
cudaMemcpy(hostOutput, deviceOutput, deviceSize, cudaMemcpyDeviceToHost);
}
else
{
// Copy memory from device to host for output
cudaMemcpy(hostOutput, deviceInput, deviceSize, cudaMemcpyDeviceToHost);
}
// Print the trimmed results (the first and last 4 values)
for (unsigned int i = 0; i < 4; ++i)
{
printf("[INDEX %5u] Input: %4u Output: %u\n", i, hostInput[i], hostOutput[i]);
}
printf("\n...Trimmed output...\n\n");
for (unsigned int i = elementCount - 5; i < elementCount; ++i)
{
printf("[INDEX %5u] Input: %4u Output: %u\n", i, hostInput[i], hostOutput[i]);
}
// Free the device memory
cudaFree(deviceInput );
cudaFree(deviceOutput);
// Free the host memory
free(hostInput);
free(hostOutput);
hostInput = NULL;
hostOutput = NULL;
}
// @brief Display the proper program usage
void showUsage()
{
printf("Invalid arguments provided. Please see the usage below:\n");
printf(" module_3_jrenga2.exe <bc> <tpb>\n");
printf(" bc - The number of blocks to run with. Must be a positive integer.\n");
printf(" tpb - The number of threads per blocks. Must be a positive integer.\n");
printf("NOTE: The total number of threads (bc * tpb) must be greater than 64 \n");
printf(" and less than 2048.\n");
printf(" ** TERMINATING **\n");
}
// @brief Main Entry-Point
int main(int argc, char* argv[])
{
// 1. Check the number of arguments.
if (argc != 3)
{
showUsage();
return EXIT_FAILURE;
}
// 2. Attempt to retrieve the integer values of the parameters
// (a value less than or equal to 0 is considered invalid)
int numBlocks = atoi(argv[1]);
if (numBlocks <= 0)
{
showUsage();
return EXIT_FAILURE;
}
int numThreads = atoi(argv[2]);
if (numThreads <= 0)
{
showUsage();
return EXIT_FAILURE;
}
int totalThreads = numBlocks * numThreads;
// 2.5 Check to see if the minimum number of threads has been achieved (64)
if (totalThreads < 64 || totalThreads > 2048)
{
showUsage();
return EXIT_FAILURE;
}
// Do some pre-processing to set up the random number generation
initializedRNG = false;
initializeRandomNumbers();
printf("Swapping Algorithm (SHARED MEMORY) [BLOCKS = %2d, THREADS/BLOCK = %2d]\n", numBlocks, numThreads);
printf("********************************************************************\n");
// Run the swapping algorithm using shared memory
runAlgorithm(numBlocks, numThreads, false);
printf("Masking Algorithm (CONSTANT MEMORY) [BLOCKS = %2d, THREADS/BLOCK = %2d]\n", numBlocks, numThreads);
printf("*********************************************************************\n");
// Run the masking algorithm using constant memory
runAlgorithm(numBlocks, numThreads, true);
return EXIT_SUCCESS;
}
|
14,974 | #include <iostream>
#include <vector>
__global__ void vecadd( int * v0, int * v1, std::size_t size )
{
auto tid = threadIdx.x;
v0[ tid ] += v1[ tid ];
}
int main()
{
cudaError_t err;
std::size_t const size = 100;
std::size_t const sizeb = size * sizeof( int );
std::vector< int > v0( size );
std::vector< int > v1( size );
/*
int * v0_h = nullptr;
int * v1_h = nullptr;
*/
for( std::size_t i = 0 ; i < size ; ++i )
{
v0[ i ] = v1[ i ] = i;
}
int * v0_d = nullptr;
int * v1_d = nullptr;
cudaHostRegister( v0.data(), sizeb, cudaHostRegisterDefault );
cudaHostRegister( v1.data(), sizeb, cudaHostRegisterDefault );
/*
err = cudaMallocHost( &v0_h, sizeb );
if( err != cudaSuccess ) { std::cerr << "Error" << std::endl; }
err = cudaMallocHost( &v1_h, sizeb);
if( err != cudaSuccess ) { std::cerr << "Error" << std::endl; }
*/
/*
for( std::size_t i = 0 ; i < size ; ++i )
{
v0_h[ i ] = 5;
v1_h[ i ] = 5;
}
*/
err = cudaMalloc( &v0_d, sizeb );
if( err != cudaSuccess ) { std::cerr << "Error" << std::endl; }
err = cudaMalloc( &v1_d, sizeb );
if( err != cudaSuccess ) { std::cerr << "Error" << std::endl; }
cudaStream_t streams[ 2 ];
for( std::size_t i = 0 ; i < 2 ; ++i )
{
cudaStreamCreate( &streams[ i ] );
}
for( std::size_t i = 0 ; i < 2 ; ++i )
{
err = cudaMemcpyAsync( v0_d + i*size/2, v0.data() + i*size/2, sizeb/2, cudaMemcpyHostToDevice, streams[ i ] );
if( err != cudaSuccess ) { std::cerr << "Error 3" << std::endl; }
}
for( std::size_t i = 0 ; i < 2 ; ++i )
{
vecadd<<< 1, 50, 0, streams[ i ] >>>( v0.data() + i*size/2, v1.data() + i*size/2, size/2 );
err = cudaGetLastError();
if( err != cudaSuccess ) { std::cerr << "Error 3.5" << std::endl; }
}
for( std::size_t i = 0 ; i < 2 ; ++i )
{
err = cudaMemcpyAsync( v0.data() + i*size/2, v0_d + i*size/2, sizeb/2, cudaMemcpyDeviceToHost, streams[ i ] );
if( err != cudaSuccess ) { std::cerr << "Error 4" << std::endl; }
}
cudaDeviceSynchronize( );
for( std::size_t i = 0 ; i < 2 ; ++i )
{
cudaStreamDestroy( streams[ i ] );
}
for( auto x: v0 )
{
std::cout << x << std::endl;
}
return 0;
} |
14,975 | #include <stdio.h>
#include <cuda.h>
//-----------------------------------------------------------------------------
// TheKernel: basic kernel containing a print statement.
//-----------------------------------------------------------------------------
__global__ void TheKernel()
{
// Reduce the output: x & 7 means "compare the 32-bit string x (an integer)
// to 0x7 (hexadecimal for 7), which is 0000 0000 ... 0000 0111, returning
// 1 if both bits read "one" and 0 if either bit reads "zero." This masks
// all but the final three bits of x, which is the same as taking mod(x, 8).
// In general, (x & (2^N - 1)) = mod(x, 2^N).
if ((threadIdx.x & 7) == 0) {
printf("This is block %2d, thread %2d saying hello world, from the GPU.\n",
blockIdx.x, threadIdx.x);
}
}
//-----------------------------------------------------------------------------
// main
//-----------------------------------------------------------------------------
int main()
{
printf("This is the C layer saying hello world, from the host.\n");
// Launch the kernel in more than one block
TheKernel<<<8, 32>>>();
// Device synchronization
cudaDeviceSynchronize();
// Announce that the kernel is complete
printf("Program exits.\n");
return 0;
}
|
14,976 | #include "includes.h"
__global__ void kernel4( int *a, int dimx, int dimy )
{
} |
14,977 | #define FOO(X,Y) X + Y
// ^ cuda
|
14,978 | #include "includes.h"
__global__ void normal_eqs_flow_GPU(float *d_CO, const float2 *d_flow_compact, const float *d_Zbuffer_flow_compact, const int *d_ind_flow_Zbuffer, float fx, float fy, float ox, float oy, int n_rows, int n_cols, const int *d_n_values_flow, const int *d_start_ind_flow) {
int n_val_accum = gridDim.x * blockDim.x; // _MAX_N_VAL_ACCUM may not be
// multiple of blocksize
int n_flow = d_n_values_flow[blockIdx.y];
int n_accum = (int)ceilf((float)n_flow / (float)n_val_accum);
int start_ind = d_start_ind_flow[blockIdx.y];
// initialize accumulators
float A0 = 0.0f, A1 = 0.0f, A2 = 0.0f, A3 = 0.0f, A4 = 0.0f, A5 = 0.0f,
A6 = 0.0f, A7 = 0.0f, A8 = 0.0f, A9 = 0.0f, A10 = 0.0f, A11 = 0.0f,
A12 = 0.0f, A13 = 0.0f, A14 = 0.0f, A15 = 0.0f, A16 = 0.0f, A17 = 0.0f,
A18 = 0.0f, A19 = 0.0f, A20 = 0.0f, A21 = 0.0f, A22 = 0.0f;
for (int in_ind = blockDim.x * blockIdx.x * n_accum + threadIdx.x;
in_ind < blockDim.x * (blockIdx.x + 1) * n_accum; in_ind += blockDim.x) {
if (in_ind < n_flow) { // is this a valid sample?
// fetch flow and Zbuffer from global memory
float2 u = d_flow_compact[in_ind + start_ind];
float disp = __fdividef(1.0f, d_Zbuffer_flow_compact[in_ind + start_ind]);
// compute coordinates
int pixel_ind = d_ind_flow_Zbuffer[in_ind + start_ind];
bool is_ar_flow = (pixel_ind >= (n_rows * n_cols));
pixel_ind -= (int)is_ar_flow * n_rows * n_cols;
float y = floorf(__fdividef((float)pixel_ind, n_cols));
float x = (float)pixel_ind - y * n_cols;
x = x - ox;
y = y - oy;
/************************/
/* evaluate constraints */
/************************/
// unique values A-matrix
A0 += (disp * disp * fx * fx);
A1 += (-disp * disp * x * fx);
A2 += (-disp * x * y);
A3 += (disp * fx * fx + disp * x * x);
A4 += (-disp * y * fx);
A5 += (-disp * disp * y * fy);
A6 += (-disp * fy * fy - disp * y * y); //!!!!
A7 += (disp * x * fy);
A8 += (disp * disp * x * x + disp * disp * y * y);
A9 += (disp * x * x * y / fx + disp * y * fy + disp * y * y * y / fy);
A10 += (-disp * x * fx - disp * x * x * x / fx - disp * x * y * y / fy);
A11 += (x * x * y * y / (fx * fx) + fy * fy + 2.0f * y * y +
y * y * y * y / (fy * fy));
A12 += (-2.0f * x * y - x * x * x * y / (fx * fx) -
x * y * y * y / (fy * fy));
A13 += (x * y * y / fx - x * fy - x * y * y / fy);
A14 += (fx * fx + 2.0f * x * x + x * x * x * x / (fx * fx) +
x * x * y * y / (fy * fy));
A15 += (-y * fx - x * x * y / fx + x * x * y / fy);
A16 += (x * x + y * y);
// B-vector
A17 += (disp * u.x * fx);
A18 += (disp * u.y * fy);
A19 += (-disp * x * u.x - disp * y * u.y);
A20 += (-x * y * u.x / fx - u.y * fy - u.y * y * y / fy);
A21 += (u.x * fx + x * x * u.x / fx + x * y * u.y / fy);
A22 += (-y * u.x + x * u.y);
}
}
/**************************/
/* write out accumulators */
/**************************/
int out_ind =
23 * n_val_accum * blockIdx.y + blockDim.x * blockIdx.x + threadIdx.x;
d_CO[out_ind] = A0;
d_CO[out_ind + n_val_accum] = A1;
d_CO[out_ind + 2 * n_val_accum] = A2;
d_CO[out_ind + 3 * n_val_accum] = A3;
d_CO[out_ind + 4 * n_val_accum] = A4;
d_CO[out_ind + 5 * n_val_accum] = A5;
d_CO[out_ind + 6 * n_val_accum] = A6;
d_CO[out_ind + 7 * n_val_accum] = A7;
d_CO[out_ind + 8 * n_val_accum] = A8;
d_CO[out_ind + 9 * n_val_accum] = A9;
d_CO[out_ind + 10 * n_val_accum] = A10;
d_CO[out_ind + 11 * n_val_accum] = A11;
d_CO[out_ind + 12 * n_val_accum] = A12;
d_CO[out_ind + 13 * n_val_accum] = A13;
d_CO[out_ind + 14 * n_val_accum] = A14;
d_CO[out_ind + 15 * n_val_accum] = A15;
d_CO[out_ind + 16 * n_val_accum] = A16;
d_CO[out_ind + 17 * n_val_accum] = A17;
d_CO[out_ind + 18 * n_val_accum] = A18;
d_CO[out_ind + 19 * n_val_accum] = A19;
d_CO[out_ind + 20 * n_val_accum] = A20;
d_CO[out_ind + 21 * n_val_accum] = A21;
d_CO[out_ind + 22 * n_val_accum] = A22;
} |
14,979 | //
// Created by rafa on 2/5/21.
//
#include "counter.cuh"
__global__
void
doPairCount(long long np, const double *array, const long long *particlesInGrid, const long long *offset,
double boxsize, int nside, double minsep, double maxsep, const int nbins, const double *rbins2,
unsigned long long *paircounts, const int *key, const int *iwrap) {
long long index = blockIdx.x * blockDim.x + threadIdx.x;
long long stride = blockDim.x * gridDim.x;
double agrid = boxsize / nside;
int index_max = (int) (maxsep / agrid) + 1;
long long i, j;
int i1, j1, k1;
double x1, y1, z1;
double x11, y11, z11;
double dx, dy, dz;
double r2;
int iwrapx, iwrapy, iwrapz;
int i2, j2, k2;
int s;
double factor = nside / boxsize;
// __shared__ unsigned long long private_histogram[20];
// if (threadIdx.x < 20) {
// private_histogram[threadIdx.x] = 0;
// }
// __syncthreads();
for (i = index; i < np; i += stride) {
x1 = array[3 * i + 0];
y1 = array[3 * i + 1];
z1 = array[3 * i + 2];
i1 = (int) (x1 * factor);
j1 = (int) (y1 * factor);
k1 = (int) (z1 * factor);
i1 %= nside;
j1 %= nside;
k1 %= nside;
for (int idx = -index_max; idx <= index_max; idx++) {
i2 = i1 + idx;
iwrapx = iwrap[i2 + index_max];
i2 = key[i2 + index_max];
for (int idy = -index_max; idy <= index_max; idy++) {
j2 = j1 + idy;
iwrapy = iwrap[j2 + index_max];
j2 = key[j2 + index_max];
for (int idz = -index_max; idz <= index_max; idz++) {
k2 = k1 + idz;
iwrapz = iwrap[k2 + index_max];
k2 = key[k2 + index_max];
s = nside * nside * k2 + nside * j2 + i2;
x11 = x1 - boxsize * iwrapx;
y11 = y1 - boxsize * iwrapy;
z11 = z1 - boxsize * iwrapz;
for (j = 0; j < particlesInGrid[s]; j += 1) {
dx = x11 - array[3 * offset[s] + 3 * j + 0];
dy = y11 - array[3 * offset[s] + 3 * j + 1];
dz = z11 - array[3 * offset[s] + 3 * j + 2];
r2 = dx * dx + dy * dy + dz * dz;
if (r2 > rbins2[nbins] || r2 < rbins2[0]) continue;
for (int k = nbins - 1; k >= 0; k--) {
if (r2 >= rbins2[k]) {
atomicAdd(&(paircounts[k]), 1);
// atomicAdd(&(private_histogram[k]), 1);
// paircounts[k] += 1;
break;
}
}
} //loop over neighbor grid2 particles
} //loop over z neighbors
} //loop over y neighbors
} //loop over x neighbors
} //loop over catalog1 particles
// __syncthreads();
//
// if (threadIdx.x < 20) {
// atomicAdd(&(paircounts[threadIdx.x]), private_histogram[threadIdx.x]);
// }
// return paircounts;
} |
14,980 | //
// include files
//
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#define BLOCK_NUM (1024 * 32)
#define THREAD_NUM 32
#define N (BLOCK_NUM * THREAD_NUM)
static void cuda_checker(cudaError_t err, const char *file, int line ) {
if (err != cudaSuccess) {
printf("%s in %s at line %d\n", cudaGetErrorString(err), file, line);
exit(EXIT_FAILURE);
}
}
#define CUDA_CHECK(err) (cuda_checker(err, __FILE__, __LINE__ ))
//
// kernel code
//
__global__ void add(int *a, int *b, int *c) {
int tid = threadIdx.x + blockIdx.x * blockDim.x; // handle the data at this index
if (tid < N) {
if(tid % 2 == 0) {
c[tid] = a[tid] + b[tid];
} else {
c[tid] = a[tid] - b[tid];
}
}
}
//
// host code
//
int main(int argc, const char **argv) {
int *a, *b, *c;
a = (int*) malloc(sizeof(int) * N);
b = (int*) malloc(sizeof(int) * N);
c = (int*) malloc(sizeof(int) * N);
int *dev_a, *dev_b, *dev_c;
for(int i = 0; i < N; i++) {
a[i] = -i;
b[i] = i * i;
}
CUDA_CHECK( cudaMalloc((void**)&dev_a, N * sizeof(int)) );
CUDA_CHECK( cudaMalloc((void**)&dev_b, N * sizeof(int)) );
CUDA_CHECK( cudaMalloc((void**)&dev_c, N * sizeof(int)) );
CUDA_CHECK( cudaMemcpy(dev_a, a, N * sizeof(int), cudaMemcpyHostToDevice) );
CUDA_CHECK( cudaMemcpy(dev_b, b, N * sizeof(int), cudaMemcpyHostToDevice) );
float time;
cudaEvent_t start, stop;
CUDA_CHECK(cudaEventCreate(&start));
CUDA_CHECK(cudaEventCreate(&stop));
CUDA_CHECK(cudaEventRecord(start, 0));
add<<<BLOCK_NUM, THREAD_NUM>>>(dev_a, dev_b, dev_c);
CUDA_CHECK( cudaMemcpy(c, dev_c, N * sizeof(int), cudaMemcpyDeviceToHost) );
CUDA_CHECK(cudaEventRecord(stop, 0));
CUDA_CHECK(cudaEventSynchronize(stop));
CUDA_CHECK(cudaEventElapsedTime(&time, start, stop));
printf("Time to generate: %3.1f ms \n", time);
// for( int i = 0; i < N; i++ ){
// int cpu_value = 0;
// if (i % 2 == 0) {
// a[i]+b[i];
// } else {
// a[i]-b[i];
// }
// printf( "cpu: %d, gpu: %d\n", cpu_value, c[i]);
// }
CUDA_CHECK( cudaFree(dev_a) );
CUDA_CHECK( cudaFree(dev_b) );
CUDA_CHECK( cudaFree(dev_c) );
cudaDeviceReset();
return 0;
} |
14,981 | #include "includes.h"
__global__ void kern_FindSourcePotentialAndStore(float* workingBuffer, float* sinkBuffer, float* divBuffer, float* labelBuffer, float iCC, int size)
{
int idx = CUDASTDOFFSET;
float value = workingBuffer[idx] + sinkBuffer[idx] + divBuffer[idx] - labelBuffer[idx] * iCC;
if( idx < size )
{
workingBuffer[idx] = value;
}
} |
14,982 | #include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <cuda.h>
#include <string.h>
// Parallel Computing Lab 3
// Abhi Dankar
// Professor Zahran
// All experiments run on cuda1
unsigned int getmax(unsigned int *, unsigned int);
unsigned int getmaxSEQ(unsigned int *, unsigned int);
void printArr(unsigned int pr[], unsigned int box);
#define THREADS_PER_BLOCK 1024
__global__ void getMaxCu(unsigned int* arrIn, unsigned int size, unsigned int* res) {
unsigned int i = 0;
__shared__ unsigned int arr[THREADS_PER_BLOCK];
i = threadIdx.x + (blockIdx.x * blockDim.x);
unsigned int gb = gridDim.x*blockDim.x;
unsigned int o = 0;
//find max within block
unsigned int temp = 0;
while(i + o < size){
temp = max(temp, arrIn[i + o]);
o += gb;
}
arr[threadIdx.x] = temp;
__syncthreads();
//across blocks
for (unsigned int y = (blockDim.x/2); y > 0; y = y/2){
if ((threadIdx.x < y)){
arr[threadIdx.x] = max(arr[threadIdx.x], arr[threadIdx.x + y]);
}
__syncthreads();
}
//get the max to return from 0 thread
if (threadIdx.x == 0)
res[blockIdx.x] = max(res[blockIdx.x],arr[0]);
}
int main(int argc, char *argv[]) {
unsigned int size = 0; // The size of the array
unsigned int i; // loop index
unsigned int * numbers; //pointer to the array
if(argc !=2)
{
printf("usage: maxseq num\n");
printf("num = size of the array\n");
exit(1);
}
size = atol(argv[1]);
numbers = (unsigned int *)malloc(size * sizeof(unsigned int));
if( !numbers )
{
printf("Unable to allocate mem for an array of size %u\n", size);
exit(1);
}
srand(time(NULL)); // setting a seed for the random number generator
// Fill-up the array with random numbers from 0 to size-1
for( i = 0; i < size; i++)
numbers[i] = rand() % size;
printf(" The maximum number in the array is: %u\n",
getmax(numbers, size));
free(numbers);
exit(0);
}
void printArr(unsigned int pr[], unsigned int box){
for (unsigned int i = 0; i < box; i++){
printf("%u ", pr[i]);
}
}
//sequential getMax for checking
unsigned int getmaxSEQ(unsigned int num[], unsigned int size) {
unsigned int i;
unsigned int max = num[0];
for(i = 1; i < size; i++){
if(num[i] > max){
max = num[i];
}
}
return( max );
}
/*
input: pointer to an array of long int
number of elements in the array
output: the maximum number of the array
*/
unsigned int getmax(unsigned int num[], unsigned int size) {
unsigned int i;
unsigned int * copy;
unsigned int * ans;
unsigned int * output;
unsigned int * newArr;
unsigned int resize;
//in case array doesn't fill up the block
if (size % THREADS_PER_BLOCK != 0){
resize = (size/THREADS_PER_BLOCK+1)*THREADS_PER_BLOCK;
} else {
resize = size;
}
//create new array with 0 values for unfilled block
newArr = (unsigned int *) malloc(sizeof(unsigned int) * resize);
for (i = 0; i < resize; i++){
if (i < size){
newArr[i] = num[i];
} else {
newArr[i] = 0;
}
}
//how many blocks
unsigned int blocks = (resize/THREADS_PER_BLOCK);
cudaMalloc((void **) ©, sizeof(unsigned int)*resize);
cudaMemcpy((void *)copy, (void *) newArr, resize*sizeof(unsigned int), cudaMemcpyHostToDevice);
cudaMalloc((void **) &output, sizeof(unsigned int)*blocks);
ans = (unsigned int*) malloc(sizeof(unsigned int) * blocks);
do {
blocks = ceil((float)(resize)/(float)THREADS_PER_BLOCK);
getMaxCu<<<blocks, THREADS_PER_BLOCK>>>(copy, resize, output);
resize = blocks;
copy = output;
} while (blocks > 1);
cudaMemcpy((void *)ans, (void *)output, blocks * sizeof(unsigned int),cudaMemcpyDeviceToHost);
unsigned int ret = ans[0];
cudaFree(output);
cudaFree(copy);
free(newArr);
free(ans);
return(ret);
}
|
14,983 | #include "includes.h"
__global__ void elementwise_1D_1D_sqrt_grad(float* in_x, float* in_d, float* out_x, float * out_d, int size) {
int tid = blockIdx.x * blockDim.x + threadIdx.x;
int stride = gridDim.x * blockDim.x;
for (; tid < size; tid += stride)
if (tid < size) in_d[tid] += out_d[tid] / out_x[tid] / 2;
} |
14,984 | #include <assert.h>
#include <errno.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
typedef signed char schar;
typedef unsigned char uchar;
typedef short shrt;
typedef unsigned short ushrt;
typedef unsigned uint;
typedef unsigned long ulong;
typedef long long llong;
typedef unsigned long long ullong;
typedef float flt;
typedef double dbl;
typedef long double ldbl;
#define exit_if(cnd_value, msg) \
do { \
if (cnd_value) \
{ \
if (errno) \
perror(msg); \
else \
fprintf(stderr, "error: %s\n", msg); \
exit(EXIT_FAILURE); \
} \
} while (0)
#define cudaErrorCheck(error) \
do { \
cudaError_t res = error; \
if (res != cudaSuccess) \
{ \
fprintf(stderr, "cuda %s:%d error: %s\n", __FILE__, __LINE__, \
cudaGetErrorString(res)); \
exit(EXIT_FAILURE); \
} \
} while(0)
#define NUM_BLOCKS (256)
#define BLOCK_SIZE (256)
__global__ void kernel(dbl * const __restrict__ first,
const dbl * const __restrict__ second,
const size_t n)
{
const size_t offset = gridDim.x * blockDim.x;
size_t idx = blockDim.x * blockIdx.x + threadIdx.x;
while (idx < n)
{
first[idx] -= second[idx];
idx += offset;
}
}
int main(void)
{
size_t n;
scanf("%zu", &n);
dbl * const first = (dbl *) malloc(sizeof(dbl) * n),
* const second = (dbl *) malloc(sizeof(dbl) * n);
exit_if(!first || !second, "malloc()");
for(size_t i = 0; i < n; ++i)
{
scanf("%lf", first + i);
}
for(size_t i = 0; i < n; ++i)
{
scanf("%lf", second + i);
}
dbl *device_first, *device_second;
cudaErrorCheck(cudaMalloc(&device_first, sizeof(dbl) * n));
cudaErrorCheck(cudaMemcpy(device_first, first, sizeof(dbl) * n,
cudaMemcpyHostToDevice));
cudaErrorCheck(cudaMalloc(&device_second, sizeof(dbl) * n));
cudaErrorCheck(cudaMemcpy(device_second, second, sizeof(dbl) * n,
cudaMemcpyHostToDevice));
kernel<<<NUM_BLOCKS, BLOCK_SIZE>>>(device_first, device_second, n);
cudaErrorCheck(cudaGetLastError());
cudaErrorCheck(cudaMemcpy(first, device_first, sizeof(dbl) * n,
cudaMemcpyDeviceToHost));
for (size_t i = 0; i < n; ++i)
printf("%.10e ", first[i]);
putchar('\n');
cudaErrorCheck(cudaFree(device_first));
cudaErrorCheck(cudaFree(device_second));
free(first);
free(second);
return 0;
}
|
14,985 | #include <stdio.h>
int main(void) {
printf("hellow World, Cuda!\n");
return 0;
}
|
14,986 | #include <cstdio>
#include <cstdlib>
#include <vector>
__global__ void bucket_sort(int* key, int* bucket, int n) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= n) return;
atomicAdd(&bucket[key[i]], 1);
__syncthreads();
for (int j = 0, k = 0; j <= i; k++) {
key[i] = k;
j += bucket[k];
}
}
int main()
{
int n = 50;
int range = 5;
int* key;
cudaMallocManaged(&key, n * sizeof(int));
for (int i = 0; i < n; i++) {
key[i] = rand() % range;
printf("%d ", key[i]);
}
printf("\n");
int* bucket;
cudaMallocManaged(&bucket, range * sizeof(int));
int M = 32;
bucket_sort <<<(n + M - 1) / M, M, range>>> (key, bucket, n);
cudaDeviceSynchronize();
for (int i = 0; i < n; i++)
printf("%d ", key[i]);
printf("\n");
cudaFree(key);
cudaFree(bucket);
}
|
14,987 | #include "includes.h"
extern "C" {
}
#define TB 256
#define EPS 0.1
#undef MIN
#define MIN(a, b) ((a) < (b) ? (a) : (b))
#undef MAX
#define MAX(a, b) ((a) > (b) ? (a) : (b))
__global__ void patchmatch_r_argmax_kernel( float *conv, float *target, float *match, int *correspondence, int c1, int h1, int w1, int h2, int w2 )
{
int id1 = blockIdx.x * blockDim.x + threadIdx.x;
int size1 = h1 * w1, size2 = h2 * w2;
if (id1 < size1) {
//int x1 = id1 % w1, y1 = id1 / w1;
double conv_max = -1e20;
for (int y2 = 0; y2 < h2; y2++) {
for (int x2 = 0; x2 < w2; x2++) {
int id2 = y2 * w2 + x2;
int id = id1 * size2 + id2;
float conv_result = conv[id];
if (conv_result > conv_max) {
conv_max = conv_result;
correspondence[id1 * 2 + 0] = x2;
correspondence[id1 * 2 + 1] = y2;
for (int c = 0; c < c1; c++) {
match[c * size1 + id1] = target[c * size2 + id2];
}
}
}
}
}
return ;
} |
14,988 | #include <stdio.h>
#include <inttypes.h>
#include <sys/time.h>
#define THREADS_PER_BLOCK 192
__global__
void hasher_kernel(uint32_t *B, uint32_t *Bx) {
/* From scrypt.c */
uint32_t j;
/* Two iterations of salsa20_8 */
uint32_t x00,x01,x02,x03,x04,x05,x06,x07,x08,x09,x10,x11,x12,x13,x14,x15;
size_t i;
__shared__ uint32_t Bcache[THREADS_PER_BLOCK*16];
__shared__ uint32_t Bxcache[THREADS_PER_BLOCK*16];
int tid = (blockIdx.x*blockDim.x) + threadIdx.x;
// parallel test init
#if 0
B[tid*16+0] = tid; B[tid*16+1] = tid; B[tid*16+2] = tid;
B[tid*16+3] = tid; B[tid*16+4] = tid; B[tid*16+5] = tid;
B[tid*16+6] = tid; B[tid*16+7] = tid; B[tid*16+8] = tid;
B[tid*16+9] = tid; B[tid*16+10] = tid; B[tid*16+11] = tid;
B[tid*16+12] = tid; B[tid*16+13] = tid; B[tid*16+14] = tid;
B[tid*16+15] = tid;
#endif
Bx[tid*16+0] = tid; Bx[tid*16+1] = tid; Bx[tid*16+2] = tid;
Bx[tid*16+3] = tid; Bx[tid*16+4] = tid; Bx[tid*16+5] = tid;
Bx[tid*16+6] = tid; Bx[tid*16+7] = tid; Bx[tid*16+8] = tid;
Bx[tid*16+9] = tid; Bx[tid*16+10] = tid; Bx[tid*16+11] = tid;
Bx[tid*16+12] = tid; Bx[tid*16+13] = tid; Bx[tid*16+14] = tid;
Bx[tid*16+15] = tid;
#pragma unroll
for (int i = 0; i < 16; i++) {
uint32_t b, bx;
b = B[tid*16+0];
bx = Bx[tid*16+0];
Bxcache[threadIdx.x + (16*i)] = bx;
Bcache[threadIdx.x + (16*i)] = b;
}
/* This is adapted at the end of the iteration now */
x00 = (Bcache[threadIdx.x + (16*0)] ^= Bxcache[threadIdx.x + (16*0)]);
x01 = (Bcache[threadIdx.x + (16*1)] ^= Bxcache[threadIdx.x + (16*1)]);
x02 = (Bcache[threadIdx.x + (16*2)] ^= Bxcache[threadIdx.x + (16*2)]);
x03 = (Bcache[threadIdx.x + (16*3)] ^= Bxcache[threadIdx.x + (16*3)]);
x04 = (Bcache[threadIdx.x + (16*4)] ^= Bxcache[threadIdx.x + (16*4)]);
x05 = (Bcache[threadIdx.x + (16*5)] ^= Bxcache[threadIdx.x + (16*5)]);
x06 = (Bcache[threadIdx.x + (16*6)] ^= Bxcache[threadIdx.x + (16*6)]);
x07 = (Bcache[threadIdx.x + (16*7)] ^= Bxcache[threadIdx.x + (16*7)]);
x08 = (Bcache[threadIdx.x + (16*8)] ^= Bxcache[threadIdx.x + (16*8)]);
x09 = (Bcache[threadIdx.x + (16*9)] ^= Bxcache[threadIdx.x + (16*9)]);
x10 = (Bcache[threadIdx.x + (16*10)] ^= Bxcache[threadIdx.x + (16*10)]);
x11 = (Bcache[threadIdx.x + (16*11)] ^= Bxcache[threadIdx.x + (16*11)]);
x12 = (Bcache[threadIdx.x + (16*12)] ^= Bxcache[threadIdx.x + (16*12)]);
x13 = (Bcache[threadIdx.x + (16*13)] ^= Bxcache[threadIdx.x + (16*13)]);
x14 = (Bcache[threadIdx.x + (16*14)] ^= Bxcache[threadIdx.x + (16*14)]);
x15 = (Bcache[threadIdx.x + (16*15)] ^= Bxcache[threadIdx.x + (16*15)]);
#define R(a,b) (((a) << (b)) | ((a) >> (32 - (b))))
#define NITERS 2048 // should be 2048 for full version
for (j = 0; j < NITERS; j++) {
#pragma unroll
for (i = 0; i < 4; i++) {
/* Operate on columns. */
x04 ^= R(x00+x12, 7); x09 ^= R(x05+x01, 7); x14 ^= R(x10+x06, 7); x03 ^= R(x15+x11, 7);
x08 ^= R(x04+x00, 9); x13 ^= R(x09+x05, 9); x02 ^= R(x14+x10, 9); x07 ^= R(x03+x15, 9);
x12 ^= R(x08+x04,13); x01 ^= R(x13+x09,13); x06 ^= R(x02+x14,13); x11 ^= R(x07+x03,13);
x00 ^= R(x12+x08,18); x05 ^= R(x01+x13,18); x10 ^= R(x06+x02,18); x15 ^= R(x11+x07,18);
/* Operate on rows. */
x01 ^= R(x00+x03, 7); x06 ^= R(x05+x04, 7); x11 ^= R(x10+x09, 7); x12 ^= R(x15+x14, 7);
x02 ^= R(x01+x00, 9); x07 ^= R(x06+x05, 9); x08 ^= R(x11+x10, 9); x13 ^= R(x12+x15, 9);
x03 ^= R(x02+x01,13); x04 ^= R(x07+x06,13); x09 ^= R(x08+x11,13); x14 ^= R(x13+x12,13);
x00 ^= R(x03+x02,18); x05 ^= R(x04+x07,18); x10 ^= R(x09+x08,18); x15 ^= R(x14+x13,18);
}
#define GETBX(REG, OFFSET) \
REG += Bcache[threadIdx.x + (16*OFFSET)]; \
Bcache[threadIdx.x + (16*OFFSET)] = REG; \
REG ^= Bxcache[threadIdx.x + (16*OFFSET)]; \
Bxcache[threadIdx.x + (16*0)] = REG
GETBX(x00, 0);
GETBX(x01, 1);
GETBX(x02, 2);
GETBX(x03, 3);
GETBX(x04, 4);
GETBX(x05, 5);
GETBX(x06, 6);
GETBX(x07, 7);
GETBX(x08, 8);
GETBX(x09, 9);
GETBX(x10, 10);
GETBX(x11, 11);
GETBX(x12, 12);
GETBX(x13, 13);
GETBX(x14, 14);
GETBX(x15, 15);
#pragma unroll
for (i = 0; i < 4; i++) {
/* Operate on columns. */
x04 ^= R(x00+x12, 7); x09 ^= R(x05+x01, 7); x14 ^= R(x10+x06, 7); x03 ^= R(x15+x11, 7);
x08 ^= R(x04+x00, 9); x13 ^= R(x09+x05, 9); x02 ^= R(x14+x10, 9); x07 ^= R(x03+x15, 9);
x12 ^= R(x08+x04,13); x01 ^= R(x13+x09,13); x06 ^= R(x02+x14,13); x11 ^= R(x07+x03,13);
x00 ^= R(x12+x08,18); x05 ^= R(x01+x13,18); x10 ^= R(x06+x02,18); x15 ^= R(x11+x07,18);
/* Operate on rows. */
x01 ^= R(x00+x03, 7); x06 ^= R(x05+x04, 7); x11 ^= R(x10+x09, 7); x12 ^= R(x15+x14, 7);
x02 ^= R(x01+x00, 9); x07 ^= R(x06+x05, 9); x08 ^= R(x11+x10, 9); x13 ^= R(x12+x15, 9);
x03 ^= R(x02+x01,13); x04 ^= R(x07+x06,13); x09 ^= R(x08+x11,13); x14 ^= R(x13+x12,13);
x00 ^= R(x03+x02,18); x05 ^= R(x04+x07,18); x10 ^= R(x09+x08,18); x15 ^= R(x14+x13,18);
}
#undef R
#define SAVEBX(REG, OFFSET) \
REG += Bxcache[threadIdx.x + (16*OFFSET)]; \
Bxcache[threadIdx.x + (16*OFFSET)] = REG; \
/* Leave the register set for the next loop iteration and the exit */ \
REG ^= Bcache[threadIdx.x + (16*OFFSET)] \
SAVEBX(x00, 0);
SAVEBX(x01, 1);
SAVEBX(x02, 2);
SAVEBX(x03, 3);
SAVEBX(x04, 4);
SAVEBX(x05, 5);
SAVEBX(x06, 6);
SAVEBX(x07, 7);
SAVEBX(x08, 8);
SAVEBX(x09, 9);
SAVEBX(x10, 10);
SAVEBX(x11, 11);
SAVEBX(x12, 12);
SAVEBX(x13, 13);
SAVEBX(x14, 14);
SAVEBX(x15, 15);
}
B[tid*16+ 0] = x00;
B[tid*16+ 1] = x01;
B[tid*16+ 2] = x02;
B[tid*16+ 3] = x03;
B[tid*16+ 4] = x04;
B[tid*16+ 5] = x05;
B[tid*16+ 6] = x06;
B[tid*16+ 7] = x07;
B[tid*16+ 8] = x08;
B[tid*16+ 9] = x09;
B[tid*16+10] = x10;
B[tid*16+11] = x11;
B[tid*16+12] = x12;
B[tid*16+13] = x13;
B[tid*16+14] = x14;
B[tid*16+15] = x15;
return;
}
double timeval_diff(const struct timeval * const start, const struct timeval * const end)
{
/* Calculate the second difference*/
double r = end->tv_sec - start->tv_sec;
/* Calculate the microsecond difference */
if (end->tv_usec > start->tv_usec)
r += (end->tv_usec - start->tv_usec)/1000000.0;
else if (end->tv_usec < start->tv_usec)
r -= (start->tv_usec - end->tv_usec)/1000000.0;
return r;
}
int main() {
#define SCRYPT_WIDTH 16
#define N 1024 * 128
printf("hi\n");
struct timeval tv_start, tv_end;
uint32_t *dev_a, *dev_b;
uint32_t *mydat = (uint32_t *)malloc(N*SCRYPT_WIDTH*sizeof(uint32_t));
printf("First malloc\n");fflush(stdout);
printf("Foo: %lu\n", N*SCRYPT_WIDTH*sizeof(uint32_t));fflush(stdout);
for (int i = 0; i < N*SCRYPT_WIDTH; i++) {
mydat[i] = i+tv_start.tv_sec; // Confuse the optimizer.
}
if (cudaMalloc((void **) &dev_a, N*SCRYPT_WIDTH*sizeof(uint32_t)) != cudaSuccess) {
fprintf(stderr, "Could not allocate array\n");
exit(0);
}
gettimeofday(&tv_start, NULL);
cudaMemcpy(dev_a, mydat, N*SCRYPT_WIDTH*sizeof(uint32_t), cudaMemcpyHostToDevice);
printf("Second malloc\n");fflush(stdout);
if (cudaMalloc((void **) &dev_b, N*SCRYPT_WIDTH*sizeof(uint32_t)) != cudaSuccess) {
fprintf(stderr, "Could not allocate array\n");
exit(0);
}
printf("Starting kernel\n");fflush(stdout);
hasher_kernel<<<N/THREADS_PER_BLOCK, THREADS_PER_BLOCK>>>(dev_a, dev_b);
printf("Memcpy result\n");
cudaMemcpy(mydat, dev_a, N*SCRYPT_WIDTH*sizeof(uint32_t), cudaMemcpyDeviceToHost);
gettimeofday(&tv_end, NULL);
#if 1
for (int i = 0; i < 10; i++) {
printf("%x\n", mydat[i*SCRYPT_WIDTH]);
}
#endif
cudaFree(dev_a);
cudaFree(dev_b);
free(mydat);
cudaDeviceReset();
printf("%2.2f\n", timeval_diff(&tv_start, &tv_end));
printf("Done\n");
}
|
14,989 | #include "includes.h"
template <unsigned int blockSize>
__device__ void warpReduce(volatile int* sdata, int tid) {
if (blockSize >= 64) sdata[tid] += sdata[tid + 32];
if (blockSize >= 32) sdata[tid] += sdata[tid + 16];
if (blockSize >= 16) sdata[tid] += sdata[tid + 8];
if (blockSize >= 8) sdata[tid] += sdata[tid + 4];
if (blockSize >= 4) sdata[tid] += sdata[tid + 2];
if (blockSize >= 2) sdata[tid] += sdata[tid + 1];
}
__global__ void reduce6(int *g_idata, int *g_odata, int n) {
extern __shared__ int sdata[];
// perform first level of reduction, reading from global memory, writing to shared memory
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x*(blockSize*2) + threadIdx.x;
unsigned int gridSize = blockSize*2*gridDim.x;
sdata[tid] = 0;
while (i < n) {
sdata[tid] += g_idata[i] + g_idata[i+blockSize];
i += gridSize;
}
__syncthreads();
// do reduction in shared mem
if (blockSize >= 512) {
if (tid < 256) { sdata[tid] += sdata[tid + 256]; } __syncthreads();
}
if (blockSize >= 256) {
if (tid < 128) { sdata[tid] += sdata[tid + 128]; } __syncthreads();
}
if (blockSize >= 128) {
if (tid < 64) { sdata[tid] += sdata[tid + 64]; } __syncthreads();
}
if (tid < 32) warpReduce<blockSize>(sdata, tid);
// write result for this block to global mem
if (tid == 0) g_odata[blockIdx.x] = sdata[0];
} |
14,990 | #include "includes.h"
__global__ void gpu_multiply(float* A, float* B, float* C, int ARows, int ACols, int BRows, int BCols, int CRows, int CCols) {
float CValue = 0;
int Row = blockIdx.y*TILE_DIM + threadIdx.y;
int Col = blockIdx.x*TILE_DIM + threadIdx.x;
__shared__ float As[TILE_DIM][TILE_DIM];
__shared__ float Bs[TILE_DIM][TILE_DIM];
for (int k = 0; k < (TILE_DIM + ACols - 1) / TILE_DIM; k++) {
if (k*TILE_DIM + threadIdx.x < ACols && Row < ARows)
As[threadIdx.y][threadIdx.x] = A[Row*ACols + k * TILE_DIM + threadIdx.x];
else
As[threadIdx.y][threadIdx.x] = 0.0;
if (k*TILE_DIM + threadIdx.y < BRows && Col < BCols)
Bs[threadIdx.y][threadIdx.x] = B[(k*TILE_DIM + threadIdx.y)*BCols + Col];
else
Bs[threadIdx.y][threadIdx.x] = 0.0;
__syncthreads();
for (int n = 0; n < TILE_DIM; ++n)
CValue += As[threadIdx.y][n] * Bs[n][threadIdx.x];
__syncthreads();
}
if (Row < CRows && Col < CCols) C[((blockIdx.y * blockDim.y + threadIdx.y)*CCols) + (blockIdx.x*blockDim.x) + threadIdx.x] = CValue;
} |
14,991 | #include "includes.h"
__global__ void set_packed_cl_cs(int *d_packed_cl, int *d_packed_cs, int *d_cl, int *d_cs, int *d_gcs, int chunk_num)
{
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i >= chunk_num) {
return;
}
if (d_gcs[i + 1] - d_gcs[i] > 0) {
d_packed_cl[d_gcs[i]] = d_cl[i];
d_packed_cs[d_gcs[i]] = d_cs[i];
}
} |
14,992 | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#define THREADS_PER_BLOCK 1024
__global__ void parta(int *array, int *B, int n){
int index = threadIdx.x + blockIdx.x * blockDim.x;
if (index < n){
atomicAdd(&B[array[index] / 100], 1);
}
__syncthreads();
}
__global__ void partb(int *array, int *B, int n){
__shared__ int localB[10];
int i;
int index = threadIdx.x + blockIdx.x * blockDim.x;
if (index < n){
atomicAdd(&localB[array[index] / 100], 1);
}
__syncthreads();
if (threadIdx.x == 0){
for (i = 0; i < 10; i++){
atomicAdd(&B[i], localB[i]);
}
}
}
__global__ void partc(int *B, int n){
int d, val;
for (d = 1; d < n; d = d*2){
if (threadIdx.x >= d)
val = B[threadIdx.x-d];
__syncthreads();
if (threadIdx.x >= d)
B[threadIdx.x] += val;
__syncthreads();
}
}
int main(void) {
int numcomma = 0;
char c;
FILE* stream = fopen("inp.txt", "r");
while(1){
c = fgetc(stream);
if (c == EOF)
break;
if (c == ',')
numcomma ++;
}
printf("%d\n", numcomma);
fclose(stream);
int array[numcomma+1];
stream = fopen("inp.txt", "r");
int i;
for (i = 0; i <= numcomma; i ++){
fscanf(stream, "%d,", &array[i]);
}
printf("%d\n", array[3]);
fclose(stream);
int array_len = numcomma + 1;
int *d_array;
int B[10];
int *d_B;
int size = sizeof(array);
// Allocate space for device copies of array
cudaMalloc((void **)&d_array, size);
cudaMalloc((void **)&d_B, sizeof(int)*10);
cudaMemcpy(d_array, &array, size, cudaMemcpyHostToDevice);
parta<<<(array_len + THREADS_PER_BLOCK - 1)/THREADS_PER_BLOCK,THREADS_PER_BLOCK>>>(d_array, d_B, array_len);
cudaMemcpy(&B, d_B, sizeof(int)*10, cudaMemcpyDeviceToHost);
cudaFree(d_B); cudaFree(d_array);
FILE *q2a = fopen("q2a.txt", "w+");
for (i = 0; i <= 9; i++){
fprintf(q2a, "%d", B[i]);
if (i < 9) fprintf(q2a, ", ");
}
fclose(q2a);
// Q2b
cudaMalloc((void **)&d_array, size);
cudaMalloc((void **)&d_B, sizeof(int)*10);
cudaMemcpy(d_array, &array, size, cudaMemcpyHostToDevice);
partb<<<(array_len + THREADS_PER_BLOCK - 1)/THREADS_PER_BLOCK,THREADS_PER_BLOCK>>>(d_array, d_B, array_len);
cudaMemcpy(&B, d_B, sizeof(int)*10, cudaMemcpyDeviceToHost);
cudaFree(d_B); cudaFree(d_array);
FILE *q2b = fopen("q2b.txt", "w+");
for (i = 0; i <= 9; i++){
fprintf(q2b, "%d", B[i]);
if (i < 9) fprintf(q2b, ", ");
}
fclose(q2b);
// Q2c
cudaMalloc((void **)&d_B, sizeof(int)*10);
cudaMemcpy(d_B, &B, sizeof(int)*10, cudaMemcpyHostToDevice);
partc<<<1,10>>>(d_B, array_len);
cudaMemcpy(&B, d_B, sizeof(int)*10, cudaMemcpyDeviceToHost);
cudaFree(d_B);
FILE *q2c = fopen("q2c.txt", "w+");
for (i = 0; i <= 9; i++){
fprintf(q2c, "%d", B[i]);
if (i < 9) fprintf(q2c, ", ");
}
fclose(q2c);
}
|
14,993 | /*
* EDDL Library - European Distributed Deep Learning Library.
* Version: 0.6
* copyright (c) 2020, Universidad Politécnica de Valencia (UPV), PRHLT Research Centre
* Date: April 2020
* Author: PRHLT Research Centre, UPV, (rparedes@prhlt.upv.es), (jon@prhlt.upv.es)
* All rights reserved
*/
#include <string.h>
#include <cstdio>
#include <cstdlib>
#include <iostream>
#include <cuda.h>
// GPU: Truth value testing
__global__ void glogical_all(float *A, int size, bool &result){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
// if(!result) return; // Abort if there is a result
if (thread_id_x < size && result){
if (A[thread_id_x] != 1.0f){
result = false;
// return;
}
}
}
__global__ void glogical_any(float *A, int size, bool &result){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
// if(result) return; // Abort if there is a result
if (thread_id_x < size && !result){
if (A[thread_id_x] == 1.0f){
result = true;
// return;
}
}
}
__global__ void gpu_isfinite(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = isfinite(A[thread_id_x]);
}
}
__global__ void gpu_isinf(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = isinf(A[thread_id_x]);
}
}
__global__ void gpu_isnan(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = isnan(A[thread_id_x]);
}
}
__global__ void gpu_isneginf(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = isinf(A[thread_id_x]) && A[thread_id_x] < 0.0f;
}
}
__global__ void gpu_isposinf(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = isinf(A[thread_id_x]) && A[thread_id_x] > 0.0f;
}
}
__global__ void glogical_and(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = (bool)A[thread_id_x] & (bool)B[thread_id_x];
}
}
__global__ void glogical_or(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = (bool)A[thread_id_x] | (bool)B[thread_id_x];
}
}
__global__ void glogical_not(float *A, float *B, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
B[thread_id_x] = !((bool)A[thread_id_x]);
}
}
__global__ void glogical_xor(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = (bool)A[thread_id_x] ^ (bool)B[thread_id_x];
}
}
__global__ void glogical_allclose(float *A, float *B, float rtol, float atol, bool equal_nan, int size, bool &allclose){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
// if(!allclose) return; // Abort if there is a result
if (thread_id_x < size && allclose){
bool close = fabsf(A[thread_id_x] - B[thread_id_x]) <= (atol + rtol * fabsf(B[thread_id_x]));
if (!close){
allclose = false;
// return;
}
}
}
__global__ void glogical_isclose(float *A, float *B, float *C, float rtol, float atol, bool equal_nan, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = fabsf(A[thread_id_x] - B[thread_id_x]) <= (atol + rtol * fabsf(B[thread_id_x]));
}
}
__global__ void glogical_greater(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] > B[thread_id_x];
}
}
__global__ void glogical_greater_equal(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] >= B[thread_id_x];
}
}
__global__ void glogical_less(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] < B[thread_id_x];
}
}
__global__ void glogical_less_equal(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] <= B[thread_id_x];
}
}
__global__ void glogical_equal(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] == B[thread_id_x];
}
}
__global__ void glogical_not_equal(float *A, float *B, float *C, int size){
long int thread_id_x = blockIdx.x * blockDim.x + threadIdx.x;
if (thread_id_x < size){
C[thread_id_x] = A[thread_id_x] != B[thread_id_x];
}
}
|
14,994 |
// =================================================================================================
// This file is CUDA translation of the original part of the CLTune project, which loosely follows the Google C++ styleguide and uses
// a tab-size of two spaces and a max-width of 100 characters per line.
//
// Author: cedric.nugteren@surfsara.nl (Cedric Nugteren)
//
// This file contains an example OpenCL kernel as part of the conv.cc example. This assumes that
// the input matrix is bigger than the output matrix, as it already has padding on the borders. So
// no check is needed within the kernel. This also assumes the workgroup dimension is a multiple
// of the matrix sizes.
//
// -------------------------------------------------------------------------------------------------
//
// Copyright 2014 SURFsara
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// =================================================================================================
// Settings (synchronise these with "conv.cc", "conv.opencl" and "conv_reference.opencl")
#define HFS (3) // Half filter size
#define FS (HFS+HFS+1) // Filter size
// =================================================================================================
// Reference implementation of the 2D convolution example
extern "C" __global__ void conv_reference(const int size_x, const int size_y,
const float* src,
const float* coeff,
float* dest) {
// Thread identifiers
const int tid_x = blockDim.x*blockIdx.x + threadIdx.x; // From 0 to size_x-1
const int tid_y = blockDim.y*blockIdx.y + threadIdx.y; // From 0 to size_y-1
// Initializes the accumulation register
float acc = 0.0f;
// Loops over the neighbourhood
for (int fx=-HFS; fx<=HFS; ++fx) {
for (int fy=-HFS; fy<=HFS; ++fy) {
const int index_x = tid_x + HFS + fx;
const int index_y = tid_y + HFS + fy;
// Performs the accumulation
float coefficient = coeff[(fy+HFS)*FS + (fx+HFS)];
acc += coefficient * src[index_y*(size_x+2*HFS) + index_x];
}
}
// Stores the result
dest[tid_y*size_x + tid_x] = acc;
}
// =================================================================================================
|
14,995 | #include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <sys/types.h>
#include <cuda_runtime_api.h>
#define sizeGrid 65535
#define sizeBlock 1024
#define sizeWarp 32
__global__ void training(int dimP, int nP, int *ps, float *ws)
{
float product;
int x;
x = blockIdx.x*blockDim.x + threadIdx.x;
if (x < dimP*dimP){
product = 0.0f;
for (int i = 0; i < nP; i++)
product += (float)((2*ps[i*dimP+(x/dimP)]-1)*(2*ps[i*dimP+(x%dimP)]-1));
product = (!((((x/dimP)*dimP)+(x/dimP)) == x)) * product;
ws[x] = product/nP;
}
}
__global__ void hopActivation(int dimP, float *ws, int *pt, int *at)
{
extern __shared__ float sdata [];
int tid = blockDim.x*blockIdx.x+threadIdx.x;
int wid = tid / sizeWarp;
int lane = tid % sizeWarp;
if (wid < dimP ){
int start_neuron = (wid*dimP);
int end_neuron = ((wid+1)*dimP);
sdata[threadIdx.x]=0;
for(int i=start_neuron+lane;i<end_neuron;i+=32)
sdata[threadIdx.x]+= ws[i] * (2*pt[i % dimP ] -1);
__syncthreads();
if (lane < 16) sdata[threadIdx.x] += sdata[threadIdx.x+16]; __syncthreads();
if (lane < 8) sdata[threadIdx.x] += sdata[threadIdx.x+ 8]; __syncthreads();
if (lane < 4) sdata[threadIdx.x] += sdata[threadIdx.x+ 4]; __syncthreads();
if (lane < 2) sdata[threadIdx.x] += sdata[threadIdx.x+ 2]; __syncthreads();
if (lane < 1) sdata[threadIdx.x] += sdata[threadIdx.x+ 1];
if (lane == 0)
at[wid] = ((sdata[threadIdx.x] > 0) - (sdata[threadIdx.x] < 0)+1)/2;
}
}
float * lState (int nPatterns, int dimPattern, int *patterns)
{
int *ps;
float *weights, *ws;
int sizeP = dimPattern*sizeof(int);
int sizeW = dimPattern*dimPattern;
if ((weights = (float*) malloc (sizeW*sizeof(float))) == NULL ) return NULL;
if ( cudaSuccess != cudaMalloc ( &ps, (sizeP*nPatterns))) return NULL;
if ( cudaSuccess != cudaMalloc ( &ws, (sizeW*sizeof(float)))) return NULL;
if ( cudaSuccess != cudaMemcpy (ps, patterns, sizeP*nPatterns, cudaMemcpyHostToDevice)) return NULL;
dim3 GRID_DIM ((sizeW+sizeBlock-1)/sizeBlock);
dim3 BLOCK_DIM (sizeBlock);
training<<< GRID_DIM, BLOCK_DIM >>> (dimPattern, nPatterns, ps, ws);
if (cudaSuccess != cudaMemcpy (weights, ws, sizeW*sizeof(float), cudaMemcpyDeviceToHost)) return NULL;
cudaFree(ps);
cudaFree(ws);
return weights;
}
int * actFunc(int dimP, int *pattern, float *weight)
{
float *ws;
int *pt, *activation, *at;
if ( (activation = (int *) malloc (dimP*sizeof(int))) == NULL) return NULL;
if (cudaSuccess != cudaMalloc (&ws, dimP*dimP*sizeof(float))) return NULL;
if (cudaSuccess != cudaMalloc (&pt, dimP*sizeof(int))) return NULL;
if (cudaSuccess != cudaMalloc (&at, dimP*sizeof(int))) return NULL;
if ( cudaSuccess != cudaMemcpy (ws, weight, dimP*dimP*sizeof(float), cudaMemcpyHostToDevice)) return NULL;
if ( cudaSuccess != cudaMemcpy (pt, pattern, dimP*sizeof(int), cudaMemcpyHostToDevice)) return NULL;
dim3 GRID_DIM (((dimP*32)+sizeBlock-1)/sizeBlock);
dim3 BLOCK_DIM (sizeBlock);
hopActivation<<< GRID_DIM, BLOCK_DIM, sizeBlock*sizeof(float) >>> (dimP, ws, pt, at);
if (cudaSuccess != cudaMemcpy (activation, at, dimP*sizeof(int), cudaMemcpyDeviceToHost)) return NULL;
cudaFree(ws);
cudaFree(pt);
cudaFree(at);
return activation;
}
|
14,996 | /*
* Copyright 1993-2010 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
/* Vector addition: C = A + B.
*
* This sample is a very basic sample that implements element by element
* vector addition. It is the same as the sample illustrating Chapter 3
* of the programming guide with some additions like error checking.
*
*/
// Includes
#include <stdio.h>
#include <stdlib.h>
// includes CUDA
#include <cuda_runtime.h>
#define THREADS_PER_BLOCK 256
#define FIRST_PHASE_TIME 0xF//0xFFF
#define SECOND_PHASE_TIME 0xF//0xFFF
// Variables
float* h_A;
float* h_B;
float* h_C;
float* d_A;
float* d_B;
float* d_C;
bool noprompt = false;
// Functions
void CleanupResources(void);
void RandomInit(float*, int);
void ParseArguments(int, char**);
////////////////////////////////////////////////////////////////////////////////
// These are CUDA Helper functions
// This will output the proper CUDA error strings in the event that a CUDA host call returns an error
#define checkCudaErrors(err) __checkCudaErrors (err, __FILE__, __LINE__)
inline void __checkCudaErrors(cudaError err, const char *file, const int line )
{
if(cudaSuccess != err)
{
fprintf(stderr, "%s(%i) : CUDA Runtime API error %d: %s.\n",file, line, (int)err, cudaGetErrorString( err ) );
exit(-1);
}
}
// This will output the proper error string when calling cudaGetLastError
#define getLastCudaError(msg) __getLastCudaError (msg, __FILE__, __LINE__)
inline void __getLastCudaError(const char *errorMessage, const char *file, const int line )
{
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err)
{
fprintf(stderr, "%s(%i) : getLastCudaError() CUDA error : %s : (%d) %s.\n",
file, line, errorMessage, (int)err, cudaGetErrorString( err ) );
exit(-1);
}
}
// end of CUDA Helper Functions
// Device code
__global__ void VecAdd(const float* A, const float* B, float* C, int N)
{
int i = blockDim.x * blockIdx.x + threadIdx.x;
int thread_id = threadIdx.x;
int k = thread_id;
__shared__ long counter[THREADS_PER_BLOCK];
__shared__ double result[THREADS_PER_BLOCK];
counter[k] = 0;
result[k] = k;
__syncthreads();
while (counter[0] < FIRST_PHASE_TIME) {
counter[k] ++;
result[k] *= result[k];
}
__syncthreads();
counter[k] = 0;
__syncthreads();
if (thread_id < 32) {
while (counter[0] < SECOND_PHASE_TIME) {
counter[k] ++;
result[k] *= result[k];
}
}
__syncthreads();
//for(k=0;k<100;k++){
//if (i < N)
// C[i] = A[i] + B[i];
//}
}
// Host code
int main(int argc, char** argv)
{
printf("Vector Addition\n");
int N = 8192;
size_t size = N * sizeof(float);
//ParseArguments(argc, argv);
// Allocate input vectors h_A and h_B in host memory
h_A = (float*)malloc(size);
if (h_A == 0) CleanupResources();
h_B = (float*)malloc(size);
if (h_B == 0) CleanupResources();
h_C = (float*)malloc(size);
if (h_C == 0) CleanupResources();
// Initialize input vectors
RandomInit(h_A, N);
RandomInit(h_B, N);
// Allocate vectors in device memory
checkCudaErrors( cudaMalloc((void**)&d_A, size) );
checkCudaErrors( cudaMalloc((void**)&d_B, size) );
checkCudaErrors( cudaMalloc((void**)&d_C, size) );
// Copy vectors from host memory to device memory
checkCudaErrors( cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice) );
checkCudaErrors( cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice) );
// Invoke kernel
int threadsPerBlock = THREADS_PER_BLOCK;
//int blocksPerGrid = (N + threadsPerBlock - 1) / threadsPerBlock;
int blocksPerGrid = 30;
VecAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, N);
getLastCudaError("kernel launch failure");
#ifdef _DEBUG
checkCudaErrors( cudaDeviceSynchronize() );
#endif
// Copy result from device memory to host memory
// h_C contains the result in host memory
checkCudaErrors( cudaMemcpy(h_C, d_C, size, cudaMemcpyDeviceToHost) );
// Verify result
int i;
/*for (i = 0; i < N; ++i) {
float sum = h_A[i] + h_B[i];
if (fabs(h_C[i] - sum) > 1e-5)
break;
}*/
CleanupResources();
}
void CleanupResources(void)
{
// Free device memory
if (d_A)
cudaFree(d_A);
if (d_B)
cudaFree(d_B);
if (d_C)
cudaFree(d_C);
// Free host memory
if (h_A)
free(h_A);
if (h_B)
free(h_B);
if (h_C)
free(h_C);
}
// Allocates an array with random float entries.
void RandomInit(float* data, int n)
{
for (int i = 0; i < n; ++i)
data[i] = rand() / (float)RAND_MAX;
}
// Parse program arguments
void ParseArguments(int argc, char** argv)
{
for (int i = 0; i < argc; ++i) {
if (strcmp(argv[i], "--noprompt") == 0 ||
strcmp(argv[i], "-noprompt") == 0)
{
noprompt = true;
break;
}
}
}
|
14,997 | // Author: Chihiro Nakatani
// February 9th, 2021
// This script contains the inner product function with cpp.
#include <iostream>
#include <numeric>
#define SIZE_OF_ARRAY(array) (sizeof(array)/sizeof(array[0]))
// Define kernel function for inner product
__global__
void inner_product(int n, float *x, float *y, float *z)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
int stride = blockDim.x * gridDim.x;
for (int i = index; i < n; i += stride)
z[i] = x[i] * y[i];
}
// Define main function
int main(int argc, char *argv[])
{
// Define input vector length
int N = 10000000;
std::cout << "Vector size : " << N << std::endl;
// Initialize float vectors with different float values
float *x = new float[N];
float *y = new float[N];
float *z = new float[N];
cudaMallocManaged(&x, N*sizeof(float));
cudaMallocManaged(&y, N*sizeof(float));
cudaMallocManaged(&z, N*sizeof(float));
for (int i = 0; i < N; i++) {
x[i] = 1;
y[i] = 2;
z[i] = 0;
}
// Check whether Initialization is right (If you use big N, you should not check it in command line)
//std::cout << "Initialize vector x: [ ";
//for (int i = 0; i < N; i++) {
// std::cout << x[i] << " ";
//}
//std::cout << "]" << std::endl;
//std::cout << "Initialize vector y: [ ";
//for (int i = 0; i < N; i++) {
// std::cout << y[i] << " ";
//}
//std::cout << "]" << std::endl;
// Execute kernel on vector on the GPU
int blockSize = 32;
int numBlocks = atoi(argv[1]);
std::cout << "CUDA core numbers: " << numBlocks * blockSize << std::endl;
inner_product<<<numBlocks, blockSize>>>(N, x, y, z);
// Wait for GPU to finish before accessing on host
cudaDeviceSynchronize();
float inner_product_value = std::accumulate(z, z + N, 0);
std::cout << "Inner product (z = (x,y)): " << inner_product_value << std::endl;
// Free memory which is used for vectors
cudaFree(x);
cudaFree(y);
cudaFree(z);
return 0;
} |
14,998 | #include <stdio.h>
#include <stdlib.h>
#define BLOCK_SIZE 100
#define GRID_SIZE 100
#define N GRID_SIZE * BLOCK_SIZE
__global__ void VectorAdd (int *A, int *B, int *C) {
int x = threadIdx.x + blockIdx.x * blockDim.x;
C[x] = A[x] + B[x];
}
int main () {
int *hA, *hB, *hC;
int *dA, *dB, *dC;
int size = N * sizeof(int);
int i;
printf ("%d", size);
// STEP 1 : Allocate memory for Host and Device variables
hA = (int *) malloc(size);
hB = (int *) malloc(size);
hC = (int *) malloc(size);
cudaMalloc((void **)&dA, size);
cudaMalloc((void **)&dB, size);
cudaMalloc((void **)&dC, size);
for (i = 0; i < N ; i++) {
hA[i] = i;
hB[i] = 2*i;
}
printf("\n Arrays to be added are:\n");
printf("Array A:\n");
for (i = 0; i < N ; i++) {
printf("%d ", hA[i]);
}
printf("\nArray B:\n");
for (i = 0; i < N ; i++) {
printf("%d ", hB[i]);
}
// STEP 2: Copy data from Host to Device
cudaMemcpy(dA, hA, size, cudaMemcpyHostToDevice);
cudaMemcpy(dB, hB, size, cudaMemcpyHostToDevice);
// STEP 3: Kernel Launch
VectorAdd<<<GRID_SIZE, BLOCK_SIZE>>> (dA, dB, dC);
// STEP 4: Copy results back to Host
cudaMemcpy(hC, dC, size, cudaMemcpyDeviceToHost);
// STEP 5 : Print the result
printf("\n\nVector Addition is:\n");
for (i = 0; i < N ; i++) {
printf("%d ", hC[i]);
}
return 0;
}
|
14,999 | #include "includes.h"
__global__ void matrixMul(double * a,double * b, double * C, int cols,int rows,int cols2)
{
int row = blockIdx.x * blockDim.x + threadIdx.x;
int col = blockIdx.y * blockDim.y + threadIdx.y;
if (row < rows && col < cols){
C[row*cols+col] =0;
for (int k = 0; k < cols2; k++){
C[row*cols+col]+=b[k*cols+col]*a[row*cols2+k];
}
}
} |
15,000 | #include "cuda.h"
#include "cuda_runtime_api.h"
#include <inttypes.h>
extern "C" __global__ void kernel0(double* output, uint32_t n)
{
for (int32_t ix = blockIdx.x * blockDim.x + threadIdx.x;
ix < 1;
ix += blockDim.x * gridDim.x) {
int32_t j;
uint32_t xa;
uint8_t lastTestBit;
xa = 0U;
lastTestBit = 1;
for (int i = 0; i < 30; ++i) {
uint32_t grayCode = n ^ n >> 1U;
uint8_t testBit = (grayCode & 1 << i) != 0;
if (testBit) {
uint32_t v;
v = 1U << 29U - (uint32_t) i; // sobol_dirVs
if (lastTestBit) {
xa = v ^ xa;
}
lastTestBit = testBit;
}
}
output[ix] = (double) (int32_t) xa / (double) (1 << 30);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.