serial_no int64 1 24.2k | cuda_source stringlengths 11 9.01M |
|---|---|
20,001 | #include "includes.h"
__global__ void BaseNeuronSetIntPtArray(int *arr, int *pos, int n_elem, int step, int val)
{
int array_idx = threadIdx.x + blockIdx.x * blockDim.x;
if (array_idx<n_elem) {
arr[pos[array_idx]*step] = val;
}
} |
20,002 | #include "includes.h"
__global__ void gpu_copy_velocity( const int num_atoms, const int offset, const int* g_group_contents, const double* g_vx_i, const double* g_vy_i, const double* g_vz_i, double* g_vx_o, double* g_vy_o, double* g_vz_o)
{
const int n = threadIdx.x + blockIdx.x * blockDim.x;
if (n < num_atoms) {
const int m = g_group_contents[offset + n];
g_vx_o[n] = g_vx_i[m];
g_vy_o[n] = g_vy_i[m];
g_vz_o[n] = g_vz_i[m];
}
} |
20,003 | #include "../headers/isinge.cuh"
|
20,004 | #include "includes.h"
__global__ void rotate_2D(float* coords, size_t dim_y, size_t dim_x, float cos_angle, float sin_angle){
size_t index = blockIdx.x * blockDim.x + threadIdx.x;
size_t total = dim_x * dim_y;
float new_y, new_x;
float old_y = coords[index];
float old_x = coords[index + total];
if(index < total){
new_y = cos_angle * old_y + sin_angle * old_x;
new_x = -sin_angle * old_y + cos_angle * old_x;
__syncthreads();
coords[index] = new_y;
coords[index + total] = new_x;
__syncthreads();
}
} |
20,005 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <stdlib.h>
#include <stdio.h>
cudaError_t forwardPass(double *x, double *y,
double *W, int row, int column);
__global__ void vectorMultiplicationKernel(double *x, double *y, double *W,
int row, int column)
{
int tid = blockIdx.x;
if (tid >= row) {
return;
}
double result = 0;
for (int j = 0; j < column; j++) {
result += W[tid * column + j] * x[j];
}
for (int j = 0; j < 10000; j++) {
for (int k = 0; k < 10000; k++) {
result++;
result--;
}
}
y[tid] = result;
}
int main(int argc, char *argv[])
{
int row = atoi(argv[1]);
int column = atoi(argv[2]);
double *W = (double*)malloc(row * column * sizeof(double));
double *x = (double*)malloc(column * sizeof(double));
double *y = (double*)malloc(row * sizeof(double));
for (int i = 0; i < column; i++) {
x[i] = 10;
}
for (int i = 0; i < row * column; i++) {
W[i] = 10;
}
cudaError_t cudaStatus = forwardPass(x, y, W, row, column);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "vectorMultiplicationWithCuda failed!");
return 1;
}
for (int i = 0; i < row; i++) {
printf("%.2f ", y[i]);
}
printf("\n");
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus = cudaDeviceReset();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
return 0;
}
cudaError_t forwardPass(double *x, double *y, double *W,
int row, int column)
{
double *dev_x = 0;
double *dev_y = 0;
double *dev_W = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_x, column * sizeof(double));
cudaStatus = cudaMalloc((void**)&dev_y, row * sizeof(double));
cudaStatus = cudaMalloc((void**)&dev_W, row * column * sizeof(double));
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_x, x, column * sizeof(double), cudaMemcpyHostToDevice);
cudaStatus = cudaMemcpy(dev_W, W, row * column * sizeof(double), cudaMemcpyHostToDevice);
// Launch a kernel on the GPU with one thread for each element.
vectorMultiplicationKernel<<<row, 1>>>(dev_x, dev_y, dev_W, row, column);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "vectorMultiplicationKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(y, dev_y, row * sizeof(double), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_x);
cudaFree(dev_y);
cudaFree(dev_W);
return cudaStatus;
}
|
20,006 |
#include <stdio.h>
__device__
double power(double x)
{
double y = (x-x)/(x-x);
return x*x + 2.0 + y;
}
__global__
void compute(double x)
{
double y = power(x);
y = y + x;
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if (tid == 0) {
printf("y: %f\n", y);
}
}
|
20,007 | // We require std::filesytem, but just requireing std=c++17 does not enforce this for all compilers, so check it works. (I.e. GCC < 8 is a problem.)
// CMake doesn't appear to have knowledge of this feature.
#include <filesystem>
int main() { return 0; } |
20,008 |
#include <cstdio>
#include <cstdlib>
#include <random>
#include <sys/time.h>
/*******************************************************
****************** Device code ************************
******************************************************/
__constant__ double d_alpha;
__global__ void axpy (const double* A, const double* B, double* C, const unsigned int num_elements) {
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx < num_elements)
C[idx] = d_alpha*A[idx]+B[idx];
}
/*******************************************************
******************** Host code ************************
******************************************************/
#define cudaErrChk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"CUDA assert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
void h_init_value(double* mem, const unsigned int num_elements) {
for (int i=0; i<num_elements; i++)
mem[i] = (double) rand();
}
void h_test (const double* A, const double* B, const double* C, const double alpha, const unsigned int num_elements) {
bool success = true;
for (int i=0; i<num_elements; i++) {
if (alpha*A[i]+B[i] != C[i])
success = false;
}
if (success) {
printf("[TEST] Test passed\n");
} else {
printf("[TEST] Test failed\n");
}
}
int main(int argc, char** argv) {
printf("\n");
printf("=========================================================================\n");
printf("[BLAS} axpy implementation \n");
printf("=========================================================================\n");
printf("\n");
/*** Configuration ***/
unsigned int num_elements = 3e+8;
size_t size = num_elements*sizeof(double);
timeval stime, etime;
double alpha = 1.0;
if (argc == 2)
alpha = atof(argv[1]);
/*** Mem allocation ***/
double *h_A=nullptr, *h_B=nullptr, *h_C=nullptr; // host(CPU)-side variable
double *d_A=nullptr, *d_B=nullptr, *d_C=nullptr; // device(GPU)-side variable
h_A = (double*) malloc (size);
h_B = (double*) malloc (size);
h_C = (double*) malloc (size);
cudaErrChk ( cudaMalloc ((void**)&d_A, size) );
cudaErrChk ( cudaMalloc ((void**)&d_B, size) );
cudaErrChk ( cudaMalloc ((void**)&d_C, size) );
cudaErrChk ( cudaMemcpyToSymbol (d_alpha, &alpha, sizeof(double)) );
printf("[mem] Allocated : 3 doulbe precision vectors[%u-D]. %.2fGB for each devices [CPU, GPU]\n"
, num_elements, 3*(double)size/1024/1024/1024);
/*** Program init ***/
h_init_value (h_A, num_elements);
h_init_value (h_B, num_elements);
cudaErrChk ( cudaMemcpy (d_A, h_A, size, cudaMemcpyHostToDevice) );
cudaErrChk ( cudaMemcpy (d_B, h_B, size, cudaMemcpyHostToDevice) );
/*** Launch a kernel ***/
unsigned int num_threads = 1024;
unsigned int num_blocks = (num_elements + (num_threads-1))/num_threads;
printf("[kernel] <%u, %u>-size grid launched\n"
, num_blocks, num_threads);
gettimeofday(&stime, NULL);
axpy<<<num_blocks, num_threads>>>(d_A, d_B, d_C, num_elements);
cudaErrChk ( cudaDeviceSynchronize () )
cudaErrChk ( cudaGetLastError () );
gettimeofday(&etime, NULL);
printf("[kernel] Elapsed time: %.4f\n"
, ((etime.tv_sec-stime.tv_sec)+(etime.tv_usec-stime.tv_usec)*10e-6) );
/*** Test computed result ***/
cudaErrChk ( cudaMemcpy (h_C, d_C, size, cudaMemcpyDeviceToHost) );
h_test (h_A, h_B, h_C, alpha, num_elements);
/*** Finalize ***/
free (h_A);
free (h_B);
free (h_C);
cudaErrChk (cudaFree (d_A));
cudaErrChk (cudaFree (d_B));
cudaErrChk (cudaFree (d_C));
return 0;
}
|
20,009 |
__global__ void f2d3(float * __restrict__ ptr1, float * __restrict__ ptr2, float * __restrict__ ptr3) {
ptr1[threadIdx.x] += 1;
ptr2[threadIdx.x] += 1;
ptr3[threadIdx.x] += 1;
return;
}
// __global__ void f2(float * __restrict__ ptr1, float * __restrict__ ptr2, float * __restrict__ ptr3, float * __restrict__ ptr4) {
// ptr1[threadIdx.x] += 1;
// ptr2[threadIdx.x] += 1;
// ptr3[threadIdx.x] += 1;
// ptr4[threadIdx.x] += 1;
// return;
// }
int main(int argc, char **argv)
{
float *f2_ptr;
// cudaMalloc(&f2_ptr, sizeof(float)*128);
// cudaMemset(f2_ptr, 0, sizeof(float)*128);
cudaMalloc(&f2_ptr, sizeof(float)*96);
cudaMemset(f2_ptr, 0, sizeof(float)*96);
f2d3<<<1,32>>>(f2_ptr, f2_ptr+32, f2_ptr+64);
// f2<<<1,32>>>(f2_ptr, f2_ptr+32, f2_ptr+64, f2_ptr+96);
cudaFree(f2_ptr);
return 0;
}
|
20,010 | /*
* @Author: grantmcgovern
* @Date: 2015-10-28 12:52:26
* @Last Modified by: grantmcgovern
* @Last Modified time: 2015-11-04 16:31:11
*/
#include <string.h>
#include <curand.h>
#include <curand_kernel.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <unistd.h>
#define N 9999 // number of bodies
#define WIDTH 7
#define MASS 0 // row in array for mass
#define X_POS 1 // row in array for x position
#define Y_POS 2 // row in array for y position
#define Z_POS 3 // row in array for z position
#define X_VEL 4 // row in array for x velocity
#define Y_VEL 5 // row in array for y velocity
#define Z_VEL 6 // row in array for z velocity
#define G 10 // "gravitational constant" (not really)
#define MU 0.001 // "frictional coefficient"
#define BOXL 100.0 // periodic boundary box length
#define dt 0.05
#define THREADS 768
/**
* [cuda_memory_check description]
*
* Useful when debugging memory issues.
*/
void cuda_memory_check() {
cudaError_t error = cudaGetLastError();
if (error != cudaSuccess) {
printf("CUDA -error %s (%d)\n", cudaGetErrorString(error), error);
}
}
/**
* [check_command_line_args description]
* @param argc [description]
* @param argv [description]
*/
void check_command_line_args(int argc, char *argv[] ){
if (argc != 2){
fprintf(stderr, "Format: %s { number of timesteps }\n", argv[0]);
exit(1);
}
// Check command line args are positive
int i = 1;
for( ; i < argc; i++) {
// We have a negative argument
if(atoi(&argv[i][0]) < 0) {
printf("Invalid Argument: Negative Number given\n");
exit(1);
}
}
}
/**
* [n_body description]
* @param dev_body [description]
* @param states [description]
* @param time_random_seed [description]
*/
__global__ void n_body(float *dev_body, curandState_t *states, int time_random_seed) {
// Get the global thread Id
int globalId = blockIdx.x * blockDim.x + threadIdx.x;
// Force arrays
float Fx_dir = 0.0;
float Fy_dir = 0.0;
float Fz_dir = 0.0;
// Differences
float x_diff = 0.0;
float y_diff = 0.0;
float z_diff = 0.0;
// Initialize curand()
curand_init(time_random_seed, globalId, 0, &states[globalId]);
if(globalId <= N) {
// all other bodies
int i = 0;
for(i = 0; i < N; i++) {
if(i != globalId) {
// calculate position difference between body i and x in x-directions, y-directions, and z-directions
x_diff = dev_body[i * WIDTH + X_POS] - dev_body[globalId * WIDTH + X_POS];
y_diff = dev_body[i * WIDTH + Y_POS] - dev_body[globalId * WIDTH + Y_POS];
z_diff = dev_body[i * WIDTH + Z_POS] - dev_body[globalId * WIDTH + Z_POS];
// periodic boundary conditions
if (x_diff < -BOXL * 0.5) x_diff += BOXL;
if (x_diff >= BOXL * 0.5) x_diff -= BOXL;
if (y_diff < -BOXL * 0.5) y_diff += BOXL;
if (y_diff >= BOXL * 0.5) y_diff -= BOXL;
if (z_diff < -BOXL * 0.5) z_diff += BOXL;
if (z_diff >= BOXL * 0.5) z_diff -= BOXL;
// calculate distance (r)
float rr = (x_diff * x_diff + y_diff * y_diff + z_diff * z_diff);
float r = sqrt(rr);
// force between bodies i and x
float F = 0.0;
float Fg = 0.0;
float Fr = 0.0;
if (r > 2.0) {
// Compute gravitational force between body i and x
Fg = G * dev_body[i * WIDTH + MASS] * dev_body[globalId * WIDTH + MASS] / rr;
Fr = MU * (curand_uniform(&states[globalId]) - 0.5);
// Maybe Fr = MU * (drand48() - 0.5); (forces friction to be either positive or negative -- range -0.5, 0.5)
F = Fg + Fr;
// Compute frictional force
Fx_dir += F * x_diff / r; // resolve forces in x and y directions
Fy_dir += F * y_diff / r; // and accumulate forces
Fz_dir += F * z_diff / r; //
}
else {
// if too close, weak anti-gravitational force
F = G * 0.01 * 0.01 / rr;
Fx_dir -= F * x_diff / r; // resolve forces in x and y directions
Fy_dir -= F * y_diff / r; // and accumulate forces
Fz_dir -= F * z_diff / r; //
}
}
}
}
// update velocities
dev_body[globalId * WIDTH + X_VEL] += Fx_dir * dt / dev_body[globalId * WIDTH + MASS];
dev_body[globalId * WIDTH + Y_VEL] += Fy_dir * dt / dev_body[globalId * WIDTH + MASS];
dev_body[globalId * WIDTH + Z_VEL] += Fz_dir * dt / dev_body[globalId * WIDTH + MASS];
if (dev_body[globalId * WIDTH + X_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + X_VEL] += BOXL;
if (dev_body[globalId * WIDTH + X_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + X_VEL] -= BOXL;
if (dev_body[globalId * WIDTH + Y_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + Y_VEL] += BOXL;
if (dev_body[globalId * WIDTH + Y_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + Y_VEL] -= BOXL;
if (dev_body[globalId * WIDTH + Z_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + Z_VEL] += BOXL;
if (dev_body[globalId * WIDTH + Z_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + Z_VEL] -= BOXL;
// update positions
dev_body[globalId * WIDTH + X_POS] += dev_body[globalId * WIDTH + X_VEL] * dt;
dev_body[globalId * WIDTH + Y_POS] += dev_body[globalId * WIDTH + Y_VEL] * dt;
dev_body[globalId * WIDTH + Z_POS] += dev_body[globalId * WIDTH + Z_VEL] * dt;
// periodic boundary conditions
if (dev_body[globalId * WIDTH + X_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + X_VEL] += BOXL;
if (dev_body[globalId * WIDTH + X_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + X_VEL] -= BOXL;
if (dev_body[globalId * WIDTH + Y_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + Y_VEL] += BOXL;
if (dev_body[globalId * WIDTH + Y_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + Y_VEL] -= BOXL;
if (dev_body[globalId * WIDTH + Z_VEL] < -BOXL * 0.5) dev_body[globalId * WIDTH + Z_VEL] += BOXL;
if (dev_body[globalId * WIDTH + Z_VEL] >= BOXL * 0.5) dev_body[globalId * WIDTH + Z_VEL] -= BOXL;
}
// void initialize_body_array(float *body) {
// int i = 0;
// for( ; i < N; i++) {
// body[i][MASS] = 0.001;
// body[i][X_VEL] = drand48();
// body[i][Y_VEL] = drand48();
// body[i][Z_VEL] = drand48();
// body[i][X_POS] = drand48();
// body[i][Y_POS] = drand48();
// body[i][Z_POS] = drand48();
// }
// }
/**
* MAIN
* @param argc [description]
* @param argv [description]
* @return [description]
*/
int main(int argc, char **argv) {
// Check if arguments are good
check_command_line_args(argc, argv);
// Total number of times
int tmax = atoi(argv[1]);
// Size
int size = N * 7 * sizeof(float);
// Seed time against system clock
srand48(time(NULL));
// Body array
float *body = (float *)malloc(10000 * 7 * sizeof(float));
float *dev_body;
// Allocate memory on GPU
cudaMalloc((void**) &dev_body, 10000 * 7 * sizeof(float));
// Initialize body array
int i = 0;
for(i = 0; i < N; i++) {
body[i*WIDTH+MASS] = 0.001;
body[i*WIDTH+X_VEL] = drand48();
body[i*WIDTH+Y_VEL] = drand48();
body[i*WIDTH+Z_VEL] = drand48();
body[i*WIDTH+X_POS] = drand48();
body[i*WIDTH+Y_POS] = drand48();
body[i*WIDTH+Z_POS] = drand48();
}
printf("MODEL %8d\n", 0);
for (i = 0; i < N; i++) {
printf("%s%7d %s %s %s%4d %8.3f%8.3f%8.3f %4.2f %4.3f\n",
"ATOM", i + 1, "CA ", "GLY", "A", i + 1, body[i*WIDTH+X_POS], body[i*WIDTH+Y_POS], body[i*WIDTH+Z_POS], 1.00, 0.00);
}
printf("TER\nENDMDL\n");
for (int t = 0; t < tmax; t++) {
// Device memory
curandState_t *states;
cudaMalloc((void**) &states, 9999 * sizeof(curandState_t));
// Copy variables to kernel
cudaMemcpy(dev_body, body, size, cudaMemcpyHostToDevice);
// Call kernel
n_body<<< (int)ceil(N / THREADS) + 1, THREADS >>>(dev_body, states, time(NULL));
// Syncthreads
cudaThreadSynchronize();
// Copy memory back
cudaMemcpy(body, dev_body, size, cudaMemcpyDeviceToHost);
// positions in PDB format
printf("MODEL %8d\n", t + 1);
for (i = 0; i < N; i++) {
printf("%s%7d %s %s %s%4d %8.3f%8.3f%8.3f %4.2f %4.3f\n",
"ATOM", i + 1, "CA ", "GLY", "A", i + 1, body[i*WIDTH+X_POS], body[i*WIDTH+Y_POS], body[i*WIDTH+Z_POS], 1.00, 0.00);
}
printf("TER\nENDMDL\n");
}
return 0;
} |
20,011 | #include <iostream>
#include <fstream>
#include <stdlib.h>
using namespace std;
// get a uniform random number between -1 and 1
inline float f_rand() {
return 2*(rand()/((float)RAND_MAX)) -1.;
}
template <typename Real>
void genData(ofstream &outFile, int nVec, Real xVar)
{
Real xMax = 1.1; Real xMin = -xMax;
Real xRange = (xMax - xMin);
for(int i=0; i < nVec; i++) {
Real t = xRange * f_rand();
Real z1 = t + xVar * f_rand();
Real z2 = t*t*t + xVar * f_rand();
outFile.write((char*) &z1, sizeof(Real));
outFile.write((char*) &z2, sizeof(Real));
}
}
int main(int argc, char *argv[])
{
if(argc < 3) {
fprintf(stderr,"Use: filename nVec\n");
exit(1);
}
ofstream outFile (argv[1], ios::out | ios::binary);
int nVec = atoi(argv[2]);
#ifdef USE_DBL
genData<double>(outFile, nVec, 0.1);
#else
genData<float>(outFile, nVec, 0.1);
#endif
outFile.close();
return 0;
}
|
20,012 | /*
• Implement a matrix multiplication using a GPU to solve the operation instead of a CPU. Create the matrices in the CPU pass them to the GPU calculate the answer and show the answer in the console in an ordered way.
*/
#include "cuda_runtime.h"
#include <stdio.h>
#include <stdlib.h> // rand(), srand()
#include <time.h>
__global__ void gpuMatrix(int *m1, int *m2, int *m3, int dim) {
int tid = threadIdx.x + blockIdx.x * blockDim.x;
int temp = 0;
int row = tid/dim;
int col = tid % dim;
if(col < dim && row < dim){
for (int i = 0; i < dim; i++) {
temp += m1[row*dim+i]*m2[i*dim+col];
}
m3[tid] = temp;
}
}
// void cpuMatrix(){
// int m1[10][10], m2[10][10], sum[10][10];
// int m = 10;
// int n = 10;
//
// int (*a)[m][n];
// int (*b)[m][n];
//
// printf("First matrix\n");
// for (int i = 0; i < m; i++) {
// for (int j = 0; j < n; j++) {
// m1[m][n]= (rand() % 9) + 1;
// printf("%d ", m1[m][n]);
//
// }
// printf("\n");
// }
//
// printf("\nSecond matrix\n");
// for (int i = 0; i < m; i++) {
// for (int j = 0; j < n; j++) {
// m2[m][n]= (rand() % 9) + 1;
// printf("%d ", m2[m][n]);
//
// }
// printf("\n");
// }
//
// a = &m1;
// b = &m2;
//
// }
// void print_matrix(int *m, int n){
// for(int i = 0; i < n; i++){
// printf("%d\t", m[i]);
// if (i % n == n-1)
// {
// printf("\n");
// }
// }
//
// printf("\n");
// }
void fillMatrixGpu(int *m, int n){
for (int i = 0; i < n; i++) {
m[i] = rand()%9+1;
}
}
int main(){
int dim = 4;
int threadsPerBlock = 4;
int *m1, *m2, *m3;
int *d_m1, *d_m2, *d_m3;
int size = dim*dim*sizeof(int);
// allocate dev
cudaMalloc((void**)&d_m1, size);
cudaMalloc((void**)&d_m2, size);
cudaMalloc((void**)&d_m3, size);
// allocate host
m1 = (int*)malloc(size);
m2 = (int*)malloc(size);
m3 = (int*)malloc(size);
// run clock()
clock_t timeOnGpu = clock();
//fillMatrix
fillMatrixGpu(m1, dim*dim);
fillMatrixGpu(m2, dim*dim);
for(int i = 0; i < dim; i++){
for(int j = 0; j < dim; j++){
printf("%d\t", m1[i + j * dim]);
}
printf("\n");
}
printf("\n");
for(int i = 0; i < dim; i++){
for(int j = 0; j < dim; j++){
printf("%d\t", m2[i + j* dim]);
}
printf("\n");
}
printf("\n");
//host to device
cudaMemcpy(d_m1, m1, size, cudaMemcpyHostToDevice);
cudaMemcpy(d_m2, m2, size, cudaMemcpyHostToDevice);
// Kernel function
gpuMatrix<<<dim*dim/threadsPerBlock, threadsPerBlock>>> (d_m1, d_m2, d_m3, dim);
//device to host
cudaMemcpy(m3, d_m3, size, cudaMemcpyDeviceToHost);
//print result and time
for(int i = 0; i < dim; i++){
for(int j = 0; j < dim; j++){
printf("%d\t", m3[i + j * dim]);
}
printf("\n");
}
printf("time on GPU %f \n", ((double)clock() - timeOnGpu)/CLOCKS_PER_SEC);
//free memory
cudaFree(d_m1);
cudaFree(d_m2);
cudaFree(d_m3);
free(m1);
free(m2);
free(m3);
// clock_t timeOnCpu = clock();
// cpuMatrix();
// printf("time on CPU %f \n", ((double)clock() - timeOnCpu)/CLOCKS_PER_SEC);
return 0;
}
|
20,013 | #include <iostream>
#include <fstream>
#include <stdlib.h>
#include <math.h>
#include <vector>
#include <time.h>
#define ancho 1280
#define alto 720
#define totalPixeles ancho*alto
#define totalhilos 32
typedef int tamPixel;
using namespace std;
void llenarVectores(tamPixel *V){
for(int i=0;i<totalPixeles;i++){
srand(time(NULL));
V[i]=rand()%256;
}
}
__global__ void convertirRGBtoYCoCg(tamPixel *RY,tamPixel *GCg, tamPixel *BCo, int height, int width){
int Row = blockIdx.y*blockDim.y + threadIdx.y;
int Col = blockIdx.x*blockDim.x + threadIdx.x;
int index = Row*width+Col;
if ((Row < height) && (Col < width)) {
RY[index] = ((1/4)*RY[index]) + ((1/2)*GCg[index]) + ((1/4)*BCo[index]);
GCg[index] = ((-1/4)*RY[index]) + ((1/2)*GCg[index]) + ((-1/4)*BCo[index]);
BCo[index] = ((1/2)*RY[index]) + ((-1/2)*BCo[index]);
}
}
__global__ void suma(tamPixel *RY,tamPixel *GCg, tamPixel *BCo, int height, int width){
int Row = blockIdx.y*blockDim.y + threadIdx.y;
int Col = blockIdx.x*blockDim.x + threadIdx.x;
int index = Row*width+Col;
if ((Row < height) && (Col < width)) {
BCo[index] = RY[index]+GCg[index];
}
}
int main(){
tamPixel *R,*G,*B;
tamPixel *RY, *GCg, *BCo;
int size = totalPixeles * sizeof(tamPixel);
R = (tamPixel *)malloc(size);
G = (tamPixel *)malloc(size);
B = (tamPixel *)malloc(size);
llenarVectores(R);
llenarVectores(G);
llenarVectores(B);
cudaMalloc((void **)&RY, size);
cudaMalloc((void **)&GCg, size);
cudaMalloc((void **)&BCo, size);
cudaMemcpy(RY,R,size,cudaMemcpyHostToDevice);
cudaMemcpy(GCg,G,size,cudaMemcpyHostToDevice);
cudaMemcpy(BCo,B,size,cudaMemcpyHostToDevice);
dim3 DimGrid(((ancho-1)/totalhilos)+1, ((alto-1)/totalhilos)+1, 1);//ver
dim3 DimBlock(totalhilos, totalhilos, 1);
convertirRGBtoYCoCg<<<DimGrid,DimBlock>>>(RY,GCg,BCo, alto, ancho);
cout<<"todo bn"<<endl;
}
|
20,014 | /*--------------------------------------------------------------------*/
/* CUDA special utility Library */
/* written by Viktor K. Decyk, UCLA */
#include <stdlib.h>
#include <stdio.h>
#include "cuda.h"
static cudaError_t crc;
/*--------------------------------------------------------------------*/
extern "C" void gpu_fallocate(float **g_f, int nsize, int *irc) {
/* allocate global float memory on GPU, return pointer to C */
void *gptr;
crc = cudaMalloc(&gptr,sizeof(float)*nsize);
if (crc) {
printf("cudaMalloc float Error=%d:%s,l=%d\n",crc,
cudaGetErrorString(crc),nsize);
*irc = 1;
}
*g_f = (float *)gptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_iallocate(int **g_i, int nsize, int *irc) {
/* allocate global integer memory on GPU, return pointer to C */
void *gptr;
crc = cudaMalloc(&gptr,sizeof(int)*nsize);
if (crc) {
printf("cudaMalloc int Error=%d:%s,l=%d\n",crc,
cudaGetErrorString(crc),nsize);
*irc = 1;
}
*g_i = (int *)gptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_callocate(float2 **g_c, int nsize, int *irc) {
/* allocate global float2 memory on GPU, return pointer to C */
void *gptr;
crc = cudaMalloc(&gptr,sizeof(float2)*nsize);
if (crc) {
printf("cudaMalloc float2 Error=%d:%s,l=%d\n",crc,
cudaGetErrorString(crc),nsize);
*irc = 1;
}
*g_c = (float2 *)gptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_deallocate(void *g_d, int *irc) {
/* deallocate global memory on GPU */
crc = cudaFree(g_d);
if (crc) {
printf("cudaFree Error=%d:%s\n",crc,cudaGetErrorString(crc));
*irc = 1;
}
return;
}
/* Interfaces to Fortran */
/*--------------------------------------------------------------------*/
extern "C" void gpu_fallocate_(unsigned long *gp_f, int *nsize,
int *irc) {
/* allocate global float memory on GPU, return pointer to Fortran */
float *fptr;
gpu_fallocate(&fptr,*nsize,irc);
*gp_f = (long )fptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_iallocate_(unsigned long *gp_i, int *nsize,
int *irc) {
/* allocate global integer memory on GPU, return pointer to Fortran */
int *iptr;
gpu_iallocate(&iptr,*nsize,irc);
*gp_i = (long )iptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_callocate_(unsigned long *gp_f, int *nsize,
int *irc) {
/* allocate global float2 memory on GPU, return pointer */
/* to Fortran */
float2 *fptr;
gpu_callocate(&fptr,*nsize,irc);
*gp_f = (long )fptr;
return;
}
/*--------------------------------------------------------------------*/
extern "C" void gpu_deallocate_(unsigned long *gp_d, int *irc) {
/* deallocate global memory on GPU, return pointer to Fortran */
void *d;
d = (void *)*gp_d;
gpu_deallocate(d,irc);
*gp_d = 0;
return;
}
|
20,015 | #include <iostream>
#include <cstdlib>
#include <cuda.h>
#include <ctime>
#include <sys/time.h>
using namespace std;
/*the kernel code to run on the GPU device */
__global__
void matrix_mult_kernel(float* A, float* B, float* C, int M, int block_size){
/* using specified conventions*/
int Bx = blockIdx.x;
int By = blockIdx.y;
int Tx = threadIdx.x;
int Ty = threadIdx.y;
/* defining row and column index tp parse through matrix A & B */
int rowd = (By * block_size) + Ty;
int columd = (Bx * block_size) + Tx;
float tempsum = 0;
if(rowd < M && columd < M){
for(int i = 0; i < M; i++){
tempsum += A[rowd * M + i] * B[i * M + columd];
}
C[rowd * M + columd] = tempsum;
}
}
int main(int argc, char* argv[]){
int M = 4096;
int B = atoi(argv[1]); //block size
/*allocate matrixes A, B, C in host memory*/
float* ahptr = (float*)malloc(sizeof(float)* M * M);
float* bhptr = (float*)malloc(sizeof(float)* M * M);
float* chptr = (float*)malloc(sizeof(float)* M * M);
float* dhptr = (float*)malloc(sizeof(float)* M * M);
/* initialize matrices a, b in host memory*/
for(int i = 0; i < M; i++){
for(int j = 0; j < M; j++){
*(ahptr + i * M + j) = ((i+1)*(j+1))/(float)M;
*(bhptr + i * M + j) = (float)(j+1)/(i+1);
*(chptr +i * M + j) = 0;
*(dhptr +i * M + j) = (i+1)*(j+1);
}
}
//verify result
cout<<"result verifier"<<endl;
for(int w = 2044; w < 2052; w++){
for(int s = 0; s < 8; s++){
cout<< *(dhptr + w * M + s)<<" ";
}
cout<<endl;
}
cout<<" "<<endl;
/*allocate memoryon the device*/
float* ad;
float* bd;
float* cd;
cudaMalloc((void**)&ad,sizeof(float)* M * M);
cudaMalloc((void**)&bd,sizeof(float)* M * M);
cudaMalloc((void**)&cd,sizeof(float)* M* M);
/* measuring the execution time*/
cudaEvent_t start, stop;
cudaEventCreate(&start);
cudaEventCreate(&stop);
/*copy matrices from host to device */
cudaMemcpy(ad, ahptr, sizeof(float) * M * M, cudaMemcpyHostToDevice);
cudaMemcpy(bd, bhptr, sizeof(float) * M * M, cudaMemcpyHostToDevice);
/*invoking the kernel */
int block_size = B;
dim3 threadsPerBlock(block_size, block_size);
int numblocks = M / block_size;
dim3 blocksPerGrid(numblocks, numblocks);
cudaEventRecord(start);
matrix_mult_kernel<<<blocksPerGrid, threadsPerBlock>>>(ad, bd, cd, M, B);
/* copy result from device to host */
cudaMemcpy(chptr, cd, sizeof(float) * M * M, cudaMemcpyDeviceToHost);
cudaDeviceSynchronize();
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milliseconds = 0.0;
cudaEventElapsedTime(&milliseconds, start, stop);
cout<<"the parallel execution time for block size "<< B << " is "<< milliseconds << endl;
/*print a section of the result to verify result*/
cout<<" a section of the GPU result;"<<endl;
for(int h = 2044; h < 2052; h++){
for(int t = 0; t < 8; t++){
cout<< *(chptr + h * M + t) <<" ";
}
cout<<endl;
}
cout<<" "<<endl;
/* free device memory*/
cudaFree(ad);
cudaFree(bd);
cudaFree(cd);
/*free host memory*/
delete [] ahptr;
delete [] bhptr;
delete [] chptr;
return 0;
};
|
20,016 | #include <stdio.h>
#include <cuda.h>
__global__ void MyKernel() {
printf("in mykernel\n");
}
void MyCallback(cudaStream_t stream, cudaError_t status, void *data){
printf("Inside callback %d\n", (long)data);
MyKernel<<<1, 1>>>();
cudaDeviceSynchronize();
cudaError_t err = cudaGetLastError();
printf("error=%d, %s, %s\n", err, cudaGetErrorName(err), cudaGetErrorString(err));
}
int main() {
cudaStream_t stream[2];
for (long i = 0; i < 2; ++i) {
cudaStreamCreate(&stream[i]);
//cudaMemcpyAsync(devPtrIn[i], hostPtr[i], size, cudaMemcpyHostToDevice, stream[i]);
MyKernel<<<1, 1, 0, stream[i]>>>();
//cudaMemcpyAsync(hostPtr[i], devPtrOut[i], size, cudaMemcpyDeviceToHost, stream[i]);
cudaStreamAddCallback(stream[i], MyCallback, (void*)i, 0);
MyKernel<<<1, 1, 0, stream[i]>>>();
cudaDeviceSynchronize();
}
}
|
20,017 | // input: 1 2 3 4 5 6
// ouput: 3 6 12 20 30
#include <iostream>
#include "cuda.h"
using namespace std;
#define N 64
__global__ void myKernel(int *retArray, int *Array)
{
extern __shared__ int sharedArray[];
const int tid = blockIdx.x * (blockDim.x * blockDim.y)
+ blockDim.x * threadIdx.y + threadIdx.x;
sharedArray[tid] = Array[tid];
__syncthreads();
if ( tid == N-1 )
retArray[tid] = sharedArray[tid];
else
retArray[tid] = sharedArray[tid] * sharedArray[tid+1];
}
int main()
{
dim3 grid(2,1,1), block(4,8,1);
int *Array_host;
Array_host = (int*)malloc(N*sizeof(int));
for(int i=0; i<N; i++)
Array_host[i] = i;
int *Array_dev;
cudaMalloc((void**)&Array_dev, N*sizeof(int));
cudaMemcpy(Array_dev, Array_host, N*sizeof(int), cudaMemcpyHostToDevice);
int *retArray_host;
retArray_host = (int*)malloc(N*sizeof(int));
int *retArray_dev;
cudaMalloc((void**)&retArray_dev, N*sizeof(int));
myKernel<<<grid, block, N*sizeof(int)>>>(retArray_dev,Array_dev);
cudaDeviceSynchronize();
cudaMemcpy(retArray_host,retArray_dev, N*sizeof(int), cudaMemcpyDeviceToHost);
cout << "======[Array]=====" << endl;
for(int i=0; i<N; i++)
cout << "Array_host[" << i <<"] = " << Array_host[i] << endl;
cout << "======[retArray]=====" << endl;
for(int i=0; i<N; i++)
cout << "retArray_host[" << i <<"] = " << retArray_host[i] << endl;
}
|
20,018 | #include <cuda.h>
#include <stdio.h>
#include <iostream>
#include <time.h>
using namespace std;
__host__
void inizializzaArray(int *a,int n){
srand((unsigned int)time(NULL));
for(int i=0;i<n;i++)
a[i]=1+rand()%10;
}
__host__
void calcolaProdEsterno(int *a,int *b,int m,int n,int *c){
for(int i=0;i<m;i++)
for(int j=0;j<n;j++)
c[i*n+j] = a[i]*b[j];
}
__host__
void stampaMatrice(int *a,int m,int n){
cout<<"-----------"<<endl;
for(int i=0;i<m;i++){
for(int j=0;j<n;j++)
cout<<a[i*n+j]<<" ";
cout<<endl;
}
}
__global__
void calcolaProdEsternoGPU (int *a,int *b,int *c,int m,int n,int colEffettive){
int indRigaThreadGriglia = threadIdx.x + blockDim.x * blockIdx.x;
int indColThreadGriglia = threadIdx.y + blockDim.y * blockIdx.y;
if(indRigaThreadGriglia>= m || indColThreadGriglia >=n)
return;
//ATTENZIONE QUI, SOLO PER LA MATRICE C'E' STATA ALLOCAZIONE PADDED, NON ANCHE PER I VETTORI
//quindi...
c[indRigaThreadGriglia * colEffettive + indColThreadGriglia] = a[indRigaThreadGriglia] * b[indColThreadGriglia];
}
//programma per il calcolo di prodotto esterno tra 2 vettori
//input : m primo vettore colonna, n vettore riga, dimensioni blocco griglia (n righe threads e n colonne threads)
int main(int argc,char *argv[]){
int n,m;
dim3 dimBlocco; //def constructor
if(argc!=5){
n=10;
m=5;
dimBlocco.x = 4;
dimBlocco.y = 3;
}
else {
sscanf(argv[1],"%d",&m);
sscanf(argv[2],"%d",&n);
sscanf(argv[3],"%d",&dimBlocco.x);
sscanf(argv[4],"%d",&dimBlocco.y);
}
dim3 dimGriglia;
dimGriglia.x = ((m % dimBlocco.x ==0) ? 0 : 1) + m / dimBlocco.x;
dimGriglia.y = ((n%dimBlocco.y == 0) ? 0 : 1) + n / dimBlocco.y;
//strutture dati in memoria heap host
int *h_a,*h_b, *h_c;
h_a= (int *)malloc(m*sizeof(int));
h_b = (int *)malloc(n*sizeof(int));
h_c = (int *)malloc ( n * m * sizeof(int));
inizializzaArray(h_a,m);
inizializzaArray(h_b,n);
calcolaProdEsterno(h_a,h_b,m,n,h_c);
stampaMatrice(h_a,m,1);
stampaMatrice(h_b,1,n);
stampaMatrice(h_c,m,n);
//strutture dati in memoria gpu
int *d_a,*d_b,*d_c;
size_t pitch;
cudaMalloc(&d_a,m*sizeof(int));
cudaMalloc(&d_b,n*sizeof(int));
cudaMallocPitch(&d_c,&pitch,n*sizeof(int),m);
//copio contenuto array da host a gpu
cudaMemcpy (d_a,h_a,m*sizeof(int),cudaMemcpyHostToDevice) ;
cudaMemcpy(d_b,h_b,n*sizeof(int),cudaMemcpyHostToDevice);
//lancio kernel
calcolaProdEsternoGPU<<<dimGriglia,dimBlocco>>>(d_a,d_b,d_c,m,n,pitch/sizeof(int));
//ricopio su memoria host
int *copia=(int *)malloc(n*m*sizeof(int));
cudaMemcpy2D(copia,n*sizeof(int),d_c,pitch,n*sizeof(int),m,cudaMemcpyDeviceToHost);
//stampo
stampaMatrice(copia,m,n);
} |
20,019 | /************************************************************************************\
* *
* Copyright � 2014 Advanced Micro Devices, Inc. *
* Copyright (c) 2015 Mark D. Hill and David A. Wood *
* All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following are met: *
* *
* You must reproduce the above copyright notice. *
* *
* Neither the name of the copyright holder nor the names of its contributors *
* may be used to endorse or promote products derived from this software *
* without specific, prior, written permission from at least the copyright holder. *
* *
* You must include the following terms in your license and/or other materials *
* provided with the software. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" *
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE *
* IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT, AND FITNESS FOR A *
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER *
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, *
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT *
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS *
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN *
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING *
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY *
* OF SUCH DAMAGE. *
* *
* Without limiting the foregoing, the software may implement third party *
* technologies for which you must obtain licenses from parties other than AMD. *
* You agree that AMD has not obtained or conveyed to you, and that you shall *
* be responsible for obtaining the rights to use and/or distribute the applicable *
* underlying intellectual property rights related to the third party technologies. *
* These third party technologies are not licensed hereunder. *
* *
* If you use the software (in whole or in part), you shall adhere to all *
* applicable U.S., European, and other export laws, including but not limited to *
* the U.S. Export Administration Regulations ("EAR") (15 C.F.R Sections 730-774), *
* and E.U. Council Regulation (EC) No 428/2009 of 5 May 2009. Further, pursuant *
* to Section 740.6 of the EAR, you hereby certify that, except pursuant to a *
* license granted by the United States Department of Commerce Bureau of Industry *
* and Security or as otherwise permitted pursuant to a License Exception under *
* the U.S. Export Administration Regulations ("EAR"), you will not (1) export, *
* re-export or release to a national of a country in Country Groups D:1, E:1 or *
* E:2 any restricted technology, software, or source code you receive hereunder, *
* or (2) export to Country Groups D:1, E:1 or E:2 the direct product of such *
* technology or software, if such foreign produced direct product is subject to *
* national security controls as identified on the Commerce Control List (currently *
* found in Supplement 1 to Part 774 of EAR). For the most current Country Group *
* listings, or for additional information about the EAR or your obligations under *
* those regulations, please refer to the U.S. Bureau of Industry and Security's *
* website at http://www.bis.doc.gov/. *
* *
\************************************************************************************/
#define BLOCK_SIZE 16
/**
* @brief floydwarshall -- diagonal block
* @param dist distance array
* @param blk_iter block iteration
* @param dim matrix dim
*/
__global__ void
floydwarshall_dia_block(int *dist, int blk_iter, int dim)
{
int tx = threadIdx.x;
int ty = threadIdx.y;
int base_x = blk_iter * BLOCK_SIZE;
int base_y = blk_iter * BLOCK_SIZE;
int base = base_y * dim + base_x;
__shared__ int dia_block[BLOCK_SIZE * BLOCK_SIZE];
dia_block[ty * BLOCK_SIZE + tx] = dist[base + ty * dim + tx];
__syncthreads();
for (int k = 0; k < BLOCK_SIZE; k++) {
if (dia_block[ty * BLOCK_SIZE + k] + dia_block[k * BLOCK_SIZE + tx] < dia_block[ty * BLOCK_SIZE + tx]) {
dia_block[ty * BLOCK_SIZE + tx] = dia_block[ty * BLOCK_SIZE + k] + dia_block[k * BLOCK_SIZE + tx];
}
__syncthreads();
}
dist[base + ty * dim + tx] = dia_block[ty * BLOCK_SIZE + tx];
}
/**
* @brief floydwarshall -- a strip of blocks (x-dim)
* @param dist distance array
* @param blk_iter block iteration
* @param dim matrix dim
*/
__global__ void
floydwarshall_strip_blocks_x(int *dist, int blk_iter, int dim)
{
int bx = blockIdx.x;
int tx = threadIdx.x;
int ty = threadIdx.y;
__shared__ int dia_block[BLOCK_SIZE * BLOCK_SIZE];
__shared__ int strip_block[BLOCK_SIZE * BLOCK_SIZE];
if (bx != blk_iter) {
int base_x = blk_iter * BLOCK_SIZE;
int base_y = blk_iter * BLOCK_SIZE;
int base = base_y * dim + base_x;
dia_block[ty * BLOCK_SIZE + tx] = dist[base + ty * dim + tx];
__syncthreads();
int strip_base_y = blk_iter * BLOCK_SIZE;
int strip_base = strip_base_y * dim;
int index = strip_base + ty * dim + bx * BLOCK_SIZE + tx;
strip_block[ty * BLOCK_SIZE + tx] = dist[index];
__syncthreads();
for (int k = 0; k < BLOCK_SIZE; k++) {
if (dia_block[ty * BLOCK_SIZE + k] + strip_block[k * BLOCK_SIZE + tx] < strip_block[ty * BLOCK_SIZE + tx]) {
strip_block[ty * BLOCK_SIZE + tx] = dia_block[ty * BLOCK_SIZE + k] + strip_block[k * BLOCK_SIZE + tx];
}
__syncthreads();
}
dist[index] = strip_block[ty * BLOCK_SIZE + tx];
}
}
/**
* @brief floydwarshall -- a strip of blocks (y-dim)
* @param dist distance array
* @param blk_iter block iteration
* @param dim matrix dim
*/
__global__ void
floydwarshall_strip_blocks_y(int *dist, int blk_iter, int dim)
{
int by = blockIdx.y;
int tx = threadIdx.x;
int ty = threadIdx.y;
__shared__ int dia_block[BLOCK_SIZE * BLOCK_SIZE];
__shared__ int strip_block[BLOCK_SIZE * BLOCK_SIZE];
if (by != blk_iter) {
int base_x = blk_iter * BLOCK_SIZE;
int base_y = blk_iter * BLOCK_SIZE;
int base = base_y * dim + base_x;
dia_block[ty * BLOCK_SIZE + tx] = dist[base + ty * dim + tx];
__syncthreads();
int strip_base_x = blk_iter * BLOCK_SIZE;
int strip_base = strip_base_x;
int index = strip_base + (by * BLOCK_SIZE + ty) * dim + tx;
strip_block[ty * BLOCK_SIZE + tx] = dist[index];
__syncthreads();
for (int k = 0; k < BLOCK_SIZE; k++) {
if (strip_block[ty * BLOCK_SIZE + k] + dia_block[k * BLOCK_SIZE + tx] < strip_block[ty * BLOCK_SIZE + tx])
strip_block[ty * BLOCK_SIZE + tx] = strip_block[ty * BLOCK_SIZE + k] + dia_block[k * BLOCK_SIZE + tx];
__syncthreads();
}
dist[index] = strip_block[ty * BLOCK_SIZE + tx];
}
}
/**
* @brief floydwarshall -- the remaining blocks
* @param dist distance array
* @param blk_iter block iteration
* @param dim matrix dim
*/
__global__ void
floydwarshall_remaining_blocks(int *dist, int blk_iter, int dim)
{
int bx = blockIdx.x;
int by = blockIdx.y;
int tx = threadIdx.x;
int ty = threadIdx.y;
__shared__ int block_y_iter[BLOCK_SIZE * BLOCK_SIZE];
__shared__ int block_iter_x[BLOCK_SIZE * BLOCK_SIZE];
__shared__ int strip_block[BLOCK_SIZE * BLOCK_SIZE];
if (by != blk_iter && bx != blk_iter) {
int base_Y_iter_y = by * BLOCK_SIZE;
int base_Y_iter_x = blk_iter * BLOCK_SIZE;
int base_Y = base_Y_iter_y * dim + base_Y_iter_x;
block_y_iter[ty * BLOCK_SIZE + tx] = dist[base_Y + ty * dim + tx];
__syncthreads();
int base_X_iter_y = blk_iter * BLOCK_SIZE;
int base_X_iter_x = bx * BLOCK_SIZE;
int base_X = base_X_iter_y * dim + base_X_iter_x;
block_iter_x[ty * BLOCK_SIZE + tx] = dist[base_X + ty * dim + tx];
__syncthreads();
int index = dim * BLOCK_SIZE * by + BLOCK_SIZE * bx + dim * ty + tx;
strip_block[ty * BLOCK_SIZE + tx] = dist[index];
__syncthreads();
for (int k = 0; k < BLOCK_SIZE; k++) {
if (block_y_iter[ty * BLOCK_SIZE + k] + block_iter_x[k * BLOCK_SIZE + tx] < strip_block[ty * BLOCK_SIZE + tx]) {
strip_block[ty * BLOCK_SIZE + tx] = block_y_iter[ty * BLOCK_SIZE + k] + block_iter_x[k * BLOCK_SIZE + tx];
}
__syncthreads();
}
dist[index] = strip_block[ty * BLOCK_SIZE + tx];
}
}
|
20,020 | #include "includes.h"
__global__ void gpu_update_sign(int *G, double *w ,int *neighbors , int k , int n ,int *temp, int *flag,int it_b ,int it_t)
{
int buf=0;
for (int off1 = 0; off1 < it_b; off1++)
{
for(int off2 = 0; off2<it_t;off2++){
int result;
double sum = 0.0;
int x = blockIdx.x+off1*gridDim.x;
int y = threadIdx.x+off2*blockDim.x;
if(x<n && y<n){
for (int i = 0; i < k; i++){
for (int j = 0; j < k; j++){
sum += ((double)G[neighbors[x*n*k*k+y*k*k+i*k+j]])*w[i*k+j];
}
}
if ( sum > 1e-6){
result = 1;
if (result != G[neighbors[x*n*k*k+y*k*k+12]])
buf++;
}
else if( sum < -(1e-6)){
result = -1;
if (result != G[neighbors[x*n*k*k+y*k*k+12]])
buf++;
}
else{
result = G[neighbors[x*n*k*k+y*k*k+12]];
}
temp[x*n+y] =result;
}
}
}
*flag+=buf;
__syncthreads();
} |
20,021 |
extern "C" __global__ void sgemm_tn_vec_128x128(
float* param_C,
const float* param_A,
const float* param_B,
float param_alpha,
float param_beta,
int param_flags,
int param_lda,
int param_ldb,
int param_ldc,
int param_m,
int param_n,
int param_k,
int param_ldaz,
int param_ldbz,
int param_ldcz,
int param_batch_loops
) {
__shared__ float share[128*8*2 + 128*8*2 + 4];
*param_C = share[0];
}
|
20,022 | #ifndef WINDOWS
#include <unistd.h>
#endif
#include <stdio.h>
#include <cuda.h>
int main(int argc, char *argv[])
{
int ndev=0;
int id=0;
cudaError_t error;
cudaDeviceProp features;
int rtver=0;
int dver=0;
/* Get number of devices */
error = cudaGetDeviceCount(&ndev);
if (error != cudaSuccess) {
printf("Error: problem obtaining number of CUDA devices: %d\n", error);
return 1;
}
/* Iterating over devices */
for (id=0; id<ndev; id++)
{
cudaSetDevice(id);
printf("GPU Device ID: %d\n", id);
cudaGetDeviceProperties(&features, id);
if (error != cudaSuccess) {
printf("Error: problem obtaining features of CUDA devices: %d\n", error);
return 1;
}
printf("GPU Name: %s\n", features.name);
printf("GPU compute capability: %d.%d\n", features.major, features.minor);
error=cudaDriverGetVersion(&dver);
if (error != cudaSuccess) {
printf("Error: problem obtaining CUDA driver version: %d\n", error);
return 1;
}
error=cudaRuntimeGetVersion(&rtver);
if (error != cudaSuccess) {
printf("Error: problem obtaining CUDA run-time version: %d\n", error);
return 1;
}
printf("CUDA driver version: %d.%d\n", dver/1000, (dver%100)/10);
printf("CUDA runtime version: %d.%d\n", rtver/1000, (rtver%100)/10);
printf("Global memory: %llu\n", (unsigned long long) features.totalGlobalMem);
printf("Max clock rate: %f MHz\n", features.clockRate * 0.001);
printf("Total amount of shared memory per block: %lu\n", features.sharedMemPerBlock);
printf("Total number of registers available per block: %d\n", features.regsPerBlock);
printf("Warp size: %d\n", features.warpSize);
printf("Maximum number of threads per multiprocessor: %d\n", features.maxThreadsPerMultiProcessor);
printf("Maximum number of threads per block: %d\n", features.maxThreadsPerBlock);
printf("Max dimension size of a thread block X: %d\n", features.maxThreadsDim[0]);
printf("Max dimension size of a thread block Y: %d\n", features.maxThreadsDim[1]);
printf("Max dimension size of a thread block Z: %d\n", features.maxThreadsDim[2]);
printf("Max dimension size of a grid size X: %d\n", features.maxGridSize[0]);
printf("Max dimension size of a grid size Y: %d\n", features.maxGridSize[1]);
printf("Max dimension size of a grid size Z: %d\n", features.maxGridSize[2]);
printf("\n");
}
return error;
}
|
20,023 | /*
Copyright (c) 2014 Matthew Simons
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#define colIndex(i,j,ld) (((j)*(ld))+(i))
/**
* implementation of the Hillis & Steel inclusive scan reduction method
*/
__device__ void reduce(double *v, int length)
{
extern __shared__ double sdata[];
int index = blockIdx.x*threadIdx.x + threadIdx.x;
int index2 = index * 2 + 1;
if(index2 < length)
{
sdata[index] = v[index2] + v[index2 - 1];
}
__syncthreads();
v[index] = sdata[index];
}
extern "C"
__global__ void scale(double *v1, int length, double factor)
{
int index = blockIdx.x*threadIdx.x + threadIdx.x;
if(index < length)
{
v1[index] = v1[index] * factor;
}
}
extern "C"
__global__ void power(double *v1, int length, double factor)
{
int index = blockIdx.x*threadIdx.x + threadIdx.x;
if(index < length)
{
v1[index] = pow(v1[index], factor);
}
}
extern "C"
__global__ void innerProduct(double *v1, double *v2, int length, double *ans)
{
int index = blockIdx.x*threadIdx.x + threadIdx.x;
extern __shared__ double products[];
// Find the product of each value
if(index < length)
{
products[index] = v1[index] * v2[index];
//printf("product of %f * %f = %f\n", v1[index], v2[index], products[index]);
}
__syncthreads();
// Sum all the products
while(length != 0)
{
reduce(products, length);
length = length / 2;
__syncthreads();
}
// First thread copies answer to global mem
if(index == 0) {
ans[0] = products[0];
}
}
extern "C"
__global__ void multiply(double *m1, double *m2, int width, int height, double *result)
{
int index = blockIdx.x*threadIdx.x + threadIdx.x;
//int corresponding =
//multiply each row and column and reduce atomic add maybe
}
extern "C"
__global__ void vectorDot(double *v1, double *v2, int length, double *v3)
{
int index = blockIdx.x*threadIdx.x + threadIdx.x;
if(index < length)
{
v3[index] = v1[index] * v2[index];
}
// returns *v3
}
|
20,024 | #include "includes.h"
__global__ void lineark(int *ip,int *weight,int *op,int N,int M,int L){
unsigned int input_id = (blockIdx.x*gridDim.y + blockIdx.y + blockIdx.z*gridDim.x*gridDim.y)*blockDim.x + threadIdx.x;
int i = input_id/(M*L);
input_id = input_id%(M*L);
int j = input_id/L;
int k = input_id%L;
int temp = (*(ip + i*L + k))*(*(weight + j*L + k));
atomicAdd((op + i*M + j),temp);
} |
20,025 | #include <cuda_runtime.h>
#include <float.h>
__global__
void solution_ker(double* A, int M, int N, int K){
int i = threadIdx.x;
int j = threadIdx.y;
int k = threadIdx.z;
int ind = (i*N+j)*K + k;
if (i == 0 || j == M-1 || j == 0 || j == N-1 || k == 0 | k == K-1) {
A[ind] = 10.*i ; //finish this
} else {
A[ind] = 0.0;
}
}
#define a(i,j,k) a[((i)*nn+(j))*kk+(k)]
#define b(i,j,k) b[((i)*nn+(j))*kk+(k)]
#define res(i,j,k) res[((i)*nn+(j))*kk+(k)]
__global__
void jac_comp(double* a, double* b, int mm, int nn, int kk, int BLOCKSIZE){
int bl_i = blockIdx.x;
int bl_j = blockIdx.y;
int bl_k = blockIdx.z;
int th_i = threadIdx.x;
int th_j = threadIdx.y;
int th_k = threadIdx.z;
int i = BLOCKSIZE*bl_i + th_i + 1;
int j = BLOCKSIZE*bl_j + th_j + 1;
int k = BLOCKSIZE*bl_k + th_k + 1;
if (i >= mm-1 || j >= nn-1 || k >= kk-1)
return;
b(i,j,k) = (a(i-1, j ,k) + a(i+1, j, k)
+ a(i, j-1, k) + a(i, j+1, k)
+ a(i, j, k-1) + a(i, j, k+1)) / 6.;
//b(i,j,k) = 1.0;
}
__global__
void no_bound_memcpy(double* a, double* b, int mm, int nn, int kk, int BLOCKSIZE, double value){
int bl_i = blockIdx.x;
int bl_j = blockIdx.y;
int bl_k = blockIdx.z;
int th_i = threadIdx.x;
int th_j = threadIdx.y;
int th_k = threadIdx.z;
int i = BLOCKSIZE*bl_i + th_i + 1;
int j = BLOCKSIZE*bl_j + th_j + 1;
int k = BLOCKSIZE*bl_k + th_k + 1;
if (i >= mm-1 || j >= nn-1 || k >= kk-1)
return;
a(i,j,k) = b(i,j,k);
}
__global__
void fill(double* a, double value, int mm, int nn, int kk, int BLOCKSIZE){
int bl_i = blockIdx.x;
int bl_j = blockIdx.y;
int bl_k = blockIdx.z;
int th_i = threadIdx.x;
int th_j = threadIdx.y;
int th_k = threadIdx.z;
int i = BLOCKSIZE*bl_i + th_i;
int j = BLOCKSIZE*bl_j + th_j;
int k = BLOCKSIZE*bl_k + th_k;
a(i,j,k) = value;
}
__global__
void jac_diff(double* res, double* a, double* b, int mm, int nn, int kk, int BLOCKSIZE){
int bl_i = blockIdx.x;
int bl_j = blockIdx.y;
int bl_k = blockIdx.z;
int th_i = threadIdx.x;
int th_j = threadIdx.y;
int th_k = threadIdx.z;
int i = BLOCKSIZE*bl_i + th_i + 1;
int j = BLOCKSIZE*bl_j + th_j + 1;
int k = BLOCKSIZE*bl_k + th_k + 1;
if (i >= mm-1 || j >= nn-1 || k >= kk-1)
return;
res(i,j,k) = fabs(a(i,j,k)-b(i,j,k));
}
__device__
float atomicMaxf(float* address, float val)
{
int *address_as_int =(int*)address;
int old = *address_as_int, assumed;
while (val > __int_as_float(old)) {
assumed = old;
old = atomicCAS(address_as_int, assumed,
__float_as_int(val));
}
return __int_as_float(old);
}
__global__
void jac_max(double* d_array, float* d_max, int elements)
{
extern __shared__ float shared[];
int tid = threadIdx.x;
int gid = (blockDim.x * blockIdx.x) + tid;
shared[tid] = -FLT_MAX;
while (gid < elements) {
shared[tid] = max(shared[tid], d_array[gid]);
gid += gridDim.x*blockDim.x;
}
__syncthreads();
gid = (blockDim.x * blockIdx.x) + tid;
for (unsigned int s=blockDim.x/2; s>0; s>>=1)
{
if (tid < s && gid < elements)
shared[tid] = max(shared[tid], shared[tid + s]);
__syncthreads();
}
if (tid == 0)
atomicMaxf(d_max, __double2float_rn(shared[0]));
}
__global__
void max_reduce_no_bounds(double* a, int mm, int nn, int kk, int BLOCKSIZE){
}
|
20,026 | #include <cuda.h>
#include <iostream>
#include <vector>
#include <iomanip>
#define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
struct DataSet{
//~DataSet(){ delete [] flatData; }
DataSet(){};
DataSet(unsigned int x, unsigned int y, unsigned int z){
dimension = {x, y, z};
flatDataSize = x*y*z;
flatData = new float[flatDataSize];
}
dim3 dimension;
float* flatData;
unsigned int flatDataSize;
};
void print(DataSet &data){
for (int i = 0; i < data.flatDataSize; i++){
if (i%data.dimension.x == 0) std::cout << std::endl;
if (i%(data.dimension.x*data.dimension.y) == 0) std::cout << std::endl;
std::cout << /*std::setprecision(2) << std::fixed <<*/ data.flatData[i] << "\t";
}
std::cout << std::endl << std::endl;
}
typedef dim3 Filter;
__device__
int getGlobalIdx_3D_3D(){
int blockId = blockIdx.x + blockIdx.y * gridDim.x
+ gridDim.x * gridDim.y * blockIdx.z;
int threadId = blockId * (blockDim.x * blockDim.y * blockDim.z)
+ (threadIdx.z * (blockDim.x * blockDim.y))
+ (threadIdx.y * blockDim.x) + threadIdx.x;
return threadId;
}
__global__ void MovingAverageKernel(DataSet input, Filter filter, DataSet output){
uint64_t idx = blockDim.x * blockIdx.x + threadIdx.x;
uint64_t idy = blockDim.y * blockIdx.y + threadIdx.y;
uint64_t idz = blockDim.z * blockIdx.z + threadIdx.z;
uint64_t idglobal = getGlobalIdx_3D_3D();
uint64_t testid= 14;
/*if (idglobal == 69)
printf("glob: %d\n", idglobal);*/
if (idglobal < output.flatDataSize //&&
/*idx <= output.dimension.x &&
idy <= output.dimension.y &&
idz <= output.dimension.z */
){
float sum = 0;
if (idglobal == testid)
printf("Output %d = (", testid);
for (uint64_t z = 0; z < filter.z; z++)
for (uint64_t y = 0; y < filter.y; y++)
for (uint64_t x = 0; x < filter.x; x++) {
uint64_t iddd = idx+x+ input.dimension.x * ((idy+y) + input.dimension.y*(idz + z));
sum += input.flatData[iddd];
if (idglobal == testid)
printf(" %f [%d] + \n", input.flatData[iddd], iddd);
}
sum /= (float)(filter.x * filter.y * filter.z);
if (idglobal == testid)
printf(" ) / %f = %f", (float)filter.x * filter.y * filter.z, sum);
output.flatData[idglobal]=idglobal;
}
}
DataSet createTestDataSet(){
DataSet d;
d.dimension = { 8, 6, 3 };
d.flatDataSize = d.dimension.x*d.dimension.y*d.dimension.z;
d.flatData = new float[d.flatDataSize]{
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.1f, 2.1f, 3.1f, 4.1f, 5.1f, 6.1f, 7.1f, 8.1f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.2f, 2.2f, 3.2f, 4.2f, 5.2f, 6.2f, 7.2f, 8.2f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f,
1.3f, 2.3f, 3.3f, 4.3f, 5.3f, 6.3f, 7.3f, 8.3f
};
return d;
}
DataSet MovingAverage(DataSet &input, Filter &filter){
/*Initalize output dataset using the size of the input and the filter*/
DataSet output(
input.dimension.x - filter.x + 1,
input.dimension.y - filter.y + 1,
input.dimension.z - filter.z + 1
);
std::cout << "Input space: " << input.dimension.x << ", " << input.dimension.y << ", " << input.dimension.z << std::endl;
std::cout << "Creating output space: " << output.dimension.x << ", " << output.dimension.y << ", " << output.dimension.z << std::endl;
/*Initalize data on the device*/
DataSet device_input;
device_input.dimension = input.dimension;
device_input.flatData = nullptr;
device_input.flatDataSize = input.flatDataSize;
DataSet device_output;
device_output.dimension = output.dimension;
device_output.flatData = nullptr;
device_output.flatDataSize = output.flatDataSize;
gpuErrchk(cudaMalloc((void **)&device_input.flatData, sizeof(float)*device_input.flatDataSize));
gpuErrchk(cudaMalloc((void **)&device_output.flatData, sizeof(float)*device_output.flatDataSize));
gpuErrchk(cudaMemcpy(device_input.flatData, input.flatData, sizeof(float)*device_input.flatDataSize, cudaMemcpyHostToDevice));
dim3 threadsperblock{ output.dimension.x, output.dimension.y, output.dimension.z };
dim3 blocksneeded = {
output.dimension.x / threadsperblock.x,
output.dimension.y / threadsperblock.y,
output.dimension.z / threadsperblock.z
};
std::cout << "Blocks Needed: " << blocksneeded.x << ", " << blocksneeded.y << ", " << blocksneeded.z << std::endl;
MovingAverageKernel<<< blocksneeded, threadsperblock >>>(device_input, filter, device_output);
gpuErrchk(cudaMemcpy(output.flatData, device_output.flatData, output.flatDataSize*sizeof(float), cudaMemcpyDeviceToHost));
return std::move(output);
}
int main(){
DataSet input = createTestDataSet();
std::cout << "Input DataSet: " << std::endl;
print(input);
std::cout << "==========seperate line ==========" << std::endl;
Filter filter{2,2,2};
DataSet output = MovingAverage(input, filter);
print(output);
} |
20,027 | #include "includes.h"
__global__ void Matrix_MultiplElementWise_naiveOLD(const float * A , int Acount, int Acols, const float * B , int Bcount, int Bcols, float * out0 , int out0count, int out0cols)
{
int id = blockDim.x*blockIdx.y*gridDim.x + blockDim.x*blockIdx.x + threadIdx.x;
int id_row,id_col;
if (id<Acount)
{
if (Acount==Bcount) // matrix .* matrix
{
out0[id] = A[id]*B[id];
}
else if (Bcols==1) // matrix .* row vector
{
id_row = id/Acols;
out0[id] = A[id]*B[id_row];
}
else // matrix .* column vector
{
id_col = id%Acols;
out0[id] = A[id]*B[id_col];
}
}
} |
20,028 | #include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <iostream>
#include <cmath>
using namespace std;
extern float dz, dt;
extern int size_space, size_time;
const float C = 3e8f;
void src_init()
{
dz = 0.015;
dt = dz / (2 * C);
}
void src_checkout()
{
cout << "dz: " << dz << endl;
cout << "dt: " << dt << endl;
}
__global__ void src_cmp_kernel(int current_timestep, float dt, float* dev_Ex, int size_space)
{
float T, T0, vt, val_src, time;
time = current_timestep * dt;
T = 5e-10f;
T0 = 3 * T;
vt = (time - T0) / T;
val_src = expf(-powf(vt, 2.0f));
dev_Ex[0] = val_src;
} |
20,029 | #include "includes.h"
__global__ void kTranspose(float* a, float* dest, int width, int height) {
const int bx = blockIdx.x * blockDim.x;
const int by = blockIdx.y * blockDim.y;
const int tx = bx + threadIdx.x;
const int ty = by + threadIdx.y;
// unsigned int idx = ty * width + tx;
__shared__ float smem[ADD_BLOCK_SIZE][ADD_BLOCK_SIZE + 1];
if (tx < width && ty < height) {
smem[threadIdx.y][threadIdx.x] = a[ty * width + tx];
}
__syncthreads();
if (by + threadIdx.x < height && threadIdx.y + bx < width) {
// idx = height * (blockIdx.x * blockDim.x + threadIdx.y) + blockIdx.y * blockDim.y + threadIdx.x;
dest[(bx + threadIdx.y) * height + by + threadIdx.x] = smem[threadIdx.x][threadIdx.y];
}
} |
20,030 | #include <stdio.h>
#include <stdlib.h>
__global__ void print_from_gpu(void)
{
printf("Hello World from thread [%d, %d]! From device\n", threadIdx.x, blockIdx.x);
}
int main(void)
{
printf("Hello World from host!\n");
print_from_gpu<<<1,2>>>();
cudaDeviceSynchronize();
return 0;
} |
20,031 | #include <stdio.h>
#include <cuda.h>
#include <cuda_runtime.h>
#define LOG_INPUT if(1)
#define LOG_SCAN if(0)
#define LOG_OUTPUT if(1)
void print_array(float *A, int N)
{
for(int i=0;i<N;i++)
printf("%.2f ",A[i]);
printf("\n");
}
#define TILE_SIZE 512
__global__ void Convolution1D(float *N, float *M, float *P, int Mask_Width, int Width){
int i = blockIdx.x*blockDim.x + threadIdx.x;
__shared__ float N_ds[TILE_SIZE];
N_ds[threadIdx.x] = N[i];
__syncthreads();
int This_tile_start_point = blockIdx.x * blockDim.x;
int Next_tile_start_point = (blockIdx.x + 1) * blockDim.x;
int N_start_point = i - (Mask_Width/2);
float Pvalue = 0;
for (int j = 0; j < Mask_Width; j ++) {
int N_index = N_start_point + j;
if (N_index >= 0 && N_index < Width) {
if ((N_index >= This_tile_start_point) && (N_index < Next_tile_start_point))
Pvalue += N_ds[threadIdx.x+j-(Mask_Width/2)]*M[j];
else
Pvalue += N[N_index] * M[j];
}
}
P[i] = Pvalue;
}
__global__ void ConvolutionPoolingFused(float *N, float *M, float *P, float *A, int Mask_Width, int Width){
__shared__ float N_ds[TILE_SIZE];
__shared__ float P_ds[TILE_SIZE];
unsigned int tid0 = ( threadIdx .x /16) *16*2 + threadIdx .x %16;
unsigned int tid1 = tid0 + 16;
unsigned int data_point0 = blockIdx .x *2* blockDim .x + tid0 ;
unsigned int data_point1 = blockIdx .x *2* blockDim .x + tid1 ;
N_ds[tid0] = N[data_point0] ;
N_ds[tid1] = N[data_point1];
__syncthreads ();
int cur_tile_start_point = blockIdx.x *2 * blockDim.x;
int next_tile_start_point = (blockIdx.x + 1) *2* blockDim.x;
int N_start_point0 = data_point0 - (Mask_Width/2);
int N_start_point1 = data_point1 - (Mask_Width/2);
float PValue0 = 0;
float PValue1 = 0;
for (int j = 0; j < Mask_Width; j ++) {
int N_index0 = N_start_point0 + j;
int N_index1 = N_start_point1 + j;
if (N_index0 >= 0 && N_index0 < Width) {
if ((N_index0 >= cur_tile_start_point) && (N_index0 < next_tile_start_point) )
PValue0 += N_ds[tid0 +j-(Mask_Width/2)]*M[j];
else
PValue0 += N[N_index0] * M[j];
}
if (N_index1 >= 0 && N_index1 < Width) {
if ((N_index1 >= cur_tile_start_point) && (N_index1 < next_tile_start_point) )
PValue1 += N_ds[tid1 +j-(Mask_Width/2)]*M[j];
else
PValue1 += N[N_index1] * M[j];
}
}
__syncthreads ();
P_ds[tid0] = PValue0;
P_ds[tid1] = PValue1;
__syncthreads();
tid0 = threadIdx.x;
int bound0 = blockDim.x;
tid1 = blockDim.x + threadIdx.x;
int bound1 = blockDim.x + blockDim.x;
int blockSize = blockDim.x;
for(int s=1;s<blockSize;s*=2)
{
if(tid0%2*s == 0 && tid0+s<bound0)
P_ds[tid0] = max(P_ds[tid0],P_ds[tid0+s]);
__syncthreads();
if(tid1%2*s == 0 && tid1+s<bound1)
P_ds[tid1] = max(P_ds[tid1],P_ds[tid1+s]);
__syncthreads();
}
if(threadIdx.x == 0)
{
A[2*blockIdx.x] = P_ds[tid0];
A[2*blockIdx.x+1] = P_ds[tid0 + blockDim.x];
}
}
__global__ void Pool1D(float* A, float* B, int N)
{
int tid = (blockIdx.x) * blockDim.x + threadIdx.x;
int bound = (blockIdx.x) * blockDim.x + blockDim.x;
int blockSize = blockDim.x;
for(int s=1;s<blockSize;s*=2)
{
if(tid%2*s == 0 && tid+s<bound)
A[tid] = max(A[tid],A[tid+s]);
__syncthreads();
}
if(threadIdx.x == 0)
B[blockIdx.x] = A[(blockIdx.x) * blockDim.x];
}
int main(void)
{
cudaError_t err = cudaSuccess;
int numElements;
int numMaskElements;
int pooling_size;
scanf("%d",&numElements);
scanf("%d",&numMaskElements);
scanf("%d",&pooling_size);
size_t size = numElements * sizeof(float);
size_t sizeMask = numMaskElements * sizeof(float);
float *h_input1 = (float *)malloc(size);
float *h_input2 = (float *)malloc(sizeMask);
float *h_output1 = (float *)malloc(size);
float *h_output2 = (float *)malloc(size);
if (h_input1 == NULL || h_input2 == NULL || h_output1 == NULL || h_output2 == NULL )
{
fprintf(stderr, "Failed to allocate host vectors!\n");
exit(EXIT_FAILURE);
}
LOG_INPUT
for (int i = 0; i < numElements; ++i){
h_input1[i] = 1.0;
}
for(int i=0;i<numMaskElements;++i){
h_input2[i] = 0.25;
}
LOG_SCAN
{
for (int i = 0; i < numElements; ++i)
{
scanf("%f",&h_input1[i]);
}
for (int i = 0; i < numMaskElements; ++i)
{
scanf("%f",&h_input2[i]);
}
}
LOG_INPUT print_array(h_input1,numElements);
LOG_INPUT print_array(h_input2,numMaskElements);
//H2D Transfers
float *d_input1 = NULL;
err = cudaMalloc((void **)&d_input1, size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector d_input1 (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
float *d_input2 = NULL;
err = cudaMalloc((void **)&d_input2, sizeMask);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector d_input2 (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
float *d_output1 = NULL;
err = cudaMalloc((void **)&d_output1, size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector h_output1 (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
float *d_output2 = NULL;
err = cudaMalloc((void **)&d_output2, size/pooling_size);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to allocate device vector h_output1 (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaMemcpy(d_input1, h_input1, size, cudaMemcpyHostToDevice);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector h_input1 from host to device (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaMemcpy(d_input2, h_input2, sizeMask, cudaMemcpyHostToDevice);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector h_input2 from host to device (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
int block_size=512;
int grid_size=numElements/block_size;
//Launch kernel
Convolution1D<<<grid_size, block_size>>>(d_input1, d_input2, d_output1, numMaskElements,numElements);
err = cudaGetLastError();
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to launch Convolution1D kernel (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
// D2H transfer
err = cudaMemcpy(h_output1, d_output1, size, cudaMemcpyDeviceToHost);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector d_output1 from device to host (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
LOG_OUTPUT printf("Output of convolution\n");
LOG_OUTPUT print_array(h_output1,numElements);
grid_size=numElements/pooling_size;
block_size=pooling_size;
Pool1D<<<grid_size,block_size>>>(d_output1,d_output2,numElements);
err = cudaGetLastError();
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to launch Pooling1D kernel (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
err = cudaMemcpy(h_output2, d_output2, size/pooling_size, cudaMemcpyDeviceToHost);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector d_output2 from device to host (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
LOG_OUTPUT printf("Output of pooling\n");
LOG_OUTPUT print_array(h_output2,numElements/pooling_size);
grid_size=numElements/512;
block_size=pooling_size;
ConvolutionPoolingFused<<<grid_size,block_size>>>(d_input1, d_input2, d_output1, d_output2, numMaskElements,numElements);
err = cudaMemcpy(h_output2, d_output2, size/pooling_size, cudaMemcpyDeviceToHost);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to copy vector d_output2 from device to host (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
LOG_OUTPUT printf("Output of pooling\n");
LOG_OUTPUT print_array(h_output2,numElements/pooling_size);
} |
20,032 | #include <iostream>
// __global__ alerts the compiler that a function should be built to run on a device, not the host
__global__ void kernel(void)
{
}
// This function is built for the host as normal
int main(void)
{
kernel<<<1,1>>>(); // The angled brackets denote arguments to be passed to the device runtime
printf("Hello, world!\n");
return 0;
} |
20,033 | #include "includes.h"
__global__ void ker_gkylCartFieldAssignAll(unsigned s, unsigned nv, double val, double *out)
{
for (int n = blockIdx.x*blockDim.x + threadIdx.x + s; n < s + nv; n += blockDim.x * gridDim.x)
out[n] = val;
} |
20,034 | #include <iostream>
#include <sys/time.h>
#define TILE_DIM 32
using namespace std;
/* cudaMallocPitch example:
* compares normal device memory allocation and allocation using cudaMallocPitch
* for different matrix sizes.
* When matrix width is not a multiple of 16 cudaMallocPitch should be preferred.
*/
// kernel for normal device memory allocation
__global__ void matmul(double *a, double* b, double *c, int aw, int bw, int enlarge) {
int row = blockIdx.y * blockDim.y + threadIdx.y;
int col = blockIdx.x * blockDim.x + threadIdx.x;
double sum = 0.0;
for (int i = 0; i < aw-enlarge; i++) {
sum += a[row*aw+i] * b[i*bw+col];
}
c[row*bw+col] = sum;
}
// kernel for allocation with cudaMallocPitch
__global__ void matmul_pitch(double *a, double* b, double *c, int aw, int bw, size_t pitch, int enlarge) {
int row = blockIdx.y * blockDim.y + threadIdx.y;
int col = blockIdx.x * blockDim.x + threadIdx.x;
double sum = 0.0;
for (int i = 0; i < aw-enlarge; i++) {
sum += a[row*pitch+i] * b[i*pitch+col];
}
c[row*pitch+col] = sum;
}
void run_matmul(int ah, int aw, int bw, int enlarge) {
time_t sTime = time(NULL);
timeval tt1, tt2;
int ms;
double fms;
int bh=aw;
int ah1 = ah + enlarge;
int aw1 = aw + enlarge;
int bh1 = bh + enlarge;
int bw1 = bw + enlarge;
// host arrays
double *a = (double*)malloc(ah1*aw1*sizeof(double));
double *b = (double*)malloc(bh1*bw1*sizeof(double));
double *c = (double*)malloc(ah1*bw1*sizeof(double));
for (int i=0;i<ah1;i++)
for (int j=0;j<aw1;j++)
a[i*ah1+j] = (double)(i+j);
for (int i=0;i<bh1;i++)
for (int j=0;j<bw1;j++)
b[i*bh1+j] = (double)(i-j);
// device arrays
double *a_dev;
cudaMalloc((void**) &a_dev, ah1*aw1 * sizeof(double));
double *b_dev;
cudaMalloc((void**) &b_dev, bh1*bw1 * sizeof(double));
double *c_dev;
cudaMalloc((void**) &c_dev, ah1*bw1 * sizeof(double));
// copy to device
cudaMemcpy(a_dev, a, ah1*aw1 * sizeof(double) , cudaMemcpyHostToDevice);
cudaMemcpy(b_dev, b, bh1*bw1 * sizeof(double) , cudaMemcpyHostToDevice);
// kernel run
dim3 nBlocks(bw/TILE_DIM, ah/TILE_DIM, 1);
dim3 nThreads(TILE_DIM, TILE_DIM, 1);
cudaThreadSynchronize();
gettimeofday( &tt1, NULL );
matmul <<< nBlocks, nThreads >>> (a_dev, b_dev, c_dev, aw1, bw1, enlarge);
cudaThreadSynchronize();
gettimeofday( &tt2, NULL );
// copy from device
cudaMemcpy(c, c_dev, ah1*bw1 * sizeof(double) , cudaMemcpyDeviceToHost);
// timing
cout << "-----------------------------------------------" << endl;
cout << "normal device memory alocation using cudaMalloc:" << endl;
ms = (tt2.tv_sec - tt1.tv_sec);
ms = ms * 1000000 + (tt2.tv_usec - tt1.tv_usec);
fms = ((double)ms)/1000000.0;
cout << "Comp time = " << fms << endl;
double dNumOps = 2.0 * (double)(aw) * (double)(ah) * (double)(bw);
double gflops = 1.0e-9 * dNumOps/fms;
cout << "GFlops = " << gflops << endl;
cout << "value check = " << c[145] << endl;
cout << "-----------------------------------------------" << endl;
free(a);
free(b);
free(c);
cudaFree(a_dev);
cudaFree(b_dev);
cudaFree(c_dev);
}
void run_matmul_pitch(int ah, int aw, int bw, int enlarge) {
time_t sTime = time(NULL);
timeval tt1, tt2;
int ms;
double fms;
int bh=aw;
int ah1 = ah + enlarge;
int aw1 = aw + enlarge;
int bh1 = bh + enlarge;
int bw1 = bw + enlarge;
// host arrays
double *a = (double*)malloc(ah1*aw1*sizeof(double));
double *b = (double*)malloc(bh1*bw1*sizeof(double));
double *c = (double*)malloc(ah1*bw1*sizeof(double));
for (int i=0;i<ah1;i++)
for (int j=0;j<aw1;j++)
a[i*ah1+j] = (double)(i+j);
for (int i=0;i<bh1;i++)
for (int j=0;j<bw1;j++)
b[i*bh1+j] = (double)(i-j);
// device arrays are allocated using cudaMallocPitch
size_t pitch;
double *a_dev;
cudaMallocPitch(&a_dev, &pitch, aw1 * sizeof(double), ah1);
double *b_dev;
cudaMallocPitch(&b_dev, &pitch, bw1 * sizeof(double), bh1);
double *c_dev;
cudaMallocPitch(&c_dev, &pitch, bw1 * sizeof(double), ah1);
// data is copied with cudaMemcpy2D
cudaMemcpy2D(a_dev, pitch, a, aw1 * sizeof(double), aw1, ah1, cudaMemcpyHostToDevice);
cudaMemcpy2D(b_dev, pitch, b, bw1 * sizeof(double), bw1, bh1, cudaMemcpyHostToDevice);
// kernel run
dim3 nBlocks(bw/TILE_DIM, ah/TILE_DIM, 1);
dim3 nThreads(TILE_DIM, TILE_DIM, 1);
cudaThreadSynchronize();
gettimeofday( &tt1, NULL );
matmul_pitch <<< nBlocks, nThreads >>> (a_dev, b_dev, c_dev, aw1, bw1,
pitch/sizeof(double),enlarge);
cudaThreadSynchronize();
gettimeofday( &tt2, NULL );
// data is copied back with cudaMemcpy2D
cudaMemcpy2D(c, bw1 * sizeof(double), c_dev, pitch, bw1, ah1, cudaMemcpyDeviceToHost);
// timing
cout << "-----------------------------------------------" << endl;
cout << "device memory alocation using cudaMallocPitch:" << endl;
ms = (tt2.tv_sec - tt1.tv_sec);
ms = ms * 1000000 + (tt2.tv_usec - tt1.tv_usec);
fms = ((double)ms)/1000000.0;
cout << "Comp time = " << fms << endl;
double dNumOps = 2.0 * (double)(aw) * (double)(ah) * (double)(bw);
double gflops = 1.0e-9 * dNumOps/fms;
cout << "GFlops = " << gflops << endl;
cout << "value check = " << c[145] << endl;
cout << "-----------------------------------------------" << endl;
free(a);
free(b);
free(c);
cudaFree(a_dev);
cudaFree(b_dev);
cudaFree(c_dev);
}
int main() {
int ah=2560;
int aw=2560;
int bw=2560;
// enlarges matrix dimensions given amount
// calculation is carried out over original matrix dimensions
// (*) give different values to see the effect on computation
// times of normal memory allocation and using cudaMallocPitch
int enlarge = 0;
run_matmul(ah, aw, bw, enlarge);
run_matmul_pitch(ah, aw, bw, enlarge);
}
|
20,035 | #include<iostream>
#include<fstream>
#include<time.h>
using namespace std;
typedef struct{
int *x;
int *y;
int nr_vertice;
int mbr[4];
int *boxes;
}polygon;
__device__ int PtInPolygon(int x,int y, int *poly_x, int *poly_y, int nCount){
int nCross=0,i;
int x1,x2,y1,y2;
double ix;
for(i=0;i<nCount-1;i++){
x1=poly_x[i];
y1=poly_y[i];
x2=poly_x[i+1];
y2=poly_y[i+1];
if(y1==y2)continue;
if(y<min(y1,y2))continue;
if(y>=max(y1,y2))continue;
ix=(double)(y-y1)*(double)(x2-x1)/(double)(y2-y1)+x1;
if(ix>x)nCross++;
}
return(nCross%2==1);
}
__global__ void kernel(int nr_v1, int *poly1_x, int *poly1_y, int nr_v2, int *poly2_x, int *poly2_y, int left, int top, int *result){
int tid = threadIdx.x+blockIdx.x*blockDim.x+blockIdx.y*gridDim.x*blockDim.x;
int x = blockIdx.x + left,y = blockIdx.y + top;
int *poly_x, *poly_y, nr_v;
poly_x = (tid%2 == 0) ? poly1_x:poly2_x;
poly_y = (tid%2 == 0) ? poly1_y:poly2_y;
nr_v = (tid%2 == 0) ? nr_v1:nr_v2;
if(PtInPolygon(x, y, poly_x, poly_y, nr_v) == 1)
result[tid] = 1;
else result[tid] = 0;
}
void parsePoly(char *line,polygon *poly){
int i, offset = 0;
sscanf(line, "%d, %d %d %d %d", &poly->nr_vertice, &poly->mbr[0], &poly->mbr[1], &poly->mbr[2], &poly->mbr[3]);
//printf("%d, %d %d %d %d\n", poly->nr_vertice, poly->mbr[0], poly->mbr[1], poly->mbr[2], poly->mbr[3]);
while(line[offset++] != ',');
while(line[offset++] != ',');
poly->x = (int *)malloc(poly->nr_vertice*sizeof(int));
poly->y = (int *)malloc(poly->nr_vertice*sizeof(int));
for(i=0;i<poly->nr_vertice;i++){
sscanf(line+offset, "%d %d", &poly->x[i], &poly->y[i]);
while(line[offset++] != ',');
}
}
int filter(polygon *poly1, polygon *poly2){
/* Check whether the mbr of poly1 contains in poly2 */
if(poly2->mbr[0]<=poly1->mbr[0] && poly2->mbr[1]>=poly1->mbr[1] && poly2->mbr[2]<=poly1->mbr[2] && poly2->mbr[3]>=poly1->mbr[3])
return 0;
else return 1;
}
int main()
{
static const int read_bufsize=65536;
char polygon1[read_bufsize], polygon2[read_bufsize];
const char *filename = "polygon";
fstream polyfile;
polyfile.open(filename,fstream::in | fstream::binary);
polyfile.getline(polygon1,read_bufsize);
polyfile.getline(polygon2,read_bufsize);
polygon *poly1,*poly2;
poly1 = (polygon *)malloc(sizeof(polygon));
poly2 = (polygon *)malloc(sizeof(polygon));
parsePoly(polygon1, poly1);
parsePoly(polygon2, poly2);
if(filter(poly1,poly2)){
cout<<"NO!"<<endl;
return 1;
}
int *dev_poly1_x, *dev_poly1_y, *dev_poly2_x, *dev_poly2_y;
int *host_result, *dev_result;
int boxsize = (poly1->mbr[1]-poly1->mbr[0]+1)*(poly1->mbr[3]-poly1->mbr[2]+1);
cudaMalloc((void **)&dev_poly1_x, poly1->nr_vertice*sizeof(int));
cudaMalloc((void **)&dev_poly1_y, poly1->nr_vertice*sizeof(int));
cudaMalloc((void **)&dev_poly2_x, poly2->nr_vertice*sizeof(int));
cudaMalloc((void **)&dev_poly2_y, poly2->nr_vertice*sizeof(int));
cudaMalloc((void **)&dev_result, 2*boxsize*sizeof(int));
cudaMemcpy(dev_poly1_x, poly1->x, poly1->nr_vertice*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_poly1_y, poly1->y, poly1->nr_vertice*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_poly2_x, poly2->x, poly2->nr_vertice*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_poly2_y, poly2->y, poly2->nr_vertice*sizeof(int), cudaMemcpyHostToDevice);
clock_t start, end;
start = clock();
dim3 grids(poly1->mbr[1]-poly1->mbr[0],poly1->mbr[3]-poly1->mbr[2]);
kernel<<<grids, 2>>>(poly1->nr_vertice, dev_poly1_x, dev_poly1_y, poly2->nr_vertice, dev_poly2_x, dev_poly2_y, poly1->mbr[0], poly1->mbr[2], dev_result);
host_result = (int *)malloc(2*boxsize*sizeof(int));
cudaMemcpy(host_result, dev_result, 2*boxsize*sizeof(int), cudaMemcpyDeviceToHost);
for(int i=0;i<boxsize;i++){
// cout<<i % (poly1->mbr[1]-poly1->mbr[0])+poly1->mbr[0]<<" "<<i/(poly1->mbr[1]-poly1->mbr[0])+poly1->mbr[2]<<endl;
if(host_result[2*i] == 1 && host_result[2*i+1] == 0){
end = clock();
cout<<"NO! Time used: "<<end-start<<endl;
return 1;
}
}
end = clock();
cout<<"YES! Time used: "<<end-start<<endl;
return 0;
}
|
20,036 | #include <stdio.h>
#include <errno.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <sys/wait.h>
#include <sys/ipc.h>
#include <sys/shm.h>
#include <cuda_runtime.h>
__global__ void VecAdd(float* A, float* B, float* C, int N)
{
for (int i=0; i<N; i++) {
C[i] = A[i] + B[i];
}
}
void HostVecAdd(float* A, float* B, float* C, int N)
{
for (int i=0; i<N; i++) {
C[i] = A[i] + B[i];
}
}
void TestHostMemory(int LoopNum)
{
printf(" - Loop %d:\n", LoopNum+1);
int N = 1024;
size_t size = N * sizeof(float);
float* h_A = (float*)malloc(size);
float* h_B = (float*)malloc(size);
float* h_D = (float*)malloc(size);
// Initialize input vectors
for (int i = 0; i < N; i++) {
h_A[i] = (float)((i+1)*rand()%1000)/100;
h_B[i] = (float)((i+2)*rand()%1000)/100;
}
//printf(" => Array A first three data: %f, %f, %f\n", h_A[0], h_A[1], h_A[2]);
//printf(" => Array B first three data: %f, %f, %f\n", h_B[0], h_B[1], h_B[2]);
HostVecAdd(h_A, h_B, h_D, N);
//printf(" => Return D first three data: %f, %f, %f\n", h_D[0], h_D[1], h_D[2]);
}
void TestGPUMemory(int LoopNum)
{
printf(" - Loop %d:\n", LoopNum+1);
int N = 1024;
size_t size = N * sizeof(float);
float* h_A = (float*)malloc(size);
float* h_B = (float*)malloc(size);
float* h_D = (float*)malloc(size);
// Initialize input vectors
for (int i = 0; i < N; i++) {
h_A[i] = (float)((i+1)*rand()%1000)/100;
h_B[i] = (float)((i+2)*rand()%1000)/100;
}
//printf(" => Array A first three data: %f, %f, %f\n", h_A[0], h_A[1], h_A[2]);
//printf(" => Array B first three data: %f, %f, %f\n", h_B[0], h_B[1], h_B[2]);
float* d_A, *d_B, *d_D;
cudaMalloc(&d_A, size);
cudaMalloc(&d_B, size);
cudaMalloc(&d_D, size);
cudaMemcpy(d_A, h_A, size, cudaMemcpyHostToDevice);
cudaMemcpy(d_B, h_B, size, cudaMemcpyHostToDevice);
// Invoke kernel
int threadsPerBlock = 256;
int blocksPerGrid = (N + threadsPerBlock - 1)/ threadsPerBlock;
VecAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_D, N);
cudaDeviceSynchronize();
cudaMemcpy(h_D, d_D, size, cudaMemcpyDeviceToHost);
cudaFree(d_A);
cudaFree(d_B);
cudaFree(d_D);
//printf(" => Return D first three data: %f, %f, %f\n", h_D[0], h_D[1], h_D[2]);
free(h_A);
free(h_B);
free(h_D);
}
void TestUnifedMemory(int LoopNum)
{
printf(" - Loop %d:\n", LoopNum+1);
int N = 1024;
size_t size = N * sizeof(float);
float* h_A, *h_B, *h_D;
cudaMallocManaged((float**)&h_A, size);
cudaMallocManaged((float**)&h_B, size);
cudaMallocManaged((float**)&h_D, size);
// Initialize input vectors
for (int i = 0; i < N; i++) {
h_A[i] = (float)((i+1)*rand()%1000)/100;
h_B[i] = (float)((i+2)*rand()%1000)/100;
}
//printf(" => Array A first three data: %f, %f, %f\n", h_A[0], h_A[1], h_A[2]);
//printf(" => Array B first three data: %f, %f, %f\n", h_B[0], h_B[1], h_B[2]);
// Invoke kernel
int threadsPerBlock = 256;
int blocksPerGrid = (N + threadsPerBlock - 1)/ threadsPerBlock;
VecAdd<<<blocksPerGrid, threadsPerBlock>>>(h_A, h_B, h_D, N);
cudaDeviceSynchronize();
//printf(" => Return D first three data: %f, %f, %f\n", h_D[0], h_D[1], h_D[2]);
// Free memory
cudaFree(h_A);
cudaFree(h_B);
cudaFree(h_D);
}
void usage()
{
printf("Usage: [ options ]\n");
printf("\t-n <loops>\tRun this number of memcpy loops (default 1)\n");
printf("\t-m <1|2|3>\tSepecify memory allocation. 1:CPU, 2:GPU, 3: Unified (default 1)\n");
exit(1);
}
int main(int argc, char *argv[]){
int LoopCount = 1;
int MemAllocMode = 1;
int t;
struct timeval start, end;
while (1) {
int c = getopt(argc, argv, "hn:m:");
if (c < 0)
break;
switch (c) {
case 'n':
LoopCount = atoi(optarg);
break;
case 'm':
MemAllocMode = atoi(optarg);
break;
case 'h':
usage();
break;
}
}
switch (MemAllocMode) {
case 1:
printf(">>> Testing Host Memory for %d loop(s)\n", LoopCount);
gettimeofday(&start, NULL);
for (int i = 0; i < LoopCount; i++) {
TestHostMemory(i);
}
gettimeofday(&end, NULL);
break;
case 2:
printf(">>> Testing GPU Memory for %d loop(s)\n", LoopCount);
gettimeofday(&start, NULL);
for (int i = 0; i < LoopCount; i++) {
TestGPUMemory(i);
}
gettimeofday(&end, NULL);
break;
case 3:
printf(">>> Testing Unified Memory for %d loop(s)\n", LoopCount);
gettimeofday(&start, NULL);
for (int i = 0; i < LoopCount; i++) {
TestUnifedMemory(i);
}
gettimeofday(&end, NULL);
break;
}
t = ((end.tv_sec - start.tv_sec)*1000000 + end.tv_usec - start.tv_usec)/LoopCount;
printf(">>> Average Kernel Runtime is %d uS for %d loops\n", t, LoopCount);
}
|
20,037 | #include "includes.h"
__global__ void smoothGray (unsigned char *imagem, unsigned char *saida, unsigned int cols, unsigned int linhas)
{
unsigned int indice = (blockIdx.y * blockDim.x * 65536) + (blockIdx.x * 1024) + threadIdx.x; // calcula o indice do vetor com base nas dimensões de bloco e indice da thread
if(indice >= cols*linhas)
return;
//indices para o campo da imagem que participará do smooth
int i_begin = (indice/(int)cols) - 2, i_end = (indice/(int)cols)+2;
int j_begin = (indice%(int)cols) - 2, j_end = (indice%(int)cols)+2;
if(i_begin<0) i_begin = 0;
if(j_begin<0) j_begin = 0;
if(i_end>=cols) i_end = cols-1;
if(j_end>=cols) j_end = cols-1;
//calcula o smooth no ponto de indice da thread
int media = 0;
int qtd = 0;
for (int i = i_begin; i<= i_end; ++i)
{
for(int j = j_begin; j<= j_end; ++j)
{
media += imagem[(i*cols)+j];
qtd++;
}
}
saida[indice] = (unsigned char)(media/qtd);
} |
20,038 | #include "includes.h"
__global__ void oneOfNSelection(float *buffer, int* index, int size, float value)
{
int threadId = blockDim.x*blockIdx.y*gridDim.x //rows preceeding current row in grid
+ blockDim.x*blockIdx.x //blocks preceeding current block
+ threadIdx.x;
if (threadId < size && threadId != index[0])
{
buffer[threadId] = 0;
}
else if (threadId < size && threadId == index[0]){
buffer[threadId] = value;
}
} |
20,039 | // includes
#include <stdio.h>
#include <stdlib.h>
// Funcion de liberacion de memoria para el vector que representa la Matriz
void liberaVector(float *vec) {
free(vec);
return;
}
|
20,040 | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
/**
* Quickhull.cu
* Author: Michael Gruesen
* A quickhull implementation for NVIDIA GPGPU Compute Capability 2.0
*
* Serial QSort Code Adapted
* from : Aashish Barnwal
* source: http://www.geeksforgeeks.org/iterative-quick-sort/
*
* Parallel QSort Code Adapted
* from : Seiral QSort Code
**/
struct Point {
int x, y;
bool operator==(Point a){ return x == a.x && y == a.y;}
};
struct Edge {
Point p;
Point q;
bool operator==(Edge a){return p == a.p && q == a.q;}
};
__global__ void quickhull(Point* d_input, Point* d_output, int n){
}
__host__ void launchQuickHull(Point* d_input, Point* d_output, int n){
dim3 dimGrid;
dim3 dimBlock;
size_t sharedSize = n * sizeof(Edge);
cudaError_t cErr;
quickhull<<< dimBlock, dimGrid, sharedSize >>>(d_input, d_output, n);
cErr = cudaDeviceSynchronize();
if (cErr != cudaSuccess) fprintf(stderr, "%s\n", cudaGetErrorString(cErr));
}
void allocate(Point* d_input, Point* h_input, Edge* d_output, int n){
size_t memSize = n*sizeof(Point);
cudaMalloc((void**)&d_input, memSize);
cudaMalloc((void**)&d_output, memSize);
cudaMemcpy(d_input, h_input, memSize, cudaMemcpyHostToDevice);
}
void deallocate(Point* d_input, Point*d_output){
cudaFree(d_input);
cudaFree(d_output);
}
void printHull(Edge* out, int n){
for (int i = 0; i < n; i++)
fprintf(stderr, "%d,%d->%d,%d%s", out[i].p.x, out[i].p.y, out[i].q.x, out[i].q.y, ((i + 1 == n) ? "\n" : ", "));
}
void swap(Point* a, Point* b){
Point temp = *a;
*a = *b;
*b = temp;
}
/**
* Modification: Added extra conditional to allow
* sorting by x then y if a.x == b.x
**/
int partition(Point* input, int l, int r){
int x = input[r].x;
int y = input[r].y;
int i = (l - 1);
for (int j = l; j <= r-1; j++){
//was : if(input[j].x <= x)
if(input[j].x < x){
i++;
swap(&input[i], &input[j]);
}
else if (input[j].x == x){
if (input[j].y < y){
i++;
swap(&input[i], &input[j]);
}
}
}
swap(&input[i+1], &input[r]);
return i+1;
}
/**
* Code for iterative serial quicksort comes from
* http://www.geeksforgeeks.org/iterative-quick-sort/
* Author: Aashish Barnwal
**/
void quickSortIterative(Point* input, int l, int r){
int stack[r - l + 1];
int top = -1;
stack[++top] = l;
stack[++top] = r;
while (top >=0){
r = stack[top--];
l = stack[top--];
int p = partition(input, l, r);
if (p-1 > l){
stack[++top] = l;
stack[++top] = p-1;
}
if (p+1 < r){
stack[++top] = p+1;
stack[++top] = r;
}
}
}
void checkSort(Point* in, int n){
for(int i = 0; i < n; i++){
fprintf(stderr, "%d %d\n", in[i].x, in[i].y);
}
}
int computeDistance(Point a, Point b, Point c){
return (b.x - a.x)*(c.y-a.y)-(b.y-a.y)*(c.x-a.x);
}
int insert(Edge* hull, Point c, Edge old, int hullCounter){
//printHull(hull, hullCounter);
//fprintf(stderr, "Inserting %d,%d\n", c.x, c.y);
int insertIdx;
for(insertIdx = 0; insertIdx < hullCounter; insertIdx++){
if (hull[insertIdx] == old) break;
}
for(int i = hullCounter; i > insertIdx + 1; i--){
hull[i] = hull[i - 1];
}
Edge e;
e.q = old.q;
e.p = c;
old.q = c;
hull[insertIdx] = old;
hull[insertIdx + 1] = e;
//printHull(hull, hullCounter+1);
return ++hullCounter;
}
int serialFindHull(Point* set, Point a, Point b, Edge* hull, int setCounter, int setMaxIdx, int hullCounter){
if (setCounter == 0){
return hullCounter;
}
Point c = set[setMaxIdx];
Edge old;
old.p = a;
old.q = b;
hullCounter = insert(hull, c, old, hullCounter);
Point* s1;
Point* s2;
s1 = (Point*)malloc((setCounter-2)*sizeof(Point));
int s1counter = 0;
int s1MaxIdx = -1;
int s1MaxVal = 0;
s2 = (Point*)malloc((setCounter-2)*sizeof(Point));
int s2counter = 0;
int s2MaxIdx = -1;
int s2MaxVal = 0;
for (int i = 0; i < setCounter; i++){
Point p = set[i];
int res;
if ((res = computeDistance(a, c, p)) > 0){
s1[s1counter++] = p;
if (res > s1MaxVal){
s1MaxIdx = s1counter - 1;
s1MaxVal = res;
}
}
else if ((res = computeDistance(c, b, p)) > 0){
s2[s2counter++] = p;
if (res > s2MaxVal){
s2MaxIdx = s2counter - 1;
s2MaxVal = res;
}
}
}
hullCounter = serialFindHull(s1, a, c, hull, s1counter, s1MaxIdx, hullCounter);
hullCounter = serialFindHull(s2, c, b, hull, s2counter, s2MaxIdx, hullCounter);
free(s1);
free(s2);
return hullCounter;
}
int serialHull(Point* in, Edge* out, int n){
//memset(out, NULL, n);
int hullCounter = 0;
Edge a = {in[0], in[n-1]};
a.p = in[0];
a.q = in[n-1];
out[hullCounter++] = a;
Point* s1;
Point* s2;
s1 = (Point*)malloc((n-2)*sizeof(Point));
int s1counter = 0;
int s1MaxIdx = 0;
int s1MaxVal = 0;
s2 = (Point*)malloc((n-2)*sizeof(Point));
int s2counter = 0;
int s2MaxIdx = 0;
int s2MaxVal = 0;
for (int i = 1; i < n-2; i++){
Point p = in[i];
int res;
if ((res = computeDistance(in[0], in[n-1], p)) > 0){
s1[s1counter++] = p;
if (res > s1MaxVal) {
s1MaxIdx = s1counter - 1;
s1MaxVal = res;
}
}
else if ((res = computeDistance(in[n-1], in[0], p)) > 0){
s2[s2counter++] = p;
if (res > s2MaxVal){
s2MaxIdx = s2counter - 1;
s2MaxVal = res;
}
}
}
hullCounter = serialFindHull(s1, in[0], in[n-1], out, s1counter, s1MaxIdx, hullCounter);
a.p = in[n-1];
a.q = in[0];
out[hullCounter++] = a;
hullCounter = serialFindHull(s2, in[n-1], in[0], out, s2counter, s2MaxIdx, hullCounter);
free(s1);
free(s2);
return hullCounter;
}
void doSerialQuickHull(Point* in, Edge* out, int n){
cudaEvent_t start, stop;
float time;
cudaEventCreate(&start);
cudaEventCreate(&stop);
//fprintf(stderr, "starting serial quick sort\n");
cudaEventRecord(start, 0);
quickSortIterative(in, 0, n-1);
//fprintf(stderr, "finished serial quick sort\n");
//checkSort(in, n);
//fprintf(stderr, "starting serial quick hull\n");
int hulls = serialHull(in, out, n);
cudaEventRecord(stop, 0);
//fprintf(stderr, "finsihed serial quick hull\n");
//printHull(out, hulls);
cudaEventElapsedTime(&time, start, stop);
fprintf(stderr, "serial quickhull runtime: %f ms\n", time);
}
int main(int argc, char** argv){
//get input passed as arg
FILE* input = fopen(argv[1], "r");
//get number of points
int numPoints;
fscanf(input, "%d", &numPoints);
size_t memSize = numPoints * sizeof(Point);
size_t outSize = numPoints * sizeof(Edge);
//host input/output
Point* h_input = (Point*)malloc(memSize);
Edge* h_output = (Edge*)malloc(outSize);
Edge* h_ref = (Edge*)malloc(outSize);
//initialize input
for (int i = 0; i < numPoints; i++){
Point p;
fscanf(input, "%d %d", &p.x, &p.y);
h_input[i] = p;
}
fprintf(stderr, "Read input\n");
doSerialQuickHull(h_input, h_ref, numPoints);
fprintf(stderr, "Quick Hull completed\n");
//device ptrs
//Point* d_input;
//Edge* d_output;
//allocate and copy to card
//allocate(d_input, h_input, d_output, numPoints);
//launch
//launchQuickHull(d_input, d_output, numPoints);
//copy back
//cudaMemcpy(h_output, d_output, numPoints*sizeof(Edge), cudaMemcpyDeviceToHost);
//deallocate card
//deallocate(d_input, d_output);
//print results
/*
for (int i = 0; i < numPoints; i++){
Edge e = h_output[i];
fprintf(stderr, "%d %d\n", e.x, e.y);
}
*/
return 0;
}
|
20,041 | #include <stdlib.h>
#include <time.h>
void sumArraysOnHost(float* A, float* B, float* C, const int N){
int idx;
for(idx = 0; idx < N; idx++){
C[idx] = A[idx] + B[idx];
}
}
void initialData(float* ip, int size){
time_t t;
srand((unsigned int) time(&t));
for(int i = 0; i < size; i++){
ip[i] = (float)(rand() & 0xff) / 10.0f;
}
}
int main(){
int nElem = 1024;
size_t nBytes = nElem * sizeof(float);
float *h_A, *h_B, *h_C;
h_A = (float*) malloc(nBytes);
h_B = (float*) malloc(nBytes);
h_C = (float*) malloc(nBytes);
float *d_A, *d_B, *d_C;
cudaMalloc(&d_A, nBytes);
cudaMalloc(&d_B, nBytes);
cudaMalloc(&d_C, nBytes);
initialData(h_A, nElem);
initialData(h_B, nElem);
cudaMemcpy(d_A, h_A, nBytes, cudaMemcpyHostToDevice);
cudaMemcpy(d_B, h_B, nBytes, cudaMemcpyHostToDevice);
cudaMemcpy(d_C, h_C, nBytes, cudaMemcpyHostToDevice);
sumArraysOnHost(h_A, h_B, h_C, nElem);
cudaFree(d_A);
cudaFree(d_B);
cudaFree(d_C);
free(h_A);
free(h_B);
free(h_C);
return 0;
}
|
20,042 | #include <stdio.h>
#include <cuda.h>
#include <time.h>
#include <iostream>
using namespace std;
#define H 1000
#define W 1000
__global__ void multMatCUDA(int *d_a,int *d_b,int *d_c){
int row = blockIdx.y * blockDim.y + threadIdx.y;
int col = blockIdx.x * blockDim.x + threadIdx.x;
if(row < H && col < W){
int result = 0;
for(int k = 0; k < W; k++){
result += d_a[row * W + k] * d_b[k * W + col];
}
d_c[row * W + col] = result;
}
}
void multMat(int *h_a, int *h_b, int *h_c){
for(int i = 0; i < H; i++){
for(int j = 0; j < W; j++){
int result = 0;
for(int k = 0; k < W; k++){
result += h_a[i * W + k] * h_b[k * W + j];
}
h_c[i * W + j] = result;
}
}
}
bool compareTo(int *h_c,int *h_result){
for(int i = 0; i < H; i++){
for(int j = 0; j < W; j++){
if(h_c[i * W + j] != h_result[i * W + j]){
return false;
}
}
}
return true;
}
void printMatrix(int *result){
for(int i = 0; i < H; i++){
for(int j = 0; j < W; j++){
cout<<result[i * W + j]<<" ";
}
cout<<endl;
}
}
int main(){
clock_t start, end;
double cpu_time_used, gpu_time_used;
float blockSize = 32;
int *h_a, *h_b, *h_c, *d_a, *d_b, *d_c, *h_result;
//Asignar memoria en el host
h_a = (int*)malloc(sizeof(int)*H*W);
h_b = (int*)malloc(sizeof(int)*H*W);
h_c = (int*)malloc(sizeof(int)*H*W);
h_result = (int*)malloc(sizeof(int)*H*W);
//Inicializar las matrices
for(int i = 0; i < H; i++){
for(int j=0; j < W; j++){
h_a[i*W+j] = i;
h_b[i*W+j] = i+1;
h_c[i*W+j] = 0;
}
}
start = clock();
//Llamar funcion que sume dos vectores y retorne el resultado en h_c
multMat(h_a, h_b, h_c);
end = clock();
cpu_time_used = ((double) (end - start)) / CLOCKS_PER_SEC;
printf("Tiempo invertido CPU = %lf s\n", cpu_time_used);
//Asignacion de memoria en el device
cudaMalloc(&d_a, sizeof(int)*H*W);
cudaMalloc(&d_b, sizeof(int)*H*W);
cudaMalloc(&d_c, sizeof(int)*H*W);
//Copiar los datos del host al device
cudaMemcpy(d_a, h_a, H*W* sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(d_b, h_b, H*W* sizeof(int), cudaMemcpyHostToDevice);
dim3 dimBlock(blockSize, blockSize, 1);
dim3 dimGrid(ceil(W/blockSize),ceil(H/blockSize),1);
start = clock();
multMatCUDA<<< dimGrid, dimBlock >>>(d_a, d_b, d_c);
cudaMemcpy(h_result, d_c, H*W*sizeof(int), cudaMemcpyDeviceToHost);
end = clock();
gpu_time_used = ((double) (end - start)) / CLOCKS_PER_SEC;
printf("Tiempo invertido GPU = %lf s\n", gpu_time_used);
if(compareTo(h_c, h_result)){
printf("Matrices Iguales");
}
else{
printf("Matrices Diferentes");
}
cudaFree(d_a);
cudaFree(d_b);
cudaFree(d_c);
free(h_a);
free(h_b);
free(h_c);
free(h_result);
return 0;
} |
20,043 | __global__ void drawAccum(unsigned int* accum, unsigned char* image_accum, int w_accum, int h_accum, int* max) {
int x = blockDim.x * blockIdx.x + threadIdx.x;
int y = blockDim.y * blockIdx.y + threadIdx.y;
int tid = y * w_accum + x;
int _r = w_accum * h_accum * 0;
int _g = w_accum * h_accum * 1;
int _b = w_accum * h_accum * 2;
int _a = w_accum * h_accum * 3;
if (x >= w_accum || y >= h_accum)
return;
int pixel_value = 0;
if (!accum[tid] == 0) {
image_accum[tid + _r] = (unsigned char)((*max) / accum[tid]) * 255;
image_accum[tid + _g] = (unsigned char)((*max) / accum[tid]) * 255;
image_accum[tid + _b] = (unsigned char)((*max) / accum[tid]) * 255;
image_accum[tid + _a] = 255;
}
else if (accum[tid] == -1) {
image_accum[tid + _r] = 255;
image_accum[tid + _g] = (unsigned char)((*max) / accum[tid]) * 255;
image_accum[tid + _b] = (unsigned char)((*max) / accum[tid]) * 255;
image_accum[tid + _a] = 255;
}
else {
image_accum[tid + _r] = 0;
image_accum[tid + _g] = 0;
image_accum[tid + _b] = 0;
image_accum[tid + _a] = 255;
}
} |
20,044 | #include<cstdlib>
#include<stdio.h>
using namespace std;
__device__ void getGausspoints(double *gausspoints,double *weights,int nint)
{
//cout<<"nint "<<nint<<endl;
switch (nint)
{
case 1:
{
double xk[1]={0.000000000000000000000000};
double wk[1]={2.000000000000000000000000};
gausspoints[0]=xk[1];
weights[0]=wk[0];
}
break;
case 2:
{ double x[1] = {0.5773502691896257645091488};
double w[1] = {1.0000000000000000000000000};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}}
break;
case 4:
{ double x[2] = {0.3399810435848562648026658,0.8611363115940525752239465};
double w[2] = {0.6521451548625461426269361,0.3478548451374538573730639};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}
}
break;
case 6:
{ double x[3] = {0.2386191860831969086305017,0.6612093864662645136613996,0.9324695142031520278123016};
double w[3] = {0.4679139345726910473898703,0.3607615730481386075698335,0.1713244923791703450402961};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}
}
break;
case 8:
{ double x[4] = {0.1834346424956498049394761,0.5255324099163289858177390,0.7966664774136267395915539,0.9602898564975362316835609};
double w[4] = {0.3626837833783619829651504,0.3137066458778872873379622,0.2223810344533744705443560,0.1012285362903762591525314};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}
}
break;
case 10:
{ double x[5] = {0.1488743389816312108848260,0.4333953941292471907992659,0.6794095682990244062343274,0.8650633666889845107320967,0.9739065285171717200779640};
double w[5] = {0.2955242247147528701738930,0.2692667193099963550912269,0.2190863625159820439955349,0.1494513491505805931457763,0.0666713443086881375935688};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}
}
break;
case 12:
{ double x[6] = {0.1252334085114689154724414,0.3678314989981801937526915,0.5873179542866174472967024,0.7699026741943046870368938,0.9041172563704748566784659,0.9815606342467192506905491};
double w[6] = {0.2491470458134027850005624,0.2334925365383548087608499,0.2031674267230659217490645,0.1600783285433462263346525,0.1069393259953184309602547,0.0471753363865118271946160};
for(int i=0;i<nint/2;i++)
{
gausspoints[2*i]=x[i];
gausspoints[2*i+1]=-x[i];
weights[2*i]=w[i];
weights[2*i+1]=w[i];
}
}
break;
case 3:
{ double x[2] = {0.0000000000000000000000000,0.7745966692414833770358531};
double w[2] = {0.8888888888888888888888889,0.5555555555555555555555556};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
case 5:
{ double x[3] = {0.0000000000000000000000000,0.5384693101056830910363144,0.9061798459386639927976269};
double w[3] = {0.5688888888888888888888889,0.4786286704993664680412915,0.2369268850561890875142640};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
case 7:
{ double x[4] = {0.0000000000000000000000000,0.4058451513773971669066064,0.7415311855993944398638648,0.9491079123427585245261897};
double w[4] = {0.4179591836734693877551020,0.3818300505051189449503698,0.2797053914892766679014678,0.1294849661688696932706114};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
case 9:
{ double x[5] = {0.0000000000000000000000000,0.3242534234038089290385380,0.6133714327005903973087020,0.8360311073266357942994298,0.9681602395076260898355762};
double w[5] = {0.3302393550012597631645251,0.3123470770400028400686304,0.2606106964029354623187429,0.1806481606948574040584720,0.0812743883615744119718922};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
case 11:
{ double x[6] = {0.0000000000000000000000000,0.2695431559523449723315320,0.5190961292068118159257257,0.7301520055740493240934163,0.8870625997680952990751578,0.9782286581460569928039380};
double w[6] = {0.2729250867779006307144835,0.2628045445102466621806889,0.2331937645919904799185237,0.1862902109277342514260976,0.1255803694649046246346943,0.0556685671161736664827537};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
case 13:
{ double x[7] = {0.0000000000000000000000000,0.2304583159551347940655281,0.4484927510364468528779129,0.6423493394403402206439846,0.8015780907333099127942065,0.9175983992229779652065478,0.9841830547185881494728294};
double w[7] = {0.2325515532308739101945895,0.2262831802628972384120902,0.2078160475368885023125232,0.1781459807619457382800467,0.1388735102197872384636018,0.0921214998377284479144218,0.0404840047653158795200216};
gausspoints[0]=x[0];weights[0]=w[0];
for(int i=0;i<(nint-1)/2;i++)
{
gausspoints[2*i+1]=x[i+1];
gausspoints[2*i+2]=-x[i+1];
weights[2*i+1]=w[i+1];
weights[2*i+2]=w[i+1];
}
}
break;
default:
{printf("NINT not found\n");
// exit(0);
asm("trap;");
}
}
}
|
20,045 |
#define DIM 3
#define P_SCALE 30.0f
#define RIGHT P_SCALE
#define LEFT -P_SCALE
#define UP (2*P_SCALE)
#define DOWN 0
#define FRONT P_SCALE
#define BACK -P_SCALE
#define OFFSET 0.01
#define SPHERE_RADIUS 1.0
__device__ bool Sphere_SphereTest(float c1_x,float c1_y,float c1_z,float c1_size,
float c2_x,float c2_y,float c2_z,float c2_size)
{
float dist = (c1_x-c2_x)*(c1_x-c2_x)+(c1_y-c2_y)*(c1_y-c2_y)+(c1_z-c2_z)*(c1_z-c2_z);
float minDist = c1_size*SPHERE_RADIUS + c2_size*SPHERE_RADIUS;
return dist <= minDist*minDist;
}
__global__ void launch_Sphere(float* sphere_poz_d,
float* sphere_speed_d,
float* sphere_size_d,
int NR_SPHERES
);
// Kernelul ce se executa pe device-ul CUDA
__global__ void launch_Sphere(float* sphere_poz_d,
float* sphere_speed_d,
float sphere_size_d,
int NR_SPHERES
)
{
//calculate position
//unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
if(sphere_poz_d[y*DIM] >= (RIGHT -OFFSET) || sphere_poz_d[y*DIM] <= (LEFT + OFFSET))
sphere_speed_d[y*DIM] = -sphere_speed_d[y*DIM];
if(sphere_poz_d[1+y*DIM] >= (UP - OFFSET) || sphere_poz_d[1+y*DIM] <= (DOWN + OFFSET))
sphere_speed_d[1+y*DIM] = -sphere_speed_d[1+y*DIM];
if(sphere_poz_d[2+y*DIM] >= (FRONT - OFFSET) || sphere_poz_d[2+y*DIM] <= (BACK + OFFSET))
sphere_speed_d[2+y*DIM] = -sphere_speed_d[2+y*DIM];
for(int j = (y+1)*DIM ; j < NR_SPHERES ; j=j+DIM)
{
if(Sphere_SphereTest(sphere_poz_d[y*DIM],sphere_poz_d[1+y*DIM],sphere_poz_d[2+y*DIM],sphere_size_d,
sphere_poz_d[j],sphere_poz_d[1+j],sphere_poz_d[2+j],sphere_size_d))
{
sphere_speed_d[j] = -sphere_speed_d[j];
sphere_speed_d[1+j] = -sphere_speed_d[1+j];
sphere_speed_d[2+j] = -sphere_speed_d[2+j];
}
}
sphere_poz_d[y*DIM] += sphere_speed_d[y*DIM];
sphere_poz_d[1+y*DIM] += sphere_speed_d[1+y*DIM];
sphere_poz_d[2+y*DIM] += sphere_speed_d[2+y*DIM];
}
extern "C"
cudaError_t launch_Sphere(float* sphere_poz_d,
float* sphere_speed_d,
float sphere_size_d,
int NR_SPHERES,
dim3 DIM_GRID,
dim3 DIM_BLOCK)
{
launch_Sphere <<<DIM_GRID, DIM_BLOCK>>> (sphere_poz_d,
sphere_speed_d,
sphere_size_d,
NR_SPHERES);
return cudaGetLastError();
} |
20,046 | #include "activ.hh"
#include "../ops/ops-builder.hh"
#include "../ops/vect-relu-leaky.hh"
#include "../ops/vect-relu.hh"
#include "../ops/vect-sigmoid.hh"
ops::Op* leaky_relu(ops::Op* x)
{
auto& builder = ops::OpsBuilder::instance();
return builder.vect_relu_leaky(x);
}
ops::Op* relu(ops::Op* x)
{
auto& builder = ops::OpsBuilder::instance();
return builder.vect_relu(x);
}
ops::Op* sigmoid(ops::Op* x)
{
auto& builder = ops::OpsBuilder::instance();
return builder.vect_sigmoid(x);
}
|
20,047 | /*
v2
for calculating maximum gain, (square root)*2 method is used to get number of parallel threads calculating maximum gain
better for large number of MODULES
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#define MAX_TH 1024
//GPU KERNEL-----------------------------------------------------------------------CALCULATE INITIAL GAIN
__global__ void calc_init_gain(int *set1, int *set2, int *set1_int_gain, int *set1_ext_gain, int *set2_int_gain, int *set2_ext_gain, int *set1_d_gain, int *set2_d_gain, int *set1_id, int *set2_id, int set1_size, int set2_size, int MODULES)
{
int i = threadIdx.x + blockIdx.x * blockDim.x;
int j;
if(i < set1_size)
{
for(j=0; j<set1_size; j++)
{
set1_int_gain[i] += set1[(j*MODULES) + set1_id[i]];
if (i< set2_size)
{
set2_ext_gain[i] += set1[(j*MODULES) + set2_id[i]];
if (j < set2_size)
set2_int_gain[i] += set2[(j*MODULES) + set2_id[i]];
}
if (j < set2_size)
{
set1_ext_gain[i] += set2[(j*MODULES) + set1_id[i]];
//*initial_cutset_size += set2[(j*MODULES) + set1_id[i]];
}
}
set1_d_gain[i] = set1_ext_gain[i] - set1_int_gain[i];
if (i < set2_size)
set2_d_gain[i] = set2_ext_gain[i] - set2_int_gain[i];
}
}
//GPU KERNEL-----------------------------------------------------------------------CALCULATE GAIN BENEFIT
__global__ void cal_gain_benefit(int *set1, int *set2_id, int *set1_d_gain, int *set2_d_gain, int *gain_benefit, int set1_size, int set2_size, int k, int MODULES)
{
int i = (blockDim.y * blockIdx.y) + threadIdx.y;
int j = (blockDim.x * blockIdx.x) + threadIdx.x;
if((i<set1_size-k) && (j<set2_size-k))
{
gain_benefit[((i*(set2_size-k))+j)*3 + 0] = i;
gain_benefit[((i*(set2_size-k))+j)*3 + 1] = j;
gain_benefit[((i*(set2_size-k))+j)*3 + 2] = set1_d_gain[i] + set2_d_gain[j] - (2*set1[(i*MODULES) + set2_id[j]]);
}
}
//GPU KERNEL-----------------------------------------------------------------------CALCULATE MAX GAIN TEMP
__global__ void cal_max_g_t(int *gain_benefit, int *max_g, int *max_g_location, int t, int temp, int thread_num)
{
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if(tid < thread_num)
{
int i, loc;
loc = (tid+1)*temp;
if((tid*temp) < t)
{
int my_max = gain_benefit[(tid*temp*3) + 2];
int my_max_location = tid*temp;
for(i=(tid*temp); i<loc; i++)
{
if(i < t)
{
if(gain_benefit[i*3 + 2] > my_max)
{
my_max = gain_benefit[i*3 + 2];
my_max_location = i;
}
}
}
max_g[tid] = my_max;
max_g_location[tid] = my_max_location;
}
else
{
max_g[tid] = -9999;
max_g_location[tid] = 0;
}
}
}
//GPU KERNEL-----------------------------------------------------------------------CALCULATE MAX GAIN
__global__ void cal_max_g(int *max_g, int *max_g_location, int *maxt, int *max_location, int thread_num)
{
int i;
*maxt = max_g[0];
*max_location = max_g_location[0];
for(i=1; i<thread_num; i++)
{
if(max_g[i] > *maxt)
{
*maxt = max_g[i];
*max_location = max_g_location[i];
}
}
}
//GPU KERNEL-----------------------------------------------------------------------UPDATE ITERATION
__global__ void update_iteration(int *iteration, int *set1_id, int *set2_id, int *gain_benefit, int *max_location, int *maxt, int k)
{
iteration[(k*3) + 0] = set1_id[gain_benefit[(*max_location)*3 + 0]];
iteration[(k*3) + 1] = set2_id[gain_benefit[(*max_location)*3 + 1]];
iteration[(k*3) + 2] = *maxt;
/*
printf("----------------------Iteration %d\n", k+1);
printf("max gain benefit: %d\n", iteration[(k*3) + 2]);
printf("set1 swapped id: %d\n", iteration[(k*3) + 0]);
printf("set2 swapped id: %d\n", iteration[(k*3) + 1]);
*/
}
//GPU KERNEL-----------------------------------------------------------------------GAIN SWAP
__global__ void gain_swap(int *set1, int *set1_id, int *set1_d_gain, int *set2, int *set2_id, int *set2_d_gain, int *gain_benefit, int *max_location, int MODULES, int set1_size, int set2_size, int k)
{
int i = threadIdx.x + blockIdx.x * blockDim.x;
int temp;
if(i < MODULES)
{
temp = set1[((gain_benefit[(*max_location)*3 + 0])*MODULES) + i];
set1[((gain_benefit[(*max_location)*3 + 0])*MODULES) + i] = set1[((set1_size-1-k)*MODULES) + i];
set1[((set1_size-1-k)*MODULES) + i] = temp;
temp = set2[((gain_benefit[(*max_location)*3 + 1])*MODULES) + i];
set2[((gain_benefit[(*max_location)*3 + 1])*MODULES) + i] = set2[((set2_size-1-k)*MODULES) + i];
set2[((set2_size-1-k)*MODULES) + i] = temp;
if(i == 0)
{
//------------------------------------------SWAP SET1 PARAMETERS
temp = set1_id[gain_benefit[(*max_location)*3 + 0]];
set1_id[gain_benefit[(*max_location)*3 + 0]] = set1_id[set1_size-1-k];
set1_id[set1_size-1-k] = temp;
temp = set1_d_gain[gain_benefit[(*max_location)*3 + 0]];
set1_d_gain[gain_benefit[(*max_location)*3 + 0]] = set1_d_gain[set1_size-1-k];
set1_d_gain[set1_size-1-k] = temp;
//------------------------------------------SWAP SET2 PARAMETERS
temp = set2_id[gain_benefit[(*max_location)*3 + 1]];
set2_id[gain_benefit[(*max_location)*3 + 1]] = set2_id[set2_size-1-k];
set2_id[set2_size-1-k] = temp;
temp = set2_d_gain[gain_benefit[(*max_location)*3 + 1]];
set2_d_gain[gain_benefit[(*max_location)*3 + 1]] = set2_d_gain[set2_size-1-k];
set2_d_gain[set2_size-1-k] = temp;
}
}
}
//GPU KERNEL-----------------------------------------------------------------------GAIN UPDATE
__global__ void gain_update(int *set1, int *set2, int *set1_id, int *set2_id, int *set1_d_gain, int *set2_d_gain, int set1_size, int set2_size, int MODULES, int k)
{
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if(tid < set1_size-k-1)
{
set1_d_gain[tid] = set1_d_gain[tid] + 2*(set1[((set1_size-1-k)*MODULES) + set1_id[tid]]) - 2*(set2[((set2_size-1-k)*MODULES) + set1_id[tid]]);
if (tid < set2_size-k-1)
set2_d_gain[tid] = set2_d_gain[tid] - 2*(set1[((set1_size-1-k)*MODULES) + set2_id[tid]]) + 2*(set2[((set2_size-1-k)*MODULES) + set2_id[tid]]);
}
}
//-------------------------------------------------------------------------------MAIN
int main(int argc, char *argv[])
{
int *set1_id;
int *set2_id;
int *set1_int_gain;
int *set1_ext_gain;
int *set2_int_gain;
int *set2_ext_gain;
int *set1_d_gain;
int *set2_d_gain;
int *set1;
int *set2;
int *gain_benefit;
int *iteration;
int *dev_set1_id;
int *dev_set2_id;
int *dev_set1_int_gain;
int *dev_set1_ext_gain;
int *dev_set2_int_gain;
int *dev_set2_ext_gain;
int *dev_set1_d_gain;
int *dev_set2_d_gain;
int *dev_set1;
int *dev_set2;
int *dev_gain_benefit;
int *dev_max_g;
int *dev_max_g_location;
int *dev_maxt;
int *dev_max_location;
int *dev_iteration;
FILE *fptr;
int i,j,k,t, temp, maxt, max_location, max_new;
int initial_cutset_size = 0;
int final_cutset_size = 0;
int len = 0;
int value = 0;
int location = 0;
int PINS = 0;
int NETS = 0;
int MODULES = 0;
int PADS = 0;
int CELLS = 0;
int check_mat[1000];
int check_len = 0;
int set1_size = 0;
int set2_size = 0;
char c;
char s[20];
char IN_FILE[100];
//----------------------------------------------------------Command line arguement check
if(argc < 2)
{
printf("INPUT FORMAT: executable filename.net\n");
exit(0);
}
strcpy(IN_FILE, argv[1]);
//----------------------------------------------------------Open IN_FILE
fptr = fopen(IN_FILE, "r");
if (fptr == NULL)
{
printf("Cannot open file \n");
exit(0);
}
//----------------------------------------------------------Read #PINS #NETS #MODULES #PADS
fgets(s, sizeof(s), fptr); //read first line
c = getc(fptr); //read next character
while(c != '\n') //read number of PINS
{
PINS = PINS*10 + ((int)c - 48);
c = getc(fptr);
}
c = getc(fptr); //read next character
while(c != '\n') //read number of NETS
{
NETS = NETS*10 + ((int)c - 48);
c = getc(fptr);
}
c = getc(fptr); //read next character
while(c != '\n') //read number of MODULES
{
MODULES = MODULES*10 + ((int)c - 48);
c = getc(fptr);
}
c = getc(fptr); //read next character
while(c != '\n') //read number of CELLS
{
CELLS = CELLS*10 + ((int)c - 48);
c = getc(fptr);
}
PADS = MODULES - CELLS; //calculate number of PADS
printf("**************DATASET DETAILS**************\n");
//printf("PINS: \t\t%d\n", PINS);
//printf("NETS: \t\t%d\n", NETS);
printf("MODULES: \t%d\n", MODULES);
printf("PADS: \t\t%d\n", PADS);
printf("CELLS: \t\t%d\n", CELLS);
//---------------------------------------------------------- HOST MEMORY ALLOCATION
//calculate the size of set1 and set2
set2_size = MODULES/2;
set1_size = MODULES - set2_size; //set1_size >= set2_size
//allocate memory for det id
set1_id = (int *)calloc(set1_size, sizeof(int));
set2_id = (int *)calloc(set2_size, sizeof(int));
//allocate memory for internal and external gain for both sets
set1_int_gain = (int *)calloc(set1_size, sizeof(int));
set1_ext_gain = (int *)calloc(set1_size, sizeof(int));
set2_int_gain = (int *)calloc(set2_size, sizeof(int));
set2_ext_gain = (int *)calloc(set2_size, sizeof(int));
set1_d_gain = (int *)calloc(set1_size, sizeof(int));
set2_d_gain = (int *)calloc(set2_size, sizeof(int));
//allocate memory for set1
set1 = (int *)calloc(set1_size*MODULES, sizeof(int));
for (i=0; i<set1_size; i++)
{
set1_id[i] = i;
}
//allocate memory for set2
set2 = (int *)calloc(set2_size*MODULES, sizeof(int));
for (i=0; i<set2_size; i++)
{
set2_id[i] = i + set1_size;
}
//allocate memory for gain benefit, 3 columns
//SET1_ID(array location ref) SET2_ID(array location ref) GAIN_BENEFIT
t = set1_size*set2_size;
gain_benefit = (int *)calloc(t*3, sizeof(int));
//allocate memory to store result of all iterations
//SET1_ID(0) SET2_ID(1) SWAP_BENEFIT(2)
iteration = (int *)calloc(set2_size*3, sizeof(int));
printf("********HOST MEMORY ALLOCATION COMPLETED********\n");
//START-----------------------------------------------------PARSER
while (fgets(s, sizeof(s), fptr))
{
len = strlen(s);
if(argv[1][0] == 'i') //IBM DATASET HAS AN EXTRA SPACE
{
len -= 1;
}
if(s[len-2] == '1')
{
if(check_len > 1) //INNER CONNECTION FOR OLD NET
{
for(i=0; i<check_len; i++)
{
for(j=0; j<check_len; j++)
{
if(i != j)
{
if(check_mat[i] > (set1_size - 1))
set2[((check_mat[i] - set1_size)*MODULES) + check_mat[j]] += 1;
else
set1[(check_mat[i])*MODULES + check_mat[j]] += 1;
}
}
}
}
//NEW NET CONNECTION
value = 0;
check_len = 0;
for(i=1; i<len-5; i++)
{
value = value*10 + ((int)s[i] - 48);
}
if(s[0] == 'a')
{
value += PADS;
}
else
{
value -= 1;
}
continue;
}
location = 0;
for(i=1; i<len-3; i++)
{
location = location*10 + ((int)s[i] - 48);
}
if(s[0] == 'a')
{
location += PADS;
}
else
{
location -= 1;
}
if(value > (set1_size - 1))
set2[((value-set1_size)*MODULES) + location] += 1;
else
set1[(value)*MODULES + location] += 1;
if(location > (set1_size - 1))
set2[((location-set1_size)*MODULES) + value] += 1;
else
set1[(location)*MODULES + value] += 1;
check_mat[check_len] = location;
check_len++;
}
fclose(fptr);
//END-------------------------------------------------------PARSER
//START-----------------------------------------------------INITIAL GAIN CALCULATE
cudaMalloc((void**)&dev_set1_id, set1_size*sizeof(int));
cudaMalloc((void**)&dev_set2_id, set2_size*sizeof(int));
cudaMalloc((void**)&dev_set1_int_gain, set1_size*sizeof(int));
cudaMalloc((void**)&dev_set1_ext_gain, set1_size*sizeof(int));
cudaMalloc((void**)&dev_set2_int_gain, set2_size*sizeof(int));
cudaMalloc((void**)&dev_set2_ext_gain, set2_size*sizeof(int));
cudaMalloc((void**)&dev_set1_d_gain, set1_size*sizeof(int));
cudaMalloc((void**)&dev_set2_d_gain, set2_size*sizeof(int));
cudaMalloc((void**)&dev_set1, set1_size*MODULES*sizeof(int));
cudaMalloc((void**)&dev_set2, set2_size*MODULES*sizeof(int));
cudaMalloc((void**)&dev_gain_benefit, set1_size*set2_size*3*sizeof(int));
//CHANGE for v2
temp = ((int)sqrt(set1_size*set2_size))*2;
cudaMalloc((void**)&dev_max_g, temp*sizeof(int));
cudaMalloc((void**)&dev_max_g_location, temp*sizeof(int));
cudaMalloc((void**)&dev_maxt, sizeof(int));
cudaMalloc((void**)&dev_max_location, sizeof(int));
cudaMalloc((void**)&dev_iteration, set2_size*3*sizeof(int));
printf("*******DEVICE MEMORY ALLOCATION COMPLETED*******\n");
cudaMemcpy(dev_set1_id, set1_id, set1_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set2_id, set2_id, set2_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set1_int_gain, set1_int_gain, set1_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set1_ext_gain, set1_ext_gain, set1_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set2_int_gain, set2_int_gain, set2_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set2_ext_gain, set2_ext_gain, set2_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set1_d_gain, set1_d_gain, set1_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set2_d_gain, set2_d_gain, set2_size*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set1, set1, set1_size*MODULES*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(dev_set2, set2, set2_size*MODULES*sizeof(int), cudaMemcpyHostToDevice);
//START-----------------------------------------------------PRINT SET ID
/*
printf("-SET1--SET2-\n");
for(i=0; i<set1_size; i++)
{
if(i < set2_size)
{
printf("%d\t%d\n", set1_id[i], set2_id[i]);
}
else
{
printf("%d\n", set1_id[i]);
}
}
*/
//END-------------------------------------------------------PRINT SET ID
//START-----------------------------------------------------PRINT SET
/*
printf("----------SET1----------\n");
for(i=0; i<set1_size; i++)
{
for(j=0; j<MODULES; j++)
{
printf("%d ", set1[(i*MODULES)+j]);
}
printf("\n");
}
printf("----------SET2----------\n");
for(i=0; i<set2_size; i++)
{
for(j=0; j<MODULES; j++)
{
printf("%d ", set2[(i*MODULES)+j]);
}
printf("\n");
}
*/
//END-------------------------------------------------------PRINT SET
int block_num, thread_num;
//START-----------------------------------------------------INITIAL GAIN CALCULATE
block_num = ceil((double)set1_size/MAX_TH);
calc_init_gain<<<block_num,MAX_TH >>>(dev_set1, dev_set2, dev_set1_int_gain, dev_set1_ext_gain, dev_set2_int_gain, dev_set2_ext_gain, dev_set1_d_gain, dev_set2_d_gain, dev_set1_id, dev_set2_id, set1_size, set2_size, MODULES);
cudaMemcpy(set2_ext_gain, dev_set2_ext_gain, set2_size*sizeof(int), cudaMemcpyDeviceToHost);
for(i=0; i<set2_size; i++)
{
initial_cutset_size += set2_ext_gain[i];
}
//END-------------------------------------------------------INITIAL GAIN CALCULATE
for(k=0; k<set2_size; k++)
{
//START-----------------------------------------------------CALCULATE GAIN BENEFIT
dim3 blocksize(32, 32, 1);
dim3 gridsize (ceil((double)(set2_size-k)/32), ceil((double)(set1_size-k)/32), 1);
cal_gain_benefit<<<gridsize,blocksize>>>(dev_set1, dev_set2_id, dev_set1_d_gain, dev_set2_d_gain, dev_gain_benefit, set1_size, set2_size, k, MODULES);
//END-------------------------------------------------------CALCULATE GAIN BENEFIT
//START-----------------------------------------------------CALCULATE MAXIMUM GAIN SWAP
t = (set1_size-k)*(set2_size-k);
//CHANGE for v2
thread_num = ((int) sqrt(t))*2;
block_num = ceil((double)thread_num/MAX_TH);
temp = ceil((double)t/thread_num);
//printf("thread_num: %d\n", thread_num);
//printf("block_num: %d\n", block_num);
//printf("temp: %d\n", temp);
cal_max_g_t<<<block_num,MAX_TH>>>(dev_gain_benefit, dev_max_g, dev_max_g_location, t, temp, thread_num);
cal_max_g <<<1,1>>>(dev_max_g, dev_max_g_location, dev_maxt, dev_max_location, thread_num);
update_iteration <<<1,1>>>(dev_iteration, dev_set1_id, dev_set2_id, dev_gain_benefit, dev_max_location, dev_maxt, k);
//END-------------------------------------------------------CALCULATE MAXIMUM GAIN SWAP
//START-----------------------------------------------------SWAP
block_num = ceil((double)MODULES/MAX_TH);
gain_swap <<<block_num,MAX_TH>>>(dev_set1, dev_set1_id, dev_set1_d_gain, dev_set2, dev_set2_id, dev_set2_d_gain, dev_gain_benefit, dev_max_location, MODULES, set1_size, set2_size, k);
//END-------------------------------------------------------SWAP
//START-----------------------------------------------------UPDATE GAIN (ONLY D)
if(k+1 < set2_size)
{
block_num = ceil((double)(set1_size-k-1)/MAX_TH);
gain_update<<<block_num,MAX_TH>>>(dev_set1, dev_set2, dev_set1_id, dev_set2_id, dev_set1_d_gain, dev_set2_d_gain, set1_size, set2_size, MODULES, k);
}
//END-------------------------------------------------------UPDATE GAIN (ONLY D)
}
//----------------------------------------------------------CALCULATE MAX CUMULATIVE GAIN
cudaMemcpy(iteration, dev_iteration, set2_size*3*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set1_d_gain, dev_set1_d_gain, set1_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set2_d_gain, dev_set2_d_gain, set2_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set1_id, dev_set1_id, set1_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set2_id, dev_set2_id, set2_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set1, dev_set1, set1_size*MODULES*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set2, dev_set2, set2_size*MODULES*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set1_int_gain, dev_set1_int_gain, set1_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set1_ext_gain, dev_set1_ext_gain, set1_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set2_int_gain, dev_set2_int_gain, set2_size*sizeof(int), cudaMemcpyDeviceToHost);
//cudaMemcpy(set2_ext_gain, dev_set2_ext_gain, set2_size*sizeof(int), cudaMemcpyDeviceToHost);
maxt = 0;
max_new = 0;
max_location = 0;
for(i=0; i<set2_size; i++)
{
max_new += iteration[(i*3) + 2];
if(max_new > maxt)
{
maxt = max_new;
max_location = i;
}
}
//----------------------------------------------------------SWAP TO GET UPDATED SETS
/*
int set1t, set2t;
if(maxt > 0)
{
for(i=0; i<=max_location; i++)
{
for(j=0; j<set1_size; j++)
{
if(iteration[i*3 + 0] == set1_id[j])
set1t = j;
if(j < set2_size)
if(iteration[i*3 + 1] == set2_id[j])
set2t = j;
}
temp = set1_id[set1t];
set1_id[set1t] = set2_id[set2t];
set2_id[set2t] = temp;
for(j=0; j<MODULES; j++)
{
temp = set1[(set1t*MODULES)+j];
set1[(set1t*MODULES)+j] = set2[(set2t*MODULES)+j];
set2[(set2t*MODULES)+j] = temp;
}
}
}
*/
final_cutset_size = initial_cutset_size - maxt;
printf("****************FINAL RESULT***************\n");
printf("Max Cumulative Gain: \t\t%d\n", maxt);
printf("Max Cumulative Gain Iteration: \t%d\n", max_location+1);
printf("Initial Cutset Size: \t\t%d\n", initial_cutset_size);
printf("Final Cutset Size: \t\t%d\n", final_cutset_size);
//START-----------------------------------------------------PRINT SET ID
/*
printf("------------\n");
printf("-SET1--SET2-\n");
for(i=0; i<set1_size; i++)
{
if(i < set2_size)
{
printf("%d\t%d\n", set1_id[i], set2_id[i]);
}
else
{
printf("%d\n", set1_id[i]);
}
}
*/
//END-------------------------------------------------------PRINT SET ID
//START-----------------------------------------------------PRINT SET
/*
printf("----------SET1----------\n");
for(i=0; i<set1_size; i++)
{
for(j=0; j<MODULES; j++)
{
printf("%d ", set1[(i*MODULES)+j]);
}
printf("\n");
}
printf("----------SET2----------\n");
for(i=0; i<set2_size; i++)
{
for(j=0; j<MODULES; j++)
{
printf("%d ", set2[(i*MODULES)+j]);
}
printf("\n");
}
*/
//END-------------------------------------------------------PRINT SET
//START-----------------------------------------------------FREE MEMORY
free(set1);
free(set2);
free(set1_id);
free(set2_id);
free(set1_int_gain);
free(set1_ext_gain);
free(set2_int_gain);
free(set2_ext_gain);
free(set1_d_gain);
free(set2_d_gain);
free(gain_benefit);
free(iteration);
cudaFree(dev_set1_id);
cudaFree(dev_set2_id);
cudaFree(dev_set1_int_gain);
cudaFree(dev_set1_ext_gain);
cudaFree(dev_set2_int_gain);
cudaFree(dev_set2_ext_gain);
cudaFree(dev_set1_d_gain);
cudaFree(dev_set2_d_gain);
cudaFree(dev_set1);
cudaFree(dev_set2);
cudaFree(dev_gain_benefit);
cudaFree(dev_max_g);
cudaFree(dev_max_g_location);
cudaFree(dev_maxt);
cudaFree(dev_max_location);
cudaFree(dev_iteration);
return 0;
}
|
20,048 | #include <stdio.h>
#include <time.h>
#include <unistd.h>
#include <stdlib.h>
#include <math.h>
using namespace std;
#define TYPE float
__global__ void fwd_3D_16x16_order8( TYPE *g_input, TYPE *g_output, TYPE *g_vsq, /* output initially contains (t-2) step*/const int dimx, const int dimy, const int dimz)
{
#define BDIMX 16 // tile (and threadblock) size in x
#define BDIMY 16 // tile (and threadblock) size in y
#define radius 4 // half of the order in space (k/2)
float c_coeff[5] = {1.0 , 0.8 , 0.6 , 0.4, 0.2};
__shared__ float s_data[BDIMY+2*radius][BDIMX+2*radius];
int ix = blockIdx.x*blockDim.x + threadIdx.x;
int iy = blockIdx.y*blockDim.y + threadIdx.y;
int in_idx = iy*dimx + ix; // index for reading input
int out_idx = 0; // index for writing output
int stride = dimx*dimy; // distance between 2D slices (in elements)
float infront1, infront2, infront3, infront4; // variables for input “in front of” the current slice
float behind1, behind2, behind3, behind4; // variables for input “behind” the current slice
float current; // input value in the current slice
int tx = threadIdx.x + radius; // thread’s x-index into corresponding shared memory tile (adjusted for halos)
int ty = threadIdx.y + radius; // thread’s y-index into corresponding shared memory tile (adjusted for halos)
// fill the "in-front" and "behind" data
behind3 = g_input[in_idx]; in_idx += stride;
behind2 = g_input[in_idx]; in_idx += stride;
behind1 = g_input[in_idx]; in_idx += stride;
current = g_input[in_idx]; out_idx = in_idx; in_idx += stride;
infront1 = g_input[in_idx]; in_idx += stride;
infront2 = g_input[in_idx]; in_idx += stride;
infront3 = g_input[in_idx]; in_idx += stride;
infront4 = g_input[in_idx]; in_idx += stride;
for(int i=radius; i<dimz-radius; i++)
{
//////////////////////////////////////////
// advance the slice (move the thread-front)
behind4 = behind3;
behind3 = behind2;
behind2 = behind1;
behind1 = current;
current = infront1;
infront1 = infront2;
infront2 = infront3;
infront3 = infront4;
infront4 = g_input[in_idx];
in_idx += stride;
out_idx += stride;
__syncthreads();
/////////////////////////////////////////
// update the data slice in smem
if(threadIdx.y<radius) // halo above/below
{
s_data[threadIdx.y][tx] = g_input[out_idx-radius*dimx];
s_data[threadIdx.y+BDIMY+radius][tx] = g_input[out_idx+BDIMY*dimx];
}
if(threadIdx.x<radius) // halo left/right
{
s_data[ty][threadIdx.x] = g_input[out_idx-radius];
s_data[ty][threadIdx.x+BDIMX+radius] = g_input[out_idx+BDIMX];
}
// update the slice in smem
s_data[ty][tx] = current;
__syncthreads();
/////////////////////////////////////////
// compute the output value
float temp = 2.f*current - g_output[out_idx];
float div = c_coeff[0] * current; //c_coefff deveria ser um array do tamanho do radius
div += c_coeff[1]*( infront1 + behind1
+ s_data[ty-1][tx] + s_data[ty+1][tx] + s_data[ty][tx-1] + s_data[ty][tx+1] );
div += c_coeff[2]*( infront2 + behind2 + s_data[ty-2][tx] + s_data[ty+2][tx] + s_data[ty][tx-2] + s_data[ty][tx+2] );
div += c_coeff[3]*( infront3 + behind3 + s_data[ty-3][tx] + s_data[ty+3][tx] + s_data[ty][tx-3] + s_data[ty][tx+3] );
div += c_coeff[4]*( infront4 + behind4 + s_data[ty-4][tx] + s_data[ty+4][tx] + s_data[ty][tx-4] + s_data[ty][tx+4] );
g_output[out_idx] = temp + div*g_vsq[out_idx];
}
}
//Tamanho do radius não é variável
//A entrada de dados ja vem com as bordas aumentadas
/*
*argumentos
*1 - n_elementos
*2 - threads por bloco
*3 - n_blocos
*4 - print
*/
int main(int argc, char* argv[]) {
float *h_e,*h_r,*h_r_test,*h_g_vsq;
float *d_e, *d_r,*d_g_vsq;
int size,tam,times;
clock_t Ticks[2];
times = 1;
int X=8;
int Y=8;
int BX=8;
int BY=8;
int Z=4;
int k=2;
int GX=1;
int GY=1;
if(argc > 1)
{
X = atoi(argv[1]);
BX=X;
}
if(argc > 2)
{
Y = atoi(argv[2]);
BY = Y;
}
if(argc > 3)
Z = atoi(argv[3]);
if(argc > 4)
k = atoi(argv[4]);
if(X>32)
{
GX = ceil((float)X/(float)32);
BX = 32;
}
if(Y>32)
{
GY = ceil((float)Y/(float)32);
BY = 32;
}
dim3 block_dim(BX,BY,1);
dim3 grid_dim(GX,GY,1);
//sharedSize = block_dim.x*block_dim.y*sizeof(float);
size = X * Y * Z * sizeof(float);
tam = X * Y * Z;
h_e = (float*) malloc(size);
h_r = (float*) malloc(size);
h_r_test = (float*) malloc(size);
h_g_vsq = (float*) malloc(size);
cudaMalloc(&d_e, size);
cudaMalloc(&d_r, size);
cudaMalloc(&d_g_vsq, size);
for (int i = 0; i < tam; i++) {
h_g_vsq[i] = (float)(rand()%100)/100.0;
h_e[i] = (float)(rand()%9000)/100.0;
h_r[i] = 0;
}
/* Copy vectors from host memory to device memory */
cudaMemcpy(d_g_vsq, h_g_vsq, size, cudaMemcpyHostToDevice);
cudaMemcpy(d_e, h_e, size, cudaMemcpyHostToDevice);
cudaMemcpy(d_r, h_r, size, cudaMemcpyHostToDevice);
cudaEvent_t start, stop;
cudaEventCreate (&start);
cudaEventCreate (&stop);
cudaEventRecord (start, 0);
/******************
*** Kernel Call ***
*******************/
//_3Dstencil_global<<<blks,th_p_blk>>>(d_e,d_r,X,Y,Z);
//_3Dstencil_sharedMemory<<<grid_dim,block_dim,sharedSize>>>(d_e,d_r,X,Y,Z,k);
fwd_3D_16x16_order8<<<grid_dim,block_dim>>>(d_e,d_r,d_g_vsq, X,Y,Z);
cudaError_t err = cudaSuccess;
err = cudaGetLastError();
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to launch _3Dstencil_global kernel (error code %s)!\n", cudaGetErrorString(err));
}
/******************
*** Kernel Call ***
*******************/
cudaDeviceSynchronize();
cudaEventRecord (stop, 0);
cudaEventSynchronize (stop);
float elapsedTime;
cudaEventElapsedTime (&elapsedTime, start, stop);
cudaEventDestroy(start);
cudaEventDestroy(stop);
Ticks[1] = clock();
double Tempo = (Ticks[1] - Ticks[0]) * 1000.0 / CLOCKS_PER_SEC;
printf("X %d || Y %d \nBX %d || BY %d\nGX %d || GY %d\nZ %d \n",X,Y,BX,BY,GX,GY,Z);
printf ("[%d,%.5f,%.5f],\n", tam,elapsedTime,Tempo/1000.0);
cudaMemcpy(h_r, d_r, size, cudaMemcpyDeviceToHost);
cudaFree(d_e);
cudaFree(d_r);
std::free(h_e);
std::free(h_r);
std::free(h_r_test);
return 0;
} /* main */
|
20,049 | /*
* Ejercicio 3 Práctica 4: CUDA
* Mariana Hernández
* Alan Córdova
*/
#include <stdio.h>
#define STRIDE 32
#define OFFSET 0
#define GROUP_SIZE 512
// tamanio
#define n 2000
#define m 2000
/* Utilidad para checar errores de CUDA */
void checkCUDAError(const char*);
// Kernel that executes on the CUDA device
__global__ void matrix_mult(float *a, float *b, float *res, int w)
{
//float vec[n * m];
int row = blockIdx.y*blockDim.y + threadIdx.y ;
int col = blockIdx.x*blockDim.x + threadIdx.x ;
//if (row < w && col < w){
// producto punto
float sum = 0;
for (int i = 0; i < w; ++i)
{
//sum += a[row * N + i] * b[i * N + col];
sum += a[row * w + i] * b[i * w + col];
}
res[row*w + col] = sum;
//}
}
__global__ void matrixMultiplicationKernel(float* A, float* B, float* C, int N) {
int ROW = blockIdx.y*blockDim.y+threadIdx.y;
int COL = blockIdx.x*blockDim.x+threadIdx.x;
float tmpSum = 0;
if (ROW < N && COL < N) {
// each thread computes one element of the block sub-matrix
for (int i = 0; i < N; i++) {
tmpSum += A[ROW * N + i] * B[i * N + COL];
}
}
C[ROW * N + COL] = tmpSum;
}
// main routine that executes on the host
int main(void)
{
float *a_h, *a_d; // Pointer to host & device arrays
float *mat1_h, *mat2_h, *mat_res_h, *mat1_d, *mat2_d, *mat_res_d;
const int N = 1<<10; // Make a big array with 2**N elements
size_t size = N * sizeof(float);
const int n_mat = n * m;
size_t sz = n_mat * sizeof(float);
/* Auxiliares para medir tiempos */
cudaEvent_t start, stop;
float time;
a_h = (float *)malloc(size); // Allocate array on host
cudaMalloc((void **) &a_d, size); // Allocate array on device
mat1_h = (float *)malloc(sz); // Allocate array on host
mat2_h = (float *)malloc(sz); // Allocate array on host
mat_res_h = (float *)malloc(sz); // Allocate array on host
cudaMalloc((void **) &mat1_d, sz); // Allocate array on device
cudaMalloc((void **) &mat2_d, sz); // Allocate array on device
cudaMalloc((void **) &mat_res_d, sz); // Allocate array on device
// Initialize host array and copy it to CUDA device
for (int i=0; i<N; i++){
a_h[i] = (float)i;
}
for (int i = 0; i < n_mat; ++i){
mat1_h[i] = i % 8;
mat2_h[i] = i % 8;
mat_res_h[i] = 0;
}
// printf("mats:\n");
// for (int i = 0; i < n_mat; ++i){
//
// if(i%n == 0)
// printf("\n");
// printf("%.2f ", mat1_h[i] );
// }
cudaMemcpy(mat1_d, mat1_h, sz, cudaMemcpyHostToDevice);
cudaMemcpy(mat2_d, mat2_h, sz, cudaMemcpyHostToDevice);
cudaMemcpy(mat_res_d, mat_res_h, sz, cudaMemcpyHostToDevice);
checkCUDAError("memcpy");
// Create timer for timing CUDA calculation
//PPunsigned int timer = 0;
//PPcutCreateTimer( &timer );
cudaEventCreate(&start);
cudaEventCreate(&stop);
// Set number of threads and blocks
// int n_threads_per_block = 128;//1<<9; // 512 threads per block
// int n_blocks = 256;//1<<10; // 1024 blocks
// Do calculation on device
cudaEventRecord(start,0);
//matrix_mult <<< n_blocks, n_threads_per_block >>> (mat1_d, mat2_d, mat_res_d, n);
// dim3 threadsPerBlock(1, 1024);
// dim3 blocksPerGrid(1, 1);
// if (m*n > 512){
// threadsPerBlock.x = 512;
// threadsPerBlock.y = 512;
// blocksPerGrid.x = ceil(double(m)/double(threadsPerBlock.x));
// blocksPerGrid.y = ceil(double(n)/double(threadsPerBlock.y));
// }
//invocamos kernel
int blockSize = 256;
int numBlocks = (n*m + blockSize - 1) / blockSize;
matrixMultiplicationKernel<<<numBlocks,blockSize>>>(mat1_d, mat2_d, mat_res_d, n);
cudaDeviceSynchronize(); // Wait for matrix_mult to finish on CUDA
checkCUDAError("kernel invocation");
// Retrieve result from device and store it in host array
cudaMemcpy(mat1_h, mat1_d, sz, cudaMemcpyDeviceToHost);
cudaMemcpy(mat2_h, mat2_d, sz, cudaMemcpyDeviceToHost);
cudaMemcpy(mat_res_h, mat_res_d, sz, cudaMemcpyDeviceToHost);
checkCUDAError("memcpy");
cudaEventRecord(stop,0);
cudaEventSynchronize(stop);
cudaEventElapsedTime( &time, start, stop );
// Print some of the results
//for (int i=0; i<N; i+=N/50) printf("%d %f\n", i, a_h[i]);
// Imprime tiempo de ejecución
printf("\n\nTIEMPO DE EJECUCIÓN: %f mSeg\n\n", time);
// printf("res:\n");
// for (int i = 0; i < n_mat; ++i)
// {
// if(i%n == 0)
// printf("\n");
// printf("%.2f ", mat_res_h[i] );
// }
cudaEventDestroy( start );
cudaEventDestroy( stop );
free(mat1_h);
free(mat2_h);
free(mat_res_h);
cudaFree(mat1_d);
cudaFree(mat2_d);
cudaFree(mat_res_d);
}
/* Utility function to check for and report CUDA errors */
void checkCUDAError(const char *msg)
{
cudaError_t err = cudaGetLastError();
if( cudaSuccess != err)
{
fprintf(stderr, "Cuda error: %s: %s.\n", msg, cudaGetErrorString( err) );
exit(EXIT_FAILURE);
}
}
|
20,050 | #include <stdio.h>
#include <math.h>
#include <unistd.h>
#include <cuda_runtime_api.h>
#include <time.h>
#include <errno.h>
/*****************************************************************************
*
*
*
*
*
* Compile with:
* nvcc -o cuda_lr cuda_linear.cu
*
* Dr Kevan Buckley, University of Wolverhampton, 2018
****************************************************************************/
typedef struct point_t {
double x;
double y;
} point_t;
int n_data = 1000;
__device__ int d_n_data = 1000;
point_t data[] = {
{73.36,102.82},{66.06,118.01},{82.76,132.16},{65.89,90.31},
{72.17,103.11},{73.69,111.48},{77.36,112.67},{71.17,102.00},
{79.02,119.87},{79.18,117.60},{68.11,101.62},{12.26,49.42},
{37.47,65.48},{88.56,132.59},{82.09,119.57},{ 8.05,20.24},
{50.29,85.08},{50.10,86.40},{27.43,38.50},{64.60,105.00},
{23.45,44.54},{ 5.62,28.10},{36.72,74.88},{67.84,127.36},
{45.69,54.43},{ 5.50,49.81},{26.39,50.67},{ 7.93,31.84},
{97.77,142.30},{52.02,92.98},{23.17,57.12},{39.68,66.32},
{85.14,127.72},{48.65,78.48},{ 9.81,44.52},{26.31,66.68},
{24.51,52.53},{21.22,53.17},{64.74,112.23},{97.21,144.28},
{20.93,37.25},{97.32,125.44},{ 4.07, 9.60},{89.43,153.50},
{ 3.02, 8.75},{33.27,51.92},{65.04,87.96},{92.91,134.17},
{44.35,55.17},{79.34,101.47},{13.48,52.08},{54.10,90.03},
{96.28,133.60},{49.96,67.53},{49.15,78.44},{20.10,51.23},
{29.80,84.89},{94.12,143.10},{29.50,48.25},{15.02,26.26},
{19.23,65.86},{60.55,101.77},{20.35,36.11},{76.77,105.08},
{ 1.51, 2.44},{39.56,65.53},{58.31,95.07},{24.68,44.45},
{75.70,118.02},{73.23,98.11},{73.25,103.83},{42.73,71.01},
{58.34,94.50},{38.20,77.22},{51.48,72.97},{40.47,60.03},
{47.46,81.55},{ 4.20,40.39},{70.02,109.63},{87.89,127.68},
{12.71,56.00},{ 4.68,17.64},{80.10,123.60},{25.52,65.22},
{77.99,106.45},{24.79,67.24},{68.15,111.07},{96.66,129.97},
{46.15,69.20},{ 0.05,18.56},{99.40,126.45},{43.51,48.33},
{61.42,112.23},{94.45,131.21},{14.40,14.40},{11.08,33.78},
{31.12,72.62},{85.25,127.99},{47.50,65.42},{35.13,60.48},
{93.19,130.01},{87.56,132.17},{99.62,156.43},{37.29,53.50},
{84.52,128.35},{38.44,69.97},{29.92,49.58},{66.92,98.94},
{88.16,129.28},{74.42,134.04},{23.29,32.81},{76.28,128.97},
{ 7.86,28.02},{53.12,89.02},{86.07,120.97},{95.27,143.74},
{90.99,126.63},{83.24,113.46},{25.62,39.20},{12.74,41.73},
{74.90,112.16},{62.01,84.85},{79.60,115.12},{44.20,75.19},
{13.89,49.88},{58.17,82.05},{33.49,60.49},{82.06,142.57},
{57.99,81.95},{37.88,71.52},{84.26,113.63},{ 0.34,17.10},
{ 7.36,39.82},{53.61,80.96},{76.94,113.90},{58.50,104.85},
{71.31,110.11},{27.32,47.06},{ 3.97,40.07},{ 2.46,23.53},
{18.57,34.51},{85.18,134.45},{86.55,122.47},{84.65,139.69},
{31.02,51.27},{34.92,57.43},{75.79,112.54},{66.07,105.57},
{19.73,34.89},{17.05,37.42},{ 7.14,37.03},{47.62,82.24},
{61.83,89.38},{14.93,59.84},{32.97,52.00},{ 1.93,38.87},
{71.73,98.98},{77.19,115.10},{35.46,63.95},{ 8.15,34.40},
{21.43,36.31},{88.15,123.82},{78.82,123.96},{62.89,80.88},
{25.09,51.84},{15.08,56.73},{ 3.55, 7.59},{45.83,90.68},
{99.57,137.90},{29.03,64.03},{52.39,86.36},{69.19,107.46},
{34.44,58.79},{53.62,86.32},{21.32,41.52},{78.70,126.13},
{37.37,42.70},{69.75,114.09},{13.12,26.77},{27.83,64.35},
{70.64,103.45},{ 7.87,27.86},{99.81,131.28},{14.27,48.27},
{67.11,108.12},{51.94,87.09},{42.79,61.96},{72.75,105.87},
{25.52,56.75},{61.06,99.84},{55.35,82.00},{24.36,55.29},
{25.92,50.67},{38.23,56.34},{71.57,109.31},{45.00,78.78},
{90.13,115.60},{60.58,110.38},{98.59,132.94},{13.48,39.94},
{74.51,103.03},{81.40,121.00},{12.21,39.24},{62.10,102.23},
{67.12,107.28},{ 3.98,22.94},{35.53,69.36},{59.52,88.50},
{79.37,113.27},{37.78,79.22},{94.34,140.45},{23.68,43.81},
{65.63,106.65},{63.58,90.77},{11.01,23.22},{84.58,117.77},
{38.13,64.59},{81.53,130.57},{59.28,91.84},{90.70,130.04},
{33.28,56.89},{54.48,84.35},{45.73,84.33},{13.34,39.57},
{95.76,139.75},{46.86,66.68},{29.51,58.21},{19.98,31.60},
{48.71,93.33},{40.08,57.40},{16.44,42.14},{97.41,141.53},
{50.94,81.23},{62.02,101.97},{41.78,82.13},{83.34,117.71},
{62.83,106.73},{10.31,44.64},{82.00,118.07},{59.07,81.51},
{42.21,63.30},{16.01,35.85},{25.89,50.78},{ 9.32,32.12},
{ 4.84,45.36},{90.66,123.62},{98.07,155.17},{60.65,110.18},
{47.38,78.81},{21.96,49.62},{32.57,72.22},{50.65,89.25},
{72.61,92.25},{42.16,72.36},{68.47,100.29},{59.06,83.01},
{57.59,92.94},{11.61,42.90},{80.40,123.91},{30.00,61.66},
{89.39,133.90},{25.25,51.12},{55.95,76.05},{13.11,28.47},
{62.65,92.37},{63.68,91.48},{32.59,62.38},{80.95,134.50},
{48.89,89.70},{80.92,102.62},{ 4.69,44.09},{23.27,53.20},
{75.96,108.66},{46.45,80.13},{10.06,29.89},{30.24,50.93},
{65.62,82.45},{71.75,100.13},{81.95,120.01},{35.69,54.53},
{23.58,44.75},{98.54,139.98},{93.37,129.29},{17.07,38.95},
{11.90,32.31},{18.76,41.75},{42.37,75.18},{94.91,128.84},
{16.80,32.49},{67.47,112.89},{66.47,116.19},{37.16,40.88},
{80.53,101.62},{10.32,19.98},{ 3.80,21.73},{73.27,105.70},
{78.63,112.46},{77.27,126.23},{58.11,103.81},{40.55,70.29},
{17.09,46.69},{99.59,136.39},{33.69,71.35},{25.47,62.80},
{86.34,117.67},{78.95,124.61},{37.03,60.67},{59.43,91.46},
{71.02,103.64},{22.94,38.43},{ 1.35,26.16},{67.76,74.59},
{46.30,69.11},{24.62,70.01},{79.46,134.58},{ 2.51,28.74},
{41.27,51.27},{69.95,96.97},{45.69,48.05},{78.54,115.65},
{56.44,103.00},{12.17,38.25},{46.38,73.48},{84.10,112.86},
{85.79,118.81},{51.49,82.33},{42.56,67.96},{15.83,37.96},
{62.05,114.32},{98.73,164.21},{33.32,62.70},{72.00,105.60},
{83.63,132.81},{99.60,131.86},{27.36,64.32},{55.23,88.19},
{25.24,59.65},{61.35,88.90},{84.38,136.84},{ 3.29,29.77},
{ 7.60,33.46},{35.69,68.45},{72.18,115.15},{74.96,116.61},
{67.29,112.17},{93.14,127.57},{38.33,61.08},{37.85,78.52},
{50.37,73.41},{63.67,106.98},{ 9.11,39.99},{64.63,95.67},
{22.52,41.58},{44.79,87.35},{57.05,93.10},{ 9.97,40.10},
{34.87,50.45},{ 1.12,10.15},{49.76,99.49},{85.50,120.77},
{ 1.93,39.27},{68.44,110.73},{13.40,32.05},{34.07,56.12},
{51.31,79.19},{29.60,55.42},{32.70,57.33},{86.50,104.39},
{55.60,81.12},{43.80,73.24},{75.47,103.32},{42.63,61.06},
{55.36,88.79},{18.07,54.10},{45.44,78.38},{62.47,73.65},
{32.46,56.16},{ 8.35,47.34},{73.17,99.07},{10.41,35.99},
{58.55,91.87},{44.20,69.65},{83.75,112.69},{86.50,143.89},
{89.40,139.15},{28.44,47.16},{82.91,132.92},{61.17,109.64},
{96.91,141.97},{ 9.68,37.20},{ 4.21,26.59},{88.03,143.78},
{58.83,87.06},{25.08,46.90},{98.78,153.77},{55.99,70.21},
{ 1.10,19.36},{43.60,74.82},{35.70,82.70},{95.96,145.87},
{90.61,119.30},{15.17,40.94},{42.08,58.42},{28.82,62.43},
{93.95,134.46},{97.05,155.02},{87.82,129.50},{50.86,89.56},
{21.59,43.65},{82.12,124.39},{62.16,84.58},{23.33,43.69},
{98.58,125.82},{95.79,125.24},{68.98,98.79},{64.58,100.15},
{11.55,38.50},{17.34,27.53},{85.86,114.23},{34.88,23.88},
{37.60,54.95},{76.96,96.88},{41.37,62.39},{69.13,127.05},
{91.47,116.46},{87.63,132.71},{34.26,68.02},{68.46,104.21},
{61.30,99.65},{83.90,119.99},{23.58,41.47},{16.10,34.63},
{49.22,75.10},{61.51,95.83},{73.89,111.14},{ 2.30,25.38},
{21.66,50.29},{68.66,116.52},{21.08,35.55},{51.98,88.61},
{37.65,58.50},{87.23,147.60},{66.20,96.70},{ 8.26,40.80},
{93.34,151.55},{58.89,97.82},{84.19,116.27},{ 1.12,23.84},
{86.38,89.15},{62.55,103.79},{73.05,99.38},{30.95,30.08},
{96.00,145.96},{ 8.88,27.36},{35.91,54.17},{78.87,117.01},
{30.72,65.67},{59.94,99.63},{94.92,132.73},{43.93,74.79},
{80.58,103.06},{61.06,106.13},{16.51,41.73},{ 1.47, 7.87},
{14.17,21.41},{40.99,66.07},{36.02,59.95},{14.33,35.31},
{91.35,132.89},{ 0.16,29.96},{59.31,82.50},{ 7.70,30.65},
{75.68,106.11},{29.12,57.24},{53.31,73.66},{55.42,98.09},
{63.28,86.78},{63.76,119.81},{65.70,110.95},{79.63,120.48},
{91.62,140.18},{10.84,55.03},{35.64,74.46},{42.92,97.53},
{79.40,100.73},{28.15,54.02},{99.51,126.02},{62.25,101.89},
{20.76,39.50},{10.03,43.97},{10.23,31.80},{65.64,104.41},
{99.97,147.91},{52.09,102.00},{29.71,43.92},{ 4.03,24.96},
{68.28,99.97},{11.92,31.02},{24.39,54.70},{21.78,43.28},
{29.68,49.30},{56.77,86.72},{22.90,43.75},{93.63,125.56},
{42.37,71.52},{ 6.55,20.37},{82.96,114.20},{22.13,36.44},
{44.26,80.87},{ 7.47,29.15},{47.84,60.31},{ 6.79,33.94},
{40.36,78.12},{94.12,123.75},{89.04,132.74},{36.50,58.33},
{37.23,76.54},{11.54,43.31},{26.05,57.22},{86.85,130.49},
{ 5.24,36.17},{21.11,49.39},{ 6.08,31.64},{87.28,102.21},
{39.13,72.90},{46.00,73.78},{81.21,112.27},{ 2.72,24.94},
{18.56,39.86},{41.82,67.80},{64.02,88.90},{ 4.38,23.77},
{23.58,35.78},{75.22,126.15},{18.46,47.91},{59.09,86.91},
{41.20,68.29},{13.12,37.81},{42.29,74.76},{51.27,68.80},
{64.75,116.16},{49.37,81.76},{93.89,123.38},{51.71,81.62},
{77.78,121.73},{24.66,43.98},{29.63,63.78},{95.59,135.81},
{46.73,91.61},{27.69,57.99},{84.74,135.78},{65.86,102.23},
{ 7.06,26.16},{61.17,104.28},{20.38,29.98},{57.47,90.06},
{94.71,136.45},{90.03,124.77},{59.06,87.93},{44.61,75.89},
{33.44,73.03},{37.68,56.66},{ 6.52,30.27},{35.13,59.15},
{ 3.47,13.38},{68.90,105.98},{54.03,81.28},{84.32,110.23},
{51.99,74.26},{43.11,99.62},{72.26,126.65},{41.00,71.47},
{46.45,69.83},{19.97,48.50},{ 9.51,22.97},{27.62,54.74},
{98.61,157.02},{86.97,137.74},{42.16,70.26},{78.03,119.62},
{20.45,34.66},{67.21,91.07},{ 8.97,35.25},{99.49,132.36},
{85.16,116.22},{58.19,84.49},{37.70,68.95},{70.84,112.46},
{57.39,101.09},{45.64,72.96},{44.39,75.08},{44.93,88.27},
{16.30,35.81},{85.54,126.35},{30.12,63.73},{61.05,88.51},
{69.15,103.29},{47.85,72.88},{13.34,38.69},{62.18,106.34},
{69.63,102.48},{17.67,47.72},{58.46,89.32},{33.35,73.81},
{23.30,47.03},{32.87,86.41},{18.74,27.93},{92.10,126.62},
{27.50,41.93},{95.45,137.23},{11.96,40.54},{57.15,78.21},
{40.08,71.36},{ 4.75,11.32},{27.21,68.49},{80.44,132.76},
{27.00,64.27},{22.21,79.62},{91.86,136.38},{89.32,142.03},
{42.95,87.42},{80.74,124.29},{14.47,34.21},{34.48,67.00},
{81.41,149.44},{90.59,116.51},{80.49,92.80},{94.57,147.78},
{80.21,115.32},{92.00,125.48},{64.42,94.43},{96.35,140.76},
{ 0.46,33.35},{41.29,80.44},{96.47,129.13},{44.62,80.21},
{22.88,25.86},{25.14,54.75},{92.36,126.77},{99.99,137.55},
{82.75,98.09},{52.46,62.03},{91.80,138.97},{26.46,63.47},
{93.51,133.09},{14.56,55.23},{11.03,44.27},{35.09,64.06},
{90.60,130.74},{54.54,93.24},{18.43,46.91},{99.21,140.47},
{70.29,88.59},{54.22,87.55},{15.43,35.00},{15.26,34.13},
{59.03,98.46},{ 2.27,35.20},{ 0.75,11.33},{21.57,31.19},
{13.92,36.31},{18.52,31.37},{67.89,108.15},{82.92,132.69},
{54.63,73.24},{ 1.68,46.57},{30.86,65.28},{38.85,88.49},
{90.89,120.59},{67.00,77.89},{87.75,115.00},{27.87,55.65},
{37.53,62.11},{19.55,27.67},{ 6.63,38.49},{23.86,45.95},
{19.09,50.18},{43.38,75.23},{ 6.80,13.15},{29.31,54.48},
{49.82,73.13},{ 7.85,37.58},{22.48,51.11},{75.54,116.63},
{33.93,64.07},{12.85,41.72},{13.05,40.70},{30.44,37.80},
{ 8.19,11.89},{60.69,117.59},{69.30,109.77},{15.98,54.21},
{69.71,100.02},{71.18,102.27},{84.95,134.70},{22.84,51.67},
{76.53,126.67},{55.53,79.98},{30.95,63.50},{69.68,103.26},
{38.72,72.91},{ 0.60,10.49},{58.42,90.41},{56.71,92.53},
{ 3.54,42.83},{12.59,31.55},{25.57,59.25},{68.34,109.47},
{ 2.54,31.62},{10.11,45.46},{26.45,61.98},{29.52,55.47},
{24.09,51.15},{63.86,80.81},{74.20,133.53},{75.01,118.47},
{65.56,93.25},{85.28,121.51},{71.80,98.64},{14.78,40.31},
{71.58,104.81},{40.34,94.57},{41.26,62.68},{48.57,75.76},
{81.23,132.80},{15.28,41.55},{99.54,154.44},{87.17,132.74},
{40.46,88.69},{62.15,116.42},{28.74,44.98},{30.13,57.25},
{61.89,100.32},{77.57,113.00},{14.25,18.09},{65.76,101.93},
{29.85,45.39},{85.18,127.01},{70.49,101.53},{79.82,106.64},
{43.96,71.71},{54.68,81.92},{97.64,138.40},{72.72,122.97},
{25.52,22.33},{32.29,58.32},{ 0.96,20.17},{55.06,99.22},
{25.84,53.73},{10.49,56.67},{30.16,62.62},{58.37,87.55},
{40.21,70.89},{24.25,54.42},{36.40,50.95},{92.33,136.48},
{15.43,42.87},{72.40,111.38},{56.00,85.90},{44.80,85.96},
{14.41,31.65},{ 1.96,15.52},{93.24,126.98},{55.67,83.60},
{51.62,88.14},{64.37,86.91},{29.84,59.92},{72.24,99.11},
{81.49,113.74},{93.86,154.88},{85.65,132.06},{91.33,128.29},
{58.78,107.60},{33.09,79.07},{52.35,66.51},{24.47,40.19},
{40.82,75.63},{22.82,49.44},{77.43,124.91},{93.49,130.09},
{93.15,134.18},{ 2.36,30.56},{ 2.60,13.99},{42.20,62.35},
{96.31,126.36},{91.69,121.80},{41.35,80.21},{66.34,89.23},
{83.45,109.55},{30.46,56.24},{54.28,95.58},{72.07,102.63},
{71.49,104.41},{60.67,83.30},{19.14,53.48},{33.74,48.87},
{ 1.85,20.34},{30.82,66.88},{ 9.92,33.53},{21.63,32.69},
{43.72,76.95},{97.13,131.87},{61.74,101.18},{26.20,70.09},
{55.38,86.89},{46.82,68.76},{10.43,32.38},{63.35,113.08},
{33.60,77.18},{94.58,143.78},{70.73,110.82},{72.76,105.71},
{10.50,22.40},{46.73,60.17},{64.01,93.80},{80.47,125.45},
{81.81,125.98},{80.03,136.86},{46.10,67.97},{67.17,108.05},
{37.90,65.81},{19.03,57.35},{67.23,92.72},{47.93,80.14},
{73.59,101.99},{ 3.40,32.74},{71.44,112.60},{33.09,51.87},
{ 2.08,27.85},{30.39,49.08},{ 1.18,16.21},{69.70,115.09},
{40.93,76.23},{18.02,38.08},{50.28,77.58},{ 4.08,30.45},
{69.69,125.56},{32.36,68.65},{98.73,136.81},{57.32,91.11},
{84.37,127.82},{80.38,121.73},{82.17,109.60},{15.49,30.24},
{78.67,123.99},{14.29,66.29},{26.61,51.60},{10.39,17.71},
{66.50,104.75},{68.92,115.60},{78.67,113.40},{58.77,81.37},
{19.57,50.07},{60.80,104.77},{34.97,63.06},{ 3.11,15.27},
{49.44,78.52},{89.94,136.74},{86.96,111.62},{27.45,64.82},
{72.19,121.39},{17.09,54.79},{43.38,71.91},{30.07,54.79},
{96.42,134.12},{68.23,92.71},{22.85,63.25},{ 3.61,17.43},
{43.09,70.08},{18.37,45.69},{39.90,81.71},{14.52,48.03},
{96.08,131.54},{93.23,130.73},{32.33,69.52},{45.79,78.42},
{77.47,110.68},{66.63,117.56},{39.67,83.71},{33.01,48.46},
{58.61,99.09},{11.50,33.64},{78.49,117.62},{97.05,142.43},
{73.32,124.52},{10.56,27.04},{69.33,102.91},{63.34,100.38},
{ 2.03,23.26},{35.49,64.64},{66.36,88.94},{32.46,56.88},
{12.51,32.28},{79.14,108.70},{67.79,105.54},{79.54,116.04},
{15.34,36.67},{79.76,121.74},{70.61,103.29},{10.66,43.81},
{64.37,118.39},{69.99,97.77},{10.75,31.03},{ 7.47,30.55},
{51.33,79.49},{71.50,120.45},{52.72,91.59},{72.03,123.62},
{13.65,39.37},{59.36,82.79},{10.10,41.62},{69.96,101.38},
{12.06,44.46},{97.62,127.00},{79.09,99.99},{19.73,50.21},
{50.85,81.04},{40.13,61.30},{95.27,124.77},{30.90,62.42},
{ 6.30,17.52},{46.36,82.66},{72.92,108.28},{85.11,113.23},
{ 8.86,37.51},{50.52,80.50},{70.80,108.22},{26.49,48.10},
{46.95,83.86},{95.89,132.49},{22.82,34.71},{79.62,126.04},
{23.28,56.07},{42.27,81.15},{58.12,96.54},{88.50,133.06},
{56.78,90.03},{83.69,111.21},{71.24,99.63},{31.86,48.70},
{16.79,40.12},{90.49,130.08},{28.45,64.61},{47.24,94.87},
{36.42,68.76},{15.81,40.48},{ 3.57,13.30},{42.53,84.95},
{30.39,57.17},{57.45,80.32},{ 7.89,42.93},{89.15,121.87},
{ 3.94,28.55},{40.98,60.65},{ 1.76,23.92},{14.77,50.19},
{52.47,81.59},{38.62,64.83},{21.49,66.77},{31.25,65.17},
{ 1.09,27.47},{57.10,90.20},{51.24,77.45},{ 8.02,27.75},
{40.82,71.86},{95.88,133.37},{ 6.01,47.92},{86.57,116.76},
{28.84,55.89},{94.02,151.44},{90.05,123.63},{35.57,68.17},
{29.93,44.81},{18.71,28.51},{86.08,116.42},{48.49,78.97},
{ 4.98,25.48},{79.98,121.58},{ 1.49,16.92},{67.39,104.96},
{ 9.06,35.95},{59.89,85.66},{70.98,107.76},{50.83,77.40},
{36.75,77.66},{29.82,62.49},{ 2.77,15.50},{46.65,73.77},
{92.27,143.64},{35.35,57.38},{45.28,68.20},{72.73,108.20},
{27.21,53.92},{58.84,92.29},{99.92,138.13},{77.64,113.61},
{67.47,108.17},{10.95,20.86},{85.48,110.64},{11.00,45.11}
};
double residual_error (double x , double y , double m , double c){
double e = (m*x) +c - y;
return e * e;
}
__device__ double d_residual_error (double x , double y , double m , double c){
double e = (m*x) +c - y;
return e*e;
}
double rms_error (double m , double c){
int i;
double mean;
double error_sum =0;
for (i=0; i<n_data; i++){
error_sum += residual_error(data[i].x,data [i].y,m,c);
}
mean = error_sum / n_data;
return sqrt (mean);
}
__global__ void d_rms_error (double *m , double *c, double *error_sum_arr, point_t *d_data){
int i = threadIdx.x + blockIdx.x * blockDim.x;
error_sum_arr[i] = d_residual_error(d_data[i].x, d_data[i].y, *m, *c);
}
int time_difference(struct timespec *start, struct timespec *finish,
long long int *difference) {
long long int ds = finish->tv_sec - start->tv_sec;
long long int dn = finish->tv_nsec - start->tv_nsec;
if(dn < 0 ) {
ds--;
dn += 1000000000;
}
*difference = ds * 1000000000 + dn;
return !(*difference > 0);
}
int main() {
int i;
double bm = 1.3;
double bc = 10;
double be;
double dm[8];
double dc[8];
double e[8];
double step = 0.01;
double best_error = 999999999;
int best_error_i;
int minimum_found = 0;
double om[] = {0,1,1, 1, 0,-1,-1,-1};
double oc[] = {1,1,0,-1,-1,-1, 0, 1};
struct timespec start, finish;
long long int time_elapsed;
clock_gettime(CLOCK_MONOTONIC, &start);
cudaError_t error;
//Device variables
double *d_dm;
double *d_dc;
double *d_error_sum_arr;
point_t *d_data;
be = rms_error(bm, bc);
error = cudaMalloc(&d_dm, (sizeof(double) * 8));
if(error){
fprintf(stderr, "cudaMalloc on d_dm returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Allocate memory for d_dc
error = cudaMalloc(&d_dc, (sizeof(double) * 8));
if(error){
fprintf(stderr, "cudaMalloc on d_dc returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
error = cudaMalloc(&d_error_sum_arr, (sizeof(double) * 1000));
if(error){
fprintf(stderr, "cudaMalloc on d_error_sum_arr returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Allocate memory for d_data
error = cudaMalloc(&d_data, sizeof(data));
if(error){
fprintf(stderr, "cudaMalloc on d_data returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
while(!minimum_found) {
for(i=0;i<8;i++) {
dm[i] = bm + (om[i] * step);
dc[i] = bc + (oc[i] * step);
}
//Copy memory for dm to d_dm
error = cudaMemcpy(d_dm, dm, (sizeof(double) * 8), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_dm returned %d %s\n", error,
cudaGetErrorString(error));
}
//Copy memory for dc to d_dc
error = cudaMemcpy(d_dc, dc, (sizeof(double) * 8), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_dc returned %d %s\n", error,
cudaGetErrorString(error));
}
//Copy memory for data to d_data
error = cudaMemcpy(d_data, data, sizeof(data), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_data returned %d %s\n", error,
cudaGetErrorString(error));
}
for(i=0;i<8;i++) {
//Host variable storing the array returned from the kernel function.
double h_error_sum_arr[1000];
//Stores the total sum of the values from the error sum array.
double error_sum_total;
//Stores the mean of the total sum of the error sums.
double error_sum_mean;
//Call the rms_error function using 100 blocks and 10 threads.
d_rms_error <<<100,10>>>(&d_dm[i], &d_dc[i], d_error_sum_arr, d_data);
cudaThreadSynchronize();
//Copy memory for d_error_sum_arr
error = cudaMemcpy(&h_error_sum_arr, d_error_sum_arr, (sizeof(double) * 1000), cudaMemcpyDeviceToHost);
if(error){
fprintf(stderr, "cudaMemcpy to error_sum returned %d %s\n", error,
cudaGetErrorString(error));
}
//Loop through the error sum array returned from the kernel function
for(int j=0; j<n_data; j++) {
//Add each error sum to the error sum total.
error_sum_total += h_error_sum_arr[j];
}
//Calculate the mean for the error sum.
error_sum_mean = error_sum_total / n_data;
//Calculate the square root for the error sum mean.
e[i] = sqrt(error_sum_mean);
if(e[i] < best_error) {
best_error = e[i];
best_error_i = i;
}
//Reset the error sum total.
error_sum_total = 0;
}
//printf("best m,c is %lf,%lf with error %lf in direction %d\n",
//dm[best_error_i], dc[best_error_i], best_error, best_error_i);
if(best_error < be) {
be = best_error;
bm = dm[best_error_i];
bc = dc[best_error_i];
} else {
minimum_found = 1;
}
}
//Free memory for d_dm
error = cudaFree(d_dm);
if(error){
fprintf(stderr, "cudaFree on d_dm returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_dc
error = cudaFree(d_dc);
if(error){
fprintf(stderr, "cudaFree on d_dc returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_data
error = cudaFree(d_data);
if(error){
fprintf(stderr, "cudaFree on d_data returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_error_sum_arr
error = cudaFree(d_error_sum_arr);
if(error){
fprintf(stderr, "cudaFree on d_error_sum_arr returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
printf("minimum m,c is %lf,%lf with error %lf\n", bm, bc, be);
//Get the system time after we have run the linear regression function.
clock_gettime(CLOCK_MONOTONIC, &finish);
//Calculate the time spent between the start time and end time.
time_difference(&start, &finish, &time_elapsed);
//Output the time spent running the program.
printf("Time elapsed was %lldns or %0.9lfs\n", time_elapsed,
(time_elapsed/1.0e9));
return 0;
}
|
20,051 | #include "includes.h"
__global__ void vecSum(double* devIn, int pow_step, int n)
{
//The thread ID (including its block ID)
int i = blockIdx.x * blockDim.x + threadIdx.x;
//Safety check to prevent unwanted threads.
if(pow_step*i < n)
//The two 'adjacent' elements of the array (or
//the two children in the segment tree) are added and
//the result is stored in the first element.
devIn[pow_step*i] = devIn[pow_step*i+(pow_step/2)] + devIn[pow_step*i];
} |
20,052 | #include <stdio.h>
#include <cuda.h>
__device__ int sumg = 0;
__global__ void K() {
int num = blockIdx.x * blockDim.x + threadIdx.x;
num += num;
++num;
atomicAdd(&sumg, num);
__shared__ int sum;
sum = 0;
__syncthreads();
sum += num;
}
int main() {
K<<<100, 32*5>>>();
cudaDeviceSynchronize();
return 0;
}
|
20,053 | #include "includes.h"
__global__ void tex_kernel(cudaTextureObject_t texture_obj, int num_samples, float* output) {
unsigned int idx = blockIdx.x*blockDim.x + threadIdx.x;
if (idx < num_samples) {
float u = idx / static_cast<float>(num_samples);
output[idx] = tex1D<float>(texture_obj, u);
}
} |
20,054 | #include <cuComplex.h>
#include <cufft.h>
#include <vector>
#include <stdio.h>
#include <cuda.h>
#include <thrust/functional.h>
#include <thrust/device_ptr.h>
#include <thrust/transform_scan.h>
#include <thrust/sequence.h>
#include <float.h>
#include <chrono>
using std::vector;
static const int THREADS_PER_BLOCK = 1024;
// Holds matrix profile and index values together
typedef union {
float floats[2]; // floats[0] = lowest
unsigned int ints[2]; // ints[1] = lowIdx
unsigned long long int ulong; // for atomic update
} mp_entry;
struct MPIDXCombine
{
__host__ __device__
mp_entry operator()(double x, unsigned int idx){
mp_entry item;
item.floats[0] = (float) x;
item.ints[1] = idx;
return item;
}
};
//Atomically updates the MP/idxs using a single 64-bit integer. We lose a small amount of precision in the output, if we do not do this we are unable
// to atomically update both the matrix profile and the indexes without using a critical section and dedicated locks.
__device__ inline unsigned long long int MPatomicMin(volatile unsigned long long int* address, double val, unsigned int idx)
{
float fval = (float)val;
mp_entry loc, loctest;
loc.floats[0] = fval;
loc.ints[1] = idx;
loctest.ulong = *address;
while (loctest.floats[0] > fval){
loctest.ulong = atomicCAS((unsigned long long int*) address, loctest.ulong, loc.ulong);
}
return loctest.ulong;
}
//This macro checks return value of the CUDA runtime call and exits
//the application if the call failed.
#define gpuErrchk(ans) { gpuAssert((ans), __FILE__, __LINE__); }
inline void gpuAssert(cudaError_t code, const char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
//This kernel computes a sliding mean with specified window size and a corresponding prefix sum array (A)
template<class DTYPE>
__global__ void sliding_mean(DTYPE* pref_sum, size_t window, size_t size, DTYPE* means)
{
const DTYPE coeff = 1.0 / (DTYPE) window;
size_t a = blockIdx.x * blockDim.x + threadIdx.x;
size_t b = blockIdx.x * blockDim.x + threadIdx.x + window;
if(a == 0){
means[a] = pref_sum[window - 1] * coeff;
}
if(a < size - 1){
means[a + 1] = (pref_sum[b] - pref_sum[a]) * coeff;
}
}
//This kernel computes a sliding standard deviaiton with specified window size, the corresponding means of each element, and the prefix squared sum at each element
template<class DTYPE>
__global__ void sliding_std(DTYPE* squares, size_t window, size_t size, DTYPE* means, DTYPE* stds){
const DTYPE coeff = 1 / (DTYPE) window;
size_t a = blockIdx.x * blockDim.x + threadIdx.x;
size_t b = blockIdx.x * blockDim.x + threadIdx.x + window;
if(a == 0){
stds[a] = sqrt((squares[window - 1] * coeff) - (means[a] * means[a]));
}
else if(b < size + window) {
stds[a] = sqrt(((squares[b - 1] - squares[a - 1]) * coeff) - (means[a] * means[a]));
}
}
template<class DTYPE>
__global__ void elementwise_multiply_inplace(const DTYPE* A, DTYPE *B, const int size)
{
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if(tid < size) {
B[tid] *= A[tid];
}
}
template<>
__global__ void elementwise_multiply_inplace(const cuDoubleComplex* A, cuDoubleComplex* B, const int size)
{
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if(tid < size) {
B[tid] = cuCmul(A[tid], B[tid]);
}
}
// A is input unaligned sliding dot products produced by ifft
// out is the computed vector of distances
template<class DTYPE>
__global__ void normalized_aligned_distance(const DTYPE* A, DTYPE* out, DTYPE * lastzs,
const DTYPE * AMean, const DTYPE* ASigma,
const unsigned int windowSize, const int exclusionZone,
const unsigned int ProfileLength, DTYPE* profile,
unsigned int * profile_idx, const unsigned int scratch, mp_entry *profile_entry)
{
int thID = blockIdx.x * blockDim.x + threadIdx.x;
int i = 1;
int j = thID + i;
DTYPE lastz = lastzs[thID];
if(j > exclusionZone)
{
// while(j < ProfileLength)
// {
lastz = lastz + (A[j + windowSize - 1] * A[i + windowSize - 1]) - (A[j - 1] * A[i - 1]);
DTYPE distance = max(2 * (windowSize - (lastz - AMean[j] * AMean[i] * windowSize) / (ASigma[j] * ASigma[i])), 0.0);
if (distance < profile_entry[j].floats[0])
{
MPatomicMin((unsigned long long int*)&profile_entry[j], distance, i);
}
if (distance < profile_entry[i].floats[0])
{
MPatomicMin((unsigned long long int*)&profile_entry[i], distance, j);
}
i++;
j++;
// }
}
}
template<class DTYPE>
__global__ void initialize_lastzs(const DTYPE* A, DTYPE* out, DTYPE * lastzs_last,
const DTYPE * AMean, const DTYPE* ASigma, const unsigned int windowSize, const unsigned int exclusionZone,
const unsigned int ProfileLength, DTYPE* profile,
unsigned int * profile_idx)
{
int j = blockIdx.x * blockDim.x + threadIdx.x;
if((j > exclusionZone) && (j < ProfileLength)) {
DTYPE lastz = 0;
for (int index = j; index < windowSize + j; index++)
{
lastz += A[index] * A[index-j];
}
DTYPE distance = max(2 * (windowSize - (lastz - AMean[j] * AMean[0] * windowSize) / (ASigma[j] * ASigma[0])), 0.0);
// Update the distance profile
out[j] = distance;
// Update the matrix profile if needed
if(profile[j] > distance) {
profile[j] = distance;
profile_idx[j] = 0;
}
if(j < ProfileLength) lastzs_last[j] = lastz;
}
else if (j < ProfileLength)
{
out[j] = DBL_MAX;
}
}
template<class DTYPE>
__host__ void distance_profile(const DTYPE* A, DTYPE* QT, DTYPE * lastzs,
DTYPE *profile, unsigned int *profile_idx, const DTYPE * AMean, const DTYPE * ASigma, const int timeSeriesLength,
const int windowSize,const int exclusionZone, const unsigned int i, mp_entry *profile_entry)
{
const int ProfileLength = timeSeriesLength - windowSize + 1;
dim3 grid(ceil(ProfileLength / (float) THREADS_PER_BLOCK), 1, 1);
dim3 block(THREADS_PER_BLOCK, 1, 1);
normalized_aligned_distance<DTYPE><<<grid, block>>>(A, QT, lastzs, AMean, ASigma,windowSize,
exclusionZone, ProfileLength,profile, profile_idx, i, profile_entry);
gpuErrchk(cudaPeekAtLastError());
}
// Reduction kernel, upper layer
// This reduction was adapted from the nvidia whitepaper:
// http://developer.download.nvidia.com/compute/cuda/1.1-Beta/x86_website/projects/reduction/doc/reduction.pdf
template <class DTYPE, unsigned int blockSize>
__global__ void reduce(const DTYPE *g_idata, DTYPE *g_odata, unsigned int *g_oloc, unsigned int ProfileLength) {
__shared__ DTYPE sdata[blockSize];
__shared__ DTYPE sloc[blockSize];
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x*(blockSize*2) + tid;
unsigned int gridSize = blockSize*2*gridDim.x;
DTYPE temp;
unsigned int temploc;
sdata[tid] = DBL_MAX;
while (i < ProfileLength) {
if (i + blockSize < ProfileLength)
{
if (g_idata[i] < g_idata[i+blockSize])
{
temp=g_idata[i];
temploc=i;
}
else
{
temp=g_idata[i+blockSize];
temploc = i+blockSize;
}
}
else
{
temp = g_idata[i];
temploc = i;
}
if (sdata[tid] > temp)
{
sdata[tid] = temp;
sloc[tid] = temploc;
}
i += gridSize;
}
__syncthreads();
if (blockSize >= 1024) {
if (tid < 512 && sdata[tid] > sdata[tid + 512])
{
sdata[tid] = sdata[tid + 512];
sloc[tid] = sloc[tid + 512];
}
__syncthreads();
}
if (blockSize >= 512 ) {
if (tid < 256 && sdata[tid] > sdata[tid + 256])
{
sdata[tid] = sdata[tid + 256];
sloc[tid] = sloc[tid + 256];
}
__syncthreads();
}
if (blockSize >= 256) {
if (tid < 128 && sdata[tid] > sdata[tid + 128])
{
sdata[tid] = sdata[tid + 128];
sloc[tid] = sloc[tid + 128];
}
__syncthreads();
}
if (blockSize >= 128) {
if (tid < 64 && sdata[tid] > sdata[tid + 64])
{
sdata[tid] = sdata[tid + 64];
sloc[tid] = sloc[tid + 64];
}
__syncthreads();
}
if (blockSize >= 64) {
if (tid < 32 && sdata[tid] > sdata[tid + 32])
{
sdata[tid] = sdata[tid + 32];
sloc[tid] = sloc[tid + 32];
}
__syncthreads();
}
if (blockSize >= 32) {
if (tid < 16 && sdata[tid] > sdata[tid + 16])
{
sdata[tid] = sdata[tid + 16];
sloc[tid] = sloc[tid + 16];
}
__syncthreads();
}
if (blockSize >= 16) {
if (tid < 8 && sdata[tid] > sdata[tid + 8])
{
sdata[tid] = sdata[tid + 8];
sloc[tid] = sloc[tid + 8];
}
__syncthreads();
}
if (blockSize >= 8) {
if (tid < 4 && sdata[tid] > sdata[tid + 4])
{
sdata[tid] = sdata[tid + 4];
sloc[tid] = sloc[tid + 4];
}
__syncthreads();
}
if (blockSize >= 4) {
if (tid < 2 && sdata[tid] > sdata[tid + 2])
{
sdata[tid] = sdata[tid + 2];
sloc[tid] = sloc[tid + 2];
}
__syncthreads();
}
if (blockSize >= 2) {
if (tid == 0)
{
if (sdata[0] <= sdata[1])
{
g_odata[blockIdx.x] = sdata[0];
g_oloc[blockIdx.x] = sloc[0];
}
else
{
g_odata[blockIdx.x] = sdata[1];
g_oloc[blockIdx.x] = sloc[1];
}
}
}
else
{
if (tid == 0)
{
g_odata[blockIdx.x] = sdata[0];
g_oloc[blockIdx.x] = sloc[0];
}
}
}
//reduction kernel, lower layer
template <class DTYPE, unsigned int blockSize>
__global__ void reducelast(DTYPE *g_idata, unsigned int *g_iloc,
unsigned int start_loc, DTYPE* profilei, unsigned int* profileidxi, unsigned int n) {
__shared__ DTYPE sdata[blockSize];
__shared__ DTYPE sloc[blockSize];
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x*(blockSize*2) + tid;
unsigned int gridSize = blockSize*2*gridDim.x;
DTYPE temp;
unsigned int temploc;
sdata[tid] = DBL_MAX;
DTYPE minval;
unsigned int minloc;
while (i < n) {
if (i + blockSize <n)
{
if (g_idata[i] < g_idata[i+blockSize])
{
temp=g_idata[i];
temploc=g_iloc[i];
}
else
{
temp=g_idata[i+blockSize];
temploc = g_iloc[i+blockSize];
}
}
else
{
temp = g_idata[i];
temploc = g_iloc[i];
}
if (sdata[tid] > temp)
{
sdata[tid] = temp;
sloc[tid] = temploc;
}
i += gridSize;
}
__syncthreads();
if (blockSize >= 1024) {
if (tid < 512 && sdata[tid] > sdata[tid + 512])
{
sdata[tid] = sdata[tid + 512];
sloc[tid] = sloc[tid + 512];
}
__syncthreads();
}
if (blockSize >= 512 ) {
if (tid < 256 && sdata[tid] > sdata[tid + 256])
{
sdata[tid] = sdata[tid + 256];
sloc[tid] = sloc[tid + 256];
}
__syncthreads();
}
if (blockSize >= 256) {
if (tid < 128 && sdata[tid] > sdata[tid + 128])
{
sdata[tid] = sdata[tid + 128];
sloc[tid] = sloc[tid + 128];
}
__syncthreads();
}
if (blockSize >= 128) {
if (tid < 64 && sdata[tid] > sdata[tid + 64])
{
sdata[tid] = sdata[tid + 64];
sloc[tid] = sloc[tid + 64];
}
__syncthreads();
}
if (blockSize >= 64) {
if (tid < 32 && sdata[tid] > sdata[tid + 32])
{
sdata[tid] = sdata[tid + 32];
sloc[tid] = sloc[tid + 32];
}
__syncthreads();
}
if (blockSize >= 32) {
if (tid < 16 && sdata[tid] > sdata[tid + 16])
{
sdata[tid] = sdata[tid + 16];
sloc[tid] = sloc[tid + 16];
}
__syncthreads();
}
if (blockSize >= 16) {
if (tid < 8 && sdata[tid] > sdata[tid + 8])
{
sdata[tid] = sdata[tid + 8];
sloc[tid] = sloc[tid + 8];
}
__syncthreads();
}
if (blockSize >= 8) {
if (tid < 4 && sdata[tid] > sdata[tid + 4])
{
sdata[tid] = sdata[tid + 4];
sloc[tid] = sloc[tid + 4];
}
__syncthreads();
}
if (blockSize >= 4) {
if (tid < 2 && sdata[tid] > sdata[tid + 2])
{
sdata[tid] = sdata[tid + 2];
sloc[tid] = sloc[tid + 2];
}
__syncthreads();
}
if (blockSize >= 2) {
if (tid == 0)
{
if (sdata[0] <= sdata[1])
{
minval = sdata[0];
minloc = sloc[0];
}
else
{
minval = sdata[1];
minloc = sloc[1];
}
}
}
else
{
if (tid == 0)
{
minval = sdata[0];
minloc = sloc[0];
}
}
if (tid==0)
{
if (minval<(*profilei))
{
(*profilei)=minval;
(*profileidxi)=minloc+start_loc;
}
}
}
template<class DTYPE>
void reducemain(DTYPE* vd, unsigned int start_loc, unsigned int max_block_num, unsigned int max_thread_num, unsigned int n, DTYPE* profile, unsigned int* profileidx, unsigned int i, DTYPE* reduced_result, unsigned int* reduced_loc)
{
if (n==0) //if this happens, there's an error
return;
if (max_thread_num>1024)
max_thread_num=1024;
unsigned int * middle_loc_pointer=reduced_loc;
unsigned int num_threads=max_thread_num;
unsigned int num_blocks=n/(num_threads*2);
if (n%(num_threads*2)!=0)
num_blocks++;
if (num_blocks>=max_block_num)
num_blocks=max_block_num;
DTYPE *middle_pointer = NULL;
unsigned int curn;
if (num_blocks>1) //upperlevel reduction
{
middle_pointer=reduced_result;
curn=num_blocks;
switch (num_threads)
{
case 1024:
reduce<DTYPE, 1024><<<num_blocks,1024>>>(vd + start_loc,reduced_result,reduced_loc,n); break;
case 512:
reduce<DTYPE, 512><<<num_blocks,512>>>(vd + start_loc,reduced_result,reduced_loc,n); break;
case 256:
reduce<DTYPE, 256><<<num_blocks,256>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 128:
reduce<DTYPE, 128><<<num_blocks,128>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 64:
reduce<DTYPE, 64><<<num_blocks,64>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 32:
reduce<DTYPE, 32><<<num_blocks,32>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 16:
reduce<DTYPE, 16><<<num_blocks,16>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 8:
reduce<DTYPE, 8><<<num_blocks,8>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 4:
reduce<DTYPE, 4><<<num_blocks,4>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
case 2:
reduce<DTYPE, 2><<<num_blocks,2>>>(vd+start_loc,reduced_result,reduced_loc,n); break;
default:
break;
}
gpuErrchk( cudaPeekAtLastError() );
}
else
{
middle_pointer=vd+start_loc;
curn=n;
auto ptr = thrust::device_pointer_cast(reduced_loc);
thrust::sequence(ptr,ptr+curn);
}
num_threads=floor(pow(2,ceil(log(curn)/log(2))-1));
if (num_threads>max_thread_num)
num_threads=max_thread_num;
switch (num_threads)
{
case 1024:
reducelast<DTYPE,1024><<<1,1024>>>(middle_pointer, middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 512:
reducelast<DTYPE,512><<<1,512>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 256:
reducelast<DTYPE,256><<<1,256>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 128:
reducelast<DTYPE,128><<<1,128>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i,curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 64:
reducelast<DTYPE,64><<<1,64>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i,curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 32:
reducelast<DTYPE,32><<<1,32>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 16:
reducelast<DTYPE,16><<<1,16>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i,curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 8:
reducelast<DTYPE,8><<<1,8>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i,curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 4:
reducelast<DTYPE,4><<<1,4>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i,curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 2:
reducelast<DTYPE,2><<<1,2>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 1:
reducelast<DTYPE,1><<<1,1>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
case 0:
reducelast<DTYPE,1><<<1,1>>>(middle_pointer,middle_loc_pointer, start_loc, profile+i, profileidx+i, curn);
gpuErrchk( cudaPeekAtLastError() );
break;
default:
break;
}
}
template<class DTYPE>
struct square_op : public thrust::unary_function<DTYPE,DTYPE>
{
__host__ __device__
DTYPE operator()(DTYPE x) const
{
return x * x;
}
};
template<class DTYPE>
void compute_statistics(const DTYPE *T, DTYPE *means, DTYPE *stds, DTYPE *scratch, size_t n, size_t m)
{
square_op<DTYPE> sqr;
dim3 grid(ceil(n / (double) THREADS_PER_BLOCK), 1,1);
dim3 block(THREADS_PER_BLOCK, 1, 1);
thrust::device_ptr<const DTYPE> dev_ptr_T = thrust::device_pointer_cast(T);
thrust::device_ptr<DTYPE> dev_ptr_scratch = thrust::device_pointer_cast(scratch);
thrust::inclusive_scan(dev_ptr_T, dev_ptr_T + n + m - 1, dev_ptr_scratch, thrust::plus<DTYPE>());
sliding_mean<DTYPE><<<grid, block>>>(scratch, m, n, means);
thrust::transform_inclusive_scan(dev_ptr_T, dev_ptr_T + n + m - 1, dev_ptr_scratch, sqr,thrust::plus<DTYPE>());
sliding_std<DTYPE><<<grid,block>>>(scratch, m, n, means, stds);
}
template<class DTYPE>
void STREAMP(DTYPE* T, const int timeSeriesLength, const int windowSize, DTYPE* profile, unsigned int* profile_idxs, mp_entry *profile_with_idx)
{
int exclusionZone = windowSize / 4;
size_t ProfileLength = timeSeriesLength - windowSize + 1;
DTYPE * AMean, * ASigma, *QT, *lastzs, *reduced_result;
dim3 block(THREADS_PER_BLOCK,1,1);
dim3 grid(ceil(ProfileLength / (float) THREADS_PER_BLOCK), 1, 1);
unsigned int *reduced_loc;
//clock_t start, now;
const unsigned int max_block_num=2048;
const unsigned int max_thread_num=1024;
unsigned int middle_loc_size=max_block_num>max_thread_num?max_block_num:max_thread_num;
// printf("size = %d, window = %d, exclusion = %d\n", ProfileLength, windowSize, exclusionZone);
//start = clock();
cudaMalloc(&QT, ProfileLength * sizeof(DTYPE));
cudaMalloc(&AMean, ProfileLength * sizeof(DTYPE));
cudaMalloc(&ASigma, ProfileLength * sizeof(DTYPE));
cudaMalloc(&lastzs, ProfileLength * sizeof(DTYPE));
cudaMalloc(&reduced_result, max_block_num * sizeof(DTYPE));
cudaMalloc(&reduced_loc, middle_loc_size * sizeof(unsigned int));
//now = clock();
//printf("Allocate memory took %lf sec\n", (now - start) / (double) CLOCKS_PER_SEC);
// Precompute statistics
//start = clock();
//Use QT vector as scratch space as we don't need it yet
compute_statistics(T, AMean, ASigma, QT, ProfileLength, windowSize);
//now = clock();
// printf("Precompute statistics took %lf sec\n", (now - start) / (double) CLOCKS_PER_SEC);
// Initialize profile and lastzs_last
// start = clock();
auto begin = std::chrono::high_resolution_clock::now();
initialize_lastzs<DTYPE><<<grid, block>>>(T, QT, lastzs, AMean, ASigma, windowSize, exclusionZone,
ProfileLength, profile, profile_idxs);
reducemain(QT, 0, 2048, 1024, ProfileLength, profile, profile_idxs, 0, reduced_result, reduced_loc);
MPIDXCombine combiner;
auto ptr_prof = thrust::device_pointer_cast(profile);
auto ptr_idx = thrust::device_pointer_cast(profile_idxs);
auto ptr_comb = thrust::device_pointer_cast(profile_with_idx);
thrust::transform(ptr_prof, ptr_prof + ProfileLength, ptr_idx, ptr_comb, combiner);
cudaDeviceSynchronize();
// compute the distance profile
distance_profile<DTYPE>(T, QT, lastzs, profile, profile_idxs, AMean, ASigma, timeSeriesLength,
windowSize, exclusionZone, 1, profile_with_idx);
cudaDeviceSynchronize();
//now = clock();
auto end = std::chrono::high_resolution_clock::now();
std::cout << "STREAMP time: "<< (float) std::chrono::duration_cast<std::chrono::microseconds>(end-begin).count() / 1000 << " ms." << std::endl;
cudaFree(QT);
cudaFree(AMean);
cudaFree(ASigma);
cudaFree(lastzs);
}
//Reads input time series from file
template<class DTYPE>
void readFile(const char* filename, vector<DTYPE>& v, const char *format_str)
{
FILE* f = fopen( filename, "r");
if(f == NULL){
printf("Unable to open %s for reading, please make sure it exists\n", filename);
exit(0);
}
DTYPE num;
while(!feof(f)){
fscanf(f, format_str, &num);
v.push_back(num);
}
v.pop_back();
fclose(f);
}
int main(int argc, char **argv)
{
if (argc != 4) {
printf("Usage: <subseq length> <input file> <output file>\n");
exit(0);
}
int nDevices;
double *T, *profile;
unsigned int *idxs;
mp_entry *profile_with_idx;
int windowSize = atoi(argv[1]);
char *filename = argv[2];
//clock_t start, now;
vector<double> T_host;
cudaGetDeviceCount(&nDevices);
vector<cudaDeviceProp> device_info(nDevices);
/* printf("Number of CUDA devices: %d\n",nDevices);
for (int i = 0; i < nDevices; ++i) {
cudaGetDeviceProperties(&device_info.at(i), i);
printf("Device Number: %d\n", i);
printf(" Device name: %s\n", device_info.at(i).name);
printf(" Memory Clock Rate (KHz): %d\n",
device_info.at(i).memoryClockRate);
printf(" Memory Bus Width (bits): %d\n",
device_info.at(i).memoryBusWidth);
printf(" Peak Memory Bandwidth (GB/s): %f\n\n",
2.0*device_info.at(i).memoryClockRate*(device_info.at(i).memoryBusWidth/8)/1.0e6);
}*/
// std::cout << "Enter the device number to use: " << '\n';
//std::cin >> selectedDevice;
//cudaSetDevice(selectedDevice);
cudaSetDevice(0);
cudaFree(0);
//start = clock();
readFile<double>(filename, T_host, "%lf");
//now = clock();
// printf("Time taken to read date from file: %lf seconds\n", (now - start) / (double) CLOCKS_PER_SEC);
vector<double> profile_host(T_host.size() - windowSize + 1, DBL_MAX);
vector<unsigned int> index_host(profile_host.size(), 0);
vector<mp_entry> profile_with_idx_h(profile_host.size());
//start = clock();
cudaMalloc(&T, T_host.size() * sizeof(double));
cudaMemcpy(T, T_host.data(), T_host.size() * sizeof(double), cudaMemcpyHostToDevice);
cudaMalloc(&profile, profile_host.size() * sizeof(double));
cudaMemcpy(profile, profile_host.data(), profile_host.size() * sizeof(double), cudaMemcpyHostToDevice);
cudaMalloc(&idxs, index_host.size() * sizeof(unsigned int));
cudaMalloc(&profile_with_idx, profile_host.size() * sizeof(mp_entry));
//now = clock();
// printf("Time taken to allocate T and profile and transfer to device: %lf seconds\n", (now - start) / (double) CLOCKS_PER_SEC);
// Do SCRIMP
STREAMP<double>(T, T_host.size(), windowSize, profile, idxs, profile_with_idx);
//start = clock();
cudaMemcpy(&profile_with_idx_h[0], profile_with_idx, profile_host.size() * sizeof(mp_entry), cudaMemcpyDeviceToHost);
//now = clock();
//printf("Time taken to copy result to host: %lf seconds\n", (now - start) / (double) CLOCKS_PER_SEC);
//printf("writing result to files\n");
FILE* f1 = fopen( argv[3], "w");
for(int i = 0; i < profile_host.size(); ++i){
fprintf(f1, "%.10f %u\n", sqrt(profile_with_idx_h[i].floats[0]) , profile_with_idx_h[i].ints[1]);
}
fclose(f1);
gpuErrchk(cudaDeviceSynchronize());
gpuErrchk(cudaDeviceReset());
cudaFree(T);
cudaFree(profile);
cudaFree(profile_with_idx);
}
|
20,055 | #ifdef _WIN32
# define NOMINMAX
#endif
#include "kernels.cu"
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <string>
#include <fstream>
#include <math.h>
#include <random>
//#include <math>
#define NUMY 10
#include <time.h>
using std::ifstream;
using std::string;
using std::ofstream;
int hibit(unsigned int n) {
n |= (n >> 1);
n |= (n >> 2);
n |= (n >> 4);
n |= (n >> 8);
n |= (n >> 16);
return n - (n >> 1);
}
double* generateDeviceArray(int size){
double* deviceArr;
cudaMalloc(&deviceArr,size*sizeof(double));
return deviceArr;
}
double* generateRandomWeights(int size,int numInputs,double scalar){
double* weightArr = (double*) malloc(size*sizeof(double));
std::default_random_engine generator;
std::uniform_real_distribution<double> distribution(-1*scalar,scalar);
for(int i = 0; i < size;i++){
weightArr[i] = distribution(generator)*sqrt(2.0/numInputs);
}
return weightArr;
}
void printArr(double* arr,int rows,int cols,std::string s){
std::cout<<s<<std::endl;
//printf("\n");
for(int i = 0; i < rows;i++){
for(int j = 0; j < cols;j++){
printf(" %lf ", arr[i*cols+j] );
}
printf("\n");
}
}
void printArrFromDevice(double* darr,int rows,int cols,std::string s){
double* harr = (double*) malloc(rows*cols*sizeof(double));
cudaMemcpy(harr,darr,rows*cols*sizeof(double),cudaMemcpyDeviceToHost);
printArr(harr,rows,cols,s);
free(harr);
}
double* read_arrLabels(char* filename, int &len) {
ifstream infile(filename);
string line;
int temp;
getline(infile,line);
temp = stoi(line);
if(temp<len) len=temp;
int tempint = 0;
double *x = (double*) malloc(10*len * sizeof(double));
for (int i = 0; i < len; i++) {
//fscanf(fp, "%f", &x[i]);
getline(infile,line);
tempint = stoi(line);
//printf("tempint: %d\n",tempint);
for(int j = 0; j < 10;j++){
if(tempint == j) {
x[i*10+j] = 1.0;
}
else
{
x[i*10+j]=0.0;
}
//printf("x[%d*10+%d] = %lf",i,j,x[i*10+j]);
}
//printf("i = %d,x[i] = %d",i,x[i]);
}
infile.close();
return x;
}
double* read_arrLabelsTest(char* filename, int &len,int* correct) {
ifstream infile(filename);
string line;
int temp;
getline(infile,line);
temp = stoi(line);
if(temp<len) len=temp;
int tempint = 0;
double *x = (double*) malloc(10*len * sizeof(double));
correct = (int*)malloc(len*sizeof(int));
for (int i = 0; i < len; i++) {
//fscanf(fp, "%f", &x[i]);
getline(infile,line);
tempint = stoi(line);
correct[i] = tempint;
//printf("tempint: %d\n",correct[i]);
for(int j = 0; j < 10;j++){
if(tempint == j) {
x[i*10+j] = 1.0;
}
else
{
x[i*10+j]=0.0;
}
//printf("x[%d*10+%d] = %lf",i,j,x[i*10+j]);
}
//printf("i = %d,x[i] = %d",i,x[i]);
}
infile.close();
return x;
}
char* read_arrImage(char* filename, int &len,int &rows,int &cols) {
//FILE *fp = fopen(filename, "r");
ifstream infile(filename);
int temp;
string line;
getline(infile,line);
temp = stoi(line);
//fscanf(fp,"%d",&temp);
if(temp<len) len=temp;
getline(infile,line);
rows = stoi(line);
getline(infile,line);
cols = stoi(line);
//fscanf(fp,"%d",&rows);
//fscanf(fp,"%d",&cols);
char *x = (char*) malloc(len*(rows)*(cols) * sizeof(char));
for (int i = 0; i < len*(rows)*(cols); i++) {
//fscanf(fp, "%f", &x[i]);
getline(infile,line);
x[i] = stoi(line);
}
infile.close();
return x;
}
/*
double* numToArr(char num){
double* x = (double*) malloc(10*sizeof(double));
for(int i = 0; i < 10;i++){
if(i==num)x[i]=1;
else x[i]=0;
}
}
*/
void trainingInstance(double* dx,double* dh, double* dy,double* dyCorrect,double* ddels,double* dgammas,double* dinter,double* dWeights1,double* dWeights2,double* ddeltas1,double* ddeltas2,int numX,int numH,int numY,double offset,double alpha,double lrate,int dinterSize){
//double* testOutput = (double*)malloc(10*sizeof(double));
//firstLayer
// printArrFromDevice(dx,1,numX);
forwardPropagation<<<numH,numX>>>(dx,dinter,dWeights1,dinterSize,offset);
// printf("dinter\n");
// printArrFromDevice(dinter,2,1024);
// printArrFromDevice(dWeights1,numH,numX);
// printf("First forward propagation done\n");
matrixReductionToVector<<<numH,numX,numX*sizeof(double)>>>(dinter,dh,1024,hibit(1024));
// printArrFromDevice(dh,1,numH);
// printf("First reduction done\n");
sigmoidKernel<<<1,numH>>>(dh);
// printf("First sigmoid done\n");
//first layer done
// printArrFromDevice(dh,1,numH);
//second layer:
forwardPropagation<<<numY,numH>>>(dh,dinter,dWeights2,dinterSize,offset);
// printf("second forward propagation done\n");
// printArrFromDevice(dWeights2,numY,numH);
matrixReductionToVector<<<numY,numH,numH*sizeof(double)>>>(dinter,dy,1024,hibit(1024));
// printf("second reduction done\n");
// printArrFromDevice(dy,1,numY);
sigmoidKernel<<<1,numY>>>(dy);
// printArrFromDevice(dy,1,numY);
// printf("second sigmoid done\n");
//second layer done
//backpropagation:
// printf("dyCorrect then dy\n");
//printArrFromDevice(dy,1,numY,"OutputY: ");
//printArrFromDevice(dyCorrect,1,numY,"CorrectY ");
//printArrFromDevice(dy,1,numY);
backPropagationFirstKernel<<<numY,numH>>>(dh,dy,dyCorrect,dWeights2,ddeltas2,ddels,alpha,lrate);
//printf("Deltas for W2: \n");
//printArrFromDevice(ddeltas2,numY,numH);
//dim3 grid(numY,numH);
//printf("Dels: \n");
//printArrFromDevice(ddels,1,numY);
backPropagationSecondKernelPart1<<<numY,numH>>>(dh,dgammas,dWeights1,ddels,alpha,lrate);
//printf("dgammas\n");
//printArrFromDevice(dgammas,numH,numY);
matrixReduction<<<numH,numY,numY*sizeof(double)>>>(dgammas,dgammas,numY,hibit(numY));
backPropagationSecondKernelPart2<<<numH,numX>>>(dx,dgammas,dWeights1,ddeltas1,alpha,lrate);
//printf("Deltas for W1: \n");
//printArrFromDevice(ddeltas1,numH,numX,"W1 Deltas");
//free(testOutput);
}
void longTraining(int len,double* trainLabels,char* trainImage,int epochs,double* dx,double* dh, double* dy,double* dyCorrect,double* ddels,double* dgammas,double* dinter,double* dWeights1,double* dWeights2,double* ddeltas1,double* ddeltas2,int numX,int numH,int numY,double offset,double alpha,double lrate,int dinterSize){
double* trainImageDouble = (double*) malloc(numX*sizeof(double));
double* trainLabelsInner = (double*) malloc(numY*sizeof(double));
for(int q = 0; q < epochs;q++){
for(int i = 0; i < len;i++){
for(int j = 0; j < numX;j++){
trainImageDouble[j] = ((double)trainImage[j+i*numX]-127.5)/127.5;
}
for(int j = 0; j < numY;j++){
trainLabelsInner[j] = trainLabels[j+i*numY];
//printf(" %lf ",trainLabelsInner[j]);
}
//printf("\n");
cudaMemcpy(dx,trainImageDouble,numX*sizeof(double),cudaMemcpyHostToDevice);
//free(hyCorrect);
//hyCorrect = numToArr(trainLabels[i]);
cudaMemcpy(dyCorrect,trainLabelsInner,numY*sizeof(double),cudaMemcpyHostToDevice);
trainingInstance(dx,dh,dy,dyCorrect,ddels,dgammas,dinter,dWeights1,dWeights2,ddeltas1,ddeltas2,numX,numH,numY,offset,alpha,lrate,dinterSize);
}
}
//free(hyCorrect);
free(trainImageDouble);
free(trainLabelsInner);
}
void testingInstance(double* dx,double* dh, double* dy,double* dinter,double* dWeights1,double* dWeights2,int numX,int numH,int numY,double offset,int dinterSize){
forwardPropagation<<<numH,numX>>>(dx,dinter,dWeights1,dinterSize,offset);
matrixReductionToVector<<<numH,numX,numX*sizeof(double)>>>(dinter,dh,1024,hibit(1024));
sigmoidKernel<<<1,numH>>>(dh);
forwardPropagation<<<numY,numH>>>(dh,dinter,dWeights2,dinterSize,offset);
matrixReductionToVector<<<numY,numH,numH*sizeof(double)>>>(dinter,dy,1024,hibit(1024));
sigmoidKernel<<<1,numY>>>(dy);
}
void testing(int len,double* testLabels,char* testImage,double* results,double* dx,double* dh, double* dy,double* dinter,double* dWeights1,double* dWeights2,int numX,int numH,int numY,double offset,int dinterSize){
double* testImageDouble = (double*) malloc(numX*sizeof(double));
double* testLabelsInner = (double*) malloc(numY*sizeof(double));
for(int i = 0; i < len;i++){
for(int j = 0; j < numX;j++){
testImageDouble[j] = ((double)testImage[j+i*numX]-127.5)/127.5;
}
for(int j = 0; j < numY;j++){
testLabelsInner[j] = testLabels[j+i*numY];
//printf(" %lf ",trainLabelsInner[j]);
}
//printf("\n");
cudaMemcpy(dx,testImageDouble,numX*sizeof(double),cudaMemcpyHostToDevice);
//free(hyCorrect);
//hyCorrect = numToArr(trainLabels[i]);
//cudaMemcpy(dyCorrect,testLabelsInner,numY*sizeof(double),cudaMemcpyHostToDevice);
testingInstance(dx,dh,dy,dinter,dWeights1,dWeights2,numX,numH,numY,offset,dinterSize);
cudaMemcpy(testLabelsInner,dy,numY*sizeof(double),cudaMemcpyDeviceToHost);
for(int j = 0;j < numY;j++){
results[j+i*numY] = testLabelsInner[j];
}
}
free(testImageDouble);
free(testLabelsInner);
}
int main(int argc,char** argv){
char* trainImage;
double* trainLabels;
int rows;
int cols;
srand(time(NULL));
char* testImage;
double* testLabels;
int* correct;
if(argc!=6){
printf("%s requires 5 additional variables:\nNum Epocs\nNumber of Training Samples\nLearning rate (float/double)\nInertia (float/double)\nNumber of nodes in the hidden layer\n",argv[1]);
return 0;
}
int epochs = std::stoi(argv[1]);
int len = std::stoi(argv[2]);
double LR = atof(argv[3]);
double alpha = atof(argv[4]);
int numH = std::stoi(argv[5]);
//printf("Got to debug # %d\n",++debugLine);
//trainImage = (char* )malloc(10*sizeof(char));
trainImage = read_arrImage("imagesTrain.txt",len,rows,cols);
//printf("Len: %d\nRows: %d\nCols: %d\n",len,rows,cols);
/*
for(int i = 0; i < rows;i++){
for(int j = 0; j < cols;j++){
printf("%d ",trainImage[i*cols+j]);
}
printf("\n");
}
*/
// len = 60000;
trainLabels = read_arrLabels("labelsTrain.txt",len);
//trainLabels = (double*) malloc(2*sizeof(double));
//printf("Len: %d\n",len);
/*
for(int i = 0; i < len;i++){
printf("trainLabels[%d]: \n",i);
for(int j = 0; j < NUMY;j++){
printf(" %lf ",trainLabels[i*NUMY+j]);
}
printf("\n");
}
*/
int testLen = 10000;
testImage = read_arrImage("imagesTest.txt",testLen,rows,cols);
testLabels = read_arrLabels("labelsTest.txt",testLen);
correct = (int*) malloc(testLen*sizeof(int));
for(int i = 0; i < testLen;i++){
for(int j =0 ; j < NUMY;j++){
if(testLabels[j+i*NUMY]>.9){
correct[i] = j;
break;
}
}
}
//int numX = 10;
//printf("TestLen: %d\n",testLen);
//printf("Testing For correct: %d\n",correct[0]);
int numX = rows*cols;
int numY = NUMY;
// int numH = 50;
double* dx = generateDeviceArray(numX);
cudaMemcpy(dx,trainImage,numX*sizeof(double),cudaMemcpyHostToDevice);
//cudaMemcpy(dx,trainImageDouble,numX*sizeof(double),cudaMemcpyHostToDevice);
//free(trainImageDouble);
double* dh = generateDeviceArray(numH);
double* dy = generateDeviceArray(NUMY);
double* dyCorrect = generateDeviceArray(NUMY);
double* hyCorrect = (double*)malloc(numY*sizeof(double));//numToArr(trainLabels[0]);
//cudaMemcpy(dyCorrect,hyCorrect,NUMY*sizeof(double),cudaMemcpyHostToDevice);
double* ddels = generateDeviceArray(NUMY);
double* dgammas = generateDeviceArray(numH*NUMY);
double* dinter = generateDeviceArray(1024*1024);
double* hWeights1 = generateRandomWeights(numX*numH,numX,1.0);
//printArr(hWeights1,numH,numX,"");
double* dWeights1 = generateDeviceArray(numX*numH);
double* hdeltas1 = (double*)malloc(numX*numH*sizeof(double));
for(int i = 0; i < numX*numH;i++){
hdeltas1[i] = 0;
}
double* hdeltas2 = (double*)malloc(numH*numY*sizeof(double));
for(int i = 0; i < numY*numH;i++){
hdeltas2[i] = 0;
}
cudaMemcpy(dWeights1,hWeights1,numX*numH*sizeof(double),cudaMemcpyHostToDevice);
double* hWeights2 = generateRandomWeights(numH*NUMY,numH,1.0);
double* dWeights2 = generateDeviceArray(numH*NUMY);
cudaMemcpy(dWeights2,hWeights2,numH*NUMY*sizeof(double),cudaMemcpyHostToDevice);
double* ddeltas1 = generateDeviceArray(numX*numH);
double* ddeltas2 = generateDeviceArray(numH*NUMY);
cudaMemcpy(ddeltas1,hdeltas1,numX*numH*sizeof(double),cudaMemcpyHostToDevice);
cudaMemcpy(ddeltas2,hdeltas2,numY*numH*sizeof(double),cudaMemcpyHostToDevice);
// double alpha = .1;
double lrate = LR;
int dinterSize = 1024;
double offset = 1;
double* results =(double*) malloc(testLen*NUMY*sizeof(double));
int* bestMatch = (int*)malloc(testLen*sizeof(int));
longTraining(len,trainLabels,trainImage,epochs,dx,dh,dy,dyCorrect,ddels,dgammas,dinter,dWeights1,dWeights2,ddeltas1,ddeltas2,numX,numH,numY,offset,alpha,lrate,dinterSize);
testing(testLen,testLabels,testImage,results,dx,dh,dy,dinter,dWeights1,dWeights2,numX,numH,numY,offset,dinterSize);
int* dbestMatch;
cudaMalloc(&dbestMatch,testLen*sizeof(int));
//double* dresults=
double* dresults = generateDeviceArray(testLen*NUMY);
int numThreads = 1024;
int numBlocks = testLen/1024 + 1;
cudaMemcpy(dresults,results,sizeof(double)*testLen*NUMY,cudaMemcpyHostToDevice);
bestChoiceKernel<<<numBlocks,numThreads>>>(dresults,dbestMatch,testLen,NUMY);
cudaMemcpy(bestMatch,dbestMatch,testLen*sizeof(int),cudaMemcpyDeviceToHost);
int err = 0;
int right = 0;
// int temp = bestMatch[0];
//double temp2 = results[0];
//int temp = correct[0];
//printf("%d\n",temp);
//printf("%lf\n",temp2);
//printf("%d\n",correct[0]);
//int tempMax = 0;
//int maxIndex = 0;
/*
for(int i =0; i < 10;i++){
tempMax =0;
maxIndex = 0;
for(int j = 0; j < NUMY;j++){
if(results[j+numY*i]>tempMax){
tempMax = results[j+numY*i];
maxIndex = j;
}
printf(" %f ",results[j+NUMY*i]);
}
printf("MAXINDEX: %d\n",maxIndex);
printf("\n");
}
*/
for(int i = 0; i < testLen;i++){
if(bestMatch[i]!=correct[i]){
err++;
}
else{
right++;
}
}
printf("num training data: %d\n",len);
printf("num hidden layers: %d\n",numH);
printf("Learning Rate: %lf\n",lrate);
printf("Alpha: %lf\n",alpha);
printf("num correct: %d\n",right);
printf("num wrong: %d\n",err);
//trainingInstance(dx,dh,dy,dyCorrect,ddels,dgammas,dinter,dWeights1,dWeights2,ddeltas1,ddeltas2,numX,numH,numY,offset,alpha,lrate,dinterSize);
free(hyCorrect);
free(hWeights2);
free(hWeights1);
cudaFree(dx);
cudaFree(dh);
cudaFree(dy);
cudaFree(dyCorrect);
cudaFree(ddels);
cudaFree(dgammas);
cudaFree(dinter);
cudaFree(ddeltas1);
cudaFree(ddeltas2);
cudaFree(dWeights2);
cudaFree(dWeights1);
cudaFree(dresults);
cudaFree(dbestMatch);
free(testLabels);
free(testImage);
free(correct);
free(results);
free(bestMatch);
free(trainLabels);
free(trainImage);
//Initialize weight matrices
//get inputs from training file
//get inputs from test file
//todo: add main
/*double* a = (double*) malloc(2*13*sizeof(double));
double* b = (double*) malloc(2);
for(int i = 0; i < 13;i++){
a[i] = i;
b[0] +=i;
a[i+13] = i;
b[1]+=i;
}
a[13] +=100;
b[1]+=100;
double* da;
printf("hibit: %x\n",hibit(13));
cudaMalloc(&da,sizeof(double)*26);
cudaMemcpy(da,a,sizeof(double)*26,cudaMemcpyHostToDevice);
matrixReductionDestructive<<<2,13,13*sizeof(double)>>>(da,13,hibit(13)<<1);
cudaMemcpy(a,da,sizeof(double)*26,cudaMemcpyDeviceToHost);
printf("Device Results: %f,%f\n",a[0],a[13]);
printf("Host Results: %f,%f\n",b[0],b[1]);
cudaFree(da);
free(a);
free(b);
*/
}
|
20,056 | #include <stdio.h>
#include <random>
const unsigned long N = 1lu << 16;
const unsigned long block_size = 256;
const float epsilon = 1e-5;
void rand_array(float* const array, const unsigned long size) {
std::mt19937 mt(std::random_device{}());
std::uniform_real_distribution<float> dist(-1.f, 1.f);
for (unsigned long i = 0; i < size; i++) {
array[i] = dist(mt);
}
}
__global__ void vec_add_kernel(float* const C, const float* const A, const float* const B, const unsigned long size) {
const unsigned long tid = blockIdx.x * blockDim.x + threadIdx.x;
if (tid >= size) return;
C[tid] = A[tid] + B[tid];
}
int main() {
// alloc
float* const correct = (float*)malloc(sizeof(float) * N);
float *A, *B, *C;
cudaMallocManaged((void**)&A, sizeof(float) * N);
cudaMallocManaged((void**)&B, sizeof(float) * N);
cudaMallocManaged((void**)&C, sizeof(float) * N);
// init
rand_array(A, N);
rand_array(B, N);
for (unsigned long i = 0; i < N; i++) correct[i] = A[i] + B[i];
// run kernel
vec_add_kernel<<<(N + block_size - 1) / block_size, block_size>>>(C, A, B, N);
cudaDeviceSynchronize();
// check
unsigned long num_passed = 0;
for (unsigned long i = 0; i < N; i++) {
if (std::abs(correct[i] - C[i]) > epsilon) {
printf("FAILED : [%7lu] C = %e, correct = %e, error = %e\n", i, C[i], correct[i], std::abs(correct[i] - C[i]));
continue;
}
num_passed++;
}
printf("%5lu / %5lu passed\n", num_passed, N);
// free
cudaFree(A);
cudaFree(B);
cudaFree(C);
free(correct);
}
|
20,057 | #include <stdio.h>
#include <math.h>
#include <time.h>
#include <unistd.h>
#include <cuda_runtime_api.h>
#include <errno.h>
#include <unistd.h>
/******************************************************************************
This program takes an initial estimate of m and c and finds the associated
rms error. It is then as a base to generate and evaluate 8 new estimates,
which are steps in different directions in m-c space. The best estimate is
then used as the base for another iteration of "generate and evaluate". This
continues until none of the new estimates are better than the base. This is
a gradient search for a minimum in mc-space.
Compile with:
nvcc -o BgnKndl_Linear_Regression BgnKndl_Linear_Regression.cu -lm
Run:
./BgnKndl_Linear_Regression
To calculate mean time execute the program 10 times and grep time in second only csv file:
./mr.py ./BgnKndl_Linear_Regression | grep Time | awk -F ' ' '{print $6}' | sed 's/s//'> BgnKndl_MeanTime_Linear_Regression.csv
=============================================================
Name: BIGYAN KANDEL University ID: "1917852"
=============================================================
*****************************************************************************/
typedef struct point_t {
double x;
double y;
} point_t;
int num_value = 1000;
__device__ int d_num_value = 1000;
point_t data[] = {
{65.98,109.88},{65.61,110.09},{82.83,125.72},{79.17,142.96},
{78.06,132.83},{71.08,121.03},{82.20,104.06},{69.01,96.53},
{69.10,107.73},{78.10,135.85},{25.48,60.84},{51.23,82.52},
{41.47,86.69},{23.75,68.67},{52.45,86.18},{71.22,104.93},
{20.98,49.05},{60.31,99.66},{88.23,130.82},{12.32,43.84},
{55.15,71.16},{40.21,85.04},{73.07,114.69},{18.25,46.81},
{68.01,106.57},{41.11,69.55},{68.50,113.41},{24.38,72.11},
{31.98,69.09},{48.85,91.03},{40.32,80.00},{98.85,156.67},
{25.30,41.08},{41.25,78.36},{51.61,85.38},{95.27,144.28},
{56.44,60.70},{76.85,100.16},{79.95,126.38},{24.36,56.80},
{54.96,83.15},{90.74,122.54},{39.61,58.59},{10.89,38.93},
{59.32,107.60},{91.89,139.32},{45.89,87.85},{37.04,49.99},
{91.12,138.15},{64.88,101.42},{93.39,128.39},{71.49,133.73},
{ 1.51,43.44},{74.44,115.83},{53.49,101.21},{44.38,89.13},
{81.75,117.68},{ 5.31,17.48},{38.68,85.07},{71.66,112.16},
{87.51,116.60},{88.03,132.43},{24.79,50.64},{49.99,83.80},
{67.65,104.42},{17.15,50.03},{51.98,86.71},{40.72,67.12},
{95.47,127.69},{79.19,125.91},{53.48,80.84},{27.94,59.44},
{79.93,107.59},{46.23,78.64},{34.77,57.59},{54.69,85.62},
{18.08,29.14},{ 1.91,40.22},{79.92,123.01},{61.60,87.65},
{ 0.62,19.24},{83.97,125.60},{ 5.23,23.27},{49.63,87.89},
{ 9.23,38.10},{ 4.98,43.72},{49.16,85.50},{12.49,26.14},
{72.90,100.92},{35.31,62.41},{91.27,141.60},{47.37,86.05},
{68.00,97.98},{77.16,129.54},{69.62,106.06},{74.58,110.28},
{48.68,78.58},{14.04,19.36},{41.30,74.79},{53.61,97.70},
{87.84,120.51},{46.34,85.64},{25.68,52.69},{48.80,83.39},
{98.10,139.48},{23.54,37.75},{51.43,82.45},{24.73,78.24},
{22.39,58.14},{84.31,126.40},{80.28,130.22},{19.39,38.88},
{69.97,110.39},{29.76,69.07},{97.60,123.04},{16.10,51.30},
{20.84,75.09},{31.78,70.70},{ 7.72,37.74},{92.49,151.58},
{67.75,111.64},{74.16,99.91},{19.23,42.35},{ 0.37,23.12},
{74.66,121.43},{77.09,122.78},{89.65,147.35},{30.73,73.11},
{27.17,65.75},{30.65,72.48},{17.62,55.37},{49.22,68.43},
{32.17,61.68},{60.30,100.12},{81.66,125.18},{ 1.71,21.75},
{50.11,84.88},{71.78,106.85},{ 1.03,22.83},{59.29,101.31},
{ 4.34,25.84},{10.21,28.81},{15.35,51.74},{52.65,94.42},
{ 9.18,39.21},{37.32,70.66},{52.16,86.27},{67.12,118.78},
{37.11,66.12},{72.17,96.49},{77.50,115.55},{75.69,114.19},
{21.92,48.52},{38.76,85.90},{52.98,81.19},{76.09,117.48},
{97.88,150.13},{39.92,82.62},{48.55,93.39},{46.13,70.20},
{79.82,108.50},{12.52,24.66},{ 5.86,30.53},{99.62,141.04},
{95.38,122.20},{43.12,81.40},{13.09,67.98},{16.95,36.80},
{ 0.92,25.40},{19.97,39.08},{56.27,93.33},{48.94,98.32},
{97.92,150.58},{20.68,30.30},{55.25,103.05},{ 7.71,37.12},
{51.27,84.65},{94.78,144.94},{52.37,89.28},{96.40,120.43},
{59.72,108.90},{67.14,111.27},{39.80,83.21},{81.93,121.80},
{25.89,61.56},{14.95,52.47},{67.23,97.90},{87.48,137.96},
{57.98,84.65},{13.55,45.28},{37.87,98.66},{63.46,106.85},
{19.59,52.46},{99.13,133.87},{73.29,112.31},{24.49,37.72},
{77.31,107.16},{76.49,123.18},{59.69,93.79},{27.18,53.86},
{73.81,106.81},{71.93,116.92},{51.19,87.83},{23.84,56.82},
{96.68,147.64},{74.37,116.31},{25.31,67.52},{86.62,123.63},
{88.49,109.08},{28.18,47.68},{43.32,59.70},{47.52,78.92},
{82.68,126.46},{96.05,152.44},{51.23,92.41},{41.38,75.40},
{83.56,145.94},{93.84,133.93},{87.96,116.95},{45.68,83.90},
{84.79,109.93},{49.54,72.03},{ 6.15,40.45},{ 5.94,28.87},
{82.45,126.73},{16.91,53.35},{40.26,79.93},{33.75,60.11},
{47.91,79.41},{11.95,34.30},{83.46,137.19},{88.63,133.07},
{55.24,98.25},{28.96,72.98},{33.02,63.08},{33.65,64.66},
{92.63,134.02},{75.91,107.42},{72.45,110.57},{45.90,82.98},
{11.77,53.35},{36.73,79.86},{39.43,68.01},{61.20,99.09},
{ 8.16,25.56},{19.88,58.16},{39.00,68.26},{62.33,93.62},
{53.63,92.55},{54.69,102.15},{ 8.62,63.67},{ 7.90,45.42},
{96.03,134.21},{74.06,120.75},{55.85,72.71},{64.19,105.29},
{70.36,110.21},{59.18,105.69},{11.91,43.45},{13.20,43.79},
{93.47,122.21},{64.30,83.59},{65.28,102.91},{93.38,130.29},
{64.67,92.40},{ 9.14,31.81},{42.18,73.74},{28.13,63.07},
{64.99,106.93},{83.95,122.89},{97.51,124.01},{92.64,142.57},
{41.71,71.88},{30.05,26.71},{60.94,73.06},{89.09,141.68},
{82.92,124.25},{32.03,64.08},{86.78,108.92},{94.88,114.94},
{94.46,138.34},{32.73,73.63},{ 7.89,34.02},{63.21,87.59},
{98.70,141.20},{22.68,52.91},{37.12,80.45},{97.66,141.21},
{49.70,89.78},{39.79,78.74},{97.07,150.82},{90.02,146.52},
{80.17,120.05},{39.62,76.48},{44.90,79.49},{65.42,92.74},
{23.13,53.95},{62.35,96.56},{50.04,95.13},{73.17,91.02},
{75.20,123.73},{95.13,131.84},{ 1.14,19.71},{ 4.97,19.55},
{71.10,103.60},{38.13,88.66},{48.02,71.75},{38.47,66.62},
{59.83,100.74},{94.76,139.07},{94.92,134.21},{49.00,82.75},
{56.63,106.00},{99.18,168.96},{80.39,122.93},{40.06,74.18},
{93.53,121.61},{98.51,153.96},{87.57,134.45},{ 1.09,18.80},
{45.44,89.09},{ 9.80,23.14},{17.63,36.21},{43.84,91.30},
{69.88,110.01},{51.59,87.43},{22.34,39.12},{72.02,103.94},
{85.76,147.21},{88.59,126.81},{47.28,74.12},{13.62,50.73},
{ 3.07,35.18},{92.45,118.35},{36.86,82.33},{54.71,88.61},
{65.99,100.49},{94.98,126.89},{24.77,68.62},{43.76,68.34},
{63.65,99.25},{54.30,97.06},{34.11,73.52},{16.61,25.84},
{29.93,49.17},{92.96,122.77},{41.77,62.86},{ 3.41,32.07},
{22.45,54.35},{40.68,71.64},{ 5.40,23.03},{47.86,82.23},
{17.33,34.98},{50.99,84.08},{ 1.78, 2.95},{84.45,135.15},
{54.86,93.38},{87.32,116.34},{49.24,95.49},{47.43,100.97},
{47.27,92.94},{23.28,66.58},{25.74,54.15},{82.64,119.28},
{71.73,88.56},{95.92,152.78},{78.86,117.33},{ 1.61,33.32},
{32.06,68.46},{23.58,59.11},{46.09,76.04},{89.91,151.32},
{50.15,86.37},{42.70,69.58},{50.79,73.18},{40.66,69.55},
{81.02,122.69},{76.53,123.30},{37.80,61.23},{52.67,87.59},
{37.80,77.17},{27.84,63.10},{42.80,79.95},{98.09,161.99},
{23.86,62.92},{72.89,106.05},{21.70,59.42},{87.24,123.07},
{40.25,73.68},{11.59,10.66},{50.62,107.80},{48.56,80.18},
{ 3.71,48.88},{53.35,99.93},{10.96,44.06},{11.85,34.55},
{37.41,68.76},{54.70,88.47},{ 6.02,32.10},{32.47,64.70},
{46.58,76.54},{51.32,109.05},{37.67,72.50},{71.02,96.23},
{ 2.98,28.02},{72.53,121.83},{19.24,63.10},{46.16,87.25},
{90.36,138.24},{96.27,141.87},{82.11,134.67},{12.58,44.75},
{39.48,57.18},{97.31,139.36},{84.10,129.54},{23.83,48.31},
{11.10,36.32},{81.42,113.42},{54.35,91.08},{25.33,44.56},
{94.96,141.98},{80.58,122.50},{61.70,109.59},{78.96,116.87},
{89.26,134.81},{89.69,131.36},{53.56,80.66},{53.94,74.60},
{ 4.64,20.55},{29.49,45.56},{72.03,123.58},{79.28,123.05},
{28.78,60.79},{23.13,43.45},{48.55,82.15},{71.08,107.12},
{12.12,42.70},{ 8.63,30.00},{40.82,78.58},{ 4.43,24.84},
{33.73,68.61},{46.84,97.55},{18.91,33.06},{51.51,86.70},
{36.87,63.19},{97.67,134.69},{72.97,98.12},{84.55,127.32},
{95.58,133.86},{52.99,87.39},{28.19,55.44},{16.15,31.92},
{75.00,117.09},{66.97,103.03},{99.82,146.19},{92.75,136.01},
{52.62,94.45},{91.93,116.28},{88.14,116.14},{58.20,93.03},
{22.14,40.65},{ 6.43,33.08},{27.51,61.50},{66.33,101.67},
{67.75,120.78},{46.27,83.90},{51.76,73.86},{58.85,80.63},
{90.78,136.48},{ 1.71,30.57},{ 0.59,36.83},{23.41,55.48},
{26.82,64.57},{76.18,119.46},{59.86,107.17},{65.18,92.35},
{72.23,103.36},{89.96,134.89},{90.32,122.69},{33.42,48.94},
{62.97,104.26},{99.58,160.97},{40.36,82.29},{68.72,118.83},
{68.39,107.33},{ 6.27,21.40},{82.38,122.11},{ 8.19,29.90},
{ 5.15,17.56},{31.21,66.79},{39.32,74.38},{74.02,99.91},
{13.50,32.58},{26.41,61.86},{45.22,82.26},{95.36,143.57},
{23.16,53.48},{39.89,84.69},{ 3.56,42.95},{19.03,39.83},
{83.99,118.14},{11.39,41.43},{26.77,39.32},{25.17,65.32},
{44.96,80.99},{34.14,65.65},{96.21,148.27},{80.17,118.87},
{74.22,113.21},{90.18,125.88},{31.33,63.68},{22.52,56.53},
{76.63,118.48},{ 4.29,23.57},{49.01,74.15},{30.15,49.90},
{56.30,80.88},{49.63,80.89},{19.83,62.84},{67.76,102.38},
{41.47,74.22},{64.01,87.45},{33.41,64.62},{64.28,114.04},
{89.80,136.67},{35.36,70.68},{20.33,52.14},{36.30,53.02},
{22.73,63.55},{81.20,122.76},{50.69,110.73},{51.13,81.86},
{44.38,76.08},{94.41,150.87},{23.71,67.69},{10.27,38.51},
{82.62,125.56},{59.50,100.62},{32.93,66.31},{ 2.04,30.59},
{89.21,116.87},{51.73,92.55},{10.34,31.09},{33.43,86.80},
{32.32,62.48},{95.63,142.24},{38.91,76.00},{44.25,70.00},
{ 9.96,55.82},{93.10,130.20},{35.27,77.53},{19.59,43.23},
{54.95,96.09},{78.74,118.41},{49.07,61.42},{47.07,87.31},
{21.22,53.90},{69.19,111.37},{15.25,39.64},{76.29,104.99},
{48.09,99.03},{97.55,126.93},{64.50,107.51},{40.93,73.00},
{ 0.23,16.44},{ 3.02,26.47},{69.74,123.61},{22.05,47.02},
{ 7.70,49.25},{69.99,119.31},{87.86,117.51},{86.35,131.60},
{38.48,72.31},{92.48,155.60},{90.16,129.13},{64.69,90.96},
{23.67,60.49},{69.36,122.18},{ 1.97,34.34},{87.38,149.58},
{33.75,60.97},{38.05,82.43},{81.43,117.05},{24.59,50.66},
{99.69,128.18},{36.50,72.85},{99.28,150.14},{34.20,74.91},
{75.23,107.17},{21.49,40.61},{46.98,81.59},{18.66,63.26},
{15.21,31.82},{52.00,82.80},{ 2.35,19.45},{37.12,55.87},
{47.23,106.45},{87.26,124.95},{ 2.02,29.64},{84.58,138.52},
{ 5.24,36.32},{84.73,129.08},{ 3.96,11.38},{86.43,142.72},
{33.58,80.18},{43.52,78.01},{51.69,86.51},{72.94,114.81},
{29.07,61.84},{ 5.51,25.99},{79.47,127.82},{66.53,104.59},
{20.11,69.20},{87.10,129.55},{37.41,58.99},{85.87,141.46},
{56.75,71.42},{65.82,109.61},{ 1.75,31.92},{62.62,102.01},
{85.45,137.87},{69.44,94.12},{85.48,145.09},{69.36,123.50},
{45.73,80.67},{13.02,57.00},{89.77,128.60},{99.84,137.25},
{86.08,111.09},{63.43,124.97},{19.65,56.52},{41.49,56.63},
{18.99,54.69},{65.20,102.59},{81.01,130.07},{88.04,147.89},
{50.51,82.74},{79.09,121.97},{ 0.21,12.54},{34.47,76.87},
{ 0.72,26.80},{39.64,89.25},{37.11,80.49},{35.08,62.09},
{90.47,140.29},{72.50,142.03},{ 0.86,25.27},{96.35,140.57},
{24.41,52.99},{12.14,47.45},{14.27,36.30},{94.60,150.34},
{52.66,85.95},{49.43,104.28},{65.54,118.46},{95.97,126.34},
{98.81,148.34},{60.19,97.62},{88.64,129.58},{14.99,58.64},
{49.14,77.73},{39.41,64.09},{ 0.01,32.36},{24.92,55.46},
{67.19,119.75},{93.47,135.47},{89.45,157.39},{10.13,26.67},
{55.19,85.62},{85.63,125.97},{79.79,110.93},{44.59,68.48},
{54.38,75.35},{ 8.88,35.67},{70.24,119.20},{ 8.04,42.16},
{53.00,91.48},{28.40,62.36},{37.98,72.15},{ 2.21,31.49},
{80.96,101.41},{15.79,27.63},{86.66,135.54},{92.12,141.73},
{79.89,120.98},{38.48,64.02},{71.15,121.04},{34.94,67.78},
{85.50,134.81},{44.34,72.26},{25.78,53.14},{10.10,29.56},
{65.67,106.97},{ 1.16,23.66},{63.44,98.36},{53.95,79.86},
{82.98,110.78},{27.17,68.55},{64.28,94.22},{92.28,137.60},
{47.73,79.95},{74.14,108.55},{ 9.34,34.76},{55.48,74.43},
{32.85,65.25},{31.95,63.41},{74.21,119.53},{38.71,80.32},
{65.98,112.16},{30.76,59.95},{ 6.13,26.37},{71.39,115.23},
{48.68,90.44},{ 6.39,39.87},{37.57,55.24},{46.01,85.04},
{65.61,103.16},{92.04,141.90},{34.76,60.22},{40.01,76.93},
{62.97,88.66},{98.52,145.68},{77.33,130.80},{38.21,54.49},
{95.37,131.71},{80.42,116.97},{99.56,138.12},{75.53,111.85},
{58.26,106.43},{12.98,28.45},{61.02,89.63},{15.80,45.30},
{11.64,22.19},{81.84,134.37},{80.55,133.82},{ 0.55,36.65},
{ 6.90,38.38},{66.50,102.13},{38.74,66.68},{38.18,61.55},
{ 6.74,34.99},{32.47,52.45},{14.53,34.00},{95.63,132.33},
{60.07,101.95},{76.01,125.62},{ 5.32,36.47},{83.66,121.27},
{60.42,74.00},{80.59,126.57},{ 0.01,11.77},{48.01,84.63},
{26.26,44.73},{56.30,89.09},{72.71,105.82},{20.29,72.57},
{59.05,104.69},{15.66,39.98},{ 9.24,42.29},{64.66,111.15},
{ 5.78,41.81},{36.28,90.06},{63.16,107.89},{41.14,57.62},
{42.53,76.07},{26.77,85.91},{64.47,114.18},{91.69,127.60},
{95.16,149.07},{58.17,89.42},{19.24,59.57},{ 5.80,32.06},
{21.96,55.62},{49.91,87.86},{87.87,129.66},{36.75,68.70},
{20.12,40.45},{45.46,84.01},{88.56,137.14},{77.76,122.24},
{ 4.72,36.23},{16.89,42.96},{94.34,155.05},{31.78,61.95},
{37.55,73.85},{ 8.45,43.29},{93.18,140.42},{58.73,94.30},
{62.00,101.43},{49.41,76.06},{89.45,151.59},{60.12,95.57},
{80.33,129.96},{ 0.63,13.11},{32.55,51.13},{70.78,114.51},
{12.01,57.29},{61.46,98.15},{70.39,128.47},{48.00,96.29},
{81.36,134.64},{97.87,148.00},{47.94,81.99},{23.10,52.69},
{25.83,66.13},{91.09,148.03},{47.29,86.23},{15.08,25.42},
{63.83,98.57},{43.84,79.41},{48.83,81.91},{28.60,45.09},
{66.05,98.34},{71.19,126.79},{60.16,105.53},{29.97,59.54},
{55.89,76.53},{90.91,133.84},{13.12,38.19},{48.25,84.49},
{84.77,113.71},{67.46,109.35},{60.41,101.79},{89.53,125.98},
{46.19,88.24},{31.79,53.57},{49.90,70.35},{39.96,84.28},
{29.28,65.89},{38.86,59.42},{87.39,136.29},{49.54,76.60},
{50.71,101.66},{29.25,63.89},{47.24,87.50},{67.74,122.88},
{49.94,92.28},{27.88,46.81},{21.53,49.84},{69.62,113.11},
{15.34,35.21},{31.73,61.89},{36.96,64.05},{19.12,49.07},
{ 6.85,56.96},{80.80,125.48},{17.88,55.38},{96.84,149.55},
{ 2.19,18.21},{ 9.74,29.24},{89.80,125.16},{74.29,94.46},
{62.52,98.50},{78.41,111.96},{79.25,119.97},{ 8.11,37.74},
{ 3.96,21.28},{69.31,112.35},{62.10,104.66},{41.96,72.57},
{47.18,84.32},{62.38,86.11},{35.85,74.83},{37.71,61.66},
{58.97,98.54},{ 7.31,46.43},{27.93,55.06},{19.53,38.27},
{49.21,83.29},{64.41,85.78},{79.31,111.66},{74.02,107.72},
{68.37,113.42},{63.56,113.81},{68.64,101.86},{21.79,49.85},
{93.99,143.17},{ 8.48,43.35},{82.85,129.48},{20.82,30.19},
{56.62,80.69},{31.58,67.45},{31.82,56.24},{13.21,62.94},
{56.35,101.96},{73.73,112.99},{26.56,60.76},{78.60,121.64},
{47.72,70.15},{59.33,83.16},{73.84,111.55},{32.13,67.85},
{62.11,85.03},{46.34,103.33},{ 7.20,25.52},{98.38,152.42},
{14.23,36.92},{89.10,118.93},{65.38,112.12},{25.00,62.76},
{83.69,125.92},{59.68,88.77},{99.48,131.31},{ 6.28,32.14},
{76.60,99.82},{82.34,120.32},{28.54,59.57},{95.04,141.28},
{78.09,135.88},{39.57,60.68},{77.44,118.82},{30.07,40.16},
{43.43,69.20},{49.33,68.65},{59.93,103.75},{48.68,88.57},
{22.76,48.66},{ 2.83,13.74},{54.78,95.15},{62.32,113.62},
{74.65,112.22},{44.72,83.91},{18.33,50.01},{26.65,56.92},
{90.67,135.56},{38.66,69.11},{43.47,91.29},{18.17,45.41},
{90.23,134.53},{ 4.57,35.59},{21.27,66.70},{85.39,124.50},
{77.91,125.34},{44.73,78.31},{34.30,61.61},{61.74,109.46},
{24.83,46.20},{22.73,35.09},{20.24,35.61},{26.44,75.99},
{ 1.01,29.24},{64.88,94.06},{43.84,77.98},{48.04,88.44},
{72.45,123.23},{ 4.35,48.53},{19.61,49.04},{85.85,106.15},
{31.75,58.85},{76.12,104.76},{25.74,64.24},{39.66,76.58},
{70.82,115.36},{45.10,58.40},{78.75,112.46},{83.57,143.06},
{ 2.07,46.65},{22.64,65.16},{56.52,90.89},{39.66,56.79},
{53.78,91.69},{39.91,66.52},{65.10,98.17},{84.90,122.12},
{85.37,130.09},{45.86,101.46},{ 4.30,38.60},{53.87,87.98},
{36.79,75.51},{62.80,87.07},{ 8.76,34.75},{47.99,97.87},
{24.35,64.41},{87.90,147.07},{68.06,115.47},{60.60,117.37},
{77.40,96.83},{99.00,144.71},{ 6.21, 5.47},{29.12,58.37},
{38.77,80.06},{25.93,49.39},{73.79,116.08},{70.83,94.68},
{24.60,56.63},{54.45,82.10},{65.99,107.35},{ 5.52,19.82},
{91.30,131.77},{62.34,101.36},{60.08,96.29},{64.86,120.21},
{37.64,64.74},{65.27,116.95},{28.69,54.27},{58.69,105.92},
{56.14,84.76},{95.59,125.32},{ 5.04,30.54},{35.80,79.09}
};
double residual_error(double x, double y, double m, double c) {
double e = (m * x) + c - y;
return e * e;
}
__device__ double d_residual_error(double x, double y, double m, double c) {
double e = (m * x) + c - y;
return e * e;
}
double rms_error(double m, double c) {
int i;
double mean;
double error_sum = 0;
for(i=0; i<num_value; i++) {
error_sum += residual_error(data[i].x, data[i].y, m, c);
}
mean = error_sum / num_value;
return sqrt(mean);
}
__global__ void d_rms_error(double *m, double *c, double *error_sum_arr, point_t *d_data) {
/*
Calculate the current index by using:
- The thread id
- The block id
- The number of threads per block
*/
int i = threadIdx.x + blockIdx.x * blockDim.x;
//Work out the error sum 1000 times and store them in an array.
error_sum_arr[i] = d_residual_error(d_data[i].x, d_data[i].y, *m, *c);
}
// this functions calculates difference oof time betwn star and end of program
int time_difference(struct timespec *start, struct timespec *finish,
long long int *difference) {
long long int ds = finish->tv_sec - start->tv_sec;
long long int dn = finish->tv_nsec - start->tv_nsec;
if(dn < 0 ) {
ds--;
dn += 1000000000;
}
*difference = ds * 1000000000 + dn;
return !(*difference > 0);
}
// Main Program begains:
int main() {
int i;
double bm = 1.3;
double bc = 10;
double be;
double dm[8];
double dc[8];
double e[8];
double step = 0.01;
double best_error = 999999999;
int best_error_i;
int minimum_found = 0;
double om[] = {0,1,1, 1, 0,-1,-1,-1};
double oc[] = {1,1,0,-1,-1,-1, 0, 1};
struct timespec start, finish;
long long int time_elapsed;
//Get the system time before we begin the linear regression.
clock_gettime(CLOCK_MONOTONIC, &start);
cudaError_t error;
//Device variables
double *d_dm;
double *d_dc;
double *d_error_sum_arr;
point_t *d_data;
be = rms_error(bm, bc);
//Allocate memory for d_dm
error = cudaMalloc(&d_dm, (sizeof(double) * 8));
if(error){
fprintf(stderr, "cudaMalloc on d_dm returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Allocate memory for d_dc
error = cudaMalloc(&d_dc, (sizeof(double) * 8));
if(error){
fprintf(stderr, "cudaMalloc on d_dc returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Allocate memory for d_error_sum_arr
error = cudaMalloc(&d_error_sum_arr, (sizeof(double) * 1000));
if(error){
fprintf(stderr, "cudaMalloc on d_error_sum_arr returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Allocate memory for d_data
error = cudaMalloc(&d_data, sizeof(data));
if(error){
fprintf(stderr, "cudaMalloc on d_data returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
while(!minimum_found) {
for(i=0;i<8;i++) {
dm[i] = bm + (om[i] * step);
dc[i] = bc + (oc[i] * step);
}
//Copy memory for dm to d_dm
error = cudaMemcpy(d_dm, dm, (sizeof(double) * 8), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_dm returned %d %s\n", error,
cudaGetErrorString(error));
}
//Copy memory for dc to d_dc
error = cudaMemcpy(d_dc, dc, (sizeof(double) * 8), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_dc returned %d %s\n", error,
cudaGetErrorString(error));
}
//Copy memory for data to d_data
error = cudaMemcpy(d_data, data, sizeof(data), cudaMemcpyHostToDevice);
if(error){
fprintf(stderr, "cudaMemcpy to d_data returned %d %s\n", error,
cudaGetErrorString(error));
}
for(i=0;i<8;i++) {
//Host variable storing the array returned from the kernel function.
double h_error_sum_arr[1000];
//Stores the total sum of the values from the error sum array.
double error_sum_total;
//Stores the mean of the total sum of the error sums.
double error_sum_mean;
//Call the rms_error function using 100 blocks and 10 threads.
dim3 block_Dim(100,1,1), thread_Dim(10,1,1);
d_rms_error <<<block_Dim,thread_Dim>>>(&d_dm[i], &d_dc[i], d_error_sum_arr, d_data);
cudaThreadSynchronize();
//Copy memory for d_error_sum_arr
error = cudaMemcpy(&h_error_sum_arr, d_error_sum_arr, (sizeof(double) * 1000), cudaMemcpyDeviceToHost);
if(error){
fprintf(stderr, "cudaMemcpy to error_sum returned %d %s\n", error,
cudaGetErrorString(error));
}
//Loop through the error sum array returned from the kernel function
for(int j=0; j<num_value; j++) {
//Add each error sum to the error sum total.
error_sum_total += h_error_sum_arr[j];
}
//Calculate the mean for the error sum.
error_sum_mean = error_sum_total / num_value;
//Calculate the square root for the error sum mean.
e[i] = sqrt(error_sum_mean);
if(e[i] < best_error) {
best_error = e[i];
best_error_i = i;
}
//Reset the error sum total.
error_sum_total = 0;
}
//printf("best m,c is %lf,%lf with error %lf in direction %d\n",
//dm[best_error_i], dc[best_error_i], best_error, best_error_i);
if(best_error < be) {
be = best_error;
bm = dm[best_error_i];
bc = dc[best_error_i];
} else {
minimum_found = 1;
}
}
//Free memory for d_dm
error = cudaFree(d_dm);
if(error){
fprintf(stderr, "cudaFree on d_dm returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_dc
error = cudaFree(d_dc);
if(error){
fprintf(stderr, "cudaFree on d_dc returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_data
error = cudaFree(d_data);
if(error){
fprintf(stderr, "cudaFree on d_data returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//Free memory for d_error_sum_arr
error = cudaFree(d_error_sum_arr);
if(error){
fprintf(stderr, "cudaFree on d_error_sum_arr returned %d %s\n", error,
cudaGetErrorString(error));
exit(1);
}
//printf("minimum m,c is %lf,%lf with error %lf\n", bm, bc, be);
//Get the system time after we have run the linear regression function.
clock_gettime(CLOCK_MONOTONIC, &finish);
//Calculate the time spent between the start time and end time.
time_difference(&start, &finish, &time_elapsed);
//Output the time spent running the program.
printf("Time elapsed was %lldns or %0.9lfs\n", time_elapsed,
(time_elapsed/1.0e9));
return 0;
}
|
20,058 | #include <thrust/host_vector.h>
#include <thrust/device_vector.h>
#include <thrust/generate.h>
#include <thrust/sort.h>
#include <thrust/copy.h>
#include <algorithm>
#include <cstdlib>
#include <iostream>
int main( int argc, char* argv[] )
{
std::cout << "Hello from [ " << argv[ argc - 1 ] << " ]!" << std::endl;
// generate 32M random numbers serially
thrust::host_vector<int> h_vec(32 << 20);
std::generate(h_vec.begin(), h_vec.end(), rand);
thrust::host_vector<int> h_check = h_vec;
// transfer data to the device
thrust::device_vector<int> d_vec = h_vec;
// transfer data back to host
thrust::copy(d_vec.begin(), d_vec.end(), h_vec.begin());
uint8_t flag = 0;
for ( size_t i = 0; i < h_vec.size(); i++ )
if ( h_vec[ i ] != h_check[ i ] )
{
std::cerr << "Vector check [ " << argv[ argc - 1 ] << " ] error!\n";
flag = 1;
break;
}
if ( flag == 0 )
std::cout << "Vector check [ " << argv[ argc - 1 ] << " ] OK!\n";
// sort data on the device (846M keys per second on GeForce GTX 480)
thrust::sort(d_vec.begin(), d_vec.end());
// transfer data back to host
thrust::copy(d_vec.begin(), d_vec.end(), h_vec.begin());
return 0;
}
|
20,059 | extern __device__ double myDeviceFunc(double* in, int nword)
{
double z=0.;
while(z < 7.777777){
z += 1.0e-5;
}
return ((double)z);
}
|
20,060 | #include "includes.h"
__global__ void hello(char *a, int *b) { a[threadIdx.x] += b[threadIdx.x]; } |
20,061 | //THE SHARED MEMORY PROGRAM FOR MATRIX MULTIPLICATION
//AUTHOR : SAURAV RAI
//REGD NO: 17558
#include<cuda.h>
#include<stdio.h>
#include<stdlib.h>
#define blockD 32
#define TILE_DIM 32
__global__ void MatrixMulKernel(float* , float* , float*,int );
void MatrixMultiplication(float *, float *, float *,int );
int main(int argc , const char * argv[])
{
const int Width = atoi(argv[1]);
int size = Width * Width * sizeof(float);
float *M, *N, *P ;
// allocate memory on the CPU
cudaMallocHost((void **)&M , size);
cudaMallocHost((void **)&N , size);
cudaMallocHost((void **)&P , size);
// initialize the matrices
for (int y=0; y < Width; y++)
{
for (int x=0; x < Width; x++)
{
M[y * Width + x] = 1; //x + y*wA;
}
}
for (int y=0; y< Width; y++)
{
for (int x=0; x< Width; x++)
{
N[y * Width + x] = 1; //x + y*wB;
}
}
MatrixMultiplication(M, N, P, Width);
for(int i = 0; i < Width * Width ; i++)
{
printf("%f\n",P[i]);
}
// free the memory allocated on the CPU
cudaFree( M );
cudaFree( N );
cudaFree( P );
return 0;
}
__global__ void MatrixMulKernel(float* Md, float* Nd, float* Pd, int Width)
{
float CValue = 0;
int sum =0;
int Row = blockIdx.y*TILE_DIM + threadIdx.y;
int Col = blockIdx.x*TILE_DIM + threadIdx.x;
__shared__ float Mds[TILE_DIM][TILE_DIM];
__shared__ float Nds[TILE_DIM][TILE_DIM];
for (int k = 0; k < (TILE_DIM + Width - 1)/TILE_DIM; k++) {
if (k*TILE_DIM + threadIdx.x < Width && Row < Width)
Mds[threadIdx.y][threadIdx.x] = Md[Row * Width + k*TILE_DIM + threadIdx.x];
else
Mds[threadIdx.y][threadIdx.x] = 0.0;
if (k*TILE_DIM + threadIdx.y < Width && Col < Width)
Nds[threadIdx.y][threadIdx.x] = Nd[(k*TILE_DIM + threadIdx.y) * Width + Col];
else
Nds[threadIdx.y][threadIdx.x] = 0.0;
__syncthreads();
if(Row > Width || Col > Width)
return;
else
{
for (int n = 0; n < TILE_DIM; ++n)
{
sum += Mds[threadIdx.y][n] * Nds[n][threadIdx.x];
}
CValue = sum;
__syncthreads();
}
if (Row < Width && Col < Width)
Pd[((blockIdx.y * blockDim.y + threadIdx.y)*Width) + (blockIdx.x * blockDim.x)+ threadIdx.x] = CValue;
}
}
void MatrixMultiplication(float *M, float *N, float *P, int Width)
{
int size = Width * Width * sizeof(float);
// int size_max = 2 * Width * sizeof(float);
float *Md, *Nd, *Pd ;
// allocate memory on the GPU
cudaMalloc((void**)&Md, size);
cudaMalloc((void**)&Nd, size);
cudaMalloc((void**)&Pd, size);
// transfer M and N to device memory
cudaMemcpy(Md, M, size, cudaMemcpyHostToDevice);
cudaMemcpy(Nd, N, size, cudaMemcpyHostToDevice);
unsigned int grid_rows= (Width + blockD-1)/blockD ;
unsigned int grid_cols= (Width + blockD -1)/blockD;
// kernel invocation code
dim3 dimBlock(blockD, blockD);
dim3 dimGrid( grid_rows,grid_cols);
//Execute Kernel
MatrixMulKernel<<<dimGrid, dimBlock>>>( Md, Nd, Pd, Width);
// transfer P from device
cudaMemcpy(P,Pd, size,cudaMemcpyDeviceToHost);
// free the memory allocated on the GPU
cudaFree(Md);
cudaFree(Nd);
cudaFree(Pd);
}
|
20,062 | /*
autor fredy m
uaem
desonses@gmail.com para mas comentarios
*/
#include <stdio.h>
#include <stdlib.h>
#include <cuda_runtime.h>
#include <device_launch_parameters.h>
/*
ejemplo que llena un vector que invierte sus valores
*/
#define N 30 //tamano de los vectores
__global__ void invierte(float *a, float *b) {
int id = threadIdx.x;
//int id = threadIdx.x + blockDim.x * blockIdx.x;// para n-bloques de 1 hilo
if (id < N)
{
b[id] = a[N-id];
}
}
__host__ void check_CUDA_Error(const char *mensaje) {
cudaError_t error;
cudaDeviceSynchronize();
error = cudaGetLastError();
if (error != cudaSuccess) {
printf("ERROR %d: %s (%s)\n", error, cudaGetErrorString(error), mensaje);
}
}
int main(int argc, char** argv)
{
float *vector1, *resultado;
float *dev_vector1, *dev_resultado;
//reserva de memoria en el host
vector1 = (float*)malloc(N * sizeof(float));
resultado = (float*)malloc(N * sizeof(float));
//reserva de memoria en el device
cudaMalloc((void**)&dev_vector1, N * sizeof(float));
check_CUDA_Error("Error Malloc dev_vector");
cudaMalloc((void**)&dev_resultado, N * sizeof(float));
check_CUDA_Error("Error Malloc dev_resultado");
// inicializacion de los vectores
printf("vector inicial: \n");
for (int i = 0; i < N; i++) {
vector1[i] = (float)rand() / RAND_MAX;
printf("%.2f, ", vector1[i]);
}
//enviar los datos hacia el Device
cudaMemcpy(dev_vector1, vector1, N * sizeof(float), cudaMemcpyHostToDevice);
check_CUDA_Error("Error CudaMemcpy");
//MEDICION DE TIEMPO EN GPU
// declaracion de eventos para medir el tiempo de ejecucion en la GPU
cudaEvent_t start;
cudaEvent_t stop;
// creacion de eventos
cudaEventCreate(&start);
cudaEventCreate(&stop);
// marca de inicio
cudaEventRecord(start, 0);
//Add <<<nBloques,hilosB>>>(dev_vector1, dev_vector2, dev_resultado);
invierte<<<1, N >>>(dev_vector1, dev_resultado);
// cambiar (N,1) para n bloques de 1 hilo
check_CUDA_Error("Error Kernel");
// marca de final
cudaEventRecord(stop, 0);
// sincronizacion GPU-CPU
cudaEventSynchronize(stop);
// calculo del tiempo en milisegundos
float elapsedTime;
cudaEventElapsedTime(&elapsedTime, start, stop);
//MEDICION DE TIEMPO EN GPU
//recogida de los datos
printf("\n");
printf("vector de regreso:\n");
cudaMemcpy(resultado, dev_resultado, N * sizeof(float), cudaMemcpyDeviceToHost);
check_CUDA_Error("Error CudaMemcpy2");
for (int i = 0; i < N; i++) {
printf("%.2f, ", resultado[i]);
}
// impresion de resultados
printf("\n");
printf("> Tiempo de ejecucion: %f ms\n", elapsedTime);
return 0;
cudaFree(dev_vector1);
cudaFree(dev_resultado);
free(vector1);
free(resultado);
}
|
20,063 | #include <stdio.h>
#include <cuda.h>
#include<math.h>
// Kernel that executes on the CUDA device
__global__ void square_array(float *a, int N)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx<N) a[idx] = a[idx] * a[idx];
}
__global__ void reduce_partials(float *d_in, int step)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx % (2 * step) == 0){
int stepForward = pow ((float)2, (float)step);
d_in[idx] = d_in[idx] + d_in[idx + stepForward];
}
}
void reduce_array(float* d_in){
size_t sizeOfArr = sizeof(d_in) / sizeof(float);
int steps = floor(log(sizeOfArr) / log(2));
int numThreads = 0;
if (sizeOfArr < 1024) {
numThreads = floor(sizeOfArr / 2);
} else {
numThreads = 512;
}
int numBlocks = floor(sizeOfArr / 1024);
for (int i = 0 ; i < steps; i++){
reduce_partials<<< numBlocks, numThreads>>>(d_in, i);
}
}
// main routine that executes on the host
int main(void)
{
float *a_h, *a_d; // Pointer to host & device arrays
const int N = 16; // Number of elements in arrays
size_t size = N * sizeof(float);
a_h = (float *)malloc(size); // Allocate array on host
cudaMalloc((void **) &a_d, size); // Allocate array on device
// Initialize host array and copy it to CUDA device
for (int i=0; i<N; i++) a_h[i] = (float)i;
cudaMemcpy(a_d, a_h, size, cudaMemcpyHostToDevice);
// Do calculation on device:
reduce_array(a_d);
// Retrieve result from device and store it in host array
cudaMemcpy(a_h, a_d, sizeof(float)*N, cudaMemcpyDeviceToHost);
// Print results
for (int i=0; i<N; i++) printf("%d %f\n", i, a_h[i]);
// Cleanup
free(a_h); cudaFree(a_d);
}
|
20,064 | // This CUDA program implements vector addition on both the CPU & GPU
//
// To compile:
// nvcc -O3 vector_add.cu -o vector_add
// Note: if /usr/local/cuda/bin is not in your $PATH, you will need to
// specify the full path to nvcc: /usr/local/cuda/bin/nvcc
//
// To run:
// ./vector_add N
// (where N is the number of elements in each vector)
#include <stdio.h>
#include <stdlib.h>
#include <sys/time.h>
// Function declarations
float *CPU_add_vectors(float *A, float *B, int N);
float *GPU_add_vectors(float *A, float *B, int N);
float *get_random_vector(int N);
long long start_timer();
long long stop_timer(long long start_time, char *name);
void die(char *message);
void check_error(cudaError e);
// The number of threads per blocks in the kernel
// (if we define it here, then we can use its value in the kernel,
// for example to statically declare an array in shared memory)
const int threads_per_block = 256;
int main(int argc, char **argv) {
// Seed the random generator (use a constant here for repeatable results)
srand(4444);
// Determine the vector length
int N = 100000; // default value
if (argc > 1) N = atoi(argv[1]); // user-specified value
// Generate two random vectors
long long vector_start_time = start_timer();
float *A = get_random_vector(N);
float *B = get_random_vector(N);
stop_timer(vector_start_time, "Vector generation");
// Compute their sum on the GPU
long long GPU_start_time = start_timer();
float *C_GPU = GPU_add_vectors(A, B, N);
long long GPU_time = stop_timer(GPU_start_time, "\t Total");
// Compute their sum on the CPU
long long CPU_start_time = start_timer();
float *C_CPU = CPU_add_vectors(A, B, N);
long long CPU_time = stop_timer(CPU_start_time, "\nCPU");
// Compute the speedup or slowdown
if (GPU_time > CPU_time) printf("\nCPU outperformed GPU by %.2fx\n", (float) GPU_time / (float) CPU_time);
else printf("\nGPU outperformed CPU by %.2fx\n", (float) CPU_time / (float) GPU_time);
// Check the correctness of the GPU results
int num_wrong = 0;
for (int i = 0; i < N; i++) {
if (fabs(C_CPU[i] - C_GPU[i]) > 0.000001) num_wrong++;
}
// Report the correctness results
if (num_wrong) printf("\n%d / %d values incorrect\n", num_wrong, N);
else printf("\nAll values correct\n");
}
// A GPU kernel that computes the vector sum A + B
// (each thread computes a single value of the result)
__global__ void add_vectors_kernel(float *A, float *B, float *C, int N) {
// Determine which element this thread is computing
// Note: here we need to convert from the two-dimensional grid of
// thread blocks back to a one-dimensional index into the arrays
int block_id = blockIdx.x + gridDim.x * blockIdx.y;
int thread_id = blockDim.x * block_id + threadIdx.x;
// Make sure we do not go off the end of the array
if (thread_id < N) {
// Compute a single element of the result vector
C[thread_id] = A[thread_id] + B[thread_id];
}
}
// Returns the vector sum A + B (computed on the GPU)
float *GPU_add_vectors(float *A_CPU, float *B_CPU, int N) {
long long memory_start_time = start_timer();
// Allocate GPU memory for the inputs and the result
int vector_size = N * sizeof(float);
float *A_GPU, *B_GPU, *C_GPU;
check_error(cudaMalloc((void **) &A_GPU, vector_size));
check_error(cudaMalloc((void **) &B_GPU, vector_size));
check_error(cudaMalloc((void **) &C_GPU, vector_size));
// Transfer the input vectors to GPU memory
check_error(cudaMemcpy(A_GPU, A_CPU, vector_size, cudaMemcpyHostToDevice));
check_error(cudaMemcpy(B_GPU, B_CPU, vector_size, cudaMemcpyHostToDevice));
stop_timer(memory_start_time, "\nGPU:\t Transfer to GPU");
// Determine the number of thread blocks in the x- and y-dimension
// Note: we use a two-dimensional grid of thread blocks here because each dimension
// of the grid can only have up to 64K thread blocks; if we want to use more than
// 64K thread blocks, we need to use a two-dimensional grid. This is slightly
// awkward, however, since the underlying problem is inherently one-dimensional
int num_blocks = (N + threads_per_block - 1) / threads_per_block;
int max_blocks_per_dimension = 65535;
int num_blocks_y = (int) ((float) (num_blocks + max_blocks_per_dimension - 1) / (float) max_blocks_per_dimension);
int num_blocks_x = (int) ((float) (num_blocks + num_blocks_y - 1) / (float) num_blocks_y);
dim3 grid_size(num_blocks_x, num_blocks_y, 1);
// Execute the kernel to compute the vector sum on the GPU
long long kernel_start_time = start_timer();
add_vectors_kernel <<< grid_size , threads_per_block >>> (A_GPU, B_GPU, C_GPU, N);
cudaThreadSynchronize(); // This is only needed for timing and error-checking purposes
stop_timer(kernel_start_time, "\t Kernel execution");
// Check for kernel errors
check_error(cudaGetLastError());
// Allocate CPU memory for the result
float *C_CPU = (float *) malloc(vector_size);
if (C_CPU == NULL) die("Error allocating CPU memory");
// Transfer the result from the GPU to the CPU
memory_start_time = start_timer();
check_error(cudaMemcpy(C_CPU, C_GPU, vector_size, cudaMemcpyDeviceToHost));
stop_timer(memory_start_time, "\tTransfer from GPU");
// Free the GPU memory
check_error(cudaFree(A_GPU));
check_error(cudaFree(B_GPU));
check_error(cudaFree(C_GPU));
return C_CPU;
}
// Returns the vector sum A + B
float *CPU_add_vectors(float *A, float *B, int N) {
// Allocate memory for the result
float *C = (float *) malloc(N * sizeof(float));
if (C == NULL) die("Error allocating CPU memory");
// Compute the sum;
for (int i = 0; i < N; i++) C[i] = A[i] + B[i];
// Return the result
return C;
}
// Returns a randomized vector containing N elements
float *get_random_vector(int N) {
if (N < 1) die("Number of elements must be greater than zero");
// Allocate memory for the vector
float *V = (float *) malloc(N * sizeof(float));
if (V == NULL) die("Error allocating CPU memory");
// Populate the vector with random numbers
for (int i = 0; i < N; i++) V[i] = (float) rand() / (float) rand();
// Return the randomized vector
return V;
}
// Returns the current time in microseconds
long long start_timer() {
struct timeval tv;
gettimeofday(&tv, NULL);
return tv.tv_sec * 1000000 + tv.tv_usec;
}
// Prints the time elapsed since the specified time
long long stop_timer(long long start_time, char *label) {
struct timeval tv;
gettimeofday(&tv, NULL);
long long end_time = tv.tv_sec * 1000000 + tv.tv_usec;
printf("%s: %.5f sec\n", label, ((float) (end_time - start_time)) / (1000 * 1000));
return end_time - start_time;
}
// Prints the specified message and quits
void die(char *message) {
printf("%s\n", message);
exit(1);
}
// If the specified error code refers to a real error, report it and quit the program
void check_error(cudaError e) {
if (e != cudaSuccess) {
printf("\nCUDA error: %s\n", cudaGetErrorString(e));
exit(1);
}
}
|
20,065 | #include "includes.h"
#define SIZ 20
#define num_inp 4
using namespace std;
typedef struct edge {
int first, second;
} edges;
__global__ void grads_w2_kernel(double * grads_W2,double * W2,double reg, int size)
{
int i = blockIdx.x;
int j = threadIdx.x;
grads_W2[i*size + j] += W2[i*size + j] * reg;
} |
20,066 | /*
Copyright 2013--2018 James E. McClure, Virginia Polytechnic & State University
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#include <math.h>
#include <stdio.h>
#include <cuda_profiler_api.h>
#define NBLOCKS 1024
#define NTHREADS 256
__global__ void dvc_ScaLBL_Color_Init(char *ID, double *Den, double *Phi, double das, double dbs, int Nx, int Ny, int Nz)
{
//int i,j,k;
int n,N;
char id;
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N){
id=ID[n];
//.......Back out the 3-D indices for node n..............
//k = n/(Nx*Ny);
//j = (n-Nx*Ny*k)/Nx;
//i = n-Nx*Ny*k-Nx*j;
if ( id == 1){
Den[n] = 1.0;
Den[N+n] = 0.0;
Phi[n] = 1.0;
}
else if ( id == 2){
Den[n] = 0.0;
Den[N+n] = 1.0;
Phi[n] = -1.0;
}
else{
Den[n] = das;
Den[N+n] = dbs;
Phi[n] = (das-dbs)/(das+dbs);
}
}
}
}
__global__ void dvc_ScaLBL_Color_InitDistancePacked(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz)
{
int i,j,k,n,N;
double d;
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N){
//.......Back out the 3-D indices for node n..............
k = n/(Nx*Ny);
j = (n-Nx*Ny*k)/Nx;
i = n-Nx*Ny*k-Nx*j;
if ( ID[n] == 1){
Den[2*n] = 1.0;
Den[2*n+1] = 0.0;
Phi[n] = 1.0;
}
if (i == 0 || j == 0 || k == 0 || i == Nx-1 || j == Ny-1 || k == Nz-1){
Den[2*n] = 0.0;
Den[2*n+1] = 0.0;
}
else if ( ID[n] == 1){
Den[2*n] = 1.0;
Den[2*n+1] = 0.0;
Phi[n] = 1.0;
}
else if ( ID[n] == 2){
Den[2*n] = 0.0;
Den[2*n+1] = 1.0;
Phi[n] = -1.0;
}
else{
Den[2*n] = das;
Den[2*n+1] = dbs;
Phi[n] = (das-dbs)/(das+dbs);
d = fabs(Distance[n]);
Phi[n] = (2.f*(exp(-2.f*beta*(d+xp)))/(1.f+exp(-2.f*beta*(d+xp))) - 1.f);
}
}
}
}
__global__ void dvc_ScaLBL_Color_InitDistance(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz)
{
int n,N;
double d;
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N){
if ( ID[n] == 1){
Den[n] = 1.0;
Den[N+n] = 0.0;
Phi[n] = 1.0;
}
else if ( ID[n] == 2){
Den[n] = 0.0;
Den[N+n] = 1.0;
Phi[n] = -1.0;
}
else{
Den[n] = das;
Den[N+n] = dbs;
Phi[n] = (das-dbs)/(das+dbs);
d = fabs(Distance[n]);
Phi[n] = (2.f*(exp(-2.f*beta*(d+xp)))/(1.f+exp(-2.f*beta*(d+xp))) - 1.f);
}
}
}
}
//*************************************************************************
__global__ void dvc_ScaLBL_Color_BC(int *list, int *Map, double *Phi, double *Den, double vA, double vB, int count, int Np)
{
int idx,n,nm;
// Fill the outlet with component b
idx = blockIdx.x*blockDim.x + threadIdx.x;
if (idx < count){
n = list[idx];
Den[n] = vA;
Den[Np+n] = vB;
//nm = Map[n];
//Phi[nm] = (vA-vB)/(vA+vB);
Phi[n] = (vA-vB)/(vA+vB);
}
}
__global__ void dvc_ScaLBL_Color_BC_z(int *list, int *Map, double *Phi, double *Den, double vA, double vB, int count, int Np)
{
int idx,n,nm;
// Fill the outlet with component b
idx = blockIdx.x*blockDim.x + threadIdx.x;
if (idx < count){
n = list[idx];
Den[n] = vA;
Den[Np+n] = vB;
//double valB = Den[Np+n]; // mass that reaches inlet is conserved
nm = Map[n];
Phi[nm] = (vA-vB)/(vA+vB);
//Phi[n] = (vA-vB)/(vA+vB);
}
}
__global__ void dvc_ScaLBL_Color_BC_Z(int *list, int *Map, double *Phi, double *Den, double vA, double vB, int count, int Np)
{
int idx,n,nm;
// Fill the outlet with component b
idx = blockIdx.x*blockDim.x + threadIdx.x;
if (idx < count){
n = list[idx];
//double valA = Den[n]; // mass that reaches outlet is conserved
Den[n] = vA;
Den[Np+n] = vB;
nm = Map[n];
Phi[nm] = (vA-vB)/(vA+vB);
//Phi[n] = (vA-vB)/(vA+vB);
}
}
//*************************************************************************
__global__ void dvc_ScaLBL_D3Q19_ColorGradient(char *ID, double *phi, double *ColorGrad, int Nx, int Ny, int Nz)
{
int n,N,i,j,k,nn;
// distributions
double f1,f2,f3,f4,f5,f6,f7,f8,f9;
double f10,f11,f12,f13,f14,f15,f16,f17,f18;
double nx,ny,nz;
// non-conserved moments
// additional variables needed for computations
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N ){
//.......Back out the 3-D indices for node n..............
k = n/(Nx*Ny);
j = (n-Nx*Ny*k)/Nx;
i = n-Nx*Ny*k-Nx*j;
//........................................................................
//........Get 1-D index for this thread....................
// n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
//........................................................................
// COMPUTE THE COLOR GRADIENT
//........................................................................
//.................Read Phase Indicator Values............................
//........................................................................
nn = n-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
f1 = phi[nn]; // get neighbor for phi - 1
//........................................................................
nn = n+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
f2 = phi[nn]; // get neighbor for phi - 2
//........................................................................
nn = n-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f3 = phi[nn]; // get neighbor for phi - 3
//........................................................................
nn = n+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f4 = phi[nn]; // get neighbor for phi - 4
//........................................................................
nn = n-Nx*Ny; // neighbor index (get convention)
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f5 = phi[nn]; // get neighbor for phi - 5
//........................................................................
nn = n+Nx*Ny; // neighbor index (get convention)
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f6 = phi[nn]; // get neighbor for phi - 6
//........................................................................
nn = n-Nx-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f7 = phi[nn]; // get neighbor for phi - 7
//........................................................................
nn = n+Nx+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f8 = phi[nn]; // get neighbor for phi - 8
//........................................................................
nn = n+Nx-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f9 = phi[nn]; // get neighbor for phi - 9
//........................................................................
nn = n-Nx+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f10 = phi[nn]; // get neighbor for phi - 10
//........................................................................
nn = n-Nx*Ny-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f11 = phi[nn]; // get neighbor for phi - 11
//........................................................................
nn = n+Nx*Ny+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f12 = phi[nn]; // get neighbor for phi - 12
//........................................................................
nn = n+Nx*Ny-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f13 = phi[nn]; // get neighbor for phi - 13
//........................................................................
nn = n-Nx*Ny+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f14 = phi[nn]; // get neighbor for phi - 14
//........................................................................
nn = n-Nx*Ny-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f15 = phi[nn]; // get neighbor for phi - 15
//........................................................................
nn = n+Nx*Ny+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f16 = phi[nn]; // get neighbor for phi - 16
//........................................................................
nn = n+Nx*Ny-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f17 = phi[nn]; // get neighbor for phi - 17
//........................................................................
nn = n-Nx*Ny+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f18 = phi[nn]; // get neighbor for phi - 18
//............Compute the Color Gradient...................................
nx = -(f1-f2+0.5*(f7-f8+f9-f10+f11-f12+f13-f14));
ny = -(f3-f4+0.5*(f7-f8-f9+f10+f15-f16+f17-f18));
nz = -(f5-f6+0.5*(f11-f12-f13+f14+f15-f16-f17+f18));
//...........Normalize the Color Gradient.................................
// C = sqrt(nx*nx+ny*ny+nz*nz);
// nx = nx/C;
// ny = ny/C;
// nz = nz/C;
//...Store the Color Gradient....................
ColorGrad[n] = nx;
ColorGrad[N+n] = ny;
ColorGrad[2*N+n] = nz;
//...............................................
}
}
}
//*************************************************************************
__global__ void dvc_ColorCollide( char *ID, double *disteven, double *distodd, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz, double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz, bool pBC)
{
int n,N;
// distributions
double f0,f1,f2,f3,f4,f5,f6,f7,f8,f9;
double f10,f11,f12,f13,f14,f15,f16,f17,f18;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
// additional variables needed for computations
double rho,jx,jy,jz,C,nx,ny,nz;
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N && ID[n] > 0){
// Retrieve the color gradient
nx = ColorGrad[n];
ny = ColorGrad[N+n];
nz = ColorGrad[2*N+n];
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
nx = nx/C;
ny = ny/C;
nz = nz/C;
//......No color gradient at z-boundary if pressure BC are set.............
// if (pBC && k==0) nx = ny = nz = 0.f;
// if (pBC && k==Nz-1) nx = ny = nz = 0.f;
//........................................................................
// READ THE DISTRIBUTIONS
// (read from opposite array due to previous swap operation)
//........................................................................
f2 = distodd[n];
f4 = distodd[N+n];
f6 = distodd[2*N+n];
f8 = distodd[3*N+n];
f10 = distodd[4*N+n];
f12 = distodd[5*N+n];
f14 = distodd[6*N+n];
f16 = distodd[7*N+n];
f18 = distodd[8*N+n];
//........................................................................
f0 = disteven[n];
f1 = disteven[N+n];
f3 = disteven[2*N+n];
f5 = disteven[3*N+n];
f7 = disteven[4*N+n];
f9 = disteven[5*N+n];
f11 = disteven[6*N+n];
f13 = disteven[7*N+n];
f15 = disteven[8*N+n];
f17 = disteven[9*N+n];
//........................................................................
// PERFORM RELAXATION PROCESS
//........................................................................
//....................compute the moments...............................................
rho = f0+f2+f1+f4+f3+f6+f5+f8+f7+f10+f9+f12+f11+f14+f13+f16+f15+f18+f17;
m1 = -30*f0-11*(f2+f1+f4+f3+f6+f5)+8*(f8+f7+f10+f9+f12+f11+f14+f13+f16+f15+f18 +f17);
m2 = 12*f0-4*(f2+f1 +f4+f3+f6 +f5)+f8+f7+f10+f9+f12+f11+f14+f13+f16+f15+f18+f17;
jx = f1-f2+f7-f8+f9-f10+f11-f12+f13-f14;
m4 = 4*(-f1+f2)+f7-f8+f9-f10+f11-f12+f13-f14;
jy = f3-f4+f7-f8-f9+f10+f15-f16+f17-f18;
m6 = -4*(f3-f4)+f7-f8-f9+f10+f15-f16+f17-f18;
jz = f5-f6+f11-f12-f13+f14+f15-f16-f17+f18;
m8 = -4*(f5-f6)+f11-f12-f13+f14+f15-f16-f17+f18;
m9 = 2*(f1+f2)-f3-f4-f5-f6+f7+f8+f9+f10+f11+f12+f13+f14-2*(f15+f16+f17+f18);
m10 = -4*(f1+f2)+2*(f4+f3+f6+f5)+f8+f7+f10+f9+f12+f11+f14+f13-2*(f16+f15+f18+f17);
m11 = f4+f3-f6-f5+f8+f7+f10+f9-f12-f11-f14-f13;
m12 = -2*(f4+f3-f6-f5)+f8+f7+f10+f9-f12-f11-f14-f13;
m13 = f8+f7-f10-f9;
m14 = f16+f15-f18-f17;
m15 = f12+f11-f14-f13;
m16 = f7-f8+f9-f10-f11+f12-f13+f14;
m17 = -f7+f8+f9-f10+f15-f16+f17-f18;
m18 = f11-f12-f13+f14-f15+f16+f17-f18;
//..........Toelke, Fruediger et. al. 2006...............
if (C == 0.0) nx = ny = nz = 1.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho - 11*rho) -alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*(-0.5*((2*jx*jx-jy*jy-jz*jz)/rho) - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( -0.5*((jy*jy-jz*jz)/rho) - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.................inverse transformation......................................................
f0 = 0.05263157894736842*rho-0.012531328320802*m1+0.04761904761904762*m2;
f1 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jx-m4)+0.0555555555555555555555555*(m9-m10);
f2 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m4-jx)+0.0555555555555555555555555*(m9-m10);
f3 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jy-m6)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m11-m12);
f4 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m6-jy)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m11-m12);
f5 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jz-m8)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m12-m11);
f6 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m8-jz)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m12-m11);
f7 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jx+jy)+0.025*(m4+m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12+0.25*m13+0.125*(m16-m17);
f8 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2-0.1*(jx+jy)-0.025*(m4+m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12+0.25*m13+0.125*(m17-m16);
f9 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jx-jy)+0.025*(m4-m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12-0.25*m13+0.125*(m16+m17);
f10 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jy-jx)+0.025*(m6-m4)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12-0.25*m13-0.125*(m16+m17);
f11 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jx+jz)+0.025*(m4+m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12+0.25*m15+0.125*(m18-m16);
f12 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2-0.1*(jx+jz)-0.025*(m4+m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12+0.25*m15+0.125*(m16-m18);
f13 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jx-jz)+0.025*(m4-m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12-0.25*m15-0.125*(m16+m18);
f14 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jz-jx)+0.025*(m8-m4)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12-0.25*m15+0.125*(m16+m18);
f15 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jy+jz)+0.025*(m6+m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10+0.25*m14+0.125*(m17-m18);
f16 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2-0.1*(jy+jz)-0.025*(m6+m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10+0.25*m14+0.125*(m18-m17);
f17 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jy-jz)+0.025*(m6-m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10-0.25*m14+0.125*(m17+m18);
f18 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jz-jy)+0.025*(m8-m6)
-0.0555555555555555555555555*m9-0.02777777777777778*m10-0.25*m14-0.125*(m17+m18);
//.......................................................................................................
// incorporate external force
f1 += 0.16666666*Fx;
f2 -= 0.16666666*Fx;
f3 += 0.16666666*Fy;
f4 -= 0.16666666*Fy;
f5 += 0.16666666*Fz;
f6 -= 0.16666666*Fz;
f7 += 0.08333333333*(Fx+Fy);
f8 -= 0.08333333333*(Fx+Fy);
f9 += 0.08333333333*(Fx-Fy);
f10 -= 0.08333333333*(Fx-Fy);
f11 += 0.08333333333*(Fx+Fz);
f12 -= 0.08333333333*(Fx+Fz);
f13 += 0.08333333333*(Fx-Fz);
f14 -= 0.08333333333*(Fx-Fz);
f15 += 0.08333333333*(Fy+Fz);
f16 -= 0.08333333333*(Fy+Fz);
f17 += 0.08333333333*(Fy-Fz);
f18 -= 0.08333333333*(Fy-Fz);
//*********** WRITE UPDATED VALUES TO MEMORY ******************
// Write the updated distributions
//....EVEN.....................................
disteven[n] = f0;
disteven[N+n] = f2;
disteven[2*N+n] = f4;
disteven[3*N+n] = f6;
disteven[4*N+n] = f8;
disteven[5*N+n] = f10;
disteven[6*N+n] = f12;
disteven[7*N+n] = f14;
disteven[8*N+n] = f16;
disteven[9*N+n] = f18;
//....ODD......................................
distodd[n] = f1;
distodd[N+n] = f3;
distodd[2*N+n] = f5;
distodd[3*N+n] = f7;
distodd[4*N+n] = f9;
distodd[5*N+n] = f11;
distodd[6*N+n] = f13;
distodd[7*N+n] = f15;
distodd[8*N+n] = f17;
//...Store the Velocity..........................
Velocity[n] = jx;
Velocity[N+n] = jy;
Velocity[2*N+n] = jz;
/* Velocity[3*n] = jx;
Velocity[3*n+1] = jy;
Velocity[3*n+2] = jz;
*/ //...Store the Color Gradient....................
// ColorGrad[3*n] = nx*C;
// ColorGrad[3*n+1] = ny*C;
// ColorGrad[3*n+2] = nz*C;
//...............................................
//***************************************************************
} // check if n is in the solid
} // loop over n
}
__global__ void
__launch_bounds__(512,2)
dvc_ScaLBL_D3Q19_ColorCollide( char *ID, double *disteven, double *distodd, double *phi, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz, double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz)
{
int i,j,k,n,nn,N;
// distributions
double f0,f1,f2,f3,f4,f5,f6,f7,f8,f9;
double f10,f11,f12,f13,f14,f15,f16,f17,f18;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
// additional variables needed for computations
double rho,jx,jy,jz,C,nx,ny,nz;
char id;
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N) {
id = ID[n];
if ( id > 0){
//.......Back out the 3-D indices for node n..............
k = n/(Nx*Ny);
j = (n-Nx*Ny*k)/Nx;
i = n-Nx*Ny*k-Nx*j;
//........................................................................
//........Get 1-D index for this thread....................
// n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
//........................................................................
// COMPUTE THE COLOR GRADIENT
//........................................................................
//.................Read Phase Indicator Values............................
//........................................................................
nn = n-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
f1 = phi[nn]; // get neighbor for phi - 1
//........................................................................
nn = n+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
f2 = phi[nn]; // get neighbor for phi - 2
//........................................................................
nn = n-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f3 = phi[nn]; // get neighbor for phi - 3
//........................................................................
nn = n+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f4 = phi[nn]; // get neighbor for phi - 4
//........................................................................
nn = n-Nx*Ny; // neighbor index (get convention)
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f5 = phi[nn]; // get neighbor for phi - 5
//........................................................................
nn = n+Nx*Ny; // neighbor index (get convention)
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f6 = phi[nn]; // get neighbor for phi - 6
//........................................................................
nn = n-Nx-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f7 = phi[nn]; // get neighbor for phi - 7
//........................................................................
nn = n+Nx+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f8 = phi[nn]; // get neighbor for phi - 8
//........................................................................
nn = n+Nx-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
f9 = phi[nn]; // get neighbor for phi - 9
//........................................................................
nn = n-Nx+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
f10 = phi[nn]; // get neighbor for phi - 10
//........................................................................
nn = n-Nx*Ny-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f11 = phi[nn]; // get neighbor for phi - 11
//........................................................................
nn = n+Nx*Ny+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f12 = phi[nn]; // get neighbor for phi - 12
//........................................................................
nn = n+Nx*Ny-1; // neighbor index (get convention)
if (i-1<0) nn += Nx; // periodic BC along the x-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f13 = phi[nn]; // get neighbor for phi - 13
//........................................................................
nn = n-Nx*Ny+1; // neighbor index (get convention)
if (!(i+1<Nx)) nn -= Nx; // periodic BC along the x-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f14 = phi[nn]; // get neighbor for phi - 14
//........................................................................
nn = n-Nx*Ny-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f15 = phi[nn]; // get neighbor for phi - 15
//........................................................................
nn = n+Nx*Ny+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f16 = phi[nn]; // get neighbor for phi - 16
//........................................................................
nn = n+Nx*Ny-Nx; // neighbor index (get convention)
if (j-1<0) nn += Nx*Ny; // Perioidic BC along the y-boundary
if (!(k+1<Nz)) nn -= Nx*Ny*Nz; // Perioidic BC along the z-boundary
f17 = phi[nn]; // get neighbor for phi - 17
//........................................................................
nn = n-Nx*Ny+Nx; // neighbor index (get convention)
if (!(j+1<Ny)) nn -= Nx*Ny; // Perioidic BC along the y-boundary
if (k-1<0) nn += Nx*Ny*Nz; // Perioidic BC along the z-boundary
f18 = phi[nn]; // get neighbor for phi - 18
//............Compute the Color Gradient...................................
nx = -(f1-f2+0.5*(f7-f8+f9-f10+f11-f12+f13-f14));
ny = -(f3-f4+0.5*(f7-f8-f9+f10+f15-f16+f17-f18));
nz = -(f5-f6+0.5*(f11-f12-f13+f14+f15-f16-f17+f18));
//...Store the Color Gradient....................
ColorGrad[n] = nx;
ColorGrad[N+n] = ny;
ColorGrad[2*N+n] = nz;
//...............................................
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C == 0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
//......No color gradient at z-boundary if pressure BC are set.............
// if (pBC && k==0) nx = ny = nz = 0.f;
// if (pBC && k==Nz-1) nx = ny = nz = 0.f;
//........................................................................
// READ THE DISTRIBUTIONS
// (read from opposite array due to previous swap operation)
//........................................................................
f2 = distodd[n];
f4 = distodd[N+n];
f6 = distodd[2*N+n];
f0 = disteven[n];
f1 = disteven[N+n];
f3 = disteven[2*N+n];
f5 = disteven[3*N+n];
//........................................................................
//....................compute the moments...............................................
rho = f0+f2+f1+f4+f3+f6+f5;
m1 = -30*f0-11*(f2+f1+f4+f3+f6+f5);
m2 = 12*f0-4*(f2+f1 +f4+f3+f6 +f5);
jx = f1-f2;
m4 = 4*(-f1+f2);
jy = f3-f4;
m6 = -4*(f3-f4);
jz = f5-f6;
m8 = -4*(f5-f6);
m9 = 2*(f1+f2)-f3-f4-f5-f6;
m10 = -4*(f1+f2)+2*(f4+f3+f6+f5);
m11 = f4+f3-f6-f5;
m12 = -2*(f4+f3-f6-f5);
//........................................................................
f8 = distodd[3*N+n];
f10 = distodd[4*N+n];
f7 = disteven[4*N+n];
f9 = disteven[5*N+n];
//........................................................................
rho += f8+f7+f10+f9;
m1 += 8*(f8+f7+f10+f9);
m2 += f8+f7+f10+f9;
jx += f7-f8+f9-f10;
m4 += f7-f8+f9-f10;
jy += f7-f8-f9+f10;
m6 += f7-f8-f9+f10;
m9 += f7+f8+f9+f10;
m10 += f8+f7+f10+f9;
m11 += f8+f7+f10+f9;
m12 += f8+f7+f10+f9;
m13 = f8+f7-f10-f9;
m16 = f7-f8+f9-f10;
m17 = -f7+f8+f9-f10;
//........................................................................
f11 = disteven[6*N+n];
f13 = disteven[7*N+n];
f12 = distodd[5*N+n];
f14 = distodd[6*N+n];
//........................................................................
//........................................................................
f15 = disteven[8*N+n];
f17 = disteven[9*N+n];
f16 = distodd[7*N+n];
f18 = distodd[8*N+n];
//........................................................................
//....................compute the moments...............................................
rho += f12+f11+f14+f13+f16+f15+f18+f17;
m1 += 8*(f12+f11+f14+f13+f16+f15+f18+f17);
m2 += f12+f11+f14+f13+f16+f15+f18+f17;
jx += f11-f12+f13-f14;
m4 += f11-f12+f13-f14;
jy += f15-f16+f17-f18;
m6 += f15-f16+f17-f18;
jz += f11-f12-f13+f14+f15-f16-f17+f18;
m8 += f11-f12-f13+f14+f15-f16-f17+f18;
m9 += f11+f12+f13+f14-2*(f15+f16+f17+f18);
m10 += f12+f11+f14+f13-2*(f16+f15+f18+f17);
m11 += -f12-f11-f14-f13;
m12 += -f12-f11-f14-f13;
m14 = f16+f15-f18-f17;
m15 = f12+f11-f14-f13;
m16 += -f11+f12-f13+f14;
m17 += f15-f16+f17-f18;
m18 = f11-f12-f13+f14-f15+f16+f17-f18;
//........................................................................
/* f2 = distodd[n];
f4 = distodd[N+n];
f6 = distodd[2*N+n];
f8 = distodd[3*N+n];
//........................................................................
f0 = disteven[n];
f1 = disteven[N+n];
f3 = disteven[2*N+n];
f5 = disteven[3*N+n];
f7 = disteven[4*N+n];
//........................................................................
//........................................................................
//....................compute the moments...............................................
rho = f0+f2+f1+f4+f3+f6+f5+f8+f7;
m1 = -30*f0-11*(f2+f1+f4+f3+f6+f5)+8*(f8+f7);
m2 = 12*f0-4*(f2+f1 +f4+f3+f6 +f5)+f8+f7;
jx = f1-f2+f7-f8;
m4 = 4*(-f1+f2)+f7-f8;
jy = f3-f4+f7-f8;
m6 = -4*(f3-f4)+f7-f8;
jz = f5-f6;
m8 = -4*(f5-f6);
m9 = 2*(f1+f2)-f3-f4-f5-f6+f7+f8;
m10 = -4*(f1+f2)+2*(f4+f3+f6+f5)+f8+f7;
m11 = f4+f3-f6-f5+f8+f7;
m12 = -2*(f4+f3-f6-f5)+f8+f7;
m13 = f8+f7;
m16 = f7-f8;
m17 = -f7+f8;
//........................................................................
f9 = disteven[5*N+n];
f11 = disteven[6*N+n];
f13 = disteven[7*N+n];
f15 = disteven[8*N+n];
f17 = disteven[9*N+n];
f10 = distodd[4*N+n];
f12 = distodd[5*N+n];
f14 = distodd[6*N+n];
f16 = distodd[7*N+n];
f18 = distodd[8*N+n];
//........................................................................
rho += f10+f9+f12+f11+f14+f13+f16+f15+f18+f17;
m1 += 8*(f10+f9+f12+f11+f14+f13+f16+f15+f18 +f17);
m2 += f10+f9+f12+f11+f14+f13+f16+f15+f18+f17;
jx += f9-f10+f11-f12+f13-f14;
m4 += f9-f10+f11-f12+f13-f14;
jy += -f9+f10+f15-f16+f17-f18;
m6 += -f9+f10+f15-f16+f17-f18;
jz += f11-f12-f13+f14+f15-f16-f17+f18;
m8 += f11-f12-f13+f14+f15-f16-f17+f18;
m9 += f9+f10+f11+f12+f13+f14-2*(f15+f16+f17+f18);
m10 += f10+f9+f12+f11+f14+f13-2*(f16+f15+f18+f17);
m11 += f10+f9-f12-f11-f14-f13;
m12 += f10+f9-f12-f11-f14-f13;
m13 += -f10-f9;
m14 = f16+f15-f18-f17;
m15 = f12+f11-f14-f13;
m16 += f9-f10-f11+f12-f13+f14;
m17 += f9-f10+f15-f16+f17-f18;
m18 = f11-f12-f13+f14-f15+f16+f17-f18;
*/ //........................................................................
// PERFORM RELAXATION PROCESS
//........................................................................
//..........Toelke, Fruediger et. al. 2006...............
if (C == 0.0) nx = ny = nz = 0.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho - 11*rho) -alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*( - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.................inverse transformation......................................................
f0 = 0.05263157894736842*rho-0.012531328320802*m1+0.04761904761904762*m2;
f1 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jx-m4)+0.0555555555555555555555555*(m9-m10);
f2 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m4-jx)+0.0555555555555555555555555*(m9-m10);
f3 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jy-m6)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m11-m12);
f4 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m6-jy)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m11-m12);
f5 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(jz-m8)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m12-m11);
f6 = 0.05263157894736842*rho-0.004594820384294068*m1-0.01587301587301587*m2
+0.1*(m8-jz)+0.02777777777777778*(m10-m9)+0.08333333333333333*(m12-m11);
f7 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jx+jy)+0.025*(m4+m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12+0.25*m13+0.125*(m16-m17);
f8 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2-0.1*(jx+jy)-0.025*(m4+m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12+0.25*m13+0.125*(m17-m16);
f9 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jx-jy)+0.025*(m4-m6)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12-0.25*m13+0.125*(m16+m17);
f10 = 0.05263157894736842*rho+0.003341687552213868*m1+0.003968253968253968*m2+0.1*(jy-jx)+0.025*(m6-m4)
+0.02777777777777778*m9+0.01388888888888889*m10+0.08333333333333333*m11
+0.04166666666666666*m12-0.25*m13-0.125*(m16+m17);
f11 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jx+jz)+0.025*(m4+m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12+0.25*m15+0.125*(m18-m16);
f12 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2-0.1*(jx+jz)-0.025*(m4+m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12+0.25*m15+0.125*(m16-m18);
f13 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jx-jz)+0.025*(m4-m8)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12-0.25*m15-0.125*(m16+m18);
f14 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jz-jx)+0.025*(m8-m4)
+0.02777777777777778*m9+0.01388888888888889*m10-0.08333333333333333*m11
-0.04166666666666666*m12-0.25*m15+0.125*(m16+m18);
f15 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jy+jz)+0.025*(m6+m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10+0.25*m14+0.125*(m17-m18);
f16 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2-0.1*(jy+jz)-0.025*(m6+m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10+0.25*m14+0.125*(m18-m17);
f17 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jy-jz)+0.025*(m6-m8)
-0.0555555555555555555555555*m9-0.02777777777777778*m10-0.25*m14+0.125*(m17+m18);
f18 = 0.05263157894736842*rho+0.003341687552213868*m1
+0.003968253968253968*m2+0.1*(jz-jy)+0.025*(m8-m6)
-0.0555555555555555555555555*m9-0.02777777777777778*m10-0.25*m14-0.125*(m17+m18);
//.......................................................................................................
// incorporate external force
f1 += 0.16666666*Fx;
f2 -= 0.16666666*Fx;
f3 += 0.16666666*Fy;
f4 -= 0.16666666*Fy;
f5 += 0.16666666*Fz;
f6 -= 0.16666666*Fz;
f7 += 0.08333333333*(Fx+Fy);
f8 -= 0.08333333333*(Fx+Fy);
f9 += 0.08333333333*(Fx-Fy);
f10 -= 0.08333333333*(Fx-Fy);
f11 += 0.08333333333*(Fx+Fz);
f12 -= 0.08333333333*(Fx+Fz);
f13 += 0.08333333333*(Fx-Fz);
f14 -= 0.08333333333*(Fx-Fz);
f15 += 0.08333333333*(Fy+Fz);
f16 -= 0.08333333333*(Fy+Fz);
f17 += 0.08333333333*(Fy-Fz);
f18 -= 0.08333333333*(Fy-Fz);
//*********** WRITE UPDATED VALUES TO MEMORY ******************
// Write the updated distributions
//....EVEN.....................................
disteven[n] = f0;
disteven[N+n] = f2;
disteven[2*N+n] = f4;
disteven[3*N+n] = f6;
disteven[4*N+n] = f8;
disteven[5*N+n] = f10;
disteven[6*N+n] = f12;
disteven[7*N+n] = f14;
disteven[8*N+n] = f16;
disteven[9*N+n] = f18;
//....ODD......................................
distodd[n] = f1;
distodd[N+n] = f3;
distodd[2*N+n] = f5;
distodd[3*N+n] = f7;
distodd[4*N+n] = f9;
distodd[5*N+n] = f11;
distodd[6*N+n] = f13;
distodd[7*N+n] = f15;
distodd[8*N+n] = f17;
//...Store the Velocity..........................
Velocity[n] = jx;
Velocity[N+n] = jy;
Velocity[2*N+n] = jz;
//***************************************************************
}// check if n is in the solid
}
} // loop over n
}
__global__ void dvc_ScaLBL_D3Q7_ColorCollideMass(char *ID, double *A_even, double *A_odd, double *B_even, double *B_odd,
double *Den, double *Phi, double *ColorGrad, double *Velocity, double beta, int N, bool pBC)
{
int n;
double f0,f1,f2,f3,f4,f5,f6;
double na,nb,nab; // density values
double ux,uy,uz; // flow velocity
double nx,ny,nz,C; // color gradient components
double a1,a2,b1,b2;
double delta;
char id;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N)
id = ID[n];
if ( id != 0){
//.....Load the Color gradient.........
nx = ColorGrad[n];
ny = ColorGrad[N+n];
nz = ColorGrad[2*N+n];
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
//....Load the flow velocity...........
ux = Velocity[n];
uy = Velocity[N+n];
uz = Velocity[2*N+n];
//........................................................................
// READ THE DISTRIBUTIONS
// (read from opposite array due to previous swap operation)
//........................................................................
f2 = A_odd[n];
f4 = A_odd[N+n];
f6 = A_odd[2*N+n];
f0 = A_even[n];
f1 = A_even[N+n];
f3 = A_even[2*N+n];
f5 = A_even[3*N+n];
na = f0+f1+f2+f3+f4+f5+f6;
//........................................................................
f2 = B_odd[n];
f4 = B_odd[N+n];
f6 = B_odd[2*N+n];
f0 = B_even[n];
f1 = B_even[N+n];
f3 = B_even[2*N+n];
f5 = B_even[3*N+n];
nb = f0+f1+f2+f3+f4+f5+f6;
nab = 1.0/(na+nb);
//........................................................................
//....Instantiate the density distributions
// Generate Equilibrium Distributions and stream
// Stationary value - distribution 0
A_even[n] = 0.3333333333333333*na;
B_even[n] = 0.3333333333333333*nb;
// Non-Stationary equilibrium distributions
//feq[0] = 0.1111111111111111*(1+4.5*ux);
//feq[1] = 0.1111111111111111*(1-4.5*ux);
//feq[2] = 0.1111111111111111*(1+4.5*uy);
//feq[3] = 0.1111111111111111*(1-4.5*uy);
//feq[4] = 0.1111111111111111*(1+4.5*uz);
//feq[5] = 0.1111111111111111*(1-4.5*uz);
//...............................................
// q = 0,2,4
// Cq = {1,0,0}, {0,1,0}, {0,0,1}
delta = beta*na*nb*nab*0.1111111111111111*nx;
if (!(na*nb*nab>0)) delta=0;
a1 = na*(0.1111111111111111*(1+4.5*ux))+delta;
b1 = nb*(0.1111111111111111*(1+4.5*ux))-delta;
a2 = na*(0.1111111111111111*(1-4.5*ux))-delta;
b2 = nb*(0.1111111111111111*(1-4.5*ux))+delta;
A_odd[n] = a1;
A_even[N+n] = a2;
B_odd[n] = b1;
B_even[N+n] = b2;
//...............................................
// q = 2
// Cq = {0,1,0}
delta = beta*na*nb*nab*0.1111111111111111*ny;
if (!(na*nb*nab>0)) delta=0;
a1 = na*(0.1111111111111111*(1+4.5*uy))+delta;
b1 = nb*(0.1111111111111111*(1+4.5*uy))-delta;
a2 = na*(0.1111111111111111*(1-4.5*uy))-delta;
b2 = nb*(0.1111111111111111*(1-4.5*uy))+delta;
A_odd[N+n] = a1;
A_even[2*N+n] = a2;
B_odd[N+n] = b1;
B_even[2*N+n] = b2;
//...............................................
// q = 4
// Cq = {0,0,1}
delta = beta*na*nb*nab*0.1111111111111111*nz;
if (!(na*nb*nab>0)) delta=0;
a1 = na*(0.1111111111111111*(1+4.5*uz))+delta;
b1 = nb*(0.1111111111111111*(1+4.5*uz))-delta;
a2 = na*(0.1111111111111111*(1-4.5*uz))-delta;
b2 = nb*(0.1111111111111111*(1-4.5*uz))+delta;
A_odd[2*N+n] = a1;
A_even[3*N+n] = a2;
B_odd[2*N+n] = b1;
B_even[3*N+n] = b2;
}
}
}
//*************************************************************************
__global__ void dvc_DensityStreamD3Q7(char *ID, double *Den, double *Copy, double *Phi, double *ColorGrad, double *Velocity,
double beta, int Nx, int Ny, int Nz, bool pBC)
{
char id;
int idx;
int in,jn,kn,n,nn,N;
int q,Cqx,Cqy,Cqz;
// int sendLoc;
double na,nb; // density values
double ux,uy,uz; // flow velocity
double nx,ny,nz,C; // color gradient components
double a1,a2,b1,b2;
double sp,delta;
double feq[6]; // equilibrium distributions
// Set of Discrete velocities for the D3Q19 Model
int D3Q7[3][3]={{1,0,0},{0,1,0},{0,0,1}};
N = Nx*Ny*Nz;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N ){
// Local Density Values
na = Copy[2*n];
nb = Copy[2*n+1];
if (id > 0 && na+nb > 0.0){
//.......Back out the 3-D indices for node n..............
int k = n/(Nx*Ny);
int j = (n-Nx*Ny*k)/Nx;
int i = n-Nx*Ny*k-Nx*j;
//.....Load the Color gradient.........
nx = ColorGrad[n];
ny = ColorGrad[N+n];
nz = ColorGrad[2*N+n];
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C == 0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
//....Load the flow velocity...........
ux = Velocity[n];
uy = Velocity[N+n];
uz = Velocity[2*N+n];
//....Instantiate the density distributions
// Generate Equilibrium Distributions and stream
// Stationary value - distribution 0
// Den[2*n] += 0.3333333333333333*na;
// Den[2*n+1] += 0.3333333333333333*nb;
Den[2*n] += 0.3333333333333333*na;
Den[2*n+1] += 0.3333333333333333*nb;
// Non-Stationary equilibrium distributions
feq[0] = 0.1111111111111111*(1+3*ux);
feq[1] = 0.1111111111111111*(1-3*ux);
feq[2] = 0.1111111111111111*(1+3*uy);
feq[3] = 0.1111111111111111*(1-3*uy);
feq[4] = 0.1111111111111111*(1+3*uz);
feq[5] = 0.1111111111111111*(1-3*uz);
// Construction and streaming for the components
for (idx=0; idx<3; idx++){
// Distribution index
q = 2*idx;
// Associated discrete velocity
Cqx = D3Q7[idx][0];
Cqy = D3Q7[idx][1];
Cqz = D3Q7[idx][2];
// Generate the Equilibrium Distribution
a1 = na*feq[q];
b1 = nb*feq[q];
a2 = na*feq[q+1];
b2 = nb*feq[q+1];
// Recolor the distributions
if (C > 0.0){
sp = nx*double(Cqx)+ny*double(Cqy)+nz*double(Cqz);
//if (idx > 2) sp = 0.7071067811865475*sp;
//delta = sp*min( min(a1,a2), min(b1,b2) );
delta = na*nb/(na+nb)*0.1111111111111111*sp;
//if (a1>0 && b1>0){
a1 += beta*delta;
a2 -= beta*delta;
b1 -= beta*delta;
b2 += beta*delta;
}
// .......Get the neighbor node..............
//nn = n + Stride[idx];
in = i+Cqx;
jn = j+Cqy;
kn = k+Cqz;
// Adjust for periodic BC, if necessary
// if (in<0) in+= Nx;
// if (jn<0) jn+= Ny;
// if (kn<0) kn+= Nz;
// if (!(in<Nx)) in-= Nx;
// if (!(jn<Ny)) jn-= Ny;
// if (!(kn<Nz)) kn-= Nz;
// Perform streaming or bounce-back as needed
id = ID[kn*Nx*Ny+jn*Nx+in];
if (id == 0){ //.....Bounce-back Rule...........
// Den[2*n] += a1;
// Den[2*n+1] += b1;
Den[2*n] += a1;
Den[2*n+1] += b1;
}
else{
//......Push the "distribution" to neighboring node...........
// Index of the neighbor in the local process
//nn = (kn-zmin[rank]+1)*Nxp*Nyp + (jn-ymin[rank]+1)*Nxp + (in-xmin[rank]+1);
nn = kn*Nx*Ny+jn*Nx+in;
// Push to neighboring node
// Den[2*nn] += a1;
// Den[2*nn+1] += b1;
Den[2*nn] += a1;
Den[2*nn+1] += b1;
}
// .......Get the neighbor node..............
q = 2*idx+1;
in = i-Cqx;
jn = j-Cqy;
kn = k-Cqz;
// Adjust for periodic BC, if necessary
// if (in<0) in+= Nx;
// if (jn<0) jn+= Ny;
// if (kn<0) kn+= Nz;
// if (!(in<Nx)) in-= Nx;
// if (!(jn<Ny)) jn-= Ny;
// if (!(kn<Nz)) kn-= Nz;
// Perform streaming or bounce-back as needed
id = ID[kn*Nx*Ny+jn*Nx+in];
if (id == 0){
//.....Bounce-back Rule...........
// Den[2*n] += a2;
// Den[2*n+1] += b2;
Den[2*n] += a2;
Den[2*n+1] += b2;
}
else{
//......Push the "distribution" to neighboring node...........
// Index of the neighbor in the local process
//nn = (kn-zmin[rank]+1)*Nxp*Nyp + (jn-ymin[rank]+1)*Nxp + (in-xmin[rank]+1);
nn = kn*Nx*Ny+jn*Nx+in;
// Push to neighboring node
// Den[2*nn] += a2;
// Den[2*nn+1] += b2;
Den[2*nn] += a2;
Den[2*nn+1] += b2;
}
}
}
}
}
}
__global__ void dvc_ScaLBL_ComputePhaseField(char *ID, double *Phi, double *Den, int N)
{
int n;
double Na,Nb;
//...................................................................
// Update Phi
char id;
int S = N/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
if (n<N){
id=ID[n];
if (id > 0){
// Get the density value (Streaming already performed)
Na = Den[n];
Nb = Den[N+n];
Phi[n] = (Na-Nb)/(Na+Nb);
}
}
}
//...................................................................
}
__global__ void dvc_ScaLBL_SetSlice_z(double *Phi, double value, int Nx, int Ny, int Nz, int Slice)
{
int n = Slice*Nx*Ny + blockIdx.x*blockDim.x + threadIdx.x;
if (n < (Slice+1)*Nx*Ny){
Phi[n] = value;
}
}
__global__ void dvc_ScaLBL_D3Q19_AAeven_Color(int *Map, double *dist, double *Aq, double *Bq, double *Den, double *Phi,
double *Velocity, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int strideY, int strideZ, int start, int finish, int Np){
int ijk,nn,n;
double fq;
// conserved momemnts
double rho,jx,jy,jz;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
double m3,m5,m7;
double nA,nB; // number density
double a1,b1,a2,b2,nAB,delta;
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
const double mrt_V1=0.05263157894736842;
const double mrt_V2=0.012531328320802;
const double mrt_V3=0.04761904761904762;
const double mrt_V4=0.004594820384294068;
const double mrt_V5=0.01587301587301587;
const double mrt_V6=0.0555555555555555555555555;
const double mrt_V7=0.02777777777777778;
const double mrt_V8=0.08333333333333333;
const double mrt_V9=0.003341687552213868;
const double mrt_V10=0.003968253968253968;
const double mrt_V11=0.01388888888888889;
const double mrt_V12=0.04166666666666666;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// compute phase indicator field
phi=(nA-nB)/(nA+nB);
// local density
rho0=rhoA + 0.5*(1.0-phi)*(rhoB-rhoA);
// local relaxation time
tau=tauA + 0.5*(1.0-phi)*(tauB-tauA);
rlx_setA = 1.f/tau;
rlx_setB = 8.f*(2.f-rlx_setA)/(8.f-rlx_setA);
// Get the 1D index based on regular data layout
ijk = Map[n];
// COMPUTE THE COLOR GRADIENT
//........................................................................
//.................Read Phase Indicator Values............................
//........................................................................
nn = ijk-1; // neighbor index (get convention)
m1 = Phi[nn]; // get neighbor for phi - 1
//........................................................................
nn = ijk+1; // neighbor index (get convention)
m2 = Phi[nn]; // get neighbor for phi - 2
//........................................................................
nn = ijk-strideY; // neighbor index (get convention)
m3 = Phi[nn]; // get neighbor for phi - 3
//........................................................................
nn = ijk+strideY; // neighbor index (get convention)
m4 = Phi[nn]; // get neighbor for phi - 4
//........................................................................
nn = ijk-strideZ; // neighbor index (get convention)
m5 = Phi[nn]; // get neighbor for phi - 5
//........................................................................
nn = ijk+strideZ; // neighbor index (get convention)
m6 = Phi[nn]; // get neighbor for phi - 6
//........................................................................
nn = ijk-strideY-1; // neighbor index (get convention)
m7 = Phi[nn]; // get neighbor for phi - 7
//........................................................................
nn = ijk+strideY+1; // neighbor index (get convention)
m8 = Phi[nn]; // get neighbor for phi - 8
//........................................................................
nn = ijk+strideY-1; // neighbor index (get convention)
m9 = Phi[nn]; // get neighbor for phi - 9
//........................................................................
nn = ijk-strideY+1; // neighbor index (get convention)
m10 = Phi[nn]; // get neighbor for phi - 10
//........................................................................
nn = ijk-strideZ-1; // neighbor index (get convention)
m11 = Phi[nn]; // get neighbor for phi - 11
//........................................................................
nn = ijk+strideZ+1; // neighbor index (get convention)
m12 = Phi[nn]; // get neighbor for phi - 12
//........................................................................
nn = ijk+strideZ-1; // neighbor index (get convention)
m13 = Phi[nn]; // get neighbor for phi - 13
//........................................................................
nn = ijk-strideZ+1; // neighbor index (get convention)
m14 = Phi[nn]; // get neighbor for phi - 14
//........................................................................
nn = ijk-strideZ-strideY; // neighbor index (get convention)
m15 = Phi[nn]; // get neighbor for phi - 15
//........................................................................
nn = ijk+strideZ+strideY; // neighbor index (get convention)
m16 = Phi[nn]; // get neighbor for phi - 16
//........................................................................
nn = ijk+strideZ-strideY; // neighbor index (get convention)
m17 = Phi[nn]; // get neighbor for phi - 17
//........................................................................
nn = ijk-strideZ+strideY; // neighbor index (get convention)
m18 = Phi[nn]; // get neighbor for phi - 18
//............Compute the Color Gradient...................................
nx = -(m1-m2+0.5*(m7-m8+m9-m10+m11-m12+m13-m14));
ny = -(m3-m4+0.5*(m7-m8-m9+m10+m15-m16+m17-m18));
nz = -(m5-m6+0.5*(m11-m12-m13+m14+m15-m16-m17+m18));
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// q=0
fq = dist[n];
rho = fq;
m1 = -30.0*fq;
m2 = 12.0*fq;
// q=1
fq = dist[2*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jx = fq;
m4 = -4.0*fq;
m9 = 2.0*fq;
m10 = -4.0*fq;
// f2 = dist[10*Np+n];
fq = dist[1*Np+n];
rho += fq;
m1 -= 11.0*(fq);
m2 -= 4.0*(fq);
jx -= fq;
m4 += 4.0*(fq);
m9 += 2.0*(fq);
m10 -= 4.0*(fq);
// q=3
fq = dist[4*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy = fq;
m6 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 = fq;
m12 = -2.0*fq;
// q = 4
fq = dist[3*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy -= fq;
m6 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 += fq;
m12 -= 2.0*fq;
// q=5
fq = dist[6*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz = fq;
m8 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q = 6
fq = dist[5*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz -= fq;
m8 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q=7
fq = dist[8*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 = fq;
m16 = fq;
m17 = -fq;
// q = 8
fq = dist[7*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 += fq;
m16 -= fq;
m17 += fq;
// q=9
fq = dist[10*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 += fq;
m17 += fq;
// q = 10
fq = dist[9*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 -= fq;
m17 -= fq;
// q=11
fq = dist[12*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 = fq;
m16 -= fq;
m18 = fq;
// q=12
fq = dist[11*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 += fq;
m16 += fq;
m18 -= fq;
// q=13
fq = dist[14*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 -= fq;
m18 -= fq;
// q=14
fq = dist[13*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 += fq;
m18 += fq;
// q=15
fq = dist[16*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 = fq;
m17 += fq;
m18 -= fq;
// q=16
fq = dist[15*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 += fq;
m17 -= fq;
m18 += fq;
// q=17
fq = dist[18*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 += fq;
m18 += fq;
// q=18
fq = dist[17*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 -= fq;
m18 -= fq;
//........................................................................
//..............carry out relaxation process..............................
//..........Toelke, Fruediger et. al. 2006................................
if (C == 0.0) nx = ny = nz = 0.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho0 - 11*rho) -19*alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho0)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*( - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho0) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho0) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho0) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.......................................................................................................
//.................inverse transformation......................................................
// q=0
fq = mrt_V1*rho-mrt_V2*m1+mrt_V3*m2;
dist[n] = fq;
// q = 1
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jx-m4)+mrt_V6*(m9-m10) + 0.16666666*Fx;
dist[1*Np+n] = fq;
// q=2
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m4-jx)+mrt_V6*(m9-m10) - 0.16666666*Fx;
dist[2*Np+n] = fq;
// q = 3
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jy-m6)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) + 0.16666666*Fy;
dist[3*Np+n] = fq;
// q = 4
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m6-jy)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) - 0.16666666*Fy;
dist[4*Np+n] = fq;
// q = 5
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jz-m8)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) + 0.16666666*Fz;
dist[5*Np+n] = fq;
// q = 6
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m8-jz)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) - 0.16666666*Fz;
dist[6*Np+n] = fq;
// q = 7
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx+jy)+0.025*(m4+m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12+0.25*m13+0.125*(m16-m17) + 0.08333333333*(Fx+Fy);
dist[7*Np+n] = fq;
// q = 8
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jy)-0.025*(m4+m6) +mrt_V7*m9+mrt_V11*m10+mrt_V8*m11
+mrt_V12*m12+0.25*m13+0.125*(m17-m16) - 0.08333333333*(Fx+Fy);
dist[8*Np+n] = fq;
// q = 9
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx-jy)+0.025*(m4-m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13+0.125*(m16+m17) + 0.08333333333*(Fx-Fy);
dist[9*Np+n] = fq;
// q = 10
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jy-jx)+0.025*(m6-m4)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13-0.125*(m16+m17)- 0.08333333333*(Fx-Fy);
dist[10*Np+n] = fq;
// q = 11
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx+jz)+0.025*(m4+m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12+0.25*m15+0.125*(m18-m16) + 0.08333333333*(Fx+Fz);
dist[11*Np+n] = fq;
// q = 12
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jz)-0.025*(m4+m8)+
mrt_V7*m9+mrt_V11*m10-mrt_V8*m11-mrt_V12*m12+0.25*m15+0.125*(m16-m18)-0.08333333333*(Fx+Fz);
dist[12*Np+n] = fq;
// q = 13
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx-jz)+0.025*(m4-m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15-0.125*(m16+m18) + 0.08333333333*(Fx-Fz);
dist[13*Np+n] = fq;
// q= 14
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jx)+0.025*(m8-m4)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15+0.125*(m16+m18) - 0.08333333333*(Fx-Fz);
dist[14*Np+n] = fq;
// q = 15
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy+jz)+0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m17-m18) + 0.08333333333*(Fy+Fz);
dist[15*Np+n] = fq;
// q = 16
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2-0.1*(jy+jz)-0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m18-m17)- 0.08333333333*(Fy+Fz);
dist[16*Np+n] = fq;
// q = 17
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy-jz)+0.025*(m6-m8)
-mrt_V6*m9-mrt_V7*m10-0.25*m14+0.125*(m17+m18) + 0.08333333333*(Fy-Fz);
dist[17*Np+n] = fq;
// q = 18
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jy)+0.025*(m8-m6)
-mrt_V6*m9-mrt_V7*m10-0.25*m14-0.125*(m17+m18) - 0.08333333333*(Fy-Fz);
dist[18*Np+n] = fq;
//........................................................................
// write the velocity
ux = jx / rho0;
uy = jy / rho0;
uz = jz / rho0;
Velocity[n] = ux;
Velocity[Np+n] = uy;
Velocity[2*Np+n] = uz;
// Instantiate mass transport distributions
// Stationary value - distribution 0
nAB = 1.0/(nA+nB);
Aq[n] = 0.3333333333333333*nA;
Bq[n] = 0.3333333333333333*nB;
//...............................................
// q = 0,2,4
// Cq = {1,0,0}, {0,1,0}, {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nx;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*ux))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*ux))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*ux))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*ux))+delta;
Aq[1*Np+n] = a1;
Bq[1*Np+n] = b1;
Aq[2*Np+n] = a2;
Bq[2*Np+n] = b2;
//...............................................
// q = 2
// Cq = {0,1,0}
delta = beta*nA*nB*nAB*0.1111111111111111*ny;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uy))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uy))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uy))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uy))+delta;
Aq[3*Np+n] = a1;
Bq[3*Np+n] = b1;
Aq[4*Np+n] = a2;
Bq[4*Np+n] = b2;
//...............................................
// q = 4
// Cq = {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nz;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uz))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uz))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uz))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uz))+delta;
Aq[5*Np+n] = a1;
Bq[5*Np+n] = b1;
Aq[6*Np+n] = a2;
Bq[6*Np+n] = b2;
//...............................................
}
}
}
__global__ void dvc_ScaLBL_D3Q19_AAodd_Color(int *neighborList, int *Map, double *dist, double *Aq, double *Bq, double *Den,
double *Phi, double *Velocity, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int strideY, int strideZ, int start, int finish, int Np){
int n,nn,ijk,nread;
int nr1,nr2,nr3,nr4,nr5,nr6;
int nr7,nr8,nr9,nr10;
int nr11,nr12,nr13,nr14;
//int nr15,nr16,nr17,nr18;
double fq;
// conserved momemnts
double rho,jx,jy,jz;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
double m3,m5,m7;
double nA,nB; // number density
double a1,b1,a2,b2,nAB,delta;
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
const double mrt_V1=0.05263157894736842;
const double mrt_V2=0.012531328320802;
const double mrt_V3=0.04761904761904762;
const double mrt_V4=0.004594820384294068;
const double mrt_V5=0.01587301587301587;
const double mrt_V6=0.0555555555555555555555555;
const double mrt_V7=0.02777777777777778;
const double mrt_V8=0.08333333333333333;
const double mrt_V9=0.003341687552213868;
const double mrt_V10=0.003968253968253968;
const double mrt_V11=0.01388888888888889;
const double mrt_V12=0.04166666666666666;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// compute phase indicator field
phi=(nA-nB)/(nA+nB);
// local density
rho0=rhoA + 0.5*(1.0-phi)*(rhoB-rhoA);
// local relaxation time
tau=tauA + 0.5*(1.0-phi)*(tauB-tauA);
rlx_setA = 1.f/tau;
rlx_setB = 8.f*(2.f-rlx_setA)/(8.f-rlx_setA);
// Get the 1D index based on regular data layout
ijk = Map[n];
// COMPUTE THE COLOR GRADIENT
//........................................................................
//.................Read Phase Indicator Values............................
//........................................................................
nn = ijk-1; // neighbor index (get convention)
m1 = Phi[nn]; // get neighbor for phi - 1
//........................................................................
nn = ijk+1; // neighbor index (get convention)
m2 = Phi[nn]; // get neighbor for phi - 2
//........................................................................
nn = ijk-strideY; // neighbor index (get convention)
m3 = Phi[nn]; // get neighbor for phi - 3
//........................................................................
nn = ijk+strideY; // neighbor index (get convention)
m4 = Phi[nn]; // get neighbor for phi - 4
//........................................................................
nn = ijk-strideZ; // neighbor index (get convention)
m5 = Phi[nn]; // get neighbor for phi - 5
//........................................................................
nn = ijk+strideZ; // neighbor index (get convention)
m6 = Phi[nn]; // get neighbor for phi - 6
//........................................................................
nn = ijk-strideY-1; // neighbor index (get convention)
m7 = Phi[nn]; // get neighbor for phi - 7
//........................................................................
nn = ijk+strideY+1; // neighbor index (get convention)
m8 = Phi[nn]; // get neighbor for phi - 8
//........................................................................
nn = ijk+strideY-1; // neighbor index (get convention)
m9 = Phi[nn]; // get neighbor for phi - 9
//........................................................................
nn = ijk-strideY+1; // neighbor index (get convention)
m10 = Phi[nn]; // get neighbor for phi - 10
//........................................................................
nn = ijk-strideZ-1; // neighbor index (get convention)
m11 = Phi[nn]; // get neighbor for phi - 11
//........................................................................
nn = ijk+strideZ+1; // neighbor index (get convention)
m12 = Phi[nn]; // get neighbor for phi - 12
//........................................................................
nn = ijk+strideZ-1; // neighbor index (get convention)
m13 = Phi[nn]; // get neighbor for phi - 13
//........................................................................
nn = ijk-strideZ+1; // neighbor index (get convention)
m14 = Phi[nn]; // get neighbor for phi - 14
//........................................................................
nn = ijk-strideZ-strideY; // neighbor index (get convention)
m15 = Phi[nn]; // get neighbor for phi - 15
//........................................................................
nn = ijk+strideZ+strideY; // neighbor index (get convention)
m16 = Phi[nn]; // get neighbor for phi - 16
//........................................................................
nn = ijk+strideZ-strideY; // neighbor index (get convention)
m17 = Phi[nn]; // get neighbor for phi - 17
//........................................................................
nn = ijk-strideZ+strideY; // neighbor index (get convention)
m18 = Phi[nn]; // get neighbor for phi - 18
//............Compute the Color Gradient...................................
nx = -(m1-m2+0.5*(m7-m8+m9-m10+m11-m12+m13-m14));
ny = -(m3-m4+0.5*(m7-m8-m9+m10+m15-m16+m17-m18));
nz = -(m5-m6+0.5*(m11-m12-m13+m14+m15-m16-m17+m18));
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// q=0
fq = dist[n];
rho = fq;
m1 = -30.0*fq;
m2 = 12.0*fq;
// q=1
//nread = neighborList[n]; // neighbor 2
//fq = dist[nread]; // reading the f1 data into register fq
nr1 = neighborList[n];
fq = dist[nr1]; // reading the f1 data into register fq
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jx = fq;
m4 = -4.0*fq;
m9 = 2.0*fq;
m10 = -4.0*fq;
// f2 = dist[10*Np+n];
//nread = neighborList[n+Np]; // neighbor 1 ( < 10Np => even part of dist)
//fq = dist[nread]; // reading the f2 data into register fq
nr2 = neighborList[n+Np]; // neighbor 1 ( < 10Np => even part of dist)
fq = dist[nr2]; // reading the f2 data into register fq
rho += fq;
m1 -= 11.0*(fq);
m2 -= 4.0*(fq);
jx -= fq;
m4 += 4.0*(fq);
m9 += 2.0*(fq);
m10 -= 4.0*(fq);
// q=3
//nread = neighborList[n+2*Np]; // neighbor 4
//fq = dist[nread];
nr3 = neighborList[n+2*Np]; // neighbor 4
fq = dist[nr3];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy = fq;
m6 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 = fq;
m12 = -2.0*fq;
// q = 4
//nread = neighborList[n+3*Np]; // neighbor 3
//fq = dist[nread];
nr4 = neighborList[n+3*Np]; // neighbor 3
fq = dist[nr4];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy -= fq;
m6 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 += fq;
m12 -= 2.0*fq;
// q=5
//nread = neighborList[n+4*Np];
//fq = dist[nread];
nr5 = neighborList[n+4*Np];
fq = dist[nr5];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz = fq;
m8 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q = 6
//nread = neighborList[n+5*Np];
//fq = dist[nread];
nr6 = neighborList[n+5*Np];
fq = dist[nr6];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz -= fq;
m8 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q=7
//nread = neighborList[n+6*Np];
//fq = dist[nread];
nr7 = neighborList[n+6*Np];
fq = dist[nr7];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 = fq;
m16 = fq;
m17 = -fq;
// q = 8
//nread = neighborList[n+7*Np];
//fq = dist[nread];
nr8 = neighborList[n+7*Np];
fq = dist[nr8];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 += fq;
m16 -= fq;
m17 += fq;
// q=9
//nread = neighborList[n+8*Np];
//fq = dist[nread];
nr9 = neighborList[n+8*Np];
fq = dist[nr9];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 += fq;
m17 += fq;
// q = 10
//nread = neighborList[n+9*Np];
//fq = dist[nread];
nr10 = neighborList[n+9*Np];
fq = dist[nr10];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 -= fq;
m17 -= fq;
// q=11
//nread = neighborList[n+10*Np];
//fq = dist[nread];
nr11 = neighborList[n+10*Np];
fq = dist[nr11];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 = fq;
m16 -= fq;
m18 = fq;
// q=12
//nread = neighborList[n+11*Np];
//fq = dist[nread];
nr12 = neighborList[n+11*Np];
fq = dist[nr12];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 += fq;
m16 += fq;
m18 -= fq;
// q=13
//nread = neighborList[n+12*Np];
//fq = dist[nread];
nr13 = neighborList[n+12*Np];
fq = dist[nr13];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 -= fq;
m18 -= fq;
// q=14
//nread = neighborList[n+13*Np];
//fq = dist[nread];
nr14 = neighborList[n+13*Np];
fq = dist[nr14];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 += fq;
m18 += fq;
// q=15
nread = neighborList[n+14*Np];
fq = dist[nread];
//fq = dist[17*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 = fq;
m17 += fq;
m18 -= fq;
// q=16
nread = neighborList[n+15*Np];
fq = dist[nread];
//fq = dist[8*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 += fq;
m17 -= fq;
m18 += fq;
// q=17
//fq = dist[18*Np+n];
nread = neighborList[n+16*Np];
fq = dist[nread];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 += fq;
m18 += fq;
// q=18
nread = neighborList[n+17*Np];
fq = dist[nread];
//fq = dist[9*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 -= fq;
m18 -= fq;
//........................................................................
//..............carry out relaxation process..............................
//..........Toelke, Fruediger et. al. 2006................................
if (C == 0.0) nx = ny = nz = 0.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho0 - 11*rho) -alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho0)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*( - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho0) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho0) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho0) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.................inverse transformation......................................................
// q=0
fq = mrt_V1*rho-mrt_V2*m1+mrt_V3*m2;
dist[n] = fq;
// q = 1
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jx-m4)+mrt_V6*(m9-m10)+0.16666666*Fx;
//nread = neighborList[n+Np];
dist[nr2] = fq;
// q=2
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m4-jx)+mrt_V6*(m9-m10) - 0.16666666*Fx;
//nread = neighborList[n];
dist[nr1] = fq;
// q = 3
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jy-m6)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) + 0.16666666*Fy;
//nread = neighborList[n+3*Np];
dist[nr4] = fq;
// q = 4
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m6-jy)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) - 0.16666666*Fy;
//nread = neighborList[n+2*Np];
dist[nr3] = fq;
// q = 5
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jz-m8)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) + 0.16666666*Fz;
//nread = neighborList[n+5*Np];
dist[nr6] = fq;
// q = 6
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m8-jz)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) - 0.16666666*Fz;
//nread = neighborList[n+4*Np];
dist[nr5] = fq;
// q = 7
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx+jy)+0.025*(m4+m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12+0.25*m13+0.125*(m16-m17) + 0.08333333333*(Fx+Fy);
//nread = neighborList[n+7*Np];
dist[nr8] = fq;
// q = 8
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jy)-0.025*(m4+m6) +mrt_V7*m9+mrt_V11*m10+mrt_V8*m11
+mrt_V12*m12+0.25*m13+0.125*(m17-m16) - 0.08333333333*(Fx+Fy);
//nread = neighborList[n+6*Np];
dist[nr7] = fq;
// q = 9
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx-jy)+0.025*(m4-m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13+0.125*(m16+m17) + 0.08333333333*(Fx-Fy);
//nread = neighborList[n+9*Np];
dist[nr10] = fq;
// q = 10
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jy-jx)+0.025*(m6-m4)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13-0.125*(m16+m17)- 0.08333333333*(Fx-Fy);
//nread = neighborList[n+8*Np];
dist[nr9] = fq;
// q = 11
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx+jz)+0.025*(m4+m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12+0.25*m15+0.125*(m18-m16) + 0.08333333333*(Fx+Fz);
//nread = neighborList[n+11*Np];
dist[nr12] = fq;
// q = 12
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jz)-0.025*(m4+m8)+
mrt_V7*m9+mrt_V11*m10-mrt_V8*m11-mrt_V12*m12+0.25*m15+0.125*(m16-m18) - 0.08333333333*(Fx+Fz);
//nread = neighborList[n+10*Np];
dist[nr11]= fq;
// q = 13
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx-jz)+0.025*(m4-m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15-0.125*(m16+m18) + 0.08333333333*(Fx-Fz);
//nread = neighborList[n+13*Np];
dist[nr14] = fq;
// q= 14
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jx)+0.025*(m8-m4)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15+0.125*(m16+m18) - 0.08333333333*(Fx-Fz);
//nread = neighborList[n+12*Np];
dist[nr13] = fq;
// q = 15
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy+jz)+0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m17-m18) + 0.08333333333*(Fy+Fz);
nread = neighborList[n+15*Np];
dist[nread] = fq;
// q = 16
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2-0.1*(jy+jz)-0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m18-m17)- 0.08333333333*(Fy+Fz);
nread = neighborList[n+14*Np];
dist[nread] = fq;
// q = 17
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy-jz)+0.025*(m6-m8)
-mrt_V6*m9-mrt_V7*m10-0.25*m14+0.125*(m17+m18) + 0.08333333333*(Fy-Fz);
nread = neighborList[n+17*Np];
dist[nread] = fq;
// q = 18
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jy)+0.025*(m8-m6)
-mrt_V6*m9-mrt_V7*m10-0.25*m14-0.125*(m17+m18) - 0.08333333333*(Fy-Fz);
nread = neighborList[n+16*Np];
dist[nread] = fq;
// write the velocity
ux = jx / rho0;
uy = jy / rho0;
uz = jz / rho0;
Velocity[n] = ux;
Velocity[Np+n] = uy;
Velocity[2*Np+n] = uz;
// Instantiate mass transport distributions
// Stationary value - distribution 0
nAB = 1.0/(nA+nB);
Aq[n] = 0.3333333333333333*nA;
Bq[n] = 0.3333333333333333*nB;
//...............................................
// q = 0,2,4
// Cq = {1,0,0}, {0,1,0}, {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nx;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*ux))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*ux))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*ux))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*ux))+delta;
// q = 1
//nread = neighborList[n+Np];
Aq[nr2] = a1;
Bq[nr2] = b1;
// q=2
//nread = neighborList[n];
Aq[nr1] = a2;
Bq[nr1] = b2;
//...............................................
// Cq = {0,1,0}
delta = beta*nA*nB*nAB*0.1111111111111111*ny;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uy))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uy))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uy))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uy))+delta;
// q = 3
//nread = neighborList[n+3*Np];
Aq[nr4] = a1;
Bq[nr4] = b1;
// q = 4
//nread = neighborList[n+2*Np];
Aq[nr3] = a2;
Bq[nr3] = b2;
//...............................................
// q = 4
// Cq = {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nz;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uz))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uz))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uz))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uz))+delta;
// q = 5
//nread = neighborList[n+5*Np];
Aq[nr6] = a1;
Bq[nr6] = b1;
// q = 6
//nread = neighborList[n+4*Np];
Aq[nr5] = a2;
Bq[nr5] = b2;
//...............................................
}
}
}
__global__ void dvc_ScaLBL_D3Q19_AAodd_ColorMomentum(int *neighborList, double *dist, double *Den,
double *Velocity, double *ColorGrad, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int start, int finish, int Np){
int n,nread;
double fq;
// conserved momemnts
double rho,jx,jy,jz;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
double nA,nB; // number density
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
const double mrt_V1=0.05263157894736842;
const double mrt_V2=0.012531328320802;
const double mrt_V3=0.04761904761904762;
const double mrt_V4=0.004594820384294068;
const double mrt_V5=0.01587301587301587;
const double mrt_V6=0.0555555555555555555555555;
const double mrt_V7=0.02777777777777778;
const double mrt_V8=0.08333333333333333;
const double mrt_V9=0.003341687552213868;
const double mrt_V10=0.003968253968253968;
const double mrt_V11=0.01388888888888889;
const double mrt_V12=0.04166666666666666;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// compute phase indicator field
phi=(nA-nB)/(nA+nB);
// local density
rho0=rhoA + 0.5*(1.0-phi)*(rhoB-rhoA);
// local relaxation time
tau=tauA + 0.5*(1.0-phi)*(tauB-tauA);
rlx_setA = 1.f/tau;
rlx_setB = 8.f*(2.f-rlx_setA)/(8.f-rlx_setA);
// read the color gradient
nx = ColorGrad[n];
ny = ColorGrad[Np+n];
nz = ColorGrad[2*Np+n];
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// q=0
fq = dist[n];
rho = fq;
m1 = -30.0*fq;
m2 = 12.0*fq;
// q=1
nread = neighborList[n]; // neighbor 2 ( > 10Np => odd part of dist)
fq = dist[nread]; // reading the f1 data into register fq
//fp = dist[10*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jx = fq;
m4 = -4.0*fq;
m9 = 2.0*fq;
m10 = -4.0*fq;
// f2 = dist[10*Np+n];
nread = neighborList[n+Np]; // neighbor 1 ( < 10Np => even part of dist)
fq = dist[nread]; // reading the f2 data into register fq
//fq = dist[Np+n];
rho += fq;
m1 -= 11.0*(fq);
m2 -= 4.0*(fq);
jx -= fq;
m4 += 4.0*(fq);
m9 += 2.0*(fq);
m10 -= 4.0*(fq);
// q=3
nread = neighborList[n+2*Np]; // neighbor 4
fq = dist[nread];
//fq = dist[11*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy = fq;
m6 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 = fq;
m12 = -2.0*fq;
// q = 4
nread = neighborList[n+3*Np]; // neighbor 3
fq = dist[nread];
//fq = dist[2*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy -= fq;
m6 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 += fq;
m12 -= 2.0*fq;
// q=5
nread = neighborList[n+4*Np];
fq = dist[nread];
//fq = dist[12*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz = fq;
m8 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q = 6
nread = neighborList[n+5*Np];
fq = dist[nread];
//fq = dist[3*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz -= fq;
m8 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q=7
nread = neighborList[n+6*Np];
fq = dist[nread];
//fq = dist[13*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 = fq;
m16 = fq;
m17 = -fq;
// q = 8
nread = neighborList[n+7*Np];
fq = dist[nread];
//fq = dist[4*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 += fq;
m16 -= fq;
m17 += fq;
// q=9
nread = neighborList[n+8*Np];
fq = dist[nread];
//fq = dist[14*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 += fq;
m17 += fq;
// q = 10
nread = neighborList[n+9*Np];
fq = dist[nread];
//fq = dist[5*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 -= fq;
m17 -= fq;
// q=11
nread = neighborList[n+10*Np];
fq = dist[nread];
//fq = dist[15*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 = fq;
m16 -= fq;
m18 = fq;
// q=12
nread = neighborList[n+11*Np];
fq = dist[nread];
//fq = dist[6*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 += fq;
m16 += fq;
m18 -= fq;
// q=13
nread = neighborList[n+12*Np];
fq = dist[nread];
//fq = dist[16*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 -= fq;
m18 -= fq;
// q=14
nread = neighborList[n+13*Np];
fq = dist[nread];
//fq = dist[7*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 += fq;
m18 += fq;
// q=15
nread = neighborList[n+14*Np];
fq = dist[nread];
//fq = dist[17*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 = fq;
m17 += fq;
m18 -= fq;
// q=16
nread = neighborList[n+15*Np];
fq = dist[nread];
//fq = dist[8*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 += fq;
m17 -= fq;
m18 += fq;
// q=17
//fq = dist[18*Np+n];
nread = neighborList[n+16*Np];
fq = dist[nread];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 += fq;
m18 += fq;
// q=18
nread = neighborList[n+17*Np];
fq = dist[nread];
//fq = dist[9*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 -= fq;
m18 -= fq;
//........................................................................
//..............carry out relaxation process..............................
//..........Toelke, Fruediger et. al. 2006................................
if (C == 0.0) nx = ny = nz = 0.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho0 - 11*rho) -alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho0)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*( - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho0) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho0) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho0) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.................inverse transformation......................................................
// q=0
fq = mrt_V1*rho-mrt_V2*m1+mrt_V3*m2;
dist[n] = fq;
// q = 1
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jx-m4)+mrt_V6*(m9-m10)+0.16666666*Fx;
nread = neighborList[n+Np];
dist[nread] = fq;
// q=2
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m4-jx)+mrt_V6*(m9-m10) - 0.16666666*Fx;
nread = neighborList[n];
dist[nread] = fq;
// q = 3
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jy-m6)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) + 0.16666666*Fy;
nread = neighborList[n+3*Np];
dist[nread] = fq;
// q = 4
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m6-jy)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) - 0.16666666*Fy;
nread = neighborList[n+2*Np];
dist[nread] = fq;
// q = 5
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jz-m8)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) + 0.16666666*Fz;
nread = neighborList[n+5*Np];
dist[nread] = fq;
// q = 6
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m8-jz)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) - 0.16666666*Fz;
nread = neighborList[n+4*Np];
dist[nread] = fq;
// q = 7
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx+jy)+0.025*(m4+m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12+0.25*m13+0.125*(m16-m17) + 0.08333333333*(Fx+Fy);
nread = neighborList[n+7*Np];
dist[nread] = fq;
// q = 8
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jy)-0.025*(m4+m6) +mrt_V7*m9+mrt_V11*m10+mrt_V8*m11
+mrt_V12*m12+0.25*m13+0.125*(m17-m16) - 0.08333333333*(Fx+Fy);
nread = neighborList[n+6*Np];
dist[nread] = fq;
// q = 9
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx-jy)+0.025*(m4-m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13+0.125*(m16+m17) + 0.08333333333*(Fx-Fy);
nread = neighborList[n+9*Np];
dist[nread] = fq;
// q = 10
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jy-jx)+0.025*(m6-m4)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13-0.125*(m16+m17)- 0.08333333333*(Fx-Fy);
nread = neighborList[n+8*Np];
dist[nread] = fq;
// q = 11
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx+jz)+0.025*(m4+m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12+0.25*m15+0.125*(m18-m16) + 0.08333333333*(Fx+Fz);
nread = neighborList[n+11*Np];
dist[nread] = fq;
// q = 12
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jz)-0.025*(m4+m8)+
mrt_V7*m9+mrt_V11*m10-mrt_V8*m11-mrt_V12*m12+0.25*m15+0.125*(m16-m18) - 0.08333333333*(Fx+Fz);
nread = neighborList[n+10*Np];
dist[nread]= fq;
// q = 13
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx-jz)+0.025*(m4-m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15-0.125*(m16+m18) + 0.08333333333*(Fx-Fz);
nread = neighborList[n+13*Np];
dist[nread] = fq;
// q= 14
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jx)+0.025*(m8-m4)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15+0.125*(m16+m18) - 0.08333333333*(Fx-Fz);
nread = neighborList[n+12*Np];
dist[nread] = fq;
// q = 15
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy+jz)+0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m17-m18) + 0.08333333333*(Fy+Fz);
nread = neighborList[n+15*Np];
dist[nread] = fq;
// q = 16
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2-0.1*(jy+jz)-0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m18-m17)- 0.08333333333*(Fy+Fz);
nread = neighborList[n+14*Np];
dist[nread] = fq;
// q = 17
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy-jz)+0.025*(m6-m8)
-mrt_V6*m9-mrt_V7*m10-0.25*m14+0.125*(m17+m18) + 0.08333333333*(Fy-Fz);
nread = neighborList[n+17*Np];
dist[nread] = fq;
// q = 18
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jy)+0.025*(m8-m6)
-mrt_V6*m9-mrt_V7*m10-0.25*m14-0.125*(m17+m18) - 0.08333333333*(Fy-Fz);
nread = neighborList[n+16*Np];
dist[nread] = fq;
// write the velocity
ux = jx / rho0;
uy = jy / rho0;
uz = jz / rho0;
Velocity[n] = ux;
Velocity[Np+n] = uy;
Velocity[2*Np+n] = uz;
}
}
}
__global__ void dvc_ScaLBL_D3Q19_AAeven_ColorMomentum(double *dist, double *Den, double *Velocity,
double *ColorGrad, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int start, int finish, int Np){
int n;
double fq;
// conserved momemnts
double rho,jx,jy,jz;
// non-conserved moments
double m1,m2,m4,m6,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18;
double nA,nB; // number density
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
const double mrt_V1=0.05263157894736842;
const double mrt_V2=0.012531328320802;
const double mrt_V3=0.04761904761904762;
const double mrt_V4=0.004594820384294068;
const double mrt_V5=0.01587301587301587;
const double mrt_V6=0.0555555555555555555555555;
const double mrt_V7=0.02777777777777778;
const double mrt_V8=0.08333333333333333;
const double mrt_V9=0.003341687552213868;
const double mrt_V10=0.003968253968253968;
const double mrt_V11=0.01388888888888889;
const double mrt_V12=0.04166666666666666;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// compute phase indicator field
phi=(nA-nB)/(nA+nB);
// local density
rho0=rhoA + 0.5*(1.0-phi)*(rhoB-rhoA);
// local relaxation time
tau=tauA + 0.5*(1.0-phi)*(tauB-tauA);
rlx_setA = 1.f/tau;
rlx_setB = 8.f*(2.f-rlx_setA)/(8.f-rlx_setA);
// read the color gradient
nx = ColorGrad[n];
ny = ColorGrad[Np+n];
nz = ColorGrad[2*Np+n];
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// q=0
fq = dist[n];
rho = fq;
m1 = -30.0*fq;
m2 = 12.0*fq;
// q=1
fq = dist[2*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jx = fq;
m4 = -4.0*fq;
m9 = 2.0*fq;
m10 = -4.0*fq;
// f2 = dist[10*Np+n];
fq = dist[1*Np+n];
rho += fq;
m1 -= 11.0*(fq);
m2 -= 4.0*(fq);
jx -= fq;
m4 += 4.0*(fq);
m9 += 2.0*(fq);
m10 -= 4.0*(fq);
// q=3
fq = dist[4*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy = fq;
m6 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 = fq;
m12 = -2.0*fq;
// q = 4
fq = dist[3*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jy -= fq;
m6 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 += fq;
m12 -= 2.0*fq;
// q=5
fq = dist[6*Np+n];
rho += fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz = fq;
m8 = -4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q = 6
fq = dist[5*Np+n];
rho+= fq;
m1 -= 11.0*fq;
m2 -= 4.0*fq;
jz -= fq;
m8 += 4.0*fq;
m9 -= fq;
m10 += 2.0*fq;
m11 -= fq;
m12 += 2.0*fq;
// q=7
fq = dist[8*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 = fq;
m16 = fq;
m17 = -fq;
// q = 8
fq = dist[7*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 += fq;
m16 -= fq;
m17 += fq;
// q=9
fq = dist[10*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jy -= fq;
m6 -= fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 += fq;
m17 += fq;
// q = 10
fq = dist[9*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jy += fq;
m6 += fq;
m9 += fq;
m10 += fq;
m11 += fq;
m12 += fq;
m13 -= fq;
m16 -= fq;
m17 -= fq;
// q=11
fq = dist[12*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 = fq;
m16 -= fq;
m18 = fq;
// q=12
fq = dist[11*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 += fq;
m16 += fq;
m18 -= fq;
// q=13
fq = dist[14*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx += fq;
m4 += fq;
jz -= fq;
m8 -= fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 -= fq;
m18 -= fq;
// q=14
fq = dist[13*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jx -= fq;
m4 -= fq;
jz += fq;
m8 += fq;
m9 += fq;
m10 += fq;
m11 -= fq;
m12 -= fq;
m15 -= fq;
m16 += fq;
m18 += fq;
// q=15
fq = dist[16*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 = fq;
m17 += fq;
m18 -= fq;
// q=16
fq = dist[15*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 += fq;
m17 -= fq;
m18 += fq;
// q=17
fq = dist[18*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy += fq;
m6 += fq;
jz -= fq;
m8 -= fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 += fq;
m18 += fq;
// q=18
fq = dist[17*Np+n];
rho += fq;
m1 += 8.0*fq;
m2 += fq;
jy -= fq;
m6 -= fq;
jz += fq;
m8 += fq;
m9 -= 2.0*fq;
m10 -= 2.0*fq;
m14 -= fq;
m17 -= fq;
m18 -= fq;
//........................................................................
//..............carry out relaxation process..............................
//..........Toelke, Fruediger et. al. 2006................................
if (C == 0.0) nx = ny = nz = 0.0;
m1 = m1 + rlx_setA*((19*(jx*jx+jy*jy+jz*jz)/rho0 - 11*rho) -alpha*C - m1);
m2 = m2 + rlx_setA*((3*rho - 5.5*(jx*jx+jy*jy+jz*jz)/rho0)- m2);
m4 = m4 + rlx_setB*((-0.6666666666666666*jx)- m4);
m6 = m6 + rlx_setB*((-0.6666666666666666*jy)- m6);
m8 = m8 + rlx_setB*((-0.6666666666666666*jz)- m8);
m9 = m9 + rlx_setA*(((2*jx*jx-jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(2*nx*nx-ny*ny-nz*nz) - m9);
m10 = m10 + rlx_setA*( - m10);
m11 = m11 + rlx_setA*(((jy*jy-jz*jz)/rho0) + 0.5*alpha*C*(ny*ny-nz*nz)- m11);
m12 = m12 + rlx_setA*( - m12);
m13 = m13 + rlx_setA*( (jx*jy/rho0) + 0.5*alpha*C*nx*ny - m13);
m14 = m14 + rlx_setA*( (jy*jz/rho0) + 0.5*alpha*C*ny*nz - m14);
m15 = m15 + rlx_setA*( (jx*jz/rho0) + 0.5*alpha*C*nx*nz - m15);
m16 = m16 + rlx_setB*( - m16);
m17 = m17 + rlx_setB*( - m17);
m18 = m18 + rlx_setB*( - m18);
//.......................................................................................................
//.................inverse transformation......................................................
// q=0
fq = mrt_V1*rho-mrt_V2*m1+mrt_V3*m2;
dist[n] = fq;
// q = 1
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jx-m4)+mrt_V6*(m9-m10) + 0.16666666*Fx;
dist[1*Np+n] = fq;
// q=2
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m4-jx)+mrt_V6*(m9-m10) - 0.16666666*Fx;
dist[2*Np+n] = fq;
// q = 3
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jy-m6)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) + 0.16666666*Fy;
dist[3*Np+n] = fq;
// q = 4
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m6-jy)+mrt_V7*(m10-m9)+mrt_V8*(m11-m12) - 0.16666666*Fy;
dist[4*Np+n] = fq;
// q = 5
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(jz-m8)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) + 0.16666666*Fz;
dist[5*Np+n] = fq;
// q = 6
fq = mrt_V1*rho-mrt_V4*m1-mrt_V5*m2+0.1*(m8-jz)+mrt_V7*(m10-m9)+mrt_V8*(m12-m11) - 0.16666666*Fz;
dist[6*Np+n] = fq;
// q = 7
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx+jy)+0.025*(m4+m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12+0.25*m13+0.125*(m16-m17) + 0.08333333333*(Fx+Fy);
dist[7*Np+n] = fq;
// q = 8
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jy)-0.025*(m4+m6) +mrt_V7*m9+mrt_V11*m10+mrt_V8*m11
+mrt_V12*m12+0.25*m13+0.125*(m17-m16) - 0.08333333333*(Fx+Fy);
dist[8*Np+n] = fq;
// q = 9
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jx-jy)+0.025*(m4-m6)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13+0.125*(m16+m17) + 0.08333333333*(Fx-Fy);
dist[9*Np+n] = fq;
// q = 10
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2+0.1*(jy-jx)+0.025*(m6-m4)+
mrt_V7*m9+mrt_V11*m10+mrt_V8*m11+mrt_V12*m12-0.25*m13-0.125*(m16+m17)- 0.08333333333*(Fx-Fy);
dist[10*Np+n] = fq;
// q = 11
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx+jz)+0.025*(m4+m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12+0.25*m15+0.125*(m18-m16) + 0.08333333333*(Fx+Fz);
dist[11*Np+n] = fq;
// q = 12
fq = mrt_V1*rho+mrt_V9*m1+mrt_V10*m2-0.1*(jx+jz)-0.025*(m4+m8)+
mrt_V7*m9+mrt_V11*m10-mrt_V8*m11-mrt_V12*m12+0.25*m15+0.125*(m16-m18)-0.08333333333*(Fx+Fz);
dist[12*Np+n] = fq;
// q = 13
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jx-jz)+0.025*(m4-m8)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15-0.125*(m16+m18) + 0.08333333333*(Fx-Fz);
dist[13*Np+n] = fq;
// q= 14
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jx)+0.025*(m8-m4)
+mrt_V7*m9+mrt_V11*m10-mrt_V8*m11
-mrt_V12*m12-0.25*m15+0.125*(m16+m18) - 0.08333333333*(Fx-Fz);
dist[14*Np+n] = fq;
// q = 15
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy+jz)+0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m17-m18) + 0.08333333333*(Fy+Fz);
dist[15*Np+n] = fq;
// q = 16
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2-0.1*(jy+jz)-0.025*(m6+m8)
-mrt_V6*m9-mrt_V7*m10+0.25*m14+0.125*(m18-m17)- 0.08333333333*(Fy+Fz);
dist[16*Np+n] = fq;
// q = 17
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jy-jz)+0.025*(m6-m8)
-mrt_V6*m9-mrt_V7*m10-0.25*m14+0.125*(m17+m18) + 0.08333333333*(Fy-Fz);
dist[17*Np+n] = fq;
// q = 18
fq = mrt_V1*rho+mrt_V9*m1
+mrt_V10*m2+0.1*(jz-jy)+0.025*(m8-m6)
-mrt_V6*m9-mrt_V7*m10-0.25*m14-0.125*(m17+m18) - 0.08333333333*(Fy-Fz);
dist[18*Np+n] = fq;
//........................................................................
// write the velocity
ux = jx / rho0;
uy = jy / rho0;
uz = jz / rho0;
Velocity[n] = ux;
Velocity[Np+n] = uy;
Velocity[2*Np+n] = uz;
}
}
}
__global__ void dvc_ScaLBL_D3Q19_AAeven_ColorMass(double *Aq, double *Bq, double *Den,
double *Velocity, double *ColorGrad, double beta, int start, int finish, int Np){
int n;
double fq;
// non-conserved moments
double nA,nB; // number density
double a1,b1,a2,b2,nAB,delta;
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// read the color gradient
ux = Velocity[n];
uy = Velocity[Np+n];
uz = Velocity[2*Np+n];
// read the color gradient
nx = ColorGrad[n];
ny = ColorGrad[Np+n];
nz = ColorGrad[2*Np+n];
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// Instantiate mass transport distributions
// Stationary value - distribution 0
nAB = 1.0/(nA+nB);
Aq[n] = 0.3333333333333333*nA;
Bq[n] = 0.3333333333333333*nB;
//...............................................
// q = 0,2,4
// Cq = {1,0,0}, {0,1,0}, {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nx;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*ux))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*ux))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*ux))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*ux))+delta;
Aq[1*Np+n] = a1;
Bq[1*Np+n] = b1;
Aq[2*Np+n] = a2;
Bq[2*Np+n] = b2;
//...............................................
// q = 2
// Cq = {0,1,0}
delta = beta*nA*nB*nAB*0.1111111111111111*ny;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uy))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uy))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uy))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uy))+delta;
Aq[3*Np+n] = a1;
Bq[3*Np+n] = b1;
Aq[4*Np+n] = a2;
Bq[4*Np+n] = b2;
//...............................................
// q = 4
// Cq = {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nz;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uz))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uz))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uz))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uz))+delta;
Aq[5*Np+n] = a1;
Bq[5*Np+n] = b1;
Aq[6*Np+n] = a2;
Bq[6*Np+n] = b2;
//...............................................
}
}
}
__global__ void dvc_ScaLBL_D3Q19_AAodd_ColorMass(int *neighborList, double *Aq, double *Bq, double *Den,
double *Velocity, double *ColorGrad, double beta, int start, int finish, int Np){
int n,nread;
double fq;
// non-conserved moments
double nA,nB; // number density
double a1,b1,a2,b2,nAB,delta;
double C,nx,ny,nz; //color gradient magnitude and direction
double ux,uy,uz;
double phi,tau,rho0,rlx_setA,rlx_setB;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// read the component number densities
nA = Den[n];
nB = Den[Np + n];
// read the color gradient
ux = Velocity[n];
uy = Velocity[Np+n];
uz = Velocity[2*Np+n];
// read the color gradient
nx = ColorGrad[n];
ny = ColorGrad[Np+n];
nz = ColorGrad[2*Np+n];
//...........Normalize the Color Gradient.................................
C = sqrt(nx*nx+ny*ny+nz*nz);
if (C==0.0) C=1.0;
nx = nx/C;
ny = ny/C;
nz = nz/C;
// Instantiate mass transport distributions
// Stationary value - distribution 0
nAB = 1.0/(nA+nB);
Aq[n] = 0.3333333333333333*nA;
Bq[n] = 0.3333333333333333*nB;
//...............................................
// q = 0,2,4
// Cq = {1,0,0}, {0,1,0}, {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nx;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*ux))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*ux))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*ux))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*ux))+delta;
// q = 1
nread = neighborList[n+Np];
Aq[nread] = a1;
Bq[nread] = b1;
// q=2
nread = neighborList[n];
Aq[nread] = a2;
Bq[nread] = b2;
//...............................................
// Cq = {0,1,0}
delta = beta*nA*nB*nAB*0.1111111111111111*ny;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uy))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uy))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uy))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uy))+delta;
// q = 3
nread = neighborList[n+3*Np];
Aq[nread] = a1;
Bq[nread] = b1;
// q = 4
nread = neighborList[n+2*Np];
Aq[nread] = a2;
Bq[nread] = b2;
//...............................................
// q = 4
// Cq = {0,0,1}
delta = beta*nA*nB*nAB*0.1111111111111111*nz;
if (!(nA*nB*nAB>0)) delta=0;
a1 = nA*(0.1111111111111111*(1+4.5*uz))+delta;
b1 = nB*(0.1111111111111111*(1+4.5*uz))-delta;
a2 = nA*(0.1111111111111111*(1-4.5*uz))-delta;
b2 = nB*(0.1111111111111111*(1-4.5*uz))+delta;
// q = 5
nread = neighborList[n+5*Np];
Aq[nread] = a1;
Bq[nread] = b1;
// q = 6
nread = neighborList[n+4*Np];
Aq[nread] = a2;
Bq[nread] = b2;
//...............................................
}
}
}
__global__ void dvc_ScaLBL_D3Q7_AAodd_PhaseField(int *neighborList, int *Map, double *Aq, double *Bq,
double *Den, double *Phi, int start, int finish, int Np){
int idx,n,nread;
double fq,nA,nB;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
//..........Compute the number density for each component ............
// q=0
fq = Aq[n];
nA = fq;
fq = Bq[n];
nB = fq;
// q=1
nread = neighborList[n];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// q=2
nread = neighborList[n+Np];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// q=3
nread = neighborList[n+2*Np];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// q = 4
nread = neighborList[n+3*Np];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// q=5
nread = neighborList[n+4*Np];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// q = 6
nread = neighborList[n+5*Np];
fq = Aq[nread];
nA += fq;
fq = Bq[nread];
nB += fq;
// save the number densities
Den[n] = nA;
Den[Np+n] = nB;
// save the phase indicator field
idx = Map[n];
Phi[idx] = (nA-nB)/(nA+nB);
}
}
}
__global__ void dvc_ScaLBL_D3Q7_AAeven_PhaseField(int *Map, double *Aq, double *Bq, double *Den, double *Phi,
int start, int finish, int Np){
int idx,n;
double fq,nA,nB;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (n<finish) {
// compute number density for each component
// q=0
fq = Aq[n];
nA = fq;
fq = Bq[n];
nB = fq;
// q=1
fq = Aq[2*Np+n];
nA += fq;
fq = Bq[2*Np+n];
nB += fq;
// q=2
fq = Aq[1*Np+n];
nA += fq;
fq = Bq[1*Np+n];
nB += fq;
// q=3
fq = Aq[4*Np+n];
nA += fq;
fq = Bq[4*Np+n];
nB += fq;
// q = 4
fq = Aq[3*Np+n];
nA += fq;
fq = Bq[3*Np+n];
nB += fq;
// q=5
fq = Aq[6*Np+n];
nA += fq;
fq = Bq[6*Np+n];
nB += fq;
// q = 6
fq = Aq[5*Np+n];
nA += fq;
fq = Bq[5*Np+n];
nB += fq;
// save the number densities
Den[n] = nA;
Den[Np+n] = nB;
// save the phase indicator field
idx = Map[n];
Phi[idx] = (nA-nB)/(nA+nB);
}
}
}
__global__ void dvc_ScaLBL_D3Q19_Gradient(int *Map, double *phi, double *ColorGrad, int start, int finish, int Np,
int strideY, int strideZ){
int idx,ijk,nn;
// distributions
double m1,m2,m3,m4,m5,m6,m7,m8,m9;
double m10,m11,m12,m13,m14,m15,m16,m17,m18;
double nx,ny,nz;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
idx = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (idx<finish) {
// Get the 1D index based on regular data layout
ijk = Map[idx];
//.......Back out the 3D indices for node n..............
//k = n/(Nx*Ny);
//j = (n-Nx*Ny*k)/Nx;
//i = n-Nx*Ny*k-Nx*j;
//........................................................................
//........Get 1-D index for this thread....................
// n = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x;
//........................................................................
// COMPUTE THE COLOR GRADIENT
//........................................................................
//.................Read Phase Indicator Values............................
//........................................................................
nn = ijk-1; // neighbor index (get convention)
m1 = phi[nn]; // get neighbor for phi - 1
//........................................................................
nn = ijk+1; // neighbor index (get convention)
m2 = phi[nn]; // get neighbor for phi - 2
//........................................................................
nn = ijk-strideY; // neighbor index (get convention)
m3 = phi[nn]; // get neighbor for phi - 3
//........................................................................
nn = ijk+strideY; // neighbor index (get convention)
m4 = phi[nn]; // get neighbor for phi - 4
//........................................................................
nn = ijk-strideZ; // neighbor index (get convention)
m5 = phi[nn]; // get neighbor for phi - 5
//........................................................................
nn = ijk+strideZ; // neighbor index (get convention)
m6 = phi[nn]; // get neighbor for phi - 6
//........................................................................
nn = ijk-strideY-1; // neighbor index (get convention)
m7 = phi[nn]; // get neighbor for phi - 7
//........................................................................
nn = ijk+strideY+1; // neighbor index (get convention)
m8 = phi[nn]; // get neighbor for phi - 8
//........................................................................
nn = ijk+strideY-1; // neighbor index (get convention)
m9 = phi[nn]; // get neighbor for phi - 9
//........................................................................
nn = ijk-strideY+1; // neighbor index (get convention)
m10 = phi[nn]; // get neighbor for phi - 10
//........................................................................
nn = ijk-strideZ-1; // neighbor index (get convention)
m11 = phi[nn]; // get neighbor for phi - 11
//........................................................................
nn = ijk+strideZ+1; // neighbor index (get convention)
m12 = phi[nn]; // get neighbor for phi - 12
//........................................................................
nn = ijk+strideZ-1; // neighbor index (get convention)
m13 = phi[nn]; // get neighbor for phi - 13
//........................................................................
nn = ijk-strideZ+1; // neighbor index (get convention)
m14 = phi[nn]; // get neighbor for phi - 14
//........................................................................
nn = ijk-strideZ-strideY; // neighbor index (get convention)
m15 = phi[nn]; // get neighbor for phi - 15
//........................................................................
nn = ijk+strideZ+strideY; // neighbor index (get convention)
m16 = phi[nn]; // get neighbor for phi - 16
//........................................................................
nn = ijk+strideZ-strideY; // neighbor index (get convention)
m17 = phi[nn]; // get neighbor for phi - 17
//........................................................................
nn = ijk-strideZ+strideY; // neighbor index (get convention)
m18 = phi[nn]; // get neighbor for phi - 18
//............Compute the Color Gradient...................................
nx = -(m1-m2+0.5*(m7-m8+m9-m10+m11-m12+m13-m14));
ny = -(m3-m4+0.5*(m7-m8-m9+m10+m15-m16+m17-m18));
nz = -(m5-m6+0.5*(m11-m12-m13+m14+m15-m16-m17+m18));
//...............................................
//...Store the Color Gradient....................
ColorGrad[idx] = nx;
ColorGrad[Np+idx] = ny;
ColorGrad[2*Np+idx] = nz;
//...............................................
}
}
}
__global__ void dvc_ScaLBL_PhaseField_Init(int *Map, double *Phi, double *Den, double *Aq, double *Bq, int start, int finish, int Np){
int idx,n;
double phi,nA,nB;
int S = Np/NBLOCKS/NTHREADS + 1;
for (int s=0; s<S; s++){
//........Get 1-D index for this thread....................
idx = S*blockIdx.x*blockDim.x + s*blockDim.x + threadIdx.x + start;
if (idx<finish) {
n = Map[idx];
phi = Phi[n];
if (phi > 1.f){
nA = 1.0; nB = 0.f;
}
else if (phi < -1.f){
nB = 1.0; nA = 0.f;
}
else{
nA=0.5*(phi+1.f);
nB=0.5*(1.f-phi);
}
Den[idx] = nA;
Den[Np+idx] = nB;
Aq[idx]=0.3333333333333333*nA;
Aq[Np+idx]=0.1111111111111111*nA;
Aq[2*Np+idx]=0.1111111111111111*nA;
Aq[3*Np+idx]=0.1111111111111111*nA;
Aq[4*Np+idx]=0.1111111111111111*nA;
Aq[5*Np+idx]=0.1111111111111111*nA;
Aq[6*Np+idx]=0.1111111111111111*nA;
Bq[idx]=0.3333333333333333*nB;
Bq[Np+idx]=0.1111111111111111*nB;
Bq[2*Np+idx]=0.1111111111111111*nB;
Bq[3*Np+idx]=0.1111111111111111*nB;
Bq[4*Np+idx]=0.1111111111111111*nB;
Bq[5*Np+idx]=0.1111111111111111*nB;
Bq[6*Np+idx]=0.1111111111111111*nB;
}
}
}
extern "C" void ScaLBL_SetSlice_z(double *Phi, double value, int Nx, int Ny, int Nz, int Slice){
int GRID = Nx*Ny / 512 + 1;
dvc_ScaLBL_SetSlice_z<<<GRID,512>>>(Phi,value,Nx,Ny,Nz,Slice);
}
extern "C" void ScaLBL_Color_Init(char *ID, double *Den, double *Phi, double das, double dbs, int Nx, int Ny, int Nz){
dvc_ScaLBL_Color_Init<<<NBLOCKS,NTHREADS >>>(ID, Den, Phi, das, dbs, Nx, Ny, Nz);
}
extern "C" void ScaLBL_Color_InitDistance(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz){
dvc_ScaLBL_Color_InitDistance<<<NBLOCKS,NTHREADS >>>(ID, Den, Phi, Distance, das, dbs, beta, xp, Nx, Ny, Nz);
}
extern "C" void ScaLBL_D3Q19_ColorGradient(char *ID, double *phi, double *ColorGrad, int Nx, int Ny, int Nz){
dvc_ScaLBL_D3Q19_ColorGradient<<<NBLOCKS,NTHREADS >>>(ID, phi, ColorGrad, Nx, Ny, Nz);
}
extern "C" void ColorCollide( char *ID, double *disteven, double *distodd, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz, bool pBC){
dvc_ColorCollide<<<NBLOCKS,NTHREADS >>>( ID, disteven, distodd, ColorGrad,Velocity, Nx, Ny, Nz,rlx_setA, rlx_setB,
alpha, beta, Fx, Fy, Fz, pBC);
}
extern "C" void ScaLBL_D3Q19_ColorCollide( char *ID, double *disteven, double *distodd, double *phi, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz){
dvc_ScaLBL_D3Q19_ColorCollide<<<NBLOCKS,NTHREADS >>>(ID, disteven, distodd, phi, ColorGrad, Velocity, Nx, Ny, Nz, rlx_setA, rlx_setB,
alpha, beta, Fx, Fy, Fz);
}
extern "C" void DensityStreamD3Q7(char *ID, double *Den, double *Copy, double *Phi, double *ColorGrad, double *Velocity,
double beta, int Nx, int Ny, int Nz, bool pBC){
dvc_DensityStreamD3Q7<<<NBLOCKS,NTHREADS >>>(ID, Den, Copy, Phi, ColorGrad, Velocity, beta, Nx, Ny, Nz, pBC);
}
extern "C" void ScaLBL_ComputePhaseField(char *ID, double *Phi, double *Den, int N){
dvc_ScaLBL_ComputePhaseField<<<NBLOCKS,NTHREADS >>>(ID, Phi, Den, N);
}
extern "C" void ScaLBL_D3Q7_ColorCollideMass(char *ID, double *A_even, double *A_odd, double *B_even, double *B_odd,
double *Den, double *Phi, double *ColorGrad, double *Velocity, double beta, int N, bool pBC){
dvc_ScaLBL_D3Q7_ColorCollideMass<<<NBLOCKS,NTHREADS >>>(ID, A_even, A_odd, B_even, B_odd, Den, Phi, ColorGrad, Velocity, beta, N, pBC);
}
// Pressure Boundary Conditions Functions
extern "C" void ScaLBL_D3Q19_AAeven_Color(int *Map, double *dist, double *Aq, double *Bq, double *Den, double *Phi,
double *Vel, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int strideY, int strideZ, int start, int finish, int Np){
cudaProfilerStart();
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAeven_Color, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAeven_Color<<<NBLOCKS,NTHREADS >>>(Map, dist, Aq, Bq, Den, Phi, Vel, rhoA, rhoB, tauA, tauB,
alpha, beta, Fx, Fy, Fz, strideY, strideZ, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAeven_Color: %s \n",cudaGetErrorString(err));
}
cudaProfilerStop();
}
extern "C" void ScaLBL_D3Q19_AAodd_Color(int *d_neighborList, int *Map, double *dist, double *Aq, double *Bq, double *Den,
double *Phi, double *Vel, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int strideY, int strideZ, int start, int finish, int Np){
cudaProfilerStart();
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAodd_Color, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAodd_Color<<<NBLOCKS,NTHREADS >>>(d_neighborList, Map, dist, Aq, Bq, Den, Phi, Vel,
rhoA, rhoB, tauA, tauB, alpha, beta, Fx, Fy, Fz, strideY, strideZ, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAodd_Color: %s \n",cudaGetErrorString(err));
}
cudaProfilerStop();
}
extern "C" void ScaLBL_D3Q7_AAodd_PhaseField(int *NeighborList, int *Map, double *Aq, double *Bq,
double *Den, double *Phi, int start, int finish, int Np){
cudaProfilerStart();
dvc_ScaLBL_D3Q7_AAodd_PhaseField<<<NBLOCKS,NTHREADS >>>(NeighborList, Map, Aq, Bq, Den, Phi, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q7_AAodd_PhaseField: %s \n",cudaGetErrorString(err));
}
cudaProfilerStop();
}
extern "C" void ScaLBL_D3Q7_AAeven_PhaseField(int *Map, double *Aq, double *Bq, double *Den, double *Phi,
int start, int finish, int Np){
cudaProfilerStart();
dvc_ScaLBL_D3Q7_AAeven_PhaseField<<<NBLOCKS,NTHREADS >>>(Map, Aq, Bq, Den, Phi, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q7_AAeven_PhaseField: %s \n",cudaGetErrorString(err));
}
cudaProfilerStop();
}
extern "C" void ScaLBL_D3Q19_Gradient(int *Map, double *Phi, double *ColorGrad, int start, int finish, int Np,
int Nx, int Ny, int Nz){
int strideY=Nx;
int strideZ=Nx*Ny;
dvc_ScaLBL_D3Q19_Gradient<<<NBLOCKS,NTHREADS >>>(Map, Phi, ColorGrad, start, finish, Np, strideY, strideZ);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_ColorGrad: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_PhaseField_Init(int *Map, double *Phi, double *Den, double *Aq, double *Bq, int start, int finish, int Np){
dvc_ScaLBL_PhaseField_Init<<<NBLOCKS,NTHREADS >>>(Map, Phi, Den, Aq, Bq, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_PhaseField_Init: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_D3Q19_AAeven_ColorMomentum(double *dist, double *Den, double *Vel,
double *ColorGrad, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int start, int finish, int Np){
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAeven_ColorMomentum, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAeven_ColorMomentum<<<NBLOCKS,NTHREADS >>>(dist, Den, Vel, ColorGrad, rhoA, rhoB, tauA, tauB,
alpha, beta, Fx, Fy, Fz, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAeven_ColorMomentum: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_D3Q19_AAodd_ColorMomentum(int *d_neighborList, double *dist, double *Den, double *Vel,
double *ColorGrad, double rhoA, double rhoB, double tauA, double tauB, double alpha, double beta,
double Fx, double Fy, double Fz, int start, int finish, int Np){
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAodd_ColorMomentum, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAodd_ColorMomentum<<<NBLOCKS,NTHREADS >>>(d_neighborList, dist, Den, Vel, ColorGrad,
rhoA, rhoB, tauA, tauB, alpha, beta, Fx, Fy, Fz, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAodd_ColorMomentum: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_D3Q19_AAeven_ColorMass(double *Aq, double *Bq, double *Den, double *Vel,
double *ColorGrad, double beta, int start, int finish, int Np){
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAeven_ColorMass, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAeven_ColorMass<<<NBLOCKS,NTHREADS >>>(Aq, Bq, Den, Vel, ColorGrad, beta, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAeven_Color: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_D3Q19_AAodd_ColorMass(int *d_neighborList, double *Aq, double *Bq, double *Den, double *Vel,
double *ColorGrad, double beta, int start, int finish, int Np){
cudaFuncSetCacheConfig(dvc_ScaLBL_D3Q19_AAodd_ColorMass, cudaFuncCachePreferL1);
dvc_ScaLBL_D3Q19_AAodd_ColorMass<<<NBLOCKS,NTHREADS >>>(d_neighborList, Aq, Bq, Den, Vel, ColorGrad, beta, start, finish, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_D3Q19_AAodd_Color: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_Color_BC_z(int *list, int *Map, double *Phi, double *Den, double vA, double vB, int count, int Np){
int GRID = count / 512 + 1;
dvc_ScaLBL_Color_BC_z<<<GRID,512>>>(list, Map, Phi, Den, vA, vB, count, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_Color_BC_z: %s \n",cudaGetErrorString(err));
}
}
extern "C" void ScaLBL_Color_BC_Z(int *list, int *Map, double *Phi, double *Den, double vA, double vB, int count, int Np){
int GRID = count / 512 + 1;
dvc_ScaLBL_Color_BC_Z<<<GRID,512>>>(list, Map, Phi, Den, vA, vB, count, Np);
cudaError_t err = cudaGetLastError();
if (cudaSuccess != err){
printf("CUDA error in ScaLBL_Color_BC_Z: %s \n",cudaGetErrorString(err));
}
}
|
20,067 | #include "includes.h"
__global__ void maxKernel(float *array, int size, float* max)
{
extern __shared__ float sdata[];
unsigned int tid = threadIdx.x;
unsigned int i = blockIdx.x * 2 * blockDim.x + threadIdx.x;
int stride = blockDim.x * 2 * gridDim.x;
sdata[tid] = 0;
while (i < size)
{
sdata[tid] = fmaxf(array[i], array[i + blockDim.x]);
i += stride;
__syncthreads();
}
for (unsigned int s = blockDim.x / 2; s > 32; s >>= 1) {
if (tid < s)
sdata[tid] = fmaxf(sdata[tid], sdata[tid + s]);
__syncthreads();
}
if (tid < 32) {
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 32]);
__syncthreads();
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 16]);
__syncthreads();
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 8]);
__syncthreads();
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 4]);
__syncthreads();
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 2]);
__syncthreads();
sdata[tid] = fmaxf(sdata[tid], sdata[tid + 1]);
__syncthreads();
}
if (tid == 0) {
max[blockIdx.x] = sdata[0];
}
} |
20,068 | /*
Name: Prashanth Mallyampatti
Student Id: 200250501
Unity Id: pmallya
Algorithm: Bitonic Sort
*/
#include <stdio.h>
#include<stdlib.h>
#include <time.h>
#include<assert.h>
#include<sys/time.h>
int threads_ = 0, blocks_ = 0;
#ifdef __cplusplus
extern "C"
{
#endif
__global__
void bitonic_sort(float *values, int j, int k)
{
int i = threadIdx.x + blockDim.x * blockIdx.x;
int ij = i^j;
//ascending
if (ij > i && (i & k) == 0 && values[i] > values[ij])
{
float temp = values[i];
values[i] = values[ij];
values[ij] = temp;
}
//descending
if (ij > i && (i & k) != 0 && values[i] < values[ij])
{
float temp = values[i];
values[i] = values[ij];
values[ij] = temp;
}
}
int cuda_sort(int number_of_elements, float* a)
{
//limiting thread usage
if(number_of_elements > 512)
threads_ = 512;
else
threads_ = number_of_elements;
blocks_ = number_of_elements/threads_;
float *values;
size_t size = number_of_elements * sizeof(float);
cudaMalloc((void**) &values, size);
cudaMemcpy(values, a, size, cudaMemcpyHostToDevice);
dim3 threads(threads_, 1);
dim3 blocks(blocks_, 1);
for (int k = 2; k <= number_of_elements; k <<= 1)
for (int j=k>>1; j>0; j=j>>1)
bitonic_sort<<<blocks_, threads_>>>(values, j, k);
cudaMemcpy(a, values, size, cudaMemcpyDeviceToHost);
cudaFree(values);
return 0;
}
#ifdef __cplusplus
}
#endif
|
20,069 | #include <stdio.h>
#define SIDE_SIZE 8
__device__ unsigned long long int cIndex(const unsigned long long int slicesRow, const unsigned long long int row, const unsigned long long int col) {
return slicesRow * row + col;
}
__device__ double cLeft(double* grid, const unsigned long long int slicesRow, const unsigned long long int row, const unsigned long long int col) {
if (col == 0) {
return grid[cIndex(slicesRow, row, col)];
}
return grid[cIndex(slicesRow, row, col - 1)];
}
__device__ double cRight(double* grid, const unsigned long long int slicesRow, const unsigned long long int slicesCol, const unsigned long long int row, const unsigned long long int col) {
if (col == slicesCol - 1) {
return grid[cIndex(slicesRow, row, col)];
}
return grid[cIndex(slicesRow, row, col + 1)];
}
__device__ double cUp(double* grid, const unsigned long long int slicesRow, const unsigned long long int row, const unsigned long long int col) {
if (row == 0) {
return grid[cIndex(slicesRow, row, col)];
}
return grid[cIndex(slicesRow, row - 1, col)];
}
__device__ double cDown(double* grid, const unsigned long long int slicesRow, const unsigned long long int row, const unsigned long long int col) {
if (row == slicesRow - 1) {
return grid[cIndex(slicesRow, row, col)];
}
return grid[cIndex(slicesRow, row + 1, col)];
}
__global__ void calculateNext(double* oldGrid, double* newGrid, const unsigned long long int slicesRow, const unsigned long long int slicesCol) {
unsigned long long int col = (blockIdx.x * blockDim.x) + threadIdx.x;
unsigned long long int row = (blockIdx.y * blockDim.y) + threadIdx.y;
if (row < slicesRow && col < slicesCol) {
double total = 0.0;
total += cLeft(oldGrid, slicesRow, row, col);
total += cRight(oldGrid, slicesRow, slicesCol, row, col);
total += cUp(oldGrid, slicesRow, row, col);
total += cDown(oldGrid, slicesRow, row, col);
newGrid[cIndex(slicesRow, row, col)] = total / 4.0;
}
}
__global__ void initializeArray(double* cylinder, const unsigned long long int slicesRow, const unsigned long long int slicesCol, const int numImpulses, const unsigned long long int* impulses, const double* concentrations) {
unsigned long long int col = (blockIdx.x * blockDim.x) + threadIdx.x;
unsigned long long int row = (blockIdx.y * blockDim.y) + threadIdx.y;
if (row < slicesRow && col < slicesCol) {
cylinder[cIndex(slicesRow, row, col)] = 0.0;
for (int k = 0; k < numImpulses; k++) {
if (row == impulses[k * 2] && col == impulses[k * 2 + 1]) {
cylinder[cIndex(slicesRow, impulses[k * 2], impulses[k * 2 + 1])] = concentrations[k];
break;
}
}
}
}
extern "C" double gpuCalculate(const unsigned long long int slicesRow, const unsigned long long int slicesCol, const unsigned long long int totalTime, const unsigned long long int desiredPointRow, const unsigned long long int desiredPointCol, const int numImpulses, const unsigned long long int* impulses, const double* concentrations) {
cudaError_t mallocResult;
double* oldGrid;
double* newGrid;
double* temp;
unsigned long long int* deviceImpulses;
double* deviceConcentrations;
mallocResult = cudaMalloc((void**) &oldGrid, slicesRow * slicesCol * sizeof(double));
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Malloc failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaMalloc((void**) &newGrid, slicesRow * slicesCol * sizeof(double));
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Malloc failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaMalloc((void**) &deviceImpulses, 2 * numImpulses * sizeof(unsigned long long int));
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Malloc failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaMalloc((void**) &deviceConcentrations, numImpulses * sizeof(double));
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Malloc failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaMemcpy(deviceImpulses, impulses, 2 * numImpulses * sizeof(unsigned long long int), cudaMemcpyHostToDevice);
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Memcpy failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaMemcpy(deviceConcentrations, concentrations, numImpulses * sizeof(double), cudaMemcpyHostToDevice);
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Memcpy failed, exiting...\n");
exit(EXIT_FAILURE);
}
dim3 dimBlock(SIDE_SIZE, SIDE_SIZE);
dim3 dimGrid(ceil(slicesCol / (double) SIDE_SIZE), ceil(slicesRow / (double) SIDE_SIZE));
initializeArray<<<dimGrid, dimBlock>>>(oldGrid, slicesRow, slicesCol, numImpulses, deviceImpulses, deviceConcentrations);
for (int i = 0; i < totalTime; i++) {
calculateNext<<<dimGrid, dimBlock>>>(oldGrid, newGrid, slicesRow, slicesCol);
temp = oldGrid;
oldGrid = newGrid;
newGrid = temp;
}
double answer;
mallocResult = cudaMemcpy(&answer, &oldGrid[slicesRow * desiredPointRow + desiredPointCol], sizeof(double), cudaMemcpyDeviceToHost);
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA Memcpy failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaFree(deviceImpulses);
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA free failed, exiting...\n");
exit(EXIT_FAILURE);
}
mallocResult = cudaFree(deviceConcentrations);
if (mallocResult != cudaSuccess) {
fprintf(stderr, "CUDA free failed, exiting...\n");
exit(EXIT_FAILURE);
}
return answer;
} |
20,070 | #include "includes.h"
__global__ void blob_rearrange_kernel2_1d(const float *in, float *out, int num, int channels, int width, int height, int widthheight, int padding, int pwidthheight)
{
int xy = blockIdx.x*blockDim.x + threadIdx.x;
if(xy>=widthheight)
return;
int ch = blockIdx.y;
int n = blockIdx.z;
float value=in[(n*channels+ch)*widthheight+xy];
__syncthreads();
int xpad = (xy % width + padding);
int ypad = (xy / width + 0);
int xypad = ypad * (width+2*padding) + xpad;
out[(n*pwidthheight+xypad)*channels + ch] = value;
} |
20,071 | #include <stdio.h>
__global__ void add(int *a, int *b, int *c, int num) {
int i = threadIdx.x;
if (i < num) {
c[i] = a[i] + b[i];
}
}
int main(int argc, char* argv[]) {
int num = 10;
int a[num], b[num], c[num];
int *a_gpu, *b_gpu, *c_gpu;
for (int i = 0; i < num; i++) {
a[i] = i;
b[i] = i*i;
}
cudaMalloc((void**)&a_gpu, num*sizeof(int));
cudaMalloc((void**)&b_gpu, num*sizeof(int));
cudaMalloc((void**)&c_gpu, num*sizeof(int));
// copy data
cudaMemcpy(a_gpu, a, num*sizeof(int), cudaMemcpyHostToDevice);
cudaMemcpy(b_gpu, b, num*sizeof(int), cudaMemcpyHostToDevice);
// do
add<<<1, num>>>(a_gpu, b_gpu, c_gpu, num);
// get data
cudaMemcpy(c, c_gpu, num*sizeof(int), cudaMemcpyDeviceToHost);
// visualization
for (int i=0; i < num; i++) {
printf("%d + %d = %d\n", a[i], b[i], c[i]);
}
return 0;
}
|
20,072 | #include "includes.h"
__global__ void fmad_kernel(double x, double y, double *out)
{
int tid = blockIdx.x * blockDim.x + threadIdx.x;
if (tid == 0)
{
*out = x * x + y;
}
} |
20,073 | #include "includes.h"
__global__ void matrix_multiply_kernel(double *matrix, double *vector_in, double *vector_out, long dim_mn){
double out;
long i, j;
i = threadIdx.x + blockIdx.x * blockDim.x;
if (i<dim_mn){
out = 0.;
for (j=0; j<dim_mn; j++){
out += matrix[i*dim_mn+j] * vector_in[j];
}
vector_out[i] = out;
}
} |
20,074 |
//#include "../include/clion_cuda.h"
#include <iostream>
#include <stdio.h>
#include <cuda_runtime.h>
__global__
void add_f(float *array) {
int k = blockIdx.x * blockDim.x + threadIdx.x;
array[k] = 10.0;
}
int main() {
int blocks = 1024;
int thread_pre_blocks = 30;
float *array_host = new float[blocks * thread_pre_blocks];
float *array_device=0;
std::cout << "before" << std::endl;
cudaMalloc((void**)&array_device, blocks * thread_pre_blocks);
add_f<<<blocks,thread_pre_blocks>>>(array_device);
cudaMemcpy(array_device, array_host, blocks * thread_pre_blocks, cudaMemcpyDeviceToHost);
for (int i(0); i < blocks * thread_pre_blocks; ++i) {
std::cout << array_host[i] << std::endl;
}
std::cout << "end" << std::endl;
cudaFree(array_device);
}
|
20,075 | #include "includes.h"
__global__ void getMaxPorb(const int size, const float* class_prob, const int class_num, float* max_prob, int* idx, int *class_idx, const int conf_thresh)
{
int index = blockIdx.x * blockDim.x + threadIdx.x;
if(index < size)
{
// printf("run here %d!\n", index);
float temp_max_prob = 0.0f;
const float *start = class_prob + index * class_num;
int class_index = -1;
for(int i = 0; i < class_num; i++)
{
float curr_prob = start[i];
if(temp_max_prob <= curr_prob)
{
class_index = i;
temp_max_prob = curr_prob;
}
}
max_prob[index] = 0.0f;
if(temp_max_prob >= conf_thresh)
{
// atomicAdd(detecNum, 1);
max_prob[index] = temp_max_prob;
// printf("run here %d!\n", index);
}
idx[index] = index;
class_idx[index] = class_index;
}
} |
20,076 | // Copyright (c) 2020 Saurabh Yadav
//
// This software is released under the MIT License.
// https://opensource.org/licenses/MIT
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <cuda_runtime.h>
int main()
{
cudaError_t err = cudaSuccess;
int device_count;
err = cudaGetDeviceCount(&device_count);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to get device count (error code %s)!\n", cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
printf("Number of GPUs:\t\t %d \n",device_count);
cudaDeviceProp dev_prop;
for(int i=0; i<device_count; i++) {
err = cudaGetDeviceProperties(&dev_prop, i);
if (err != cudaSuccess)
{
fprintf(stderr, "Failed to get device %d properties (error code %s)!\n", i, cudaGetErrorString(err));
exit(EXIT_FAILURE);
}
printf("GPU Name:\t\t %s \n",dev_prop.name);
printf("Clock rate:\t\t %d \n",dev_prop.clockRate);
printf("Max Threads per block:\t %d \n",dev_prop.maxThreadsPerBlock);
printf("Shared Memory per block: %lu \n",dev_prop.sharedMemPerBlock);
}
return 0;
}
|
20,077 | // RUN: c-index-test -test-load-source all -x cuda %s | FileCheck %s
__attribute__((device)) void f_device();
__attribute__((global)) void f_global();
__attribute__((constant)) int* g_constant;
__attribute__((host)) void f_host();
// CHECK: attributes-cuda.cu:3:30: FunctionDecl=f_device:3:30
// CHECK-NEXT: attributes-cuda.cu:3:16: attribute(device)
// CHECK: attributes-cuda.cu:4:30: FunctionDecl=f_global:4:30
// CHECK-NEXT: attributes-cuda.cu:4:16: attribute(global)
// CHECK: attributes-cuda.cu:5:32: VarDecl=g_constant:5:32 (Definition)
// CHECK-NEXT: attributes-cuda.cu:5:16: attribute(constant)
// CHECK: attributes-cuda.cu:6:28: FunctionDecl=f_host:6:28
// CHECK-NEXT: attributes-cuda.cu:6:16: attribute(host)
|
20,078 |
#include <iostream>
#include <chrono>
#include "cuda_runtime.h"
#include "cuda.h"
#include "device_launch_parameters.h"
static void HandleError( cudaError_t err, const char *file, int line )
{
if (err != cudaSuccess)
{
printf( "%s in %s at line %d\n", cudaGetErrorString( err ),
file, line );
exit( EXIT_FAILURE );
}
}
#define HANDLE_ERROR( err ) (HandleError( err, __FILE__, __LINE__ ))
#define SIZE 10
using namespace std;
void print_array (float array[][SIZE])
{
for (int i=0; i<SIZE; i++) {
for (int j=0; j<SIZE; j++) {
cout<<array[i][j]<<" ";
}
cout<<endl;
}
}
void initialize_array (float array[][SIZE])
{
for (int i=0; i<SIZE; i++) {
for (int j=0; j<SIZE; j++) {
array[i][j] = static_cast <float> (rand()) / static_cast <float> (RAND_MAX);
}
}
}
void array_decl ()
{
static float A[SIZE][SIZE];
static float B[SIZE][SIZE];
static float C[SIZE][SIZE];
static float D[SIZE][SIZE];
initialize_array(A);
initialize_array(B);
initialize_array(C);
initialize_array(D);
int *d_A, *d_B, *d_C, *d_D, *d_temp;
HANDLE_ERROR(cudaMalloc((void**)&d_A, SIZE*SIZE*sizeof(float)));
HANDLE_ERROR(cudaMalloc((void**)&d_B, SIZE*SIZE*sizeof(float)));
HANDLE_ERROR(cudaMalloc((void**)&d_C, SIZE*SIZE*sizeof(float)));
HANDLE_ERROR(cudaMalloc((void**)&d_D, SIZE*SIZE*sizeof(float)));
HANDLE_ERROR(cudaMalloc((void**)&d_temp, SIZE*SIZE*sizeof(float)));
cudaFree (d_A);
cudaFree (d_B);
cudaFree (d_C);
cudaFree (d_D);
cudaFree (d_temp);
}
int main (int argc, char **argv) {
array_decl();
} |
20,079 | /** File: modified_SMO.cu
* Purpose: Parallel Programming 2017 Final Project: Training Support Vector Machine on multiprocessors and GPUs
* the CUDA version.
*
* Compile: nvcc -o modified_SMO modified_SMO.cu
* Run: ex: ./modified_SMO ./data/train-mnist ./data/train-mnist.model 10000 784 1 0.01 0.001
* ./data/train-mnist: input training data set
* ./data/train-mnist.model: output model data
* 10000 : number of training data
* 784 : dimension of feature space
* 1 : C
* 0.01 : gamma for gaussian kernel
* 0.001: eps
*
* Notes:
* 1. Follow the paper "Parallel Sequential Minimal Optimization for the Training of Support Vector Machines" by L.J. Cao et al. 2006
* 2. Use one-against-all method to implement multiclass version (removed)
* 3. Modify eps to balance the accuracy and speed (recommend value: 0.001)
* 4. Input file is identical with "libsvm" format
* 5. Output file includes support vector
* 6. The number of training data should be the same with the one in the input file or the program will crash
*
* Output: Lagrangian parameter alphas + support vector + b = model data
*
* Author: Wei-Hsiang Teng
* History: 2017/6/2 created
* 2017/6/13 change data type float to double
*/
#include <stdlib.h>
#include <stdio.h>
#include <sys/time.h> /* for estimate elapsed time */
#include <math.h> /* for exp() */
#include <string.h>
#include <limits.h>
#define CHECK(call) \
{ \
const cudaError_t error = call; \
if (error != cudaSuccess) \
{ \
fprintf(stderr, "Error: %s:%d, ", __FILE__, __LINE__); \
fprintf(stderr, "code: %d, reason: %s\n", error, \
cudaGetErrorString(error)); \
exit(-1); \
} \
}
#define MAX(x, y) ((x)>(y))?(x):(y)
#define MIN(x, y) ((x)<(y))?(x):(y)
#define ABS(a) (((a) < 0) ? -(a) : (a))
#define STR_SIZE 8192
struct problem
{
double* x; /* input features */
double* alphas; /* output Lagrangian parameters */
int *y; /* input labels */
int size; /* size of training data set */
int dim; /* number of dimension of coordinates */
double C; /* regularization parameter */
double gamma; /* parameter for gaussian kernel function */
double b; /* offset of decision boundary */
double tau; /* parameter for divergence */
double eps; /* tolerance */
};
/**
* name: seconds
*
* description: for estimating execution time
*
*/
double seconds(void)
{
struct timeval tp;
gettimeofday(&tp, NULL);
return ((double)tp.tv_sec + (double)tp.tv_usec * 1.e-6);
}
/**
* name: rbf_kernel
*
* description: generate kernel function K(X_i, X_j) of X which is gaussian.
* input: prob: needed information describing the problem
* i, j: index of kernel function K(X_i, X_j)
*
* output: K(X_i, X_j)
*
*/
double rbf_kernel(struct problem* prob, int i, int j)
{
double ker = 0.0;
int m;
for (m = 0; m < prob->dim; m++)
{
ker += (prob->x[i * prob->dim + m] - prob->x[j * prob->dim + m]) * (prob->x[i * prob->dim + m] - prob->x[j * prob->dim + m]);
}
ker = exp(-1 * prob->gamma * ker);
return ker;
}
/**
* name: computeDualityGap
*
* description: computeDualityGap computes parameter DualityGap according to (8).
* input: Err[]: error function (6)
* prob: needed information describing the problem
*
* output: DualityGap
*
*/
double computeDualityGap(double Err[], struct problem* prob)
{
double DualityGap = 0;
int i;
for (i = 0; i < prob->size; i++)
{
if (prob->y[i] == 1)
DualityGap += prob->C * MAX(0, (prob->b - Err[i]));
else
DualityGap += prob->C * MAX(0, (-1 * prob->b + Err[i]));
if (prob->alphas[i] != 0)
{
DualityGap += prob->alphas[i] * prob->y[i] * Err[i];
}
}
return DualityGap;
}
/**
* name: computeBupIup
*
* description: computeBupIup computes b_up and I_up according to page 5.
* input: Err[]: error function (6)
* prob: needed information describing the problem
* b_up: the min error function of sets which unions I0 I1 I2
* I_up: the index for min of error function of set which unions I0 I1 I2
*
* output: None
*
*/
void computeBupIup(double Err[], struct problem* prob, double *b_up, int *I_up)
{
int i;
*b_up = INT_MAX;
for (i = 0; i < prob->size; i++)
{
if (prob->alphas[i] > 0 && prob->alphas[i] < prob->C)
{
if (Err[i] < *b_up) {
*b_up = Err[i];
*I_up = i;
continue;
}
}
if (prob->alphas[i] == 0 && prob->y[i] == 1)
{
if (Err[i] < *b_up) {
*b_up = Err[i];
*I_up = i;
continue;
}
}
if (prob->alphas[i] == prob->C && prob->y[i] == -1)
{
if (Err[i] < *b_up) {
*b_up = Err[i];
*I_up = i;
continue;
}
}
}
}
/**
* name: computeBlowIlow
*
* description: computeBlowIlow computes b_low and I_low according to page 5.
* input: Err[]: error function (6)
* prob: needed information describing the problem
* b_low: the max error function of sets which unions I0 I3 I4
* I_low: the index for max of error function of set which unions I0 I3 I4
*
* output: None
*
*/
void computeBlowIlow(double Err[], struct problem* prob, double *b_low, int *I_low)
{
int i;
*b_low = INT_MIN;
for (i = 0; i < prob->size; i++)
{
if (prob->alphas[i] > 0 && prob->alphas[i] < prob->C)
{
if (Err[i] > *b_low) {
*b_low = Err[i];
*I_low = i;
continue;
}
}
if (prob->alphas[i] == prob->C && prob->y[i] == 1)
{
if (Err[i] > *b_low) {
*b_low = Err[i];
*I_low = i;
continue;
}
}
if (prob->alphas[i] == 0 && prob->y[i] == -1)
{
if (Err[i] > *b_low) {
*b_low = Err[i];
*I_low = i;
continue;
}
}
}
}
/**
* name: computeNumChaned
*
* description: computeNumChaned implements Procedure takeStep() in page 19.
* input: prob: needed information describing the problem
* I_up: index for minimum of Err in group I0, I1, I2
* I_low: index for maximum of Err in group I0, I3, I4
* alpha1: alphas[I_up]
* alpha2: alphas[I_low]
* y1, y2: Y[I_up], Y[I_low]
* F1, F2: Err[I_up], Err[I_low]
* Dual: see function (7)
* a1, a2: the renewed alpha1, alphas2
*
* output: numChanged
*
*/
int computeNumChaned(struct problem* prob,
int I_up,
int I_low,
double alpha1,
double alpha2,
int y1,
int y2,
double F1,
double F2,
double *Dual,
double* a1,
double* a2)
{
if (I_up == I_low) return 0;
int s = y1 * y2;
double gamma;
double L, H, slope, change;
double k11, k12, k22, eta;
if (y1 == y2)
gamma = alpha1 + alpha2;
else
gamma = alpha1 - alpha2;
if (s == 1)
{
L = MAX(0, gamma - prob->C);
H = MIN(prob->C, gamma);
} else {
L = MAX(0, -1 * gamma);
H = MIN(prob->C, prob->C - gamma);
}
if (H <= L) return 0;
k11 = rbf_kernel(prob, I_up, I_up);
k22 = rbf_kernel(prob, I_low, I_low);
k12 = rbf_kernel(prob, I_up, I_low);
eta = 2 * k12 - k11 - k22;
if (eta < prob->eps * (k11 + k22))
{
*a2 = alpha2 - (y2 * (F1 - F2) / eta);
if (*a2 < L)
*a2 = L;
else if (*a2 > H)
*a2 = H;
} else {
slope = y2 * (F1 - F2);
change = slope * (H - L);
if (change != 0)
{
if (slope > 0)
*a2 = H;
else
*a2 = L;
} else {
*a2 = alpha2;
}
}
if (*a2 > prob->C - prob->eps * prob->C) *a2 = prob->C;
else if (*a2 < prob->eps * prob->C) *a2 = 0;
if (ABS(*a2 - alpha2) < prob->eps * (*a2 + alpha2 + prob->eps)) return 0;
if (s == 1) *a1 = gamma - *a2;
else *a1 = gamma + *a2;
if (*a1 > prob->C - prob->eps * prob->C) *a1 = prob->C;
else if (*a1 < prob->eps * prob->C) *a1 = 0;
*Dual = *Dual - (*a1 - alpha1) * (F1 - F2) / y1 + 1 / 2 * eta * (*a1 - alpha2) * (*a1 - alpha2) / y1 / y1;
return 1;
}
/**********************************************************************
* Initialize alphas and Err
*********************************************************************/
__global__ void Initialization(double* devErr, int* devY, int size)
{
int i = blockIdx.x * blockDim.x + threadIdx.x;
if (i < size) {
devErr[i] = -1*devY[i];
}
}
/**********************************************************************
* Update f_i
*********************************************************************/
__global__ void update_fi(double *devErr, double *devX, double a1, double a2, double a1_old, double a2_old, int y1, int y2, int I_up, int I_low, double gamma, int dim, int size) {
int i = blockIdx.x * blockDim.x + threadIdx.x;
double k1 = 0, k2 = 0;
if (i < size) {
for (int m = 0; m < dim; m++)
{
k1 += (devX[I_up * dim + m] - devX[i * dim + m]) * (devX[I_up * dim + m] - devX[i * dim + m]);
k2 += (devX[I_low * dim + m] - devX[i * dim + m]) * (devX[I_low * dim + m] - devX[i * dim + m]);
}
k1 = exp(-1 * gamma * k1);
k2 = exp(-1 * gamma * k2);
devErr[i] += (a1 - a1_old) * y1 * k1 + (a2 - a2_old) * y2 * k2;
}
}
/**
* name: modified_SMO
*
* description: modified_SMO implements Pseudo-code for the serial SMO in page 19.
* input: prob: needed information describing the problem
*
* output: None
*
*/
void modified_SMO(struct problem* prob)
{
prob->b = 0.0;
double* Err;
double b_up, b_low, a1 = 0, a2 = 0, F1 = 0, F2 = 0;
int I_up, I_low, y1 = 0, y2 = 0;
int numChanged;
double Dual = 0, DualityGap;
double a1_old, a2_old;
int num_iter = 0;
double s1, s2, s3, s4;
double t1 = 0, t2 = 0, t3 = 0, t4 = 0;
/* device variables */
double* devX;
int* devY;
double* devErr;
Err = (double *)malloc(sizeof(double) * prob->size);
/* allocate device memory */
CHECK(cudaMalloc((void**)&devX, prob->size * prob->dim * sizeof(double)));
CHECK(cudaMalloc((void**)&devY, prob->size * sizeof(int)));
CHECK(cudaMalloc((void**)&devErr, prob->size * sizeof(double)));
CHECK(cudaMemcpy(devX, prob->x, prob->size * prob->dim * sizeof(double), cudaMemcpyHostToDevice));
CHECK(cudaMemcpy(devY, prob->y, prob->size * sizeof(int), cudaMemcpyHostToDevice));
dim3 block(32);
dim3 grid((prob->size + block.x - 1)/block.x);
Initialization<<<grid, block>>>(devErr, devY, prob->size);
memset(prob->alphas, 0, sizeof(double) * prob->size);
/* initialize b_up, I_up, b_low, I_low, DualityGap */
CHECK(cudaMemcpy(Err, devErr, prob->size * sizeof(double), cudaMemcpyDeviceToHost));
DualityGap = computeDualityGap(Err, prob);
computeBupIup(Err, prob, &b_up, &I_up);
computeBlowIlow(Err, prob, &b_low, &I_low);
numChanged = 1;
while(DualityGap > prob->tau*ABS(Dual) && numChanged != 0)
{
s1 = seconds();
a1_old = prob->alphas[I_up];
a2_old = prob->alphas[I_low];
y1 = prob->y[I_up];
y2 = prob->y[I_low];
F1 = Err[I_up];
F2 = Err[I_low];
numChanged = computeNumChaned(prob, I_up, I_low, a1_old, a2_old, y1, y2, F1, F2, &Dual, &a1, &a2);
prob->alphas[I_up] = a1;
prob->alphas[I_low] = a2;
t1 += (seconds() - s1);
/* update Err[i] */
s2 = seconds();
update_fi<<<grid, block>>>(devErr, devX, a1, a2, a1_old, a2_old, y1, y2, I_up, I_low, prob->gamma, prob->dim, prob->size);
CHECK(cudaMemcpy(Err, devErr, prob->size * sizeof(double), cudaMemcpyDeviceToHost));
t2 += (seconds() - s2);
s3 = seconds();
computeBupIup(Err, prob, &b_up, &I_up);
computeBlowIlow(Err, prob, &b_low, &I_low);
prob->b = (b_low + b_up) / 2;
t3 += (seconds() - s3);
s4 = seconds();
DualityGap = computeDualityGap(Err, prob);
t4 += (seconds() - s4);
num_iter++;
//printf("itertion: %d\n", num_iter);
}
prob->b = -1 * (b_low + b_up) / 2;
printf("computeNumChaned : %lf secs\n", t1);
printf("update f_i : %lf secs\n", t2);
printf("update b_up, b_low : %lf secs\n", t3);
printf("computeDualityGap : %lf secs\n", t4);
printf("b = %f\n", prob->b);
printf("block: %d, grid: %d\n", block.x, grid.x);
cudaFree(devErr);
cudaFree(devX);
cudaFree(devY);
}
void read_data(char* file, struct problem* prob)
{
int i;
char s[STR_SIZE];
const char* delim = ":";
char *token;
int index = 0, pre_index = 0;
FILE *pFile;
pFile = fopen(file, "r");
if (pFile == NULL) {
printf("can't open %s\n", file);
exit(-1);
}
for (i = 0; i < prob->size; i++)
{
int cnt = 0;
fgets(s, sizeof(s), pFile);
/* get the first token */
token = strtok(s, delim);
sscanf(token, "%d %d", &prob->y[i], &index);
/* walk through other tokens */
while( token != NULL )
{
if (cnt == 0) {
token = strtok(NULL, delim);
}
if (index >= 1 && index <= prob->dim)
sscanf(token, "%lf %d", &prob->x[i * prob->dim + index - 1], &pre_index);
index = pre_index;
token = strtok(NULL, delim);
cnt++;
}
}
fclose(pFile);
}
void save_model(char* filename, struct problem* prob)
{
FILE *pFile;
int i, j;
int total_sv = 0;
pFile = fopen(filename, "w");
if (pFile == NULL) {
printf("can't open %s\n", filename);
exit(-1);
}
for (i = 0; i < prob->size; i++) {
if (prob->alphas[i] != 0)
total_sv++;
}
fprintf(pFile, "%d %lf %lf\n", total_sv, prob->gamma, prob->b);
for (i = 0; i < prob->size; i++) {
if (prob->alphas[i] != 0)
{
fprintf(pFile, "%lf", prob->alphas[i] * prob->y[i]);
for (j = 0; j < prob->dim; j++)
{
if (prob->x[i * prob->dim + j] != 0)
fprintf(pFile, " %d:%lf", j + 1, prob->x[i * prob->dim + j]);
}
fprintf(pFile, "\n");
}
}
printf("total sv: %d\n", total_sv);
fclose(pFile);
}
int main(int argc, char* argv[])
{
struct problem* prob = (struct problem*)malloc(sizeof(*prob));
double start, end;
if (argc < 8) {
printf("%s data_file model_file data_size data_dim C gamma eps\n", argv[0]);
exit(-1);
}
prob->size = atoi(argv[3]);
prob->dim = atoi(argv[4]);
prob->C = atof(argv[5]);
prob->gamma = atof(argv[6]);
prob->eps = atof(argv[7]);
prob->x = (double *)malloc(prob->size * prob->dim * sizeof(double));
memset(prob->x, 0, sizeof(double) * prob->size * prob->dim);
prob->y = (int *)malloc(prob->size * sizeof(int));
prob->alphas = (double *)malloc(prob->size * sizeof(double));
read_data(argv[1], prob);
/* start the SMO algorithm */
prob->tau = 0.000001;
start = seconds();
modified_SMO(prob);
end = seconds();
printf("The total elapsed time is %lf seconds\n", end - start);
/* save the result */
save_model(argv[2], prob);
free(prob->x);
free(prob->y);
free(prob->alphas);
return 0;
}
|
20,080 |
//__device__ bool inLeftBorder();
//__device__ bool inRightBorder();
//__device__ bool inTopBorder();
//__device__ bool inBottomBorder();
__device__ bool inRange(const int x, const int y, const int x_range, const int y_range);
__device__ int globalAddr(const int x, const int y, const int x_size);
__device__ int findRoot(int equivalenceArray[], int elementAddress);
__device__ void Union(int equivalenceArray[], const int elementAddress0, const int elementAddress1, int& changed);
__global__ void mergeTiles(
int* dLabelsData,
const int x_size,
const int y_size){
__shared__ int changed;
const int subBlockDim = 16;//x_size/blockDim.x;
const int repetitions = int(subBlockDim/blockDim.z);
const int subBlock_x = blockIdx.x*blockDim.x + threadIdx.x;
const int subBlock_y = blockIdx.y*blockDim.y + threadIdx.y;
int x, y = 0;
do {
__syncthreads();
if(threadIdx.x == 0 && threadIdx.y == 0 && threadIdx.z == 0) changed = 0;
__syncthreads();
for(int i=0; i < repetitions; i++) {
x = subBlock_x*subBlockDim + threadIdx.z + i*blockDim.z;
y = (subBlock_y + 1)*subBlockDim - 1;
if(inRange(x, y, x_size, y_size)) {
//if(!inLeftBorder()) {
if(inRange(x-1, y+1, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x-1, y+1, x_size), changed);
}
if(inRange(x, y+1, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x, y+1, x_size), changed);
}
//if(!inRightBorder()) {
if(inRange(x+1, y+1, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x+1, y+1, x_size), changed);
}
}
}
for(int i=0; i < repetitions; i++) {
x = (subBlock_x + 1)*subBlockDim -1;
y = subBlock_y*subBlockDim + threadIdx.z + i*blockDim.z;
if(inRange(x, y, x_size, y_size)){
//if(!inTopBorder()){
if(inRange(x+1, y-1, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x+1, y-1, x_size), changed);
}
if(inRange(x+1, y, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x+1, y, x_size), changed);
}
//if(!inBottomBorder()) {
if(inRange(x+1, y+1, x_size, y_size)) {
Union(dLabelsData, globalAddr(x, y, x_size), globalAddr(x+1, y+1, x_size), changed);
}
}
}
__syncthreads();
} while(changed);
}
__device__ bool inRange(const int x, const int y, const int x_range, const int y_range) {
return x >= 0 and y >= 0 and x < x_range and y < y_range;
}
/*
__device__ bool inLeftBorder(){
return (threadIdx.x == 0 && blockIdx.x == 0);
}
__device__ bool inRightBorder(){
return (blockIdx.x == (blockDim.x - 1) && threadIdx.x == BLOCK_WIDTH-1);
}
__device__ bool inTopBorder(){
return (threadIdx.y == 0 && blockIdx.y == 0);
}
__device__ bool inBottomBorder(){
return (blockIdx.y == (blockDim.y - 1) && threadIdx.y == BLOCK_HEIGHT-1);
}
*/
__device__ int globalAddr(const int x, const int y, const int x_size){
return x + y * x_size;
}
__device__ int findRoot(int equivalenceArray[], int elementAddress){
while(equivalenceArray[elementAddress] != elementAddress)
elementAddress = equivalenceArray[elementAddress];
return elementAddress;
}
__device__ void Union(int equivalenceArray[], const int elementAddress0, const int elementAddress1, int& changed){
int root0 = findRoot(equivalenceArray, elementAddress0);
int root1 = findRoot(equivalenceArray, elementAddress1);
if(root0 < root1) {
equivalenceArray[root1] = root0;
//atomicMin(equivalenceArray + root1, root0);
changed = 1;
}
else if(root1 < root0){
equivalenceArray[root0] = root1;
//atomicMin(equivalenceArray + root0, root1);
changed = 1;
}
}
|
20,081 |
#ifndef __CUDACC__
#define __CUDACC__
#endif
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <cuda.h>
#include <device_functions.h>
#include <cuda_runtime_api.h>
#include<stdio.h>
#include<stdlib.h>
#include<math.h>
#include <string.h>
//#define WEI 11
//#define ITEN 5
//void inserirPeso(int vet[]);
//void inserirValor(int vet[]);
void info(int tam, int pes[], int val[], int n);
void encItens(int *mol, int n, int W, int pes[], int val[], int nit, int wit);
cudaError_t mochilaWithCuda(int *mochila, const int *peso, const int *valor, int iten, int pes);
__global__ void mochilaKernel(int *mochila, const int *peso, const int *valor,const int wei,const int iten)
{
int i;
int w = threadIdx.x + 1;
if (w<wei+1){
for (i = 1; i<iten + 1; i++){
if (peso[i]>w){
mochila[i*(wei + 1) + w] = mochila[(i - 1)*(wei + 1) + w];
}
else{
mochila[i*(wei+1)+w] = max(mochila[(i - 1)*(wei+1)+w], valor[i] + mochila[(i - 1)*(wei+1)+ w - peso[i]]);
}
}
}
}
int main()
{
char url[80];
//declarao do peso limite da mochila, o numero de itens e variavel auxiliar
int W, n, aux, aux2, q, i, j, *peso, *valor, *mochila;
//C:/Users/Pedro/Downloads/test.in
//---------------------Leitura do arquivo-----------------------------------------------
printf("Digite o local do arquivo: ");
printf("\n");
scanf("%s", &url);
FILE *arq;
arq = fopen(url, "r");
if (arq == NULL)
printf("Erro, nao foi possivel abrir o arquivo\n");
else{
fscanf(arq, "%d\n", &n);
aux = n + 1;
//Alocao
peso = (int*)malloc(aux*sizeof(int));
valor = (int*)malloc(aux*sizeof(int));
if (peso == NULL || valor == NULL){
perror("Erro de alocacao de memoria vetor peso ou vetor valor");
exit(EXIT_FAILURE);
}
for (i = 1; i<aux; i++){
fscanf(arq, "%d %d %d\n", &q, &valor[i], &peso[i]);
}
fscanf(arq, "%d\n", &W);
aux2 = W + 1;
mochila = (int*)malloc(aux*aux2*sizeof(int));
if (mochila == NULL){
perror("Nao foi possivel alocar a mochila");
exit(EXIT_FAILURE);
}
for (i = 0; i < (aux)*(aux2); i++) mochila[i] = 0;
}
//Fecha o Arquivo
fclose(arq);
//------------------------Final do Arquvio------------------------------------------
//inseres dos itens com seus respectivos valores
//inserirPeso(peso);
//inserirValor(valor);
info(W, peso, valor, n);
//Pseudo Codigo Transcrito
// Add vectors in parallel.
cudaError_t cudaStatus = mochilaWithCuda(mochila,peso,valor,n,W);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addWithCuda failed!");
return 1;
}
// cudaDeviceReset must be called before exiting in order for profiling and
// tracing tools such as Nsight and Visual Profiler to show complete traces.
cudaStatus = cudaDeviceReset();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceReset failed!");
return 1;
}
printf("\n");
encItens(mochila, n, W, peso, valor, n, W);
printf("\n");
printf("\n");
//Imprimindo a matriz
for (i = 0; i<n + 1; i++) {
for (j = 0; j<W + 1; j++) {
printf("%d ", mochila[(i*(W + 1)) + j]);
}
printf("\n"); // para pular linha quando terminar a coluna
}
printf("\n");
printf("\n");
printf("Valor maximo da mochila: %d\n", mochila[n*(W + 1) + W]);
free(mochila);
free(valor);
free(peso);
system("pause");
return 0;
}
// Helper function for using CUDA to add vectors in parallel.
cudaError_t mochilaWithCuda(int *host_mochila, const int *host_peso, const int *host_valor, int iten, int pes)
{
int *dev_mochila = 0;
int *dev_peso = 0;
int *dev_valor = 0;
cudaError_t cudaStatus;
// Choose which GPU to run on, change this on a multi-GPU system.
cudaStatus = cudaSetDevice(0);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaSetDevice failed! Do you have a CUDA-capable GPU installed?");
goto Error;
}
// Allocate GPU buffers for three vectors (two input, one output) .
cudaStatus = cudaMalloc((void**)&dev_mochila,(iten + 1)*(pes + 1)*sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_peso, (iten+1) * sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
cudaStatus = cudaMalloc((void**)&dev_valor, (iten+1) * sizeof(int));
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMalloc failed!");
goto Error;
}
// Copy input vectors from host memory to GPU buffers.
cudaStatus = cudaMemcpy(dev_mochila, host_mochila, (iten + 1)*(pes + 1)* sizeof(int), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_valor, host_valor, (iten+1) * sizeof(int), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
cudaStatus = cudaMemcpy(dev_peso, host_peso , (iten+1) * sizeof(int), cudaMemcpyHostToDevice);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
// Launch a kernel on the GPU with one thread for each element.
mochilaKernel<<<1, pes>>>(dev_mochila, dev_peso, dev_valor, pes, iten);
// Check for any errors launching the kernel
cudaStatus = cudaGetLastError();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "addKernel launch failed: %s\n", cudaGetErrorString(cudaStatus));
goto Error;
}
// cudaDeviceSynchronize waits for the kernel to finish, and returns
// any errors encountered during the launch.
cudaStatus = cudaDeviceSynchronize();
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaDeviceSynchronize returned error code %d after launching addKernel!\n", cudaStatus);
goto Error;
}
// Copy output vector from GPU buffer to host memory.
cudaStatus = cudaMemcpy(host_mochila, dev_mochila, (iten+1)*(pes+1)* sizeof(int), cudaMemcpyDeviceToHost);
if (cudaStatus != cudaSuccess) {
fprintf(stderr, "cudaMemcpy failed!");
goto Error;
}
Error:
cudaFree(dev_mochila);
cudaFree(dev_valor);
cudaFree(dev_peso);
return cudaStatus;
}
void inserirPeso(int vet[]){
vet[1] = 1;
vet[2] = 2;
vet[3] = 5;
vet[4] = 6;
vet[5] = 7;
}
void inserirValor(int vet[]){
vet[1] = 1;
vet[2] = 6;
vet[3] = 18;
vet[4] = 22;
vet[5] = 28;
}
void info(int tam, int pes[], int val[], int n){
int i;
printf("=========================================================\n");
printf(" *Dados da mochila* \n");
printf("\n");
printf("Capacidade total da mochila: %d\n", tam);
printf("Numero de itens: %d itens", n);
printf("\n");
printf("Valor de cada item: ");
for (i = 1; i<n + 1; i++){
printf("%d ", val[i]);
}
printf("\n");
printf("Peso de cada item: ");
for (i = 1; i<n + 1; i++){
printf("%d ", pes[i]);
}
printf("\n=========================================================\n");
}
void encItens(int *mol, int n, int W, int pes[], int val[], int nit, int wit){
int i, pcal;
//mat[n] abc
if (mol[(n*(wit+1))+W] == mol[((n - 1)*(wit+1))+W]){
if (mol[(n*(wit+1)) + W] != 0){
encItens(mol, n - 1, W, pes, val, nit, wit);
}
}
else{
for (i = 1; i<nit + 1; i++){
pcal = W - pes[i];
if (val[i] + mol[((i - 1)*(wit+1)) + pcal] == mol[(n*(wit+1)) + W]){
printf("%d ", i);
encItens(mol, i - 1, pcal, pes, val, nit, wit);
break;
}
if (mol[(n*(wit+1)) + W] == val[i]){
printf("%d ", i);
break;
}
}
}
}
|
20,082 | #ifndef _GENSPARSEMAT_
#define _GENSPARSEMAT_
void GenSparseMat(float *conVec, int rows, int clms, int* sparseVec, int* idxVec, int* nPostNeurons ) {
/* generate sparse representation
conVec : input vector / flattened matrix
sparseVec : sparse vector
idxVec : every element is the starting index in sparseVec for ith row in matrix conVec
nPostNeurons : number of non-zero elements in ith row
*/
unsigned long long int i, j, counter = 0, nPost;
for(i = 0; i < rows; ++i) {
nPost = 0;
for(j = 0; j < clms; ++j) {
if(conVec[i + clms * j]) { /* i --> j */
sparseVec[counter] = j;
counter += 1;
nPost += 1;
}
}
nPostNeurons[i] = nPost;
}
idxVec[0] = 0;
for(i = 1; i < rows; ++i) {
idxVec[i] = idxVec[i-1] + nPostNeurons[i-1];
}
}
void GenSparseFeedForwardMat(float *conVec, int nff, int nNeurons, int* sparseVec, int* idxVec, int* nPostNeurons ) {
/* generate sparse representation
conVec : nff*nNeurons-by-1 - input vector / flattened matrix
sparseVec : - sparse vector
idxVec : nff-by-1 - every element is the starting index in sparseVec for ith row in matrix conVec
nPostNeurons : nff-by-1 - number of 2/3 neurons reciving connections from
*/
unsigned long long int i, j, counter = 0, nPost;
for(i = 0; i < nff; ++i) {
nPost = 0;
for(j = 0; j < nNeurons; ++j) {
if(conVec[i + j * nff]) {
sparseVec[counter] = j;
counter += 1;
nPost += 1;
}
nPostNeurons[i] = nPost;
}
}
idxVec[0] = 0;
for(i = 1; i < nff; ++i) {
idxVec[i] = idxVec[i-1] + nPostNeurons[i-1];
}
}
#endif
|
20,083 | /**************************************************************
*
* --== Simple CUDA kernel ==--
* author: ampereira
*
*
* Fill the rest of the code
*
* Insert the functions for time measurement in the correct
* sections (i.e. do not account for filling the vectors with random data)
*
* Before compile choose the CPU/CUDA version by running the bash command:
* export CUDA=yes or export CUDA=no
*
**************************************************************/
#include <stdio.h>
#include <cstdlib>
#include <iostream>
#include <sys/time.h>
#define TIME_RESOLUTION 1000000 // time measuring resolution (us)
#define NUM_BLOCKS 128
#define STRIDE_SIZE 16
#define NUM_THREADS_PER_BLOCK 256
#define SIZE NUM_BLOCKS*NUM_THREADS_PER_BLOCK
#define RADIUS 2
using namespace std;
timeval t;
long long unsigned cpu_time;
cudaEvent_t start, stop;
// These are specific to measure the execution of only the kernel execution - might be useful
void startKernelTime (void) {
cudaEventCreate(&start);
cudaEventCreate(&stop);
cudaEventRecord(start);
}
void stopKernelTime (char * discription) {
cudaEventRecord(stop);
cudaEventSynchronize(stop);
float milliseconds = 0;
cudaEventElapsedTime(&milliseconds, start, stop);
cout << milliseconds << " ms have elapsed for the kernel " << discription << " execution" << endl;
}
// Fill the input parameters and kernel qualifier
__global__ void stencilKernelStride (float *in, float *out) {
for ( int tid = threadIdx.x + blockIdx.x * blockDim.x; tid < SIZE; tid += STRIDE_SIZE ){
float value = 0.0f;
for ( int pos = -RADIUS; pos <= RADIUS; pos++ ){
value += in[tid+pos];
}
out[tid]=value;
}
}
__global__ void stencilKernelSharedMemory (float *in, float *out){
int tid = threadIdx.x + blockIdx.x * blockDim.x;
float value = 0.0f;
__shared__ float temp[NUM_THREADS_PER_BLOCK+2*RADIUS];
temp[threadIdx.x + RADIUS] = in[tid];
if(threadIdx.x < RADIUS ){
//before
if (tid - threadIdx.x >= 0){
temp[threadIdx.x] = in[tid - threadIdx.x];
}
//after
if ( tid + threadIdx.x < SIZE){
temp[threadIdx.x+blockDim.x] = in[tid + threadIdx.x];
}
}
__syncthreads();
for(int pos = 0; pos<=2*RADIUS; pos++){
value += temp[pos];
}
out[tid] = value;
}
/*
// Fill the input parameters and kernel qualifier
void quicksortKernel (???) {
}
*/
// Fill with the code required for the GPU stencil (mem allocation, transfers, kernel launch....)
void stencilGPU (void) {
int bytes = SIZE*sizeof(int);
float vector[SIZE], output_vector[SIZE];
float *dev_vector, *dev_output;
// create random vector
for (unsigned i = 0; i<SIZE; i++){
vector[i]=(float) rand()/RAND_MAX;
}
// malloc memmory device
cudaMalloc((void**)&dev_vector,bytes);
cudaMalloc((void**)&dev_output,bytes);
startKernelTime();
// copy inputs to the device
cudaMemcpy(dev_vector,&vector,bytes,cudaMemcpyHostToDevice);
// launch the kernel
// instead of number o blocks we now have stride size
dim3 dimGrid(NUM_BLOCKS);
dim3 dimBlock(NUM_THREADS_PER_BLOCK);
stencilKernelStride<<<dimBlock,dimGrid>>>(dev_vector,dev_output);
cudaDeviceSynchronize();
stopKernelTime("Stride");
// copy the output to the host
startKernelTime();
stencilKernelSharedMemory <<<dimBlock,dimGrid>>>(dev_vector,dev_output);
stopKernelTime("Shared Memory");
startKernelTime();
cudaMemcpy(&output_vector,dev_output,bytes,cudaMemcpyDeviceToHost);
stopKernelTime("cudaMemcpy");
// free the device memory
cudaFree(dev_vector);
cudaFree(dev_output);
}
/*
// Fill with the code required for the GPU quicksort (mem allocation, transfers, kernel launch....)
void quicksortGPU (void) {
}*/
int main (int argc, char** argv){
stencilGPU();
return 0;
}
|
20,084 | #include "includes.h"
__global__ void pack_kernel(float *d_output, float *d_input, float *d_predicates, float *d_scanned, int length)
{
int idx = blockDim.x * blockIdx.x + threadIdx.x;
if (idx >= length) return;
if (d_predicates[idx] != 0.f)
{
// address
int address = d_scanned[idx] - 1;
// gather
d_output[address] = d_input[idx];
}
} |
20,085 | /* Small CUDA exercise to try to improve efficiency by using two
separate streams to set up a staged copying and execution (when
instead of one large copy, followed by one large kernel
computation, one does it in many small chunks, with copying and
computing done in parallel in two streams, after first chunk was
copied to the device).
For the purpose of this exercise, ignore the copying of the
results from the device to host at the end; only do the staged copy
and execute for the copying of the initial data to the device + the
kernel.
We use cudaMallocHost to allocate arrays on host in pinned memory,
which is both results in faster copying to/from GPU (compared to malloc),
and also a CUDA requirement for copying running concurrently with a kernel.
Make sure that the "Result:" value printed by the code is (almost)
identical in both original and modified versions of the code. If
not, you have a bug!
Hints: You will have to use the following CUDA functions:
- cudaStreamCreate
- cudaMemcpyAsync
- cudaStreamDestroy
- cudaDeviceSynchronize
* You will have to set up a for loop for multiple chunks copying and
kernel execution;
* Number of chunks should be a variable (or macro parameter); for
simplicity, make NMAX dividable by the number of chunks;
* In cudaMemcpyAsync the first two arguments should be "&d_A[ind],
&h_A[ind]", not "d_A, h_A", ind being the starting index for the
current chunk to copy;
* You'll have to pass two more arguments to the kernel -
ind and number of threads per chunk;
* Nblocks will be different - shoul be computed per chunk.
* You'll have to modify the kernel slightly;
At the end, you should get the timings (based on 10 runs, NMAX=1000000,
BLOCK_SIZE=128) similar to this:
NCHUNKS t, ms
- 2.76 - the original (non-staged) version of the code
1 2.72 - result is similar to the non-staged code
2 2.08 - even with only 2 chunks, we already see 33% speedup
4 1.79
5 1.75
10 1.72 - seems to be the best timing, 60% faster than the original code
20 1.87 - as NCHUNKS increases, the results get worse. Why?
100 3.53 - for too many NCHUNKS results can get even worse than in non-staged code
To compile:
nvcc -arch=sm_20 -O2 staged.cu -o staged
The best/average timings:
../best_time.sh ./staged
*/
#include <sys/time.h>
#include <ctype.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
// Number of times to run the test (for better timings accuracy):
#define NTESTS 100
// Number of threads in one block (possible range is 32...1024):
#define BLOCK_SIZE 128
// Total number of threads (total number of elements to process in the kernel):
#define NMAX 1000000
/* Subtract the `struct timeval' values X and Y,
storing the result in RESULT.
Return 1 if the difference is negative, otherwise 0. */
// It messes up with y!
int
timeval_subtract (double *result, struct timeval *x, struct timeval *y)
{
struct timeval result0;
/* Perform the carry for the later subtraction by updating y. */
if (x->tv_usec < y->tv_usec) {
int nsec = (y->tv_usec - x->tv_usec) / 1000000 + 1;
y->tv_usec -= 1000000 * nsec;
y->tv_sec += nsec;
}
if (x->tv_usec - y->tv_usec > 1000000) {
int nsec = (y->tv_usec - x->tv_usec) / 1000000;
y->tv_usec += 1000000 * nsec;
y->tv_sec -= nsec;
}
/* Compute the time remaining to wait.
tv_usec is certainly positive. */
result0.tv_sec = x->tv_sec - y->tv_sec;
result0.tv_usec = x->tv_usec - y->tv_usec;
*result = ((double)result0.tv_usec)/1e6 + (double)result0.tv_sec;
/* Return 1 if result is negative. */
return x->tv_sec < y->tv_sec;
}
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// The kernel:
__global__ void MyKernel (double *d_A, double *d_B)
{
double x, y, z;
int i = threadIdx.x + blockDim.x * blockIdx.x;
if (i >= NMAX)
return;
// Some meaningless cpu-intensive computation:
x = pow(d_A[i], 2.71);
y = pow(d_A[i], 0.35);
z = 2*x + 5*y;
d_B[i] = x + y + z + x*y + x/y + y/z;
return;
}
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
int main (int argc,char **argv)
{
struct timeval tdr0, tdr1, tdr, tdr01;
double restime, restime0, restime1;
int devid, devcount, error, Max_gridsize;
double *h_A, *h_B, *d_A, *d_B;
/* find number of device in current "context" */
cudaGetDevice(&devid);
/* find how many devices are available */
if (cudaGetDeviceCount(&devcount) || devcount==0)
{
printf ("No CUDA devices!\n");
exit (1);
}
else
{
cudaDeviceProp deviceProp;
cudaGetDeviceProperties (&deviceProp, devid);
printf ("Device count, devid: %d %d\n", devcount, devid);
printf ("Device: %s\n", deviceProp.name);
printf("[deviceProp.major.deviceProp.minor] = [%d.%d]\n\n", deviceProp.major, deviceProp.minor);
Max_gridsize = deviceProp.maxGridSize[0];
}
// Loop to run the timing test multiple times:
int kk;
for (kk=0; kk<NTESTS; kk++)
{
// Using cudaMallocHost (intead of malloc) to accelerate data copying:
// Initial data array on host:
if (error = cudaMallocHost (&h_A, NMAX*sizeof(double)))
{
printf ("Error %d\n", error);
exit (error);
}
// Results array on host:
if (error = cudaMallocHost (&h_B, NMAX*sizeof(double)))
{
printf ("Error %d\n", error);
exit (error);
}
// ALlocating arrays on GPU:
if (error = cudaMalloc (&d_A, NMAX*sizeof(double)))
{
printf ("Error %d\n", error);
exit (error);
}
if (error = cudaMalloc (&d_B, NMAX*sizeof(double)))
{
printf ("Error %d\n", error);
exit (error);
}
// Initializing the input array:
for (int i=0; i<NMAX; i++)
h_A[i] = (double)rand()/(double)RAND_MAX;
// Number of blocks of threads:
int Nblocks = (NMAX+BLOCK_SIZE-1) / BLOCK_SIZE;
if (Nblocks > Max_gridsize)
{
printf ("Nblocks > Max_gridsize! %d %d\n", Nblocks, Max_gridsize);
exit (1);
}
if (error = cudaDeviceSynchronize())
{
printf ("Error %d\n", error);
exit (error);
}
gettimeofday (&tdr0, NULL);
//--------------------------------------------------------------------------------
// Copying the data to device (we time it):
if (error = cudaMemcpy (d_A, h_A, NMAX*sizeof(double), cudaMemcpyHostToDevice))
{
printf ("Error %d\n", error);
exit (error);
}
// Intermediate timing, to measure timings separately for copying and kernel execution
// (Should be removed in the solution code)
if (error = cudaDeviceSynchronize())
{
printf ("Error %d\n", error);
exit (error);
}
gettimeofday (&tdr01, NULL);
// The kernel call:
MyKernel <<<Nblocks, BLOCK_SIZE>>> (d_A, d_B);
//--------------------------------------------------------------------------------
if (error = cudaDeviceSynchronize())
{
printf ("Error %d\n", error);
exit (error);
}
gettimeofday (&tdr1, NULL);
tdr = tdr0;
timeval_subtract (&restime, &tdr1, &tdr);
// Copying the result back to host (we don't time it):
if (error = cudaMemcpy (h_B, d_B, NMAX*sizeof(double), cudaMemcpyDeviceToHost))
{
printf ("Error %d\n", error);
exit (error);
}
if (error = cudaDeviceSynchronize())
{
printf ("Error %d\n", error);
exit (error);
}
// Adding up the results, for accuracy/correctness testing:
double result = 0.0;
for (int i=0; i<NMAX; i++)
{
result += h_B[i];
}
tdr = tdr0;
timeval_subtract (&restime0, &tdr01, &tdr);
tdr = tdr01;
timeval_subtract (&restime1, &tdr1, &tdr);
printf ("Individual timings: %e %e\n", restime0, restime1);
printf ("Result: %e\n\n", result);
printf ("Time: %e\n", restime);
cudaFreeHost (h_A);
cudaFreeHost (h_B);
cudaFree (d_A);
cudaFree (d_B);
} // kk loop
return 0;
}
|
20,086 | float h_A[]= {
0.71258353642576, 0.9437452696564885, 0.9206953881231721, 0.8592231084651001, 0.8334756851657088, 0.9522180011198649, 0.8877285085592034, 0.5696394460435252, 0.5060703997456785, 0.7131640220581139, 0.8365663893428747, 0.67184394960691, 0.8337799946328023, 0.9468270925152771, 0.924509634091081, 0.5787146626579391, 0.8673707975414596, 0.730069948044419, 0.7073829055636107, 0.7868526510545227, 0.8050583315971139, 0.6814754229560973, 0.6421108666013022, 0.9753755099336483, 0.7889002952234283, 0.7832792895191147, 0.7568666917970004, 0.7359596242106965, 0.574816357194682, 0.8265537910580325, 0.8777737592115793, 0.8366343796423943, 0.6243995715609172, 0.5009847471963808, 0.6780733747787102, 0.6684415023415347, 0.7027646674883272, 0.5324299572073049, 0.9674946805445683, 0.5610162746036216, 0.7946587885255273, 0.9185043833919662, 0.601884148308258, 0.5435326454963854, 0.5594641002220595, 0.9233992749952352, 0.7969978222596753, 0.9439726540504675, 0.6656443694129452, 0.8663338750314528, 0.8827153479339543, 0.7154002650212522, 0.5470106221852029, 0.5549777915096932, 0.5496956849279759, 0.5618423134625589, 0.6076929824912074, 0.6915413892791744, 0.9868135873166675, 0.9703857050967521, 0.6528872036756693, 0.6520010315477578, 0.7530760854261076, 0.985449533769053, 0.901793438205563, 0.6863858953673843, 0.7718771441139334, 0.9911106478101966, 0.7179076965807161, 0.9310486541334716, 0.6096853327277735, 0.7991764709669371, 0.6416294428959843, 0.5204064588473252, 0.9682830327912828, 0.7977140939110717, 0.8226955068573667, 0.5981743655333001, 0.8311100577478979, 0.501515903180243, 0.708409299044187, 0.883528033197865, 0.6302035631546692, 0.8601226315566823, 0.6359158678868673, 0.927469855067556, 0.7002166875395283, 0.7258409081815276, 0.8687337801939524, 0.6450333239332082, 0.8942139756710968, 0.6877403129054539, 0.9547801819885797, 0.8400558584736584, 0.6571043275356192, 0.7883664694851024, 0.511519383894855, 0.7517392476284861, 0.596267414220302, 0.6423077676934194, 0.5203111068432329, 0.6639548661560734, 0.5077009489754405, 0.61284474638332, 0.9777290851657225, 0.6576552232184298, 0.9174459385729132, 0.6127626610264075, 0.8190119210740856, 0.5212961550202527, 0.952070595948479, 0.7923895162175814, 0.5957670602751767, 0.5179063407259916, 0.5947491261638362, 0.9590013116459943, 0.8805658632651245, 0.8441419870408369, 0.9494011046236273, 0.8425538958513673, 0.7879894263643898, 0.6504892281447444, 0.8827587023474062, 0.6170808150934011, 0.5443297310216471, 0.8494691738989105, 0.9046237024347559, 0.613870171598168, 0.688161289172766, 0.5802631610221543, 0.7249807407757531, 0.69832621322955, 0.7036739773182186, 0.9456299956746025, 0.7128026820497162, 0.8843327002425498, 0.9548034065262399, 0.6990961497041266, 0.7264058084750737, 0.7887542851381751, 0.8308342091272223, 0.7533313014335732, 0.9100372312322489, 0.5034763125997175, 0.7130295260768997, 0.5869994224401824, 0.6921024837345886, 0.8963063509781776, 0.8214279635853072, 0.8097301995064923, 0.7862183877171953, 0.918979001570166, 0.7866135830021512, 0.7962558421206336, 0.8360318489694027, 0.8286114695493261, 0.6710435675092323, 0.8125473456882364, 0.884462838042755, 0.8770996185327732, 0.5417517803324943, 0.6712451592444417, 0.5860045907477631, 0.6938786473006938, 0.6072195712536603, 0.6031943010076863, 0.5253773065123979, 0.7670600300783468, 0.6593805586327417, 0.8973988396681901, 0.8725349590919004, 0.8123639372260484, 0.6430376621536917, 0.6189699743638282, 0.667128198011065, 0.5212299990742019, 0.957380109052755, 0.679248900075909, 0.6465876310575864, 0.6327854802961073, 0.9485333401744791, 0.9367491119345053, 0.691118203501712, 0.749763526278312, 0.5382590778543547, 0.7739798422558558, 0.8504005898426947, 0.7928238806828438, 0.7551335159714645, 0.8262588719515145, 0.7012880718143989, 0.7721134745452161, 0.8917225469427776, 0.9429357757033456, 0.7760954209726509, 0.8959465754109565, 0.7287038100275334, 0.7078942763165876, 0.7546505454245549, 0.6887097303397169, 0.9569181981392547, 0.5224204798481984, 0.8847137435963709, 0.5426438138004694, 0.9117365303286484, 0.9438550008579121, 0.7433010399999342, 0.7239369969051395, 0.5266651816611452, 0.8261280258632345, 0.8786242946049334, 0.7323565133567475, 0.7163546624338524, 0.5166947296130888, 0.7847054868157, 0.6513974034207639, 0.8598661941174898, 0.8158200226811696, 0.561859857978412, 0.9842571893095778, 0.7005818489168854, 0.9303425657396649, 0.9521147911977863, 0.8893344416115201, 0.9509142982158878, 0.5792639636119324, 0.6666394254253285, 0.7886030120203201, 0.7878179940487615, 0.5635525135014681, 0.8672045594522463, 0.6948002129821609, 0.8050311522468426, 0.5636735057178186, 0.5512196963260043, 0.5400693206261364, 0.8444399238248612, 0.9015873612269429, 0.6621414559642177, 0.7134562950580214, 0.7585647182964537, 0.5827772826516637, 0.8111890828681326, 0.7192037842107485, 0.7236841470925824, 0.5378455120676691, 0.9315047824925041, 0.5727061930881729, 0.7569874843087577, 0.6863997923066211, 0.6200758990807165, 0.5142340720941587, 0.60136341819312, 0.9894402627751822, 0.6821570545040176, 0.9123406785605881, 0.9680097848141513, 0.5336691924594283, 0.7090192982045267, 0.88409692856142, 0.73474762352241, 0.893529011183892, 0.9755731425533558, 0.7020590982976413, 0.646898535069232, 0.5027260539030165, 0.8831959542730105, 0.8925262791419968, 0.728327810094765, 0.58024858502316, 0.5327603415333257, 0.9546029398755016, 0.5844092298113699, 0.8405027230661695, 0.5406575886164964, 0.7499534885916306, 0.8925339390759146, 0.9835959298580288, 0.9030955717166034, 0.6626903241795146, 0.8615128441004356, 0.5859924481805098, 0.9098872072941341, 0.530200310677194, 0.6786267576275032, 0.9397185910455172, 0.7038369367740276, 0.805270213875713, 0.8122140981437925, 0.5969701706304662, 0.7511856599205933, 0.6852381410724426, 0.8798222605963184, 0.5858732661731865, 0.5088990356254357, 0.5550031376584914, 0.8509208155091649, 0.8990969666597183, 0.8104275909486888, 0.6203560404595649, 0.9151248995946285, 0.9029032429985959, 0.9684194013828693, 0.5903556795597626, 0.8798336881814987, 0.9252158113887137, 0.5733799685378431, 0.8337368563824179, 0.5865979211756935, 0.9885880675691048, 0.5719794387575519, 0.615103892786605, 0.7830581694555495, 0.7901700731499595, 0.9004824399804241, 0.8786361365860611, 0.7923642418825554, 0.8933585552260341, 0.8305577839679563, 0.6582890722257262, 0.8928780520175293, 0.5453314659573261, 0.8929095678958127, 0.5332328252195319, 0.8436047913048139, 0.5771493071704168, 0.9324752692521137, 0.7740823949660895, 0.9980356402534583, 0.6810813617416948, 0.9754020784683766, 0.9037152281963217, 0.582633283603563, 0.9317397022199099, 0.6612668648160884, 0.6773235907388879, 0.9116320410920093, 0.5989581419395688, 0.714992456441345, 0.7856025585105808, 0.8367477493866082, 0.819814111476, 0.817858679393882, 0.7702572832756096, 0.8925781251763694, 0.5001045300515969, 0.5849088457318519, 0.8412175440795404, 0.7582134481867373, 0.6041051240355106, 0.8479566044628457, 0.6893243444675735, 0.5889759743947528, 0.8482672268924676, 0.9541683755431909, 0.5040766940158182, 0.7448946560237617, 0.9613431586814831, 0.660023401388935, 0.920486966252434, 0.5787283953442948, 0.9552682071815595, 0.7490382850948829, 0.9662357241487874, 0.5976476724416562, 0.7458443692904115, 0.7972684918554498, 0.8567291464527123, 0.8186861475357686, 0.774083567579196, 0.6265846162757229, 0.7693771601011707, 0.5230009456363733, 0.5876529888331982, 0.5639345748289928, 0.8570187651876092, 0.9075657058487017, 0.5430685509606683, 0.602461159374339, 0.932591881014774, 0.862405571865372, 0.6077154465580991, 0.63254992094333, 0.9968075899018729, 0.8782350021270238, 0.6299884541304633, 0.92759220374661, 0.9438500850385676, 0.8419159040686454, 0.7252980497264035, 0.9443559096800993, 0.5986508591607652, 0.8335203787190659, 0.7599968066912302, 0.9742584182329284, 0.6303162082076714, 0.8187773274147472, 0.7181014254333244, 0.8102488767548341, 0.7753782508067082, 0.7991996642502588, 0.5332088649363599, 0.7927482869720746, 0.915397414449148, 0.9771763878820501, 0.8499391696602242, 0.9009820884124069, 0.78877926099131, 0.7951108103726479, 0.7876856565480994, 0.6995987344096107, 0.9397886250602586, 0.7838729435522181, 0.8708433989047624, 0.6865366033891505, 0.9440825119404154, 0.6568467631733134, 0.9712517578682819, 0.9694246162717701, 0.6509010569733726, 0.7173327699426975, 0.8087682121424572, 0.675085751928499, 0.5910789930251856, 0.5108229707070235, 0.7961852129669247, 0.6355488279469383, 0.6625920588807883, 0.7065322299082568, 0.7464891042630759, 0.801487441722263, 0.6645093598209042, 0.5986984750927967, 0.551094877227462, 0.5420686238485513, 0.5614564379924954, 0.9920968429839983, 0.5854103137393261, 0.8555309803509398, 0.720725056383662, 0.942665365058227, 0.9596619064034269, 0.6991531748994311, 0.7575522210007021, 0.9949207817629758, 0.6300294815110885, 0.5526334057122798, 0.7904935985926167, 0.638257298474087, 0.5575940765343101, 0.5406163129636332, 0.7147273293714251, 0.9751516930653721, 0.6422629981183343, 0.8710439028620962, 0.6553498389555361, 0.971630041353194, 0.7821920229870756, 0.6688378287047458, 0.9648391965753027, 0.7112317685213165, 0.8240125190510863, 0.6215550422810514, 0.9786978265174555, 0.9623936503349972, 0.6103760376933891, 0.889601530047644, 0.6258658534862487, 0.9137406795071417, 0.5714522583410417, 0.5109140713226401, 0.6628316978642901, 0.7471241377923497, 0.5530424037565744, 0.7194441377312004, 0.8875835774443621, 0.7401764042542622, 0.7841855387961111, 0.7198612885278457, 0.5969250671315288, 0.7603691209087177, 0.6553399898272643, 0.5923338014772084, 0.7540792587515234, 0.555133172642924, 0.8514013463478712, 0.8413676201875154, 0.5161969838597282, 0.8176723958597552, 0.6329701804035187, 0.6097966042995271, 0.8691961402534989, 0.8861853860942654, 0.8649113745650447, 0.9470781267146876, 0.7271463383366784, 0.8816339392217776, 0.9712755839862515, 0.7951719998856182, 0.5986663667934412, 0.5173242884032497, 0.8436045738240787, 0.761146024951704, 0.8003642962103743, 0.7165478490752786, 0.6789827752568929, 0.5016531467885375, 0.9189937110057875, 0.5222055128795855, 0.9760838359479165, 0.6631183237317451, 0.8951459317773066, 0.8457714943337481, 0.6299146054059197, 0.6058741999125485, 0.73902553286047, 0.786640780244537, 0.9528337985429878, 0.8727677859358682, 0.9354883227463164, 0.9405771710582622, 0.6425423706418333, 0.5612858756098058, 0.9166037973267831, 0.5237740411051877, 0.7785324955496773, 0.8454107359728535, 0.8029823873382698, 0.6204173952981962, 0.7631213475781049, 0.7007429843511221, 0.9247705452113163, 0.8860225641018549, 0.7766956364537776, 0.9479123187547875, 0.9429791417678565, 0.7726636388753103, 0.577641348955376, 0.8130384141413527, 0.5482238698394147, 0.6457076470193936, 0.768560597097963, 0.923629984901384, 0.6529684201244231, 0.995970623019518, 0.7094982593807962, 0.6698046327200005, 0.8277613919734599, 0.9337669175070893, 0.920430944120696, 0.5685115232586537, 0.6281879282371824, 0.5771907413416546, 0.8242814517292012, 0.7003688329075642, 0.5053777021509469, 0.8955256466065269, 0.9177561678137995, 0.7750997359855141, 0.772608413287879, 0.6205083610273919, 0.7110233081288355, 0.5486730270859718, 0.9807277336404925, 0.6280996954836116, 0.6986139821031161, 0.6072721529147185, 0.5690117695651646, 0.7088116486953875, 0.8379852227154454, 0.975867682102655, 0.9160829423681144, 0.9597687567725153, 0.9432417073153139, 0.6842193570007811, 0.6057495103912909, 0.7570251529104519, 0.79908735620281, 0.5016827481112256, 0.5056430253352864, 0.608891552384871, 0.5230373489317891, 0.5427066724089419, 0.5580004313910716, 0.8579177840996193, 0.8315044159057197, 0.9996531642033569, 0.6955844623076637, 0.5669596828642487, 0.9690733023549412, 0.5554098921655448, 0.5082804053133492, 0.7978738525426127, 0.6189626567943689, 0.9979454025961914, 0.7868948653295025, 0.6000035045012562, 0.6411177496942668, 0.8736537226999656, 0.5193617694329389, 0.7829906596656928, 0.7756419050150642, 0.560962130720564, 0.7890411721086408, 0.9183988447818833, 0.6942016360182104, 0.7668714994435468, 0.5298319957922566, 0.7505933149973947, 0.8666417008306202, 0.9276864304374053, 0.7457378665999392, 0.6004537279771378, 0.58834274969226, 0.5862303736081094, 0.7234855651717269, 0.9651018057108938, 0.7991906983714627, 0.8478682857488911, 0.5454475520065369, 0.8009693003429523, 0.788350165493364, 0.5619953405296438, 0.9354266953404652, 0.8304550643170993, 0.7305745219009963, 0.7712812805264317, 0.887374286752508, 0.9983817804592995, 0.6549520687698405, 0.654130802798131, 0.8189760313971159, 0.5283466453235721, 0.8632109595358395, 0.6652600710466758, 0.705289662715719, 0.8271324889920516, 0.8803138557972723, 0.6886083942925754, 0.8514032065269317, 0.995695523420381, 0.6644132815474887, 0.8943845142856734, 0.8841145279298257, 0.8873059470770166, 0.5648661684996661, 0.6158515126467716, 0.6356973016016235, 0.9756332157017156, 0.7437042196617061, 0.5388759501200435, 0.9425770236842724, 0.65189107582094, 0.8348991833910724, 0.9837582363679704, 0.5016754921004539, 0.9492314677990115, 0.9550024431794458, 0.7971672634706622, 0.5377591169959118, 0.8451920389913687, 0.6123113503525184, 0.527054686706846, 0.8912466335869665, 0.5671633875101594, 0.7427681775811619, 0.9354675811805824, 0.7677746617456613, 0.6463878103783973, 0.9573308600631771, 0.7433130804377044, 0.5259947567478552, 0.5555872782739512, 0.6934683570175875, 0.7140759196350089, 0.8104548174071708, 0.8198840268868786, 0.5462270944471935, 0.9158398414283511, 0.636135765516701, 0.6192306088210926, 0.9487230351420807, 0.9848261750687506, 0.7446327324821456, 0.9003996740154325, 0.8859144251828615, 0.754547966160771, 0.9384675524741213, 0.7403921981623997, 0.6124357729647336, 0.7176528724933653, 0.9287863980654122, 0.8847391136608761, 0.7329810412039109, 0.9917350846135874, 0.8200029632081569, 0.810380615630325, 0.5533954862203829, 0.7011669143453637, 0.8147651177094625, 0.6444274830498993, 0.7142596747220392, 0.8859060057617336, 0.6713452295027271, 0.7536976349254185, 0.9592465974501116, 0.7021470327341166, 0.8117328000581667, 0.9704177708645978, 0.8125265262528147, 0.5389296562689289, 0.6400708364972262, 0.9877328203324811, 0.6748607415913668, 0.9097614432846013, 0.6229131237034216, 0.656523389141547, 0.7224784344677345, 0.9918116155713941, 0.7567577253077007, 0.6112454183349776, 0.8423564338381015, 0.7042577596179276, 0.6927236494905218, 0.6382721460145249, 0.9111694696012107, 0.8716479065385577, 0.8597253966729963, 0.6297632013398569, 0.557690269706983, 0.6592820042919751, 0.7257587860751239, 0.7136247547188819, 0.9344596677854656, 0.7283566293346293, 0.8933695224543948, 0.7505864080453439, 0.8418922125400146, 0.7343882408660187, 0.5163095965748847, 0.5530927429162583, 0.767560926344675, 0.7830765312016383, 0.5260576649294654, 0.8180234374570983, 0.752791108758454, 0.5702129711076724, 0.6935839609302581, 0.5050772248383668, 0.6713426222974765, 0.6426731278543003, 0.6966164034384471, 0.6533845967278784, 0.6146315145888697, 0.7135426215435186, 0.957011051994689, 0.9712273769849447, 0.8687146142000111, 0.8429091385284435, 0.7119514423131488, 0.7694410029844769, 0.9015623287963879, 0.8742639712931204, 0.5334413879082115, 0.6997450781751013, 0.8432294854379161, 0.8923173538420839, 0.9689358044611059, 0.8192005486882692, 0.6258231322955075, 0.7846730441429192, 0.9352782650352327, 0.696014204685249, 0.5086860655291409, 0.8141924206035386, 0.8066190464373946, 0.5483281763047418, 0.5516733944498705, 0.9118464041076267, 0.6885099319351361, 0.6741390391827364, 0.745912346200908, 0.7141000224223414, 0.9253204155464514, 0.8986300612934084, 0.9203825396725289, 0.9096151101753399, 0.7745298339795352, 0.9412246596299056, 0.5734440162344137, 0.8833980048600965, 0.9302955097980272, 0.8113590461675352, 0.7821859636660238, 0.6863293765477997, 0.9423746121091494, 0.943430786366662, 0.6003664501778005, 0.9865544900046195, 0.8437131278025773, 0.9919807856335923, 0.5480618845019778, 0.7386995546944932, 0.5233586150702574, 0.916943409448687, 0.8966364311655969, 0.6698274986347155, 0.9487474834811256, 0.5166194909579768, 0.9466304624201469, 0.7980180738793599, 0.7414200963702453, 0.8919535084788699, 0.7041640965570384, 0.8472559174448251, 0.5270986976883358, 0.5566790345094936, 0.5401229548209057, 0.550073541354525, 0.8818362131068015, 0.6347823484014132, 0.8533784938158802, 0.7711051846229502, 0.9490360325547835, 0.6196987485256156, 0.6673749301443779, 0.5932872868909324, 0.5085859102782624, 0.6265713166320779, 0.7340130334327631, 0.7152612695206612, 0.8141564091697439, 0.5108927975679174, 0.6681507207679819, 0.7021726051475609, 0.8255771183630676, 0.9638440144445725, 0.685010805130611, 0.5120269430984116, 0.7855958884869083, 0.7015669903723339, 0.9241107930493606, 0.5328396748379651, 0.9832376590720164, 0.7493768271195538, 0.8806184328567193, 0.8757431610669077, 0.5409318621317533, 0.9993739142421991, 0.6655882906417073, 0.8828949020586496, 0.9587117580218678, 0.6761241254013747, 0.9068179180997391, 0.986992171406669, 0.8278250642884979, 0.8657264645025107, 0.7538109817431431, 0.5493800595429528, 0.5437481875466024, 0.7590839204076021, 0.670802613496045, 0.8631083563640393, 0.9675965096759973, 0.7806993136173764, 0.6833957153069481, 0.9019725587224267, 0.5088749126214056, 0.9930863170294465, 0.9794484462886731, 0.6339622458246157, 0.7000347747763983, 0.555477848025677, 0.5174149371669794, 0.6971490954093836, 0.7444782064407908, 0.5898186921315699, 0.7047224818271409, 0.7761245988218037, 0.7938051786224241, 0.8089933912610798, 0.9644321545170144, 0.5317180202614642, 0.8742794286947406, 0.9288459981545946, 0.959423389508404, 0.7423343151795814, 0.5082548037364034, 0.8032622849868262, 0.7438245340537308, 0.9338334472952289, 0.6657462533139719, 0.7121360263323092, 0.8235983676530871, 0.7105179312348341, 0.5501383857533584, 0.8826814420384632, 0.713749794040426, 0.6045102308991481, 0.8039284086886775, 0.7747766025949462, 0.9533126755547137, 0.7479344518079846, 0.5402234619398596, 0.9659421380711293, 0.9666919981989368, 0.5195449498975455, 0.9735190791956028, 0.8457407492389709, 0.5586995405277024, 0.6569773940560459, 0.7176123197068751, 0.8963947738580162, 0.6037436001381042, 0.9644371599768542, 0.8986475841685352, 0.7427166268043471, 0.7215345391793284, 0.5481606391976488, 0.5883601975867647, 0.8232812982347041, 0.5407436686023126, 0.6289910482430299, 0.7204191051073386, 0.6435063190494525, 0.8269695508603205, 0.5142920762765903, 0.8993053071861359, 0.8589546140760083, 0.6343676006576422, 0.7136222647793087, 0.5525829074944046, 0.7143336736203054, 0.595047019732668, 0.8502756568552672, 0.9753267624248951, 0.6161710819832685, 0.5578067871699023, 0.6187394725896214, 0.8963787929924669, 0.6668742113242199, 0.6976492590106183, 0.8995255473349364, 0.5662384376616449, 0.6103421707703616, 0.6938267059093658, 0.8756828648396893, 0.8635970551995704, 0.5100302521939045, 0.860083047189119, 0.8128081061671085, 0.8751648502842232, 0.7329430336799203, 0.8093053147516966, 0.5028857606578694, 0.8527953480107107, 0.6540980964700001, 0.7699367283817837, 0.6029871268577096, 0.8818247497635938, 0.934166561402479, 0.6713272670040152, 0.8108911915959802, 0.5128820000340287, 0.871769583272735, 0.8041287287879824, 0.7902929521375912, 0.5991032567616544, 0.9816940394747686, 0.5692521034648306, 0.8044551275754994, 0.7368691891918384, 0.6666959109496619, 0.5771668211988934, 0.8515991555487352, 0.6503248617433217, 0.7288760816280377, 0.8438712716493868, 0.7475484678652595, 0.77535405093933, 0.9713638094391079, 0.7219949899058298, 0.5149764928082601, 0.8786559511278028, 0.905884502397337, 0.981900726169626, 0.7928243496428572, 0.7929390156991019, 0.9494058636947551, 0.8044386704976276, 0.8370748193000963, 0.5102392965035301, 0.8692948448591203, 0.6739911492601852, 0.9947332603850796, 0.693169306713106, 0.6711295510435196, 0.6679329607486186, 0.5970591460582286, 0.6106197652804992, 0.7914647736685942, 0.7443525767877901, 0.6626752350535249, 0.7659460876364721, 0.5675963917414331, 0.8428762119401604, 0.7558060413833382, 0.5921220050077072, 0.9757777506831953, 0.8915300768176893, 0.654742816327357, 0.9007359969068236, 0.6507630596851204, 0.9461626234882754, 0.5720427696463216, 0.9193266997416893, 0.6201953672255086, 0.7025481564022731, 0.5772887462264888, 0.9640907796211282, 0.6917415383565171, 0.8597395741823906, 0.6187001793149751, 0.8762378471864796, 0.7293477029714699, 0.845979342565996, 0.9717941790260398, 0.7292541395601781, 0.817756157639548, 0.6151464558052739, 0.9565179486016677, 0.8826991766134649, 0.6712046283436868, 0.5616515383745586, 0.5970794676198928, 0.6201494133774202, 0.930718736477353, 0.7818159126853362, 0.8427979425470693, 0.9599680010095838, 0.9241996279467779, 0.808145893977998, 0.7539729692907575, 0.771411817440594, 0.7517509317387727, 0.9309574336285121, 0.6683948909568942, 0.8808756827736964, 0.883315693343921, 0.9040978859276319, 0.7525411987934121, 0.7987744618689023, 0.8682890617353809, 0.8280358506471979, 0.5194316330018682, 0.9673418056822919, 0.6859097103654777, 0.8183771202798564, 0.7752523400547329, 0.6432266073128591, 0.691211505447832, 0.9112576906063059, 0.8577853784706957, 0.5729890091718381, 0.6007690681050026, 0.6632796338409533, 0.9801096111756856, 0.818208292047717, 0.617776176079039, 0.5326513374889009, 0.9509988294969339, 0.8475655035279077, 0.6176273133418977, 0.8487019970596213, 0.7871444346786552, 0.7736180182602779, 0.9180295773664335, 0.8435062321906225, 0.9012969003203352, 0.6510536557457411, 0.5234073192696758, 0.6265017197711804, 0.5120070117573083, 0.6345301649585274, 0.6547800231391622, 0.7765881812273325, 0.5961174559541295, 0.7669190496450939, 0.923320993647814, 0.782138417238692, 0.6134028086012522, 0.5885201651831271, 0.8321815901086287, 0.7707703328743123, 0.7060080694615528, 0.5904539979088258, 0.8192205258562927, 0.5381833581669011, 0.5243603726637902, 0.6801224179720379, 0.515945335998915, 0.7746325545678225, 0.878954599349786, 0.8397217685876506, 0.6506084429717547, 0.5996323304242284, 0.6116977805178427, 0.7840076859902114, 0.5246285420019001, 0.6398959733869086, 0.5259783060879877, 0.5163707643509537, 0.7298477121876377, 0.5549408622321745, 0.9017545017365696, 0.742748981859947, 0.7678322545093016, 0.5678502216904877, 0.7602976280550229, 0.788445081981796, 0.8625964088040415, 0.779375791650617, 0.667045019364082, 0.5752889876337075, 0.956900515585762, 0.5843788352608916, 0.5330638323272151, 0.6539607937028273, 0.6745402300982816, 0.905687688235843, 0.773058672617819, 0.9708909502963738, 0.7098368421365422, 0.7272191651243115, 0.5140598058686214, 0.6424178019630684, 0.8747823672203894, 0.8450730616205941, 0.888684382728085, 0.8166555974916159, 0.5191810851405929, 0.7905863341590358, 0.7873559890014894, 0.8845759340610122, 0.5664745048978967, 0.9828287979133158, 0.5502096261321896, 0.5386186219000906, 0.6039118836381654, 0.6022642099366677, 0.9534482490548415, 0.7769585607990641, 0.5184543285450576, 0.949263525147064, 0.5656963399771942, 0.6646724321668421, 0.533024674524827, 0.522073771510341, 0.9847663968908342, 0.9945672843083314, 0.8192199780072003, 0.7979919778884534, 0.551599904257683, 0.8055968573234769, 0.8862942297433816, 0.5397794419954786, 0.5589972302462374, 0.8019162260257231, 0.7797213618218266, 0.6645076428417922, 0.6241176769830983, 0.5381906975851345, 0.904144567454535, 0.6745419002099373, 0.6863743141718586, 0.7524141635102054, 0.9038944936204936, 0.9425583177807788, 0.8048074886813543, 0.8794991363286475, 0.7825893426055224, 0.6421359671570326, 0.9626351838061296, 0.6282078993542483, 0.7176463708845873, 0.5683521514382626, 0.8364006858299015, 0.8333476210550076, 0.980752746214482, 0.8993548040001853, 0.7149662114044817, 0.6644374982930046, 0.5707962202313264, 0.9530771024131286, 0.5286046410249239, 0.5353072269320144, 0.8211310793573025, 0.837912889459326, 0.8949652551699476, 0.9673003675673404, 0.7210143588025598, 0.8049044750985852, 0.6267722367105208, 0.8585675379938196, 0.7355268391755676, 0.9996600387891965, 0.9710329877453985, 0.5622463735890881, 0.61255401838754, 0.7277050797475165, 0.8162385339928873, 0.9533706055973201, 0.6674843236842282, 0.5885776367036727, 0.6197274829069264, 0.8246720016673232, 0.6058840080235935, 0.7926475862906254, 0.5033446940245083, 0.9326346663699021, 0.5330370359045363, 0.8295449020003389, 0.5137780504468303, 0.5055505265092, 0.7790721350123311, 0.8965136720497515, 0.8534208835092831, 0.951060075425187, 0.9858847277311372, 0.9511879464075592, 0.9849610117964164, 0.8771758612430496, 0.7590595235591149, 0.7422888869565519, 0.7961690442930168, 0.8368438612861459, 0.6925360881405136, 0.7526131021049893, 0.5833862260553799, 0.8382131437259994, 0.8550672321687508, 0.7616812411962632, 0.9398710039290469, 0.862420270068496, 0.6535784389159573, 0.6860681019442014, 0.6001907800995473, 0.7244998269881815, 0.7636977321509688, 0.6957347244122754, 0.8360814078654459, 0.9371228804458824, 0.5354172355226995, 0.9485748397060502, 0.5985124632562677, 0.9666592839532122, 0.5903386440968348, 0.8371202313548307, 0.8438017014941701, 0.7796588570431238, 0.8017270378191084, 0.8072686194955634, 0.7252097736910474, 0.8261912416264564, 0.9999046437450005, 0.7921513527726893, 0.9334395365733171, 0.869327663478012, 0.8879495339149239, 0.826570978605694, 0.8887682454524113, 0.62543552543386, 0.7125636515561412, 0.7036169749522231, 0.537056325788922, 0.8048485013213706, 0.5208962066986518, 0.9302339166104587, 0.9562188264866525, 0.6861451338194013, 0.7830112394319961, 0.6132083837565194, 0.6571851529149473, 0.5190320516164506, 0.7298873151793345, 0.850338796829677, 0.8196602622102767, 0.7459521913816267, 0.9087717222546937, 0.5578111141274096, 0.5751552030208811, 0.7778979841349994, 0.7182521582396033, 0.8804629073652508, 0.8501675237088896, 0.8994366039501221, 0.890795437344571, 0.9954038587368054, 0.7225654732905163, 0.639345480462703, 0.7443500545672248, 0.5774957657230317, 0.8951989026505699, 0.9128362200012641, 0.612490801620226, 0.9449620352500374, 0.5109108962305386, 0.7706582683078309, 0.7234512119064374, 0.8858617649599068, 0.9990517425686181, 0.703211814468955, 0.8554451617386588, 0.5924268998449187, 0.7749339372444313, 0.8998430298684459, 0.7890871897981135, 0.6024062288867305, 0.9060624246213196, 0.6999861147939128, 0.9590137806859418, 0.8442368802244784, 0.686188279708511, 0.5319726705549261, 0.7945995066994181, 0.9635355504407999, 0.5970187530842406, 0.9290667999274557, 0.6002566693172886, 0.7277110739594428, 0.5207057660434757, 0.8960192005927472, 0.6792360600908927, 0.6670802833699601, 0.8755512074922864, 0.5581231751283295, 0.6496332665966141, 0.749567340074555, 0.8274100317955861, 0.8046457467781507, 0.9724361407360844, 0.5778257594202147, 0.6611036271351692, 0.9140933170270766, 0.8764736426036965, 0.7011057640623151, 0.7198624757406544, 0.8488526435331403, 0.7858103287247085, 0.6221508356376823, 0.7065555505001554, 0.5987635227742425, 0.7382572098989997, 0.8240001184819765, 0.5031494144744548, 0.6034720276762657, 0.5034197927879452, 0.5248136618074848, 0.7761571145774067, 0.9377436143986703, 0.5841696354920428, 0.9755942881884505, 0.6211148231438092, 0.7137428054501387, 0.5832492861172928, 0.6171729341395377, 0.8666950028279263, 0.9978967748585479, 0.9808143151021987, 0.8062680978237684, 0.6566834351220883, 0.5994566923722253, 0.9813521022815445, 0.98269392132334, 0.8824959274701563, 0.6145092748572905, 0.5112441290363241, 0.5487147271996367, 0.9752183554412092, 0.9491062232331586, 0.9048297429180137, 0.6882057916302396, 0.6218380883344253, 0.7295903237503347, 0.7341229018349411, 0.9983380491765581, 0.7959172834808736, 0.6442191778145372, 0.8367563448842603, 0.6976329628679048, 0.9706318096203845, 0.9930175936026913, 0.5523095882933602, 0.7045655298020687, 0.8932722976549017, 0.6090441407750429, 0.7143861629232967, 0.7012626606751333, 0.8182014734883789, 0.6606419446261524, 0.9247023175610474, 0.6056313369588414, 0.6571028942559658, 0.6761216662891688, 0.9338228976825216, 0.6398994815588186, 0.7165212639526815, 0.7389040799928241, 0.8571539574328586, 0.6661335372271909, 0.6533179278337713, 0.7350008878263343, 0.7136219071029495, 0.8528560051258054, 0.751641367197928, 0.7652129415548827, 0.5407935198662027, 0.5683933245222302, 0.8572438767190844, 0.5858741860195995, 0.8228428384404918, 0.7607444669642238, 0.9114779128390762, 0.9316629310666302, 0.7623809449412513, 0.8943395254915469, 0.6780927381301943, 0.5083401752868353, 0.5957562572757016, 0.973190790170287, 0.9161311693349055, 0.5388292867801495, 0.7357036108303314, 0.6222965041489015, 0.872146366565469, 0.957611227495945, 0.9990170405471189, 0.9661861351276844, 0.9491142022246264, 0.7795388404237542, 0.8324689542855837, 0.8133818011611342, 0.9376772633581796, 0.5119224330874681, 0.8617285704739445, 0.7228879987592403, 0.6198055057106542, 0.760697156337614, 0.6074880593720888, 0.7798331758161476, 0.8975618111652419, 0.6896402452330366, 0.6272739068079165, 0.8904987586964119, 0.8479618399428877, 0.7326044980756212, 0.7524693973785919, 0.5002879664836983, 0.7667362297215259, 0.7823602990085546, 0.7695603395162074, 0.8137567511199904, 0.5313453993419273, 0.6702593013455829, 0.51168620271412, 0.6122823140619947, 0.7406698180858284, 0.9910096316529335, 0.7816812049146011, 0.9444753620561055, 0.9445224701494794, 0.7060067218276174, 0.6204775961580438, 0.9703137796512047, 0.6990695266395718, 0.8087447598987916, 0.6120744928534355, 0.7531459689335372, 0.5144562789046769, 0.6884566846915376, 0.8028883842320047, 0.6214495682264082, 0.9221317706772127, 0.6932729732278049, 0.986998489878083, 0.5930187082437488, 0.873215128956861, 0.5568332547639833, 0.8989012503326044, 0.6890064741122954, 0.840245121993885, 0.9363684275166283, 0.7871253961816997, 0.6463059760046133, 0.6463282287052281, 0.5295071974501699, 0.9627447159500153, 0.56903836404096, 0.6867333316730952, 0.5172083776522471, 0.9372014778682565, 0.9753142335722738, 0.9465515409655796, 0.7893469380693585, 0.7386605925520018, 0.582434872044288, 0.97758794613937, 0.9770994509243662, 0.7080830520957238, 0.9945254784639388, 0.6864347751492392, 0.9597694907721712, 0.9598821657693313, 0.9877420702775573, 0.9528541474500252, 0.6579276524479447, 0.5317347338616016, 0.646943486656282, 0.5138809122965218, 0.5710838708608528, 0.6138138113544108, 0.7894716794715031, 0.9784148708425304, 0.9106875185157275, 0.6852399487424605, 0.5827166582999831, 0.664759540324743, 0.6107014594253368, 0.6433912903872243, 0.9633742499304065, 0.7276203006517314, 0.6359542449858765, 0.6566832663070469, 0.771518189705776, 0.9641954885680829, 0.694684293495242, 0.5174193517716869, 0.7757971845679347, 0.53831189191719, 0.814265516185686, 0.5799463980938517, 0.8598748637890019, 0.5726386135524455, 0.6797473365806284, 0.7309611527212057, 0.6392827784742439, 0.7656803272261397, 0.8379283444331087, 0.5001091681236968, 0.6857668297732477, 0.8857400155187029, 0.9576421074352847, 0.9543237930987547, 0.9496419392403539, 0.8453401383870729, 0.8252689169802908, 0.7578328977661302, 0.7857916216941423, 0.5452263578289622, 0.6570886173621884, 0.7370427332868243, 0.7617167143121543, 0.5998328644212167, 0.5058617210997116, 0.5069393684537671, 0.5802086335097907, 0.5659122006299844, 0.5786321286816241, 0.8751118972850436, 0.7340376149371088, 0.6138361671329657, 0.6544340309036897, 0.8554821460719114, 0.6822608329221935, 0.6760963202256227, 0.7157233442047779, 0.5276928841140658, 0.7969761672712519, 0.7305230331465795, 0.8036633217173477, 0.6452551559657691, 0.787396414297356, 0.6229239870759483, 0.5063817343377289, 0.7445390715507719, 0.682981883904349, 0.7204495151726669, 0.9111213781287092, 0.8924691868519705, 0.5899571646477099, 0.5713872617202402, 0.858393935161524, 0.5685636898720097, 0.5912107106083382, 0.7753881001800831, 0.8431646774194466, 0.9715115723123376, 0.8222928985267971, 0.5093851808889638, 0.5270229219880631, 0.9408896818040107, 0.9492751417761898, 0.6848522010364042, 0.9593486812068582, 0.6695725682408635, 0.7049626389646988, 0.8718856700517885, 0.5148904562243312, 0.5386943120311469, 0.7611322504056508, 0.9289282180293601, 0.6237566098216816, 0.5040483052872575, 0.8101400635642907, 0.5701664935002722, 0.6921780601008592, 0.5939073779864366, 0.9274390686046475, 0.5920406755311794, 0.5808167567314839, 0.8264652566390418, 0.7913821342803069, 0.5071692785637805, 0.8071463619927647, 0.6096407168411677, 0.8597777286597732, 0.8583512966484677, 0.8992403327323764, 0.7571135290077085, 0.8884422612788611, 0.9971989778430387, 0.6116709071200889, 0.9888930078121123, 0.973898106229006, 0.7844380517794443, 0.7304589264540916, 0.8736273140102819, 0.8731020262317375, 0.6291880383785908, 0.5801776447193354, 0.7011309338214087, 0.5101074167539241, 0.6090094748383474, 0.5844974647465624, 0.7039300203056569, 0.5641526684247933, 0.94069357652075, 0.9522796448776234, 0.6491785940151138, 0.612558412222018, 0.7687480436285772, 0.9080120977367186, 0.6618991714941017, 0.68203604823603, 0.6408153548161938, 0.9102141440061345, 0.7169190959529876, 0.8408583284644033, 0.6691200187977031, 0.95162476925004, 0.7491680969525104, 0.8066404793280946, 0.9022742264104543, 0.754682900804293, 0.5631168069793, 0.8335013721962485, 0.6028671758873339, 0.7098685111886408, 0.7181047934319191, 0.8931758113386886, 0.7204296169787039, 0.8156969827050438, 0.503803821200643, 0.8417156706829476, 0.9875637079957222, 0.8158422869475636, 0.9632804206355676, 0.8308148384176718, 0.609775827373206, 0.7886830183612331, 0.5173575769135101, 0.90329348090128, 0.78082807201314, 0.8549472893059926, 0.9143323199468567, 0.9942398113026473, 0.7527475385234945, 0.7927615270630787, 0.5025203940686152, 0.5053058447401944, 0.6590819854124346, 0.8126056160720236, 0.6532706368851862, 0.886174706878031, 0.6805895391879944, 0.9277110804796326, 0.7546831174324251, 0.8785972620326986, 0.8164211132014155, 0.6591305311502169, 0.5166270444180117, 0.5005478785522279, 0.5025197754633053, 0.9861994922523984, 0.7235572974171877, 0.9606228689247334, 0.5443755185712011, 0.5974713368115474, 0.5014102292804603, 0.6168657953071558, 0.7805783719458079, 0.618045888328607, 0.5738824327094925, 0.9289853824006609, 0.6773650988294466, 0.6581044716110276, 0.6629212092779304, 0.714660710133143, 0.8176639208896285, 0.5143265106287441, 0.7727415409359846, 0.824600816255011, 0.5730402498594624, 0.8478243658275637, 0.5058505054228425, 0.6384661171197543, 0.8725138029563118, 0.7370443228931365, 0.8592372458667641, 0.647531678437917, 0.7165143480017844, 0.6685442813765865, 0.8470350574955468, 0.7527380396119554, 0.5895914880323341, 0.7032772210875409, 0.6786255626921595, 0.5108472434195213, 0.9443627977189755, 0.8688771888292155, 0.9208666309376199, 0.6848943080646499, 0.7599534477930885, 0.7990014667187644, 0.8957445606003354, 0.591616002940617, 0.9588009559926592, 0.5752193431239603, 0.8179173330505518, 0.8905353325879535, 0.9906945615010826, 0.7377341687226575, 0.7187962618493299, 0.6020245269365019, 0.6077000193498658, 0.9852342051408903, 0.8463697828689472, 0.5162449834885354, 0.8781308082531969, 0.8950002857063479, 0.6845689322152079, 0.9738479222468419, 0.8731663080426543, 0.6524635137024692, 0.8858367960779697, 0.6605781310710721, 0.7886098921703026, 0.7352586231516925, 0.6274925872318371, 0.6861375857859549, 0.7398540553716976, 0.6123402049678333, 0.8432541731696429, 0.9188201170028326, 0.5141384852878117, 0.7721675951402276, 0.7071447955761442, 0.6319362152926733, 0.591782450605199, 0.9994875576845558, 0.7331053146569402, 0.5488148702813604, 0.8754846491389301, 0.8527590227974223, 0.9348191081854695, 0.9472951356834327, 0.5533919914303995, 0.6544681449852674, 0.5770945356815818, 0.5468347507236844, 0.5708039227437581, 0.7505494341410706, 0.757369762712536, 0.9522209284837945, 0.9098277741499707, 0.8899452016730216, 0.6978829220345595, 0.566607818187628, 0.5859233507720386, 0.8727078643164607, 0.5811561921430146, 0.6950085641215169, 0.5787484098930784, 0.827305569214126, 0.5948866502785042, 0.8733623364114673, 0.5919565933977698, 0.8025954097026458, 0.5914051290031827, 0.6589836330972774, 0.5710300345719479, 0.7061424128186506, 0.920146080994803, 0.6857803002035079, 0.8593906714939876, 0.6863462843424096, 0.5979589734662203, 0.8412394335414491, 0.7303498729288638, 0.820376717635465, 0.7171353893473222, 0.9613287271465933, 0.5346286946173922, 0.8561822374414799, 0.5822632502375913, 0.9200291218467533, 0.6579127621924356, 0.7330024902119965, 0.9621592480371837, 0.692258044889268, 0.5074980712729527, 0.9512309392365335, 0.9086268528337338, 0.835448051104063, 0.643050448038017, 0.9175407470729278, 0.7823325656483537, 0.5084263508349636, 0.822877509143672, 0.6082651425965124, 0.7381381110849157, 0.5640344206475154, 0.6551545205424827, 0.8729141292700272, 0.6728910107458121, 0.6559282648417928, 0.8873944413660289, 0.6105604245746832, 0.8454338800860075, 0.5087842930595532, 0.5856354132784913, 0.783985881802624, 0.7262458403193034, 0.570883053908543, 0.5581588809360336, 0.7773295411193825, 0.9026469682380485, 0.7251715822972391, 0.5558384751336563, 0.8310602368244604, 0.6100510980084651, 0.6785089582198204, 0.8319972925138366, 0.9085606604134651, 0.9406195606372361, 0.7773949835997882, 0.9128527094806382, 0.5568870702316314, 0.5360000688105936, 0.7961686181285339, 0.7292585735098777, 0.9561611129941274, 0.7685485715083201, 0.6846337590834433, 0.631272444186098, 0.7895259976643556, 0.6499001012417426, 0.6162123321061501, 0.9588587498673423, 0.8028142717209396, 0.8496070248772183, 0.6203884544138225, 0.5803183629149589, 0.9676936190629595, 0.7078612589516876, 0.9999369727528169, 0.690945467339722, 0.7117255508176943, 0.5681393424111979, 0.6517899923481034, 0.5352981363718043, 0.9496452374456399, 0.7489438170371365, 0.6651279750051428, 0.8126147673462905, 0.7664801118523521, 0.9529516062777317, 0.9966077559686695, 0.9650180626217271, 0.7068246453735468, 0.7452992324058477, 0.5755454726499701, 0.5766521009726014, 0.710925011704697, 0.8378223521630321, 0.9345269700907053, 0.5970288350521267, 0.6394875430484683, 0.5620226186339887, 0.9300945516105563, 0.7014490249231227, 0.5410755460556977, 0.767099600917758, 0.7909585553959118, 0.859776743397277, 0.9699504004204611, 0.6548132298507418, 0.9451222928171689, 0.8817093114495128, 0.919248671504958, 0.5713858027515903, 0.6141203228109731, 0.922317503773835, 0.7471462584855839, 0.6648046525048605, 0.9690484796237337, 0.7831186453512742, 0.8802350590757599, 0.9177016971875134, 0.6462235797814192, 0.6024205200151627, 0.8971568708516251, 0.5400569319103325, 0.774787080022818, 0.6773137066504934, 0.511560054050836, 0.7678835480360031, 0.6851715984991056, 0.6464258183565286, 0.9663394658264365, 0.9620439865242435, 0.9116094758164988, 0.5270091516203059, 0.6995988929490184, 0.9375536802390733, 0.5728241712390623, 0.7206947148503666, 0.5244262507177115, 0.8567726020159507, 0.6085672179339692, 0.7388807238362609, 0.8762648393842013, 0.7196156107815294, 0.5273391783831565, 0.8946715859614593, 0.9392933284366975, 0.7794020239230077, 0.8602930550410723, 0.7647312863745763, 0.6214681099936382, 0.5513464201928526, 0.7813931038062164, 0.9463282048041644, 0.8745739527355054, 0.7196693327065264, 0.8673677047209338, 0.9301217869459084, 0.7712906870433387, 0.8899657541553772, 0.6963501667010277, 0.5530216357294905, 0.7327232602213805, 0.6232641445605909, 0.8186445038089172, 0.5862002260809425, 0.5055510548063362, 0.8668967499221513, 0.9679828288638266, 0.978017481080748, 0.9433988796451038, 0.9162575691960841, 0.513190304484771, 0.8292153734412899, 0.6651233042054872, 0.6007829472047947, 0.6626418205963522, 0.6184286702326396, 0.5436301325641089, 0.5014771937624869, 0.7573680751433729, 0.9350384437769215, 0.6569502243241548, 0.548912881915937, 0.6516072073324327, 0.9859434576271102, 0.6881510356275797, 0.9710611320541929, 0.9651327839311454, 0.5052227147280997, 0.5262572239079404, 0.5589710941589601, 0.7327637259825188, 0.799622232093661, 0.9241289853466677, 0.9481426217996702, 0.9322073235384385, 0.7492754184383004, 0.7285837441193371, 0.8845073654144306, 0.5869784862851803, 0.9661010871458041, 0.7701759418458194, 0.5271713037846744, 0.9493579333405515, 0.7129594374010306, 0.8180393357431985, 0.8437649434294618, 0.5088373372506763, 0.941058281270048, 0.9755760662375241, 0.7771714780113625, 0.5277312679524264, 0.9441745220070462, 0.8827818946940977, 0.6439467066519928, 0.9495237796567053, 0.833894581078467, 0.7739864209194467, 0.5438731145248927, 0.5096798496564299, 0.5578486150332156, 0.6541828277064095, 0.8008180252299602, 0.995591242030405, 0.6848086681970713, 0.5691077104683948, 0.8848916087576106, 0.7779483752496101, 0.9292141002339134, 0.864564921619546, 0.6568854985557637, 0.5287368153011229, 0.582666147819312, 0.9646181895925436, 0.5953015151357148, 0.941156768986497, 0.5357602385334173, 0.6457727280598099, 0.6229577078860378, 0.8907058360702851, 0.7933885052992239, 0.8720832513775945, 0.8330257200741508, 0.616533148970393, 0.8437873782103191, 0.9586709785848471, 0.721359478747089, 0.9827865667013234, 0.5402358985592683, 0.900137279761934, 0.5593531808855017, 0.6718894295240442, 0.7067372298253136, 0.7426994761600904, 0.7237462723768086, 0.8266864784910148, 0.9325240801553158, 0.7999880396256875, 0.9757744818346741, 0.5136082760550522, 0.5915824646146556, 0.5959766392961716, 0.9561969290866885, 0.6776044506512013, 0.9913250585447333, 0.5481536777114943, 0.7839330096188029, 0.7691329696294571, 0.9403741027649618, 0.696200103406232, 0.619614702516214, 0.8521750061119815, 0.7411455688795726, 0.7166111314570349, 0.8891305948204566, 0.5585880059348665, 0.7476751971412408, 0.9279615960192262, 0.7205246926476494, 0.7032061910283647, 0.9671495728001788, 0.6889525891326747, 0.5162873828516759, 0.7621433548574963, 0.8465219905230679, 0.92034292967594, 0.8839531044680721, 0.7340866736656382, 0.7070800455194485, 0.681268644749712, 0.9879028008207262, 0.7971204046862972, 0.5158554804777955, 0.8063088580930954, 0.5187781587706752, 0.6539251924625815, 0.9637644691458009, 0.7214807981982228, 0.880952603038388, 0.8844017676089297, 0.5492283564077705, 0.9185836769515506, 0.6639999494549351, 0.5681110390321217, 0.7127416938548612, 0.5894885969608431, 0.8796913848277084, 0.7771244746493593, 0.9460017041401477, 0.7620986910158309, 0.5648484737797405, 0.6869279168008539, 0.8491628682308099, 0.9116340762348332, 0.8454731510319518, 0.8175088964058688, 0.9429940962329888, 0.5397449819828681, 0.6126429830123103, 0.804172779218451, 0.7486521015594441, 0.5249984336128419, 0.6531497270867272, 0.7019349527194589, 0.8561453561988882, 0.8648832912687099, 0.8456066527895304, 0.6231232706301735, 0.745099580730173, 0.9556346735914277, 0.6125925625993669, 0.7435977142215318, 0.7054532923333485, 0.5374280970370333, 0.7574062029791551, 0.9799560896223616, 0.7329715726391106, 0.9170296159362492, 0.629234650572811, 0.6850113606990642, 0.6751752753296268, 0.5309501451757931, 0.6229149203278377, 0.5017699217453147, 0.6599996407049866, 0.8311730567937213, 0.7246635603443821, 0.8366170207113098, 0.5271851136507163, 0.6031191184395621, 0.7480034164603868, 0.7132012007390993, 0.763386145113896, 0.542992360643672, 0.9404855160849218, 0.5224739402509856, 0.6155556371340956, 0.6049372299970324, 0.8194671803187212, 0.7083866587197025, 0.5295100839001308, 0.7721962803522323, 0.7005501351721484, 0.5632059293243719, 0.7381998877589063, 0.7134216060723846, 0.6684882959822632, 0.5258206390174943, 0.8370049596177673, 0.6590750195852584, 0.6486158837732612, 0.9024934034568006, 0.5757259653923485, 0.6473432800579855, 0.8115004184651521, 0.7722417280915249, 0.8666430036647048, 0.8120180113817418, 0.5819239977409649, 0.758511590696592, 0.9249948576620011, 0.9504179996483783, 0.6937353365616554, 0.8229353478114843, 0.9832857220515325, 0.6421955453879573, 0.9901579931362577, 0.6220736290198954, 0.6913873931613947, 0.9243042869914695, 0.8988668409754277, 0.6818576695963505, 0.5365975604435076, 0.685732872073659, 0.8199028669308454, 0.8814545340970545, 0.7237152987937258, 0.9490025380044245, 0.9788051757927119, 0.6215360742991574, 0.9182290717117143, 0.9869067644575709, 0.7262888580425422, 0.9174890825194136, 0.8737534923758974, 0.541632874053712, 0.9909816505771332, 0.8944656984446, 0.7660803904423747, 0.6547558507344863, 0.5416362250979766, 0.7838403933360503, 0.8982169305683118, 0.7933898615387601, 0.5398930599741367, 0.6965994528260211, 0.9706237432763509, 0.742818077428105, 0.7215801228802753, 0.669517726258843, 0.7838174135235549, 0.6734023127787478, 0.6340326539238433, 0.6155365962352157, 0.8756829406372877, 0.8588449812316749, 0.9430616626459973, 0.7721091347688729, 0.7087530420207111, 0.6229953868250135, 0.6063734413719455, 0.5093183946270908, 0.9785341375051765, 0.6261674750961833, 0.767083568693266, 0.5767990466365591, 0.6561019161093948, 0.7091513975185797, 0.7619595877010453, 0.9541450492344645, 0.5511444922479514, 0.7718528596204832, 0.7324882224123717, 0.6988956048399035, 0.7721409917600067, 0.7497297961339658, 0.7133438531443164, 0.9126325912242543, 0.5262066742276613, 0.8671742387683647, 0.9932435512444012, 0.5772096697365221, 0.6094286628269795, 0.6675844190336726, 0.8793725296535666, 0.9367464287023122, 0.7091612933476343, 0.9671829204980908, 0.9019012734213063, 0.8233924346776265, 0.8477285622570042, 0.6682477946316892, 0.7559289713481014, 0.8117374963208664, 0.9639241142173277, 0.6690431443207008, 0.8049499534398759, 0.8871722592446816, 0.6982191218022498, 0.9989725333284643, 0.5196978958836682, 0.6227510968371284, 0.5739214319380573, 0.962889502773026, 0.948766237291764, 0.5978393471996486, 0.6367559137641197, 0.8230253076693228, 0.9242643191161839, 0.9929735887298285, 0.6382476749925763, 0.6886700425330607, 0.527081256536913, 0.5795931663634947, 0.775164569057574, 0.7058252362066568, 0.8017182620522023, 0.7510432450169964, 0.6104317758305756, 0.9048016298393491, 0.6060696024910368, 0.7519838415487123, 0.5008916590517053, 0.7136785057506465, 0.8420686665402429, 0.7323033625883428, 0.8300039860100168, 0.623275267785952, 0.8651876098971034, 0.5819582717370442, 0.6967015070368946, 0.7921175557745956, 0.8346106908883567, 0.6369522384949768, 0.9282472051130248, 0.8225492235905938, 0.5668186341783681, 0.578176911857048, 0.6053531906523277, 0.6496371090758213, 0.7986416174137098, 0.5234798930348185, 0.7223758249933595, 0.9932486582142852, 0.9542119213821637, 0.6235998461439276, 0.8055638163797538, 0.6833388995325522, 0.7668347438325422, 0.9837518856413672, 0.7965907199524673, 0.9402945351462362, 0.6001890322375492, 0.540798854229672, 0.9959541799577754, 0.7506280020865487, 0.9627417265036835, 0.5108090997736422, 0.9262854580775941, 0.6049451509027735, 0.9153753272211138, 0.7964071978739264, 0.6694528490237619, 0.6127161204080696, 0.8836255907833509, 0.8145686651187518, 0.7775522084094764, 0.7013241208288079, 0.6092059832352597, 0.9954184461925373, 0.9658721955828966, 0.5402746071009457, 0.7221023067690004, 0.9102321773951538, 0.8478645628522222, 0.6019258725048742, 0.828827948903529, 0.6871623954603654, 0.5478439228407437, 0.7414990938692083, 0.6725099069544006, 0.5437043099778668, 0.9183442099435453, 0.8435260145954746, 0.8948341432826392, 0.883626852146441, 0.8827787404438922, 0.5869727279325985, 0.5648195054469773, 0.6998491148470936, 0.9206352066404436, 0.5583657032634851, 0.9625043097991886, 0.6640519500540973, 0.7928839588595156, 0.5539883678805002, 0.508040545146107, 0.8766912186814084, 0.5333368857508167, 0.5354116401798296, 0.5494914184911562, 0.9763227545627722, 0.958576670503418, 0.5541111130746188, 0.9133133410001002, 0.8479719492354447, 0.8602868205699475, 0.742590426833674, 0.644805196024987, 0.8933482871409415, 0.7434996899333737, 0.9947769593923649, 0.8133838679473098, 0.8166960777544244, 0.5500047744816969, 0.7086206441328778, 0.645008943026333, 0.9854833370797257, 0.7187100859640028, 0.8199584848761926, 0.9903119659479209, 0.5784653564137763, 0.673745605867053, 0.6406440677846201, 0.6331261955062166, 0.645275092779571, 0.5490580808173187, 0.8176301427631076, 0.7742117451477866, 0.7496570537309561, 0.8543452400977773, 0.6349634663127546, 0.7956179313115882, 0.5589566816255614, 0.8958000111406785, 0.7232350806513921, 0.6965405287184434, 0.6301839483082817, 0.9728916818736861, 0.5610004172778768, 0.8150204854943939, 0.9916625327578225, 0.88805776131586, 0.6118249667571589, 0.8910429273023169, 0.9166089412938945, 0.7185929610156345, 0.9474467739346861, 0.6242730010107509, 0.5555498226953495, 0.5352003298386008, 0.5356015495508895, 0.6737472355293945, 0.6148057937834732, 0.7249969564052212, 0.8963980704834051, 0.507429762266358, 0.5636838264562187, 0.7866030031351301, 0.997260391845048, 0.652839526078948, 0.9926932614111088, 0.7388926147707857, 0.6466453218848804, 0.5928218534732519, 0.5289890554516021, 0.8384695255214774, 0.9229945575855388, 0.664267549086373, 0.9464847557203061, 0.9467981221564272, 0.8008974588658153, 0.8800746393119256, 0.9050991462258009, 0.5346344951201774, 0.5956104826260554, 0.8846670265918566, 0.5567379139336021, 0.9868542038363224, 0.9551979868878937, 0.730102794107027, 0.6917626400063044, 0.9601071139907542, 0.8464824377401804, 0.7032148913783166, 0.712924932065369, 0.6826759615009077, 0.9139818229301718, 0.8852765922912746, 0.9305828297616638, 0.9443559883072615, 0.9830974942417403, 0.9455321693246055, 0.5329784424203112, 0.6341309022657065, 0.8831737064239162, 0.9998918401343917, 0.7196091663565711, 0.5182188357534582, 0.793577084134617, 0.6793331233298117, 0.502953156721698, 0.8468540665328004, 0.8180408033397153, 0.738167605259276, 0.6254889736666911, 0.7874084125219407, 0.6520225955021561, 0.9499597065272333, 0.8480923767620225, 0.8320063907014565, 0.7223301009279199, 0.65978901828309, 0.742015131112796, 0.5512848175789706, 0.8580348162836204, 0.7068528276216139, 0.8314870107058117, 0.9170879010858494, 0.542308938521897, 0.8726158243883391, 0.5630054393919215, 0.7816118413043199, 0.5566114349724001, 0.6181427306884125, 0.6132541263335929, 0.6899249036412227, 0.6266646726641798, 0.6179205429307499, 0.5681851309262136, 0.8598734592895445, 0.5936204801433472, 0.8342668865868637, 0.5304068329108792, 0.93562606063064, 0.9398861815001816, 0.9738018928777179, 0.7553243543422261, 0.8665135201031644, 0.9546146209327759, 0.5452783315164673, 0.9581940415104016, 0.7905855616104713, 0.7954099987170076, 0.6959246836549675, 0.8073272888527754, 0.5398261811742365, 0.8003575285114037, 0.8250683050126036, 0.617423176075587, 0.6059110611623602, 0.8423694304448991, 0.7764005441222916, 0.7817084450314296, 0.5994778805253529, 0.9778522819077893, 0.6394121060524742, 0.9252415778857899, 0.6067354400133675, 0.5715588283331647, 0.9834606496734137, 0.9372633309573047, 0.8271854318086261, 0.7534179957769392, 0.8841352349374099, 0.9554166265702956, 0.9694244400694516, 0.9525419086580003, 0.8132664077069751, 0.6659663589890319, 0.802036446556815, 0.7340735052059351, 0.8829515576684133, 0.5949458747068559, 0.9869417468674675, 0.9676630614901122, 0.6396509959716138, 0.8369625722828004, 0.799028408882442, 0.8537962708840034, 0.5955536339738061, 0.6492122058788327, 0.7972435119217862, 0.6517476041601246, 0.9795028585947443, 0.9890884256238921, 0.7869710138591259, 0.6406034811957295, 0.6976321875487392, 0.5483468700609735, 0.5500037007484837, 0.7814040181012367, 0.7688769123281994, 0.8433339028558949, 0.9408123346330743, 0.6671448838803885, 0.6177011889863602, 0.9265922765218019, 0.855060353398362, 0.9913093472470118, 0.9885329884384398, 0.5448939881747211, 0.853243031988357, 0.8582564197383559, 0.7101251514906111, 0.8965053803785814, 0.9800185041763527, 0.524274297821459, 0.8488385356695212, 0.5547534345698115, 0.6363654240248624, 0.8391692442255849, 0.675080226528554, 0.90577642248684, 0.9167628490663129, 0.5287122289419717, 0.5893335082098845, 0.7214609435466921, 0.929260759428171, 0.9906379441982979, 0.8242774319963952, 0.9802863383910456, 0.9736383205768424, 0.8990356770518004, 0.9405823732222443, 0.706754057155393, 0.5971919059244046, 0.8916103213278308, 0.5315249725775104, 0.6314323208933048, 0.9227578603468056, 0.51120239184429, 0.8422153973627042, 0.6832711730239849, 0.7775792557739087, 0.7841709766590436, 0.6349321341694009, 0.5676403294168093, 0.936075092504636, 0.6884900704446295, 0.5832786262492131, 0.9899689303200574, 0.8080636886516381, 0.7282772096722887, 0.7476014594048495, 0.7541751154471481, 0.8033338824523049, 0.886741698930331, 0.543123843688651, 0.6773067537147901, 0.5263370072887122, 0.9972795347801944, 0.6112930745799345, 0.6586045251841017, 0.8939270429857339, 0.5011505872756257, 0.8802964531544923, 0.7124385938056566, 0.8462033052084168, 0.6222354409225255, 0.5141514224104994, 0.6412523449585039, 0.97668924428817, 0.7229490718102047, 0.8706033607056949, 0.9499884868963693, 0.8598170898158117, 0.9940478163982289, 0.5716703773224118, 0.8451332294564499, 0.5512705925442358, 0.9285994038291371, 0.7135798274907827, 0.6263501784960677, 0.6903305650740521, 0.6109469816243533, 0.641203375572881, 0.7060308447395511, 0.9867739326659943, 0.8739238270319314, 0.7294905146389635, 0.5321959969017771, 0.769561284650865, 0.6902142422817925, 0.7391228502300553, 0.7440452988123593, 0.8180401218357884, 0.8840904649178514, 0.901041440159495, 0.9273689853200962, 0.8101964500747052, 0.8624877205810825, 0.6084181654320517, 0.7604462424257274, 0.7279674160180014, 0.7712121792819157, 0.8548179212722615, 0.9932255029144832, 0.8824044250231196, 0.9194768368367213, 0.6524751945241714, 0.5303872595343666, 0.6639800112379985, 0.5770418715108387, 0.8846883132452819, 0.7022580409293371, 0.6220341046928393, 0.9141832053923126, 0.6545597143011948, 0.6567665834221745, 0.8012689228797047, 0.9374581902709318, 0.8791313158646334, 0.5606907934337766, 0.5445400246093157, 0.72791248342937, 0.9665178490260853, 0.9287531808568678, 0.8138675420456289, 0.9630042523651382, 0.9960766577783136, 0.5422207836397286, 0.5185600638079682, 0.7669320221268532, 0.942191927150656, 0.5561711837413893, 0.8485723968312673, 0.843211265161284, 0.93328215137207, 0.9427231259716763, 0.9098764498501867, 0.9886194735781544, 0.653086744667263, 0.6204128909696853, 0.7277524836180002, 0.8199403577008866, 0.5259525382068242, 0.8766134863227535, 0.7464302530672061, 0.723649053832942, 0.7101283397531808, 0.7455350453185086, 0.8313720248973893, 0.9400169030752783, 0.970704669042358, 0.5235970907617022, 0.7797013958364306, 0.5946559113841431, 0.5462237771980276, 0.7107442158942503, 0.5119229499944182, 0.5632395776323511, 0.8542698105001801, 0.5063237996705935, 0.5288394304179254, 0.5537732064668981, 0.7318586732753474, 0.7423585250026155, 0.8154827381776076, 0.8081756129414667, 0.8130652696312818, 0.760036208765829, 0.988065061400809, 0.8417779669496293, 0.5638560469150771, 0.8734180160826273, 0.8099219312586483, 0.6718921893643408, 0.5537772692111611, 0.9663769919648619, 0.9021946025906784, 0.8474374097781333, 0.7026984728162122, 0.633236214381629, 0.9334148800834752, 0.5234186821221527, 0.92243598108488, 0.6080233182149153, 0.8162053029534733, 0.7682883325403966, 0.5986930900333194, 0.7761348244840007, 0.938950974599982, 0.9452900579310276, 0.5489281148197293, 0.931508945157288, 0.9773332679240764, 0.8715806182181323, 0.913891140447343, 0.61939185226705, 0.7027512326118737, 0.9805801001519265, 0.5904261279679583, 0.7882371246850174, 0.6983704810227438, 0.9360894480954557, 0.667430629902476, 0.6935606973210118, 0.9997424607037233, 0.5822299869585916, 0.9206601774318663, 0.6380564933922899, 0.8121096417473845, 0.9510777036164815, 0.6203079250466124, 0.8976077321853314, 0.72448665514907, 0.9103818075854899, 0.8873910046782062, 0.8687044684065506, 0.9622416277951711, 0.6991849466094167, 0.5459316458445023, 0.6875711145025423, 0.9472220315323641, 0.6097651268713481, 0.5037715452208116, 0.9272668451742797, 0.8326951139086243, 0.9221356952139319, 0.9019353078051078, 0.8130880267384155, 0.587467389446706, 0.9340877255603671, 0.5616137775711666, 0.6451106940353606, 0.7474306890275346, 0.8078824371122075, 0.7978907650330176, 0.7667706201502364, 0.7072200059772054, 0.5079950582889187, 0.7459339589129443, 0.6539588618641243, 0.7408671650345742, 0.5329785741407534, 0.7524523471573853, 0.9023038213650727, 0.9343584366789652, 0.6589638749159938, 0.7440707893180438, 0.961207112261091, 0.7155022719704589, 0.5263562374692198, 0.6319090876722648, 0.6543427699881785, 0.7623138287450728, 0.6086356362790426, 0.5662760729711476, 0.9413129782839325, 0.9245035079027489, 0.8223398628363235, 0.7926860661403244, 0.7828166884177797, 0.9834810019099623, 0.6346806773886786, 0.9047633752176851, 0.7819159791480786, 0.875959795818863, 0.5616421771216453, 0.5408737383143141, 0.6149830565032477, 0.9691912490760748, 0.9516538730832304, 0.7867205655072584, 0.6567252938609778, 0.623962841281331, 0.5825694351628521, 0.5618131303654641, 0.781910980965574, 0.6706424919095063, 0.9018291022248881, 0.9333125171327119, 0.8228946775247286, 0.5361400272300743, 0.9538636028204309, 0.6837624098913915, 0.6906827029417608, 0.7293883565121664, 0.8170867543272449, 0.9723099866931797, 0.5205369028823585, 0.7758789047130052, 0.7123804206270932, 0.7698740764194528, 0.911301342269252, 0.5015675980299585, 0.7558222621445485, 0.5884409139880356, 0.5148551713440576, 0.6521978004608491, 0.8002096062503917, 0.6426882641191196, 0.5117700743265714, 0.5307571264851938, 0.6537610660516859, 0.995357414475114, 0.8023209287955888, 0.6305839238095311, 0.9219302680594392, 0.7461362611147275, 0.7836593653484021, 0.7179533011675208, 0.5147252443646839, 0.9409159491422302, 0.9964253097484291, 0.9153069430824848, 0.983636554196319, 0.7342980322228709, 0.8841524899632938, 0.7112953697556392, 0.5875152632611029, 0.6497534826428705, 0.9934037096843973, 0.9000508852739413, 0.8339544598000065, 0.7505803410249383, 0.5651528234280225, 0.5567829432446508, 0.8011721234585301, 0.6287976141021717, 0.6367841497381446, 0.6278929426608822, 0.7849582582974428, 0.8094642035030268, 0.7574579269638531, 0.6627953986880784, 0.9701269259411315, 0.6279794413398055, 0.8748358346398619, 0.9580046455302882, 0.6541496657450819, 0.7232818734097934, 0.5919685512329886, 0.5114030855098559, 0.7137361356702772, 0.5255102454894667, 0.8058314241668445, 0.9600248916742158, 0.561837640150886, 0.5921912107081255, 0.8015689816409537, 0.5028183387074219, 0.8313466051454939, 0.7033968730369484, 0.7810352075673059, 0.7377002488894779, 0.6322067436220338, 0.8089947136157027, 0.8894389337479349, 0.6566049308285287, 0.8166279210603931, 0.8680463351363024, 0.5754736342241893, 0.6768970860491919, 0.8558553336755608, 0.6446706516214012, 0.9993591403428896, 0.5612749212505803, 0.6917678148302269, 0.8671516099376704, 0.7623191921065797, 0.5170520992652411, 0.5729774225596017, 0.8074227278597188, 0.5074139381710763, 0.5633620362193352, 0.9701223212064514, 0.9804188343681408, 0.6256665487620842, 0.9483396035854519, 0.6672532398406948, 0.9908390050430578, 0.6064632334296364, 0.8442917209123212, 0.7707930992586325, 0.9553957193944544, 0.9194897315929349, 0.8974348567873136, 0.6145205890130405, 0.7575210845029542, 0.7361681411976315, 0.9237999155142503, 0.9120165499196757, 0.9955723360513411, 0.5027495353883358, 0.8150147946766199, 0.8540373804362689, 0.7511650925861749, 0.9103084443593603, 0.8449698582107492, 0.727646769502398, 0.670647441115979, 0.9722645457945265, 0.6713062704713466, 0.9519432831421921, 0.6893570971242999, 0.8317412932996965, 0.7199235560205519, 0.5247046610328382, 0.5559270165157747, 0.6119368196812109, 0.9178641035949586, 0.7369205134975589, 0.8589013873445681, 0.8434977791660846, 0.9194220138622606, 0.5178130342858258, 0.7493268207778117, 0.6313397132089843, 0.7377156411792678, 0.5287618200636683, 0.5630659046188657, 0.736563747076346, 0.8471950708223861, 0.9656165145454768, 0.8430209799528348, 0.7197453470210027, 0.8867328132525358, 0.7730009603147858, 0.6533455580775731, 0.8984744280358125, 0.6999010912295578, 0.7248158223800328, 0.9576733668623456, 0.5584045449400848, 0.5403085918517219, 0.5168044505671899, 0.7933974397300468, 0.544486010266662, 0.9449063754705909, 0.9733929417612844, 0.6146735667396503, 0.802122085546088, 0.6677661773537825, 0.7244529974833135, 0.6188393013702947, 0.7393763258229891, 0.5066551580333578, 0.899050511539611, 0.5844731814526882, 0.5648140767720704, 0.6318209798475556, 0.9178573142020412, 0.6258288516777228, 0.6054988831839907, 0.5897065375743161, 0.5276646749426924, 0.6398995966191497, 0.5441080579103388, 0.9376064870052614, 0.6551247285390711, 0.5717917158648864, 0.5344855655028313, 0.8456965872856989, 0.8313254702851421, 0.8742072630541002, 0.5180643171164077, 0.6701688351592501, 0.7677596618487428, 0.6377137630052059, 0.684612746238543, 0.975377103697775, 0.9156855889116491, 0.5148038896718989, 0.9770053464978421, 0.9326810019716731, 0.7742977453909288, 0.854873589815144, 0.8201936193612474, 0.9019055357087883, 0.7965336954030728, 0.6354701311817513, 0.6575219457750542, 0.939132167762897, 0.7734171161068026, 0.8204190184820168, 0.9578341715113772, 0.7074527502985102, 0.8867059608473483, 0.7550493863164213, 0.5420878803016844, 0.8664368288133364, 0.7888650391151149, 0.6874400675517414, 0.9465642678781747, 0.6726786236623339, 0.8025175130834072, 0.628508971211952, 0.7613465486125504, 0.7767017248055541, 0.6907770678093654, 0.8669171386600656, 0.7158945766080402, 0.6543847058900614, 0.6802308048303518, 0.7296777675044437, 0.5148207156661084, 0.8834832780568822, 0.8138063287943673, 0.5583971481575984, 0.8981950671199392, 0.5588133563479811, 0.875857426723307, 0.6143539617817971, 0.7644732379897039, 0.6501161034711782, 0.8122151969961351, 0.72876972522503, 0.5407809568778144, 0.7425643821279113, 0.6204273094762867, 0.8023118281128437, 0.5749079543079503, 0.9522438731535512, 0.5091267624689908, 0.7991755056551504, 0.8485356538108644, 0.7617226786217195, 0.7679665060786498, 0.5372488802131701, 0.9377989334608816, 0.7207594216853723, 0.6378114869013178, 0.871628588377945, 0.8808989390743609, 0.7280301269690672, 0.9742198428451349, 0.5124339645554922, 0.86245488189193, 0.7662399623370793, 0.7740876500175606, 0.5100710945338487, 0.586284823721521, 0.9070754615153565, 0.8359850201051361, 0.5295662817749639, 0.5067992654612676, 0.6260836028893138, 0.9091984100873143, 0.8454514727081671, 0.7815705455482309, 0.7610850972495193, 0.8713509536926711, 0.7369992676658578, 0.9731643374570824, 0.5634980626987096, 0.7169993260154339, 0.7892984588106475, 0.5848817802894274, 0.9069939793026607, 0.9257914440982873, 0.7160525166643483, 0.847708579503637, 0.8777388130700015, 0.5019173852025912, 0.5864969741341197, 0.708709227960834, 0.8371556856358366, 0.6262955758040765, 0.5071211148661394, 0.5934298653985066, 0.8215918281856349, 0.7771597092017883, 0.7070697251892577, 0.5469868613040585, 0.9946993127818273, 0.5043600064098288, 0.8323317114045482, 0.540017965189334, 0.8582842481402476, 0.9714897016546523, 0.6274575835163244, 0.960318434486327, 0.5533624429080041, 0.9749357681908131, 0.6410152742065287, 0.7461638827189698, 0.9431669984632369, 0.5689544380815622, 0.8237519601504217, 0.7836505526077033, 0.8685544401212044, 0.8475792866118435, 0.9678075783122886, 0.5655732470847212, 0.5984125705581214, 0.7232530930513024, 0.8878512112359814, 0.9408623902262119, 0.6641212049515408, 0.617865786304374, 0.7389027876616495, 0.8650642437976346, 0.5241045849477624, 0.8907121947273905, 0.8678017770606106, 0.9742744595746555, 0.5466272106102307, 0.8115469192435929, 0.8665767395135258, 0.5291784866193932, 0.9657067662628547, 0.6262269930594662, 0.6887477530068706, 0.5727625427588199, 0.7671487468805132, 0.7415992995513314, 0.7060218022595856, 0.8497180822517969, 0.827038391988656, 0.8453632005514808, 0.7682034927994401, 0.7869784871068787, 0.7778085775063872, 0.5193731110110847, 0.9431800822134653, 0.9250404940492942, 0.6137584196837556, 0.8315423207921087, 0.823024486341474, 0.7769897263643056, 0.6019917745694552, 0.7083786204805396, 0.6455042607007125, 0.5774068195164879, 0.9230473861939977, 0.9733962867285157, 0.9267868400245975, 0.5327591993922842, 0.7663993773885859, 0.6945773090954104, 0.7121444071081563, 0.7368867992580763, 0.882521293402412, 0.6339346231963765, 0.9027020012158478, 0.7239511799307012, 0.8947676072660166, 0.8706860524892257, 0.7931939479673905, 0.5531153205195788, 0.8511498608164823, 0.5652024811114709, 0.8960542020581592, 0.9399273514892557, 0.5838563698020178, 0.6043111633181969, 0.9669257918835734, 0.8877665016652752, 0.5891426899215204, 0.7365443992230789, 0.7526614214798234, 0.876781659375801, 0.7738243861166639, 0.9240423195610225, 0.5027457981949539, 0.9677179744812293, 0.6592835018681296, 0.8242682966742865, 0.5180072334575625, 0.6493483258858552, 0.8210227290309244, 0.8721705777507079, 0.8899738248983822, 0.5168125689220804, 0.5782133673341467, 0.5592722954823148, 0.5947272590652191, 0.6029603880211797, 0.861340789817671, 0.6878581490910582, 0.5337932521703528, 0.8163976965449647, 0.9965143942624364, 0.9796221563648833, 0.7366559453590833, 0.7785157294468497, 0.9508596996880703, 0.6890804663874106, 0.587726421914941, 0.9703027466367556, 0.7925613836910766, 0.6370545909213466, 0.6479432018251048, 0.562114347294701, 0.5362795972747154, 0.8435167373878778, 0.9298331650696001, 0.8466742810085397, 0.7118889010736409, 0.9589115333616596, 0.5711453778436704, 0.9422805833515551, 0.8128347087375585, 0.8508833017877724, 0.7270125777905431, 0.8125943131379687, 0.7886852630058417, 0.8846497217788545, 0.5864760931356291, 0.6663421914828496, 0.8783084853484985, 0.8325488849496308, 0.7213165712044132, 0.83811495583111, 0.8582340615996236, 0.8910413857236357, 0.6986821508170752, 0.631774301963749, 0.8164417435165021, 0.6026226292615149, 0.5767776510438576, 0.7398035046123397, 0.7794638264706029, 0.7058583384656727, 0.5691116286685762, 0.7026686528089099, 0.6377356815708831, 0.6251241411623254, 0.5444614560104133, 0.9174787043025319, 0.8535092380109686, 0.7163852127873387, 0.7241203351108572, 0.6124137503472015, 0.936137395675886, 0.7071057876167546, 0.8057911632733792, 0.5252039928544572, 0.6032599315351234, 0.6238731988244777, 0.7231542964949343, 0.7324690617870084, 0.9029544161671499, 0.6060092401606939, 0.7864989535677992, 0.9439025212392366, 0.7209473695822362, 0.9739630452082084, 0.6066204172491838, 0.648711636365856, 0.857360956061143, 0.8695937891434016, 0.5620212875470295, 0.5652219504958683, 0.5600403999715726, 0.5187352699336074, 0.5420189117465316, 0.7738027231524003, 0.9012205429767965, 0.6726718595379264, 0.8795434255538026, 0.5301654775437661, 0.5997967114548683, 0.7754669524931628, 0.636948651440308, 0.51720681179465, 0.5076932669947846, 0.8602493903317961, 0.926045919877341, 0.8275598600810246, 0.9378387818057559, 0.5211445770319396, 0.7457911569606496, 0.8958478108659345, 0.7408217361084177, 0.9091797922165589, 0.8179407766376392, 0.9020455687235414, 0.7619655638418519, 0.8939964919179391, 0.6485024662760395, 0.5438115482380292, 0.6886931520794976, 0.8971427325729139, 0.5371261648630694, 0.8857710797802063, 0.6460642098615378, 0.5768844344531436, 0.5876976952135018, 0.6688758773400274, 0.5164356260407001, 0.7556430665452745, 0.9773295813694836, 0.5055369182379383, 0.9346338157905969, 0.609869011202796, 0.6785578236226161, 0.782788303518159, 0.7857868739201774, 0.6809841013572251, 0.9237650666124078, 0.7967983462659778, 0.8732820178935584, 0.652233485118566, 0.7038997017936364, 0.6204302162050176, 0.9928026852961702, 0.8230192131582993, 0.5175266291510321, 0.5215868910731345, 0.8920806257486674, 0.9258953884324879, 0.7410320718019283, 0.560405728929372, 0.6981619596013217, 0.5989143931130386, 0.5683227660161372, 0.7783481814520907, 0.9240348193539885, 0.7509594687899325, 0.5258783175471236, 0.7717748194761294, 0.7422494562214728, 0.8302622877195027, 0.5033568995876649, 0.7673433452367164, 0.9332340032577858, 0.7733163580801161, 0.8602310661574404, 0.9799215910993203, 0.875306800047017, 0.9427349471916653, 0.8100916028735712, 0.8265381858812819, 0.7280928367370364, 0.7140107882003144, 0.5735682289166959, 0.5888680474709013, 0.970804219047517, 0.7944576736323476, 0.9450260442777005, 0.9133995672565608, 0.9382074920068126, 0.8656770195980997, 0.7897904938699889, 0.9470255198518768, 0.9433680558387598, 0.7833251381279005, 0.6239423539558031, 0.8231962567767255, 0.6032322595722179, 0.8413003613743713, 0.9660967988862783, 0.8228995345494865, 0.6018084123450862, 0.721284647058227, 0.7357829460166243, 0.7554770522875498, 0.9421946967496648, 0.5389047099586706, 0.8024535812746296, 0.7533022856251759, 0.6623888438320269, 0.8511609333840788, 0.7802356347590851, 0.9537946151164806, 0.6163315914948648, 0.659436890733058, 0.5324106045787337, 0.7117308381806524, 0.8854677133508257, 0.7457324703840957, 0.531102501566823, 0.800328775273857, 0.5496235253689613, 0.8686789009303024, 0.912456966649216, 0.6574931190456255, 0.5788834567921703, 0.794740049593588, 0.5194146809907971, 0.9440668916189104, 0.5264848587070421, 0.7014216738755104, 0.8981138294421228, 0.7687422160181128, 0.9092850482371476, 0.5155631319696481, 0.5582979949862292, 0.8582002560327135, 0.825271153796215, 0.9509969247466453, 0.9310300939193197, 0.9528659747419674, 0.5833229713946557, 0.6867119330277386, 0.8021908005891432, 0.840456818780774, 0.7600713152593528, 0.8820940939887416, 0.9269312928396058, 0.8578430328609953, 0.9068499242826145, 0.6921503633487534, 0.7605883746516964, 0.9152800389103024, 0.5436254027321599, 0.5444931947735987, 0.8978594453457653, 0.8620926211356146, 0.800184988765686, 0.8717266829705572, 0.8661931573385281, 0.8211681228298222, 0.8601998225257244, 0.7553251513232189, 0.7113704216483505, 0.5201430200641912, 0.8062966837845044, 0.7294705412685552, 0.8803527287564528, 0.7803896988549706, 0.8926674486237183, 0.5464639944426801, 0.8625640151914551, 0.6372274825900797, 0.8311851655777018, 0.832327557033279, 0.5333260381467687, 0.7985380263422074, 0.8648753106967774, 0.9333113016068885, 0.8661452072135079, 0.8699867751518267, 0.9506610928285605, 0.8937403861884053, 0.6182198930871154, 0.5047745800239981, 0.6878410141622964, 0.9700143066325794, 0.8026512632034375, 0.5463117969072362, 0.7941508000393932, 0.6384065692035983, 0.9783727819995105, 0.6857724988202911, 0.5185694135346391, 0.8714947200827787, 0.5302580225847986, 0.8720082188073361, 0.8079660490781436, 0.7825432459005854, 0.6241742259821987, 0.6619152361505176, 0.6450538752930632, 0.5592950256717222, 0.5107044305266384, 0.9331150867870331, 0.9936191717509828, 0.8930750986807825, 0.8447833204090907, 0.8405614513550069, 0.7039582343730828, 0.7023530607852042, 0.9555190797317465, 0.9053128625207782, 0.5027973890807971, 0.7954814689548662, 0.8119601479499472, 0.6526713652730765, 0.8966841240943926, 0.9145015046622764, 0.8673773547600122, 0.6936406519351157, 0.8621216805293167, 0.7079381892270982, 0.8995258443030321, 0.5483135477885219, 0.763460702045409, 0.7688930525915167, 0.8400220223021694, 0.7570993101794956, 0.5203428325819017, 0.711787508343136, 0.5738235006229344, 0.5324439435476491, 0.800710427871437, 0.556176542903986, 0.757537668212183, 0.9011936081685434, 0.5151400935840875, 0.7554346238772138, 0.5754198037650167, 0.9672863177996467, 0.5896368897862724, 0.8756829699678648, 0.6116156074778178, 0.9493435423367546, 0.7528700645684927, 0.6337148636298842, 0.98239589887211, 0.9350410215168777, 0.7224893878005567, 0.757237515717599, 0.9879143365289302, 0.848622320730557, 0.6335815430545432, 0.5347155523196452, 0.7399019285338693, 0.6066440012222813, 0.5801900841502058, 0.9136044279898541, 0.8660665042499454, 0.5803977597878711, 0.9464261190749432, 0.68503000094509, 0.9688893285692481, 0.6666009689076722, 0.992988542751482, 0.7959381508046681, 0.9897956588153796, 0.5430726877897949, 0.5710724682706287, 0.8093304815617843, 0.8916533607929444, 0.9262746867132242, 0.5936694844295325, 0.7346860421089665, 0.9999188434334626, 0.6266488238836189, 0.826289450443604, 0.7604921356364254, 0.9582405668416982, 0.6537517023725374, 0.7310812446450238, 0.7723034945285312, 0.5027071562106575, 0.817745963761139, 0.8532202556207916, 0.8011728640268294, 0.747129794558289, 0.8920687015827526, 0.5665099944109482, 0.9586263126132057, 0.617875155325821, 0.5811661201858872, 0.6679928380315082, 0.6652727613824846, 0.6820350613224079, 0.9929639223035474, 0.844063069671415, 0.6034083933638651, 0.9607857397098435, 0.5399333089058398, 0.5879376978613973, 0.682222676134318, 0.5295923031908798, 0.8595592615409927, 0.6279670510825803, 0.5091682450498479, 0.6155019266043824, 0.6214231365308932, 0.530822115321128, 0.7803506825173423, 0.7180286253868382, 0.7510365683403395, 0.8905281545899442, 0.7637678487343895, 0.9765273147061504, 0.7412955841380343, 0.617354942485318, 0.8262900037044382, 0.908135286949276, 0.9140847639888375, 0.9521497816688761, 0.7094631035222005, 0.5332444720963665, 0.8358802214677543, 0.7703907974339397, 0.8136007539516699, 0.6101813379976517, 0.9544639030178449, 0.7649622992275489, 0.7308673003249984, 0.6592351556920917, 0.7996161329298614, 0.5400285572397618, 0.8222522406310104, 0.5801013206809773, 0.7310560891982819, 0.9746586962211026, 0.7458114578211563, 0.948802635824485, 0.7519973990055768, 0.9955687749036546, 0.7471685298735793, 0.5255709963827078, 0.6391562835752937, 0.5000687744931143, 0.5169896782267862, 0.6682475105582597, 0.6541415164305587, 0.9510855349968983, 0.6606784868547848, 0.6549878584691708, 0.6234940271352266, 0.550038159833807, 0.5008815530533794, 0.7421271233240798, 0.7304739227117941, 0.8417530689342758, 0.8358393739273347, 0.5292080596071673, 0.5864469621039043, 0.8306719492051109, 0.5066175834181683, 0.6548321564492952, 0.9209073258610334, 0.5287955514940748, 0.5581585466261245, 0.629569793379076, 0.545652022865911, 0.8608493594938305, 0.7943207317427425, 0.6667102849705473, 0.965314605003206, 0.7942056411475058, 0.902911338789683, 0.8113826437147137, 0.9744863273987543, 0.5831174615103538, 0.5725802926985757, 0.8822882192559566, 0.8391597502906669, 0.5270509576344, 0.9836026713013308, 0.9939877550838858, 0.9179764487124604, 0.5252965790220389, 0.9843020318719847, 0.7098049037947094, 0.9148663020065824, 0.7486990358202311, 0.5986451753367139, 0.8372786909926513, 0.5668651445696946, 0.9912208875803865, 0.783374176311991, 0.8818636114959915, 0.5678513900548166, 0.9977030826572798, 0.8166382932286957, 0.7054287840976774, 0.8780813047286505, 0.9207848018883098, 0.8344000769391262, 0.7541615123235232, 0.5097352919275449, 0.7104735709419545, 0.9725140655081878, 0.6495511296986403, 0.7231218474710073, 0.8780463932568379, 0.7203541532416644, 0.6520001919975069, 0.947569174888482, 0.8893680463870606, 0.6180628074379417, 0.9361161519032768, 0.9689342448372931, 0.5505478483938817, 0.6841594733249763, 0.5686761266459158, 0.921946994624943, 0.806194165794512, 0.5992609602394859, 0.93656322029666, 0.511069587208144, 0.7284287432695409, 0.7143096601820899, 0.8578325450408095, 0.686564801770378, 0.821156424074141, 0.8658726448450668, 0.7733440302306223, 0.866134978969599, 0.804067489463969, 0.7817068416738594, 0.6676682833908567, 0.7945072520068166, 0.5894519347241836, 0.7546796334610759, 0.7649374690397002, 0.8587871688560248, 0.5516366704230748, 0.860143248771993, 0.5272012120275773, 0.95631400962019, 0.7984526830633787, 0.5077973815289988, 0.7627803594207007, 0.7654459364077507, 0.7783562937068313, 0.5474133926191642, 0.7875601369292211, 0.5693622982878968, 0.7711997275423998, 0.5967302371327955, 0.7088239250484789, 0.6144547528464581, 0.6158985836914108, 0.6145447554056298, 0.7048860811300577, 0.6190092064093812, 0.7561912088064819, 0.5457504320881996, 0.7937821472412179, 0.9108070435855027, 0.5451029958840934, 0.7310896140784368, 0.8634759784588137, 0.741983949259527, 0.7970612838050708, 0.5934351732185987, 0.9436257511319365, 0.8136595786433232, 0.7093036299575014, 0.7713751345032046, 0.7800798416251247, 0.6953242356228939, 0.8968404463234785, 0.6246657304879049, 0.5932895709513217, 0.71292946412713, 0.5922779846177362, 0.5613179906650642, 0.5514836614725593, 0.8827622030782278, 0.7043698896181414, 0.6055019604753027, 0.5844702087594051, 0.886076130601039, 0.5888924612385863, 0.8333696224004348, 0.773090295729965, 0.9258406798747223, 0.9163719658496227, 0.5926685455824611, 0.9874791300554735, 0.8217031039585168, 0.6512871413743629, 0.7678076394429745, 0.9767991957864475, 0.8536493066479742, 0.642987796604642, 0.8940008690124562, 0.5248651959611269, 0.9347463678643053, 0.6019031440906073, 0.8535588454800527, 0.611943643462265, 0.8668209487193896, 0.8373671113622922, 0.9836422586041136, 0.7764053547656597, 0.9100830546389984, 0.6508424623500727, 0.5240210693518963, 0.5273566011269221, 0.9852338191651482, 0.8045459167940949, 0.7740989609993323, 0.8520432820611713, 0.8219600038358068, 0.5309309077963974, 0.8360916306926478, 0.8183654679385424, 0.5138031021476042, 0.8700485379174575, 0.7431592299517753, 0.8733452700557998, 0.6725590020722385, 0.766326329170253, 0.5203328831745302, 0.5255137743305018, 0.6395077444229336, 0.927536012591908, 0.9919517561799758, 0.9077749822444295, 0.8320825943755177, 0.7226976319374638, 0.9055678125063779, 0.5807126288076423, 0.6784568300962641, 0.5004708318064595, 0.8042051324689309, 0.9348907595208659, 0.9543699085386081, 0.6914095815221943, 0.6176121066975244, 0.9043874972802164, 0.9694009925564959, 0.6092780520071381, 0.8572586552417276, 0.99006914532588, 0.7152048585086328, 0.9148071272142162, 0.8322062127447333, 0.7473574679406383, 0.731924100878464, 0.6592534877684276, 0.7936470993398526, 0.5942559006966377, 0.765030636309496, 0.5629539869930826, 0.9391189582280284, 0.8559914139770171, 0.950984348420772, 0.8081798901008357, 0.5672347891446904, 0.6679188116436188, 0.9157374283623569, 0.6474007172957703, 0.7907050788806559, 0.9168599289272448, 0.8421593794252915, 0.7909892904668583, 0.7132484465573039, 0.9079613039545135, 0.952297425339033, 0.8452470058814302, 0.7117900174248001, 0.6695456354817498, 0.791304118285106, 0.8560093680265175, 0.7803469184430096, 0.8153390628883197, 0.5876501284674793, 0.7762894055893899, 0.9639000846556119, 0.5896680227098698, 0.9720399721931455, 0.5634366164708775, 0.8481716089898672, 0.8906634395952091, 0.8737284741393094, 0.7322576785708644, 0.8330032732396404, 0.58983537188717, 0.5443121615163373, 0.7274513783285708, 0.8495905904174441, 0.6781076137909767, 0.5811264525580226, 0.9222850271889924, 0.9296050184895327, 0.9787127174025423, 0.6256185748634022, 0.9009360975693239, 0.8158637639363402, 0.8546348743729426, 0.6469865219321844, 0.748212611624429, 0.7025132737379147, 0.5505728942581427, 0.6214666709634116, 0.6370598523924936, 0.5631294134417246, 0.6275691764138567, 0.956112508438704, 0.8209968261273873, 0.6749886655839351, 0.6455425680428795, 0.9729371188268457, 0.5396366638684553, 0.777555171158878, 0.5962565615182341, 0.5708634390257415, 0.663750852803477, 0.5330854020386403, 0.7791703375396091, 0.8886201088270608, 0.6537746108934839, 0.8090272996556402, 0.8325580766680724, 0.6153986719231306, 0.5812524581784777, 0.6946406700236034, 0.8161467700927623, 0.7425574309759093, 0.5152186544047764, 0.5396278398247383, 0.8190303804510234, 0.7009755137816338, 0.5268280859367392, 0.6401551575109502, 0.6473132198815457, 0.5511480010629628, 0.5771250389502638, 0.6957950711719885, 0.9309673906376699, 0.9251245540697275, 0.6933588480484852, 0.9798985630165991, 0.7745556688627269, 0.5154789857728641, 0.7905331991195229, 0.5915652417658195, 0.5308552938546949, 0.7819994383062434, 0.5506217922642724, 0.7653149489544844, 0.7229330964633333, 0.9090688490144273, 0.8573418932731357, 0.6520361358878322, 0.9085179188125605, 0.8209403668970019, 0.6051281337456296, 0.6358122168633908, 0.6639436348735022, 0.7668133167048163, 0.7060692998666105, 0.5424801618819012, 0.6918194947669012, 0.550016809424049, 0.966180860063774, 0.8362067388223122, 0.5833681423483239, 0.8739957059803611, 0.9526889858355145, 0.8950312183895637, 0.6643651062076099, 0.6498483166732054, 0.6823408102650724, 0.7513115266407819, 0.9483392518498759, 0.9479883843604489, 0.8039059169739349, 0.6478426705711369, 0.6513802858982897, 0.5944942445435547, 0.7932571979149937, 0.8579900103575968, 0.803051136739773, 0.5408514984610686, 0.9173402007147574, 0.8983186288154794, 0.5658932154458596, 0.6193082403832162, 0.8457584534168858, 0.8979815971481384, 0.7397255020102972, 0.9008941035978049, 0.8515521515406107, 0.8436220452489637, 0.6318174143162354, 0.6505393406669626, 0.6492771463595144, 0.6609672423446517, 0.7101941208707563, 0.8376845749994033, 0.6312753838580012, 0.6544101781880982, 0.6509119368764064, 0.6778661191511741, 0.9265151960944061, 0.7487181788983247, 0.7211293329067714, 0.8935257816532405, 0.7471613870461675, 0.8273809019221171, 0.7285696572431877, 0.6787996850615905, 0.5675350140006743, 0.929035882455005, 0.8290929581817854, 0.9259006906834689, 0.8549083455828228, 0.5726248456142091, 0.7536173318737087, 0.8582821897903655, 0.5800556159802053, 0.5683870543159921, 0.889980286115873, 0.7130307575525905, 0.908170140817065, 0.7283893884932326, 0.5260174918701501, 0.792465670324046, 0.5486252763626865, 0.8981995011076946, 0.9332853141743962, 0.7002086395645941, 0.5742766486462689, 0.9570363516052751, 0.5694015308941016, 0.7611503265859513, 0.8025052761764234, 0.8488113461583128, 0.582372631924327, 0.979151080729308, 0.5647739047595869, 0.5154765495683602, 0.5168214893889584, 0.6275439925091559, 0.5963668633591017, 0.8640532353679218, 0.7443816737384079, 0.5815351372033332, 0.7986799581860327, 0.885992880418805, 0.8745534999066467, 0.6931305750976781, 0.8033757278460687, 0.6047660871845912, 0.824799655226865, 0.7867513286983585, 0.7786191706995433, 0.5861113192398049, 0.7333525532470495, 0.7474113256221981, 0.6191712053331395, 0.7930939613351311, 0.8789187439050198, 0.5209658311375058, 0.969893943750094, 0.59334296161705, 0.8459052951369359, 0.7439659852345484, 0.624201397570365, 0.7803426429236802, 0.6425026334752144, 0.8595038945978069, 0.9985812938961061, 0.8499618576430372, 0.9850653114191712, 0.7754288471542811, 0.5042931621052754, 0.9202903508854945, 0.8010353920090783, 0.581597841654335, 0.8109312328058801, 0.7165456945918709, 0.8279888930021551, 0.895536160436002, 0.8954861558842011, 0.8808928767361571, 0.5527713588909728, 0.5458717445649481, 0.6102414808214583, 0.8567101931753778, 0.7473591213938701, 0.749010579093327, 0.7759244122689284, 0.6755821194795109, 0.9965793469026145, 0.5346548398773328, 0.9508727427187265, 0.7676929875206288, 0.5075919767446743, 0.892859660205006, 0.9525115922167273, 0.5589537979951555, 0.6833608652357857, 0.6110383735031872, 0.9219883946330181, 0.5206829549137557, 0.8037001058768551, 0.7564929609700672, 0.9195625233618371, 0.5727297102270701, 0.7666183691689268, 0.5494527502437307, 0.8528317458531793, 0.6259235955130623, 0.945790167305955, 0.9198440846778364, 0.5727425613722316, 0.8469870287146886, 0.9261076627810338, 0.6094256147425877, 0.9251820021166217, 0.7065028427756562, 0.5577531790333153, 0.650856029042771, 0.7364630240367841, 0.5715823173601077, 0.7363644092512213, 0.9586865104419695, 0.6244610435331441, 0.6512251238384834, 0.7670493005676039, 0.6761642799414092, 0.9379210453260092, 0.9205104775786308, 0.5976773806497453, 0.9542629377815357, 0.8990159795024866, 0.6896276623497789, 0.9143404752546853, 0.7207471451337288, 0.6334217328374221, 0.5043701591940835, 0.6818333413714345, 0.8530015195119811, 0.8322000738063527, 0.6627088310700169, 0.7210759683816245, 0.8727610730648975, 0.6074790736689866, 0.8997210182110538, 0.5369192606053951, 0.548991437733533, 0.6570107775728071, 0.7098738159916964, 0.8723740855602218, 0.5780544455179868, 0.7322191346028724, 0.8877796189678493, 0.5968291367061461, 0.7049692919961998, 0.7765795612634573, 0.5647302748626668, 0.8484097598853476, 0.8482446409601128, 0.8905359446586247, 0.8438558583565945, 0.5734974006927842, 0.7771258152535616, 0.6863759639009479, 0.6416681686219037, 0.5256661769828277, 0.7643597733169984, 0.9506604974372439, 0.8050704949908448, 0.687354622375455, 0.7601109511249065, 0.7228466505992536, 0.7567989037682666, 0.7395855730697679, 0.6705043637919494, 0.856727656901741, 0.5969135578378693, 0.9179568717433744, 0.9141275516167917, 0.9037047588971564, 0.8647840512880214, 0.756593671675733, 0.7515017553719214, 0.5988810311638546, 0.6629363986587142, 0.5563572012239695, 0.8445952488114873, 0.5925690283011232, 0.7995259511962864, 0.5766934861118871, 0.6645143764543733, 0.6315451561842631, 0.7545108036438264, 0.6805065444273981, 0.6026959518529035, 0.900863282156197, 0.935106344212427, 0.5254365179388181, 0.676346550423145, 0.6628400139812817, 0.5614768925244769, 0.9055925638672435, 0.6334208196648223, 0.5446846271918109, 0.6273943443452291, 0.5760520878090246, 0.5197786411980592, 0.8156553826647601, 0.8912467059859046, 0.9022408464289489, 0.5810793011434707, 0.5096958547306751, 0.6120410895966424, 0.9834433850794968, 0.6959601323645288, 0.5870032019924301, 0.5664311436709883, 0.8726485423523226, 0.9627099105311836, 0.6348421427548848, 0.602951912067401, 0.6194572387296797, 0.532850677413603, 0.8795971601658752, 0.6501258744821292, 0.8049826047627523, 0.8476776368157266, 0.5884853501902509, 0.6728690822735417, 0.8557208247309338, 0.5619674842737346, 0.7137601240814603, 0.821553239723646, 0.6380191053250435, 0.959826518994557, 0.6043259533992714, 0.502878816198572, 0.8402263621963731, 0.7856032498881784, 0.9646623455339767, 0.8977962455548406, 0.5796291149520796, 0.863514552592904, 0.831157819800989, 0.9135224367513854, 0.6411814654495741, 0.5250859370698111, 0.5997984615961196, 0.571429797879127, 0.8896004523732046, 0.5258290700743009, 0.5848944707963346, 0.9333208001364618, 0.8562879379586308, 0.9291759637570852, 0.7528193295404069, 0.6739537459853323, 0.5249651678049466, 0.8801544935425536, 0.9546615324992367, 0.57728591475835, 0.9545398322477818, 0.9991707310970268, 0.575997770411705, 0.8726687608364512, 0.7049488302213673, 0.8624364486049372, 0.7234918795655059, 0.7492599911225846, 0.5780568774771455, 0.6845653994564951, 0.6672756267802353, 0.976518001896922, 0.8729450853034669, 0.6410158291169407, 0.6369707458108171, 0.85010945662978, 0.8490878080915414, 0.8311873590091239, 0.8424469292122835, 0.8431023141490517, 0.8831362895254327, 0.6220818095424601, 0.8640535264591649, 0.8063012464931532, 0.7636321694462712, 0.7248753935067643, 0.5897590150355871, 0.5013809928401475, 0.5600136014344432, 0.5718605157104162, 0.9389760047751466, 0.7429707975354954, 0.8414533979448217, 0.6807404678047155, 0.664236997825465, 0.855692564244595, 0.6046861992305532, 0.8077661261962403, 0.9269273238028937, 0.525864001159852, 0.9731547647726062, 0.6954765894968001, 0.7713712878605752, 0.7128231706996841, 0.7906453970370078, 0.8809928922363947, 0.5079977055787599, 0.7782006513750942, 0.9726284400467792, 0.7928390252385771, 0.6238823549996294, 0.5665722315203239, 0.6675281846232783, 0.9540687263592442, 0.5165953804786916, 0.7157381460305896, 0.6575002399851286, 0.9378910319859919, 0.7155359884523816, 0.7283490660309105, 0.8034141098789919, 0.9960443590413913, 0.7598700159980252, 0.5175993892833233, 0.5146033178060587, 0.6726945318152608, 0.8774877647218737, 0.7928903559386247, 0.7131729029691856, 0.5865444127167242, 0.792555924527246, 0.9263400092558874, 0.9042168969534043, 0.8120722693951893, 0.6764480829436075, 0.6887572657967543, 0.9587596250613334, 0.5226472398328061, 0.8008744029287537, 0.9440222558606375, 0.8843349759258821, 0.7787992125473142, 0.7844421450716063, 0.6411125706880119, 0.9487576793217551, 0.8702155289079665, 0.8755995840326385, 0.8748404479188836, 0.8027749969347295, 0.9605839779503598, 0.6892746119496899, 0.6001190265527225, 0.5939855279908723, 0.79237959051934, 0.9550527033174021, 0.7645068627009994, 0.7608093301427681, 0.5064972807717809, 0.7203869678018345, 0.527540603630414, 0.6698635914058118, 0.7959405669443307, 0.6459441180601551, 0.7366687834729146, 0.8321771899390937, 0.9016835353168513, 0.8945658192801025, 0.8609907685784058, 0.9627129766569454, 0.6240018590639371, 0.8343053188224043, 0.9931533079502615, 0.8656485627864066, 0.6595236260810782, 0.5240214757535878, 0.5961956808275697, 0.5853375454285149, 0.6247117396809087, 0.8766019331742354, 0.8462944643072646, 0.9838425357403381, 0.5343059630800158, 0.9400374994938047, 0.5397973101528386, 0.6519224767377982, 0.7826193170872084, 0.8854040414053885, 0.7247357195907533, 0.9729951430309209, 0.8784454901896475, 0.939831508263783, 0.9525751841971086, 0.8640460315734213, 0.9717021743154282, 0.7068310457821113, 0.6695099082499762, 0.585684628400188, 0.9851063315231022, 0.5234108766010204, 0.5156379691050597, 0.868368360322656, 0.7724549200716393, 0.83909632039782, 0.5554113699195764, 0.6518854661504634, 0.6695211605078851, 0.6566170325273577, 0.5164056664990959, 0.6441632342848078, 0.859249898597979, 0.9312770750028017, 0.8759795568012889, 0.9250126918096213, 0.9318195901566191, 0.5341105236168059, 0.5496076756766475, 0.6072183923324215, 0.7142788593727598, 0.96829987746669, 0.5808106228036037, 0.7373619595561713, 0.6747038302762518, 0.8435405639975903, 0.6424517005992338, 0.9596482551990884, 0.5936191890125296, 0.822075850030767, 0.9598903689660028, 0.5045594662758472, 0.8247681544750207, 0.5015438280848237, 0.8476717905245177, 0.7493300883887294, 0.8252242594186544, 0.6649783052425344, 0.6966567317397072, 0.5310445286845471, 0.7814811875874759, 0.948050185651895, 0.8065291474452021, 0.6340224323726034, 0.8418033600037064, 0.9179035820715697, 0.865058850007193, 0.8558035674344435, 0.9493483827416218, 0.8190388851773849, 0.5130174011796154, 0.9816079502470354, 0.657686446592523, 0.6568240798352531, 0.9415733397917536, 0.8626389566254742, 0.9407605330231296, 0.7515472238531464, 0.6965906703229641, 0.7818875700725734, 0.715608549260994, 0.7257951932673613, 0.6044155394184878, 0.8736699185163167, 0.78002660995587, 0.9959724906084018, 0.8610882418357575, 0.8026326973490081, 0.7543533927949242, 0.6044329895419029, 0.9254861703078272, 0.7728851970565126, 0.9495692193810858, 0.8225433827787101, 0.6117801800380546, 0.6007413400581787, 0.512151295590902, 0.7136220796433084, 0.6726785754853437, 0.837051278229568, 0.8983834399968793, 0.8966189762249208, 0.5905426957483617, 0.6346561464433039, 0.8151027224376441, 0.8069014275203086, 0.5761386551976729, 0.7796551139749454, 0.7694196743320731, 0.5445035859571121, 0.9497779174119736, 0.802415202685284, 0.9034398633514062, 0.9251027433470544, 0.751258611026966, 0.7496007338761627, 0.7482120642308974, 0.68286773329689, 0.6638526821622439, 0.630999539081907, 0.7504401089732607, 0.8597898397363026, 0.9094734083820484, 0.6950293519526858, 0.5639634611089659, 0.7546279405289966, 0.5229279427510691, 0.8503848022410875, 0.6841886980747098, 0.8892401544405542, 0.5400828058196343, 0.7581190832395222, 0.6341869927814598, 0.7571338619364674, 0.9757039725842048, 0.5954822615156806, 0.918495422704128, 0.8213831645580896, 0.7629322321757108, 0.5375984262230764, 0.6841296897395255, 0.509369754543184, 0.5379385737716404, 0.578334068395886, 0.8671936747985138, 0.5241088654635915, 0.5908707145549198, 0.7458904887493875, 0.7427900936266527, 0.9443683061647723, 0.5977176681200109, 0.749414076111879, 0.5832507114723515, 0.6343106337554505, 0.6152564324567292, 0.544459415204321, 0.7286377510231854, 0.5147178557824215, 0.587430315027371, 0.7254392278607296, 0.9680658004534191, 0.9641972199939115, 0.6598862614350891, 0.6025179768936817, 0.7383544448705697, 0.5321527603871832, 0.8071388690006653, 0.6759634253960886, 0.96875374612246, 0.9785219605687917, 0.5426449011519274, 0.7154340917955045, 0.7733926954977101, 0.6051319385060132, 0.7919656685175012, 0.582783884058413, 0.7643652306804581, 0.9453506196186743, 0.910100080112977, 0.9013926088929135, 0.7282061969470649, 0.5548787406823243, 0.8897260747590936, 0.8802840549433499, 0.9226048465848422, 0.6756495165588866, 0.5350817623052315, 0.9049424210492679, 0.7206442879569562, 0.821175964618183, 0.7616084739774063, 0.6401782674608727, 0.9661048611291407, 0.6168980250323342, 0.9492188363206916, 0.7862467956733794, 0.6452952566880065, 0.5865416835320064, 0.9895046710258321, 0.565232959235892, 0.9358809852245727, 0.5555983505255864, 0.8799738649380631, 0.9208233891239486, 0.6812541413237589, 0.8048092962784542, 0.6957204079718982, 0.8334404106493676, 0.7418420904656338, 0.8560448088451181, 0.5651072197815419, 0.9422104353621047, 0.5860618555281528, 0.6140787620207346, 0.7921684391657232, 0.5073260720408512, 0.6401163775436989, 0.5502769300803422, 0.6428309154491502, 0.8170275265907223, 0.90462642852874, 0.7272203415366845, 0.9205298159087297, 0.5420532300986789, 0.613155049374329, 0.750112248253685, 0.5229943762192848, 0.5171896164508238, 0.5659737391890066, 0.7244831311994109, 0.5147408453247706, 0.5389841820603736, 0.764324040080733, 0.9841283757757961, 0.7296937208321712, 0.7801033091702965, 0.7934505704654351, 0.6744138804166931, 0.5760814251647233, 0.5978952628790445, 0.7541446665417829, 0.6979343574632504, 0.5429448798324932, 0.9269228667582841, 0.7301335779573402, 0.5096777621505301, 0.5909212907250322, 0.5713647727436977, 0.8516848255514119, 0.772351247006394, 0.9386618704629652, 0.6425776777931338, 0.8129244931060149, 0.9829516578017232, 0.5158164577773381, 0.7503276589678881, 0.5374582713598223, 0.9889259111739777, 0.6576159843402063, 0.7032361008978025, 0.6960027153430572, 0.5753954434836623, 0.8366113954037089, 0.7211746368003152, 0.9272535000979407, 0.5443074792055225, 0.7898519094942485, 0.7489672227837985, 0.5358479711359421, 0.5644030718770892, 0.6797578770100077, 0.9021361292672068, 0.7445891346528879, 0.748427378660711, 0.807906446255156, 0.6879388833635736, 0.8336891222293232, 0.8335270099229657, 0.5566324338613295, 0.6283206390733777, 0.6662222483490997, 0.8976952395290343, 0.9851852530001031, 0.6362487280095355, 0.6415519461415852, 0.7339396051735194, 0.8375555606672149, 0.9017714307353345, 0.5910208296920192, 0.9994853903902755, 0.7680053060873859, 0.7084192169417309, 0.719702351233847, 0.7150321761339089, 0.9199026299298587, 0.8583961642749462, 0.9123077970288265, 0.9801268510436205, 0.7694124671979803, 0.9977711768059914, 0.7808729029522172, 0.517698717815938, 0.5721440519504111, 0.5902377219075596, 0.5603847881560575, 0.6229092648346214, 0.8631637314620817, 0.6377955644455436, 0.825522179735346, 0.9272862576171985, 0.9918261220160202, 0.9469993936075171, 0.7110130749864174, 0.7035285488949544, 0.5655576482434004, 0.5478854080969313, 0.8254141537496864, 0.8455456385487965, 0.5582846687957004, 0.7502401026472436, 0.6466623879144617, 0.5069867165488944, 0.9094253048011065, 0.6066153327719361, 0.7959743649092642, 0.6354359120752822, 0.6352951429224203, 0.7772235911597587, 0.5648817319853506, 0.7373877302604825, 0.5292177086316652, 0.936449045040963, 0.8776226936691975, 0.7650921162385428, 0.9870116973098984, 0.9799814946569985, 0.6531661103044538, 0.8651601019246421, 0.5315514637650871, 0.9406306656726908, 0.8319332028212145, 0.778934327050578, 0.611648384604397, 0.8566557599172867, 0.5178464725586115, 0.836131441520288, 0.9990509949099902, 0.8123693670103296, 0.815966627090315, 0.8222110293830573, 0.5784544730164033, 0.7900377047808926, 0.807048854139029, 0.6799099263560067, 0.9588415841245569, 0.7601636389670612, 0.7636643074798406, 0.6330122412126793, 0.6309698697253822, 0.8137411528053211, 0.9143380672058543, 0.7228446646437703, 0.7091437111897982, 0.9005650999744241, 0.8293836862763921, 0.5927648983868019, 0.7736991631157731, 0.836154137349356, 0.6726132968552878, 0.8555702840533166, 0.6525023436115005, 0.7535556213509053, 0.789948018930556, 0.7722620791701673, 0.6939314071510163, 0.5771769623012735, 0.9724777103947714, 0.7267657363657725, 0.9601982757973841, 0.7262245546165893, 0.9414143907973666, 0.8662139153567912, 0.9106961802769445, 0.8749913090034203, 0.7911981079727352, 0.9004569449872264, 0.7724840372651329, 0.9016093450065303, 0.9134015900253891, 0.56232878829791, 0.6509665490567835, 0.8248669912604385, 0.7259918699858785, 0.8821847435989747, 0.7591832466355689, 0.6106143693641999, 0.9243183356215019, 0.8847634593529877, 0.8993432365092764, 0.5629211821643018, 0.6865649860009135, 0.749740577853122, 0.7084890606722644, 0.5624662110765192, 0.8426720016757095, 0.6622153975824465, 0.8617690896667414, 0.7714947670142545, 0.9613975701483759, 0.7995340924506515, 0.7109632992238244, 0.6585034217069317, 0.6570512095073658, 0.9053192607621426, 0.6534826951414308, 0.6053607996143164, 0.7717096250121029, 0.5806726228377159, 0.8370337803325412, 0.9973802521205298, 0.8701435074018996, 0.8599414713357361, 0.504926980823057, 0.5990279903423772, 0.8581146985604826, 0.774322134461205, 0.5989047230305196, 0.5028442518216428, 0.9038973718058239, 0.5158125574629417, 0.5864407066296728, 0.657308234554759, 0.9133542386618954, 0.9443022309554223, 0.5882380623246493, 0.6117589591193298, 0.61902902374325, 0.7795780766968763, 0.9278580967535237, 0.8294202208408885, 0.5458787246445289, 0.8885634979652655, 0.9212018474725554, 0.5210193155360714, 0.6275335406240471, 0.97430399764934, 0.6015274372959327, 0.7902419791479653, 0.6477840688699652, 0.7442754880318835, 0.7034035818444324, 0.7013166294147238, 0.8129789045245819, 0.6777607303013593, 0.7219178802313524, 0.9873330603124617, 0.5529333313505975, 0.8591036760722314, 0.5819285641087644, 0.8068511666663658, 0.6738278780397275, 0.874665245188645, 0.5199959217527096, 0.5032146282267138, 0.5111660604998209, 0.9468157905749311, 0.970761504570987, 0.6465830915162406, 0.5813946227427468, 0.6008059688910286, 0.8804743774919109, 0.7000177965088477, 0.7465610992396529, 0.9595131009886428, 0.7831445471818819, 0.5116913896361619, 0.9511215620375801, 0.8802160175159288, 0.7348281906023952, 0.9941344208217588, 0.9452188162084931, 0.9758828021176591, 0.7277340360850741, 0.6966211914506797, 0.9314571982846118, 0.6349728594969926, 0.743540416774957, 0.7094351100630181, 0.8782707137100911, 0.673768165796404, 0.6124054613212033, 0.5782439581588884, 0.516197894725021, 0.9471716177750344, 0.7726148345183977, 0.937639364527773, 0.6040706085225358, 0.5215082974070758, 0.8639553903487583, 0.9494994201064774, 0.58437122703902, 0.7626836844155824, 0.7725194555154287, 0.7254031437624855, 0.6914855310407801, 0.6499248980165173, 0.5390871386164875, 0.8781885591862397, 0.925353711628145, 0.6260396821476293, 0.571452812817681, 0.8650442953630131, 0.7798077588705778, 0.5793698258167418, 0.995904695266854, 0.8747551361987372, 0.5771900518249657, 0.8332627788391865, 0.8017629167033448, 0.8236271739680276, 0.689229098765527, 0.6626427586386291, 0.5007450384144088, 0.733420444164836, 0.7243672983644942, 0.8659719490634656, 0.7959370251374576, 0.6362495574281328, 0.6011659258072953, 0.9852072501023623, 0.9383177850021114, 0.683797602743597, 0.619787251858479, 0.8327543171462091, 0.966534065454618, 0.5720177598658012, 0.706992726858425, 0.5581509052815058, 0.6019268421461428, 0.5503154704210047, 0.7224256177627717, 0.9047594708038162, 0.8535540578617731, 0.5317050421520184, 0.8654200618379275, 0.6529712525257669, 0.7760286940955252, 0.9113510212136582, 0.5768304013121838, 0.5887363139029531, 0.5205102857712562, 0.8179177193456331, 0.9634642394304018, 0.9438980895202529, 0.6944666524722147, 0.7235604077438211, 0.8287840201995638, 0.5355681948520372, 0.6437575848523422, 0.8830882223711507, 0.9422676711310535, 0.9304466568841592, 0.7835581509522359, 0.9420025536658281, 0.5020773570598147, 0.5308579348915465, 0.6228845332537277, 0.6339603384562151, 0.9931567672977566, 0.5361221370873908, 0.8645516885970654, 0.7587614058267285, 0.7866811849234293, 0.952603881782156, 0.680860063289418, 0.7102329754911598, 0.9900124197131096, 0.537992293256691, 0.9030868893106413, 0.921719648830551, 0.8807244649095969, 0.885051527052487, 0.8477576304447113, 0.5427203836246096, 0.7735201241522, 0.7493443413098726, 0.9603292861344646, 0.8873154158917427, 0.8968224191797244, 0.8530875951801131, 0.867781881637927, 0.8981690838915459, 0.5401194318345193, 0.9955068938018585, 0.5867964314515868, 0.8622380291081821, 0.9435682530044256, 0.5050369227574288, 0.5209493287240456, 0.7664671249732932, 0.8373531166112771, 0.7282848251916224, 0.9884448119297026, 0.9072447193560234, 0.9904605856057496, 0.5871245025927487, 0.5038290141329929, 0.889714726950144, 0.8519437653852222, 0.5339058011404392, 0.7884525383690197, 0.9607288512642234, 0.9261368141964559, 0.7905535468624836, 0.5011301258724425, 0.649438029609879, 0.7751113827643681, 0.7673198785837928, 0.747994205249525, 0.6281449155026327, 0.9388257461367895, 0.814992906855383, 0.697785717778894, 0.8564574948698167, 0.9856702162574458, 0.8249402689988383, 0.8164641030276854, 0.786576646349511, 0.8733196225695726, 0.8571905384827414, 0.603589670059715, 0.8956352531203247, 0.8458128534171705, 0.7824226840067958, 0.5838843302462676, 0.8755582782515893, 0.6408844698211044, 0.7607896943104036, 0.8251933954615642, 0.6146059522249956, 0.5076659046959615, 0.7320461528774465, 0.6604490903238776, 0.8242185250279589, 0.811294225829003, 0.539983749206328, 0.9827632704069376, 0.9952087049508938, 0.7846754830309182, 0.8237671304710477, 0.7705596948309629, 0.8614414204289182, 0.797761679808185, 0.520648612543088, 0.7279084584502983, 0.8922538778245068, 0.6156394034556336, 0.7139009055241594, 0.8210722522060306, 0.8903819593289566, 0.5872709459473362, 0.608336436315555, 0.6558238799003251, 0.6341817643419478, 0.7101972762615347, 0.7715393646066939, 0.910028051246778, 0.5498716866362121, 0.6923905945573573, 0.8205632362415862, 0.7312134373827541, 0.7343440507101482, 0.7059633988864285, 0.8741770438388372, 0.5503984270623388, 0.503042858129094, 0.6360451274323204, 0.7364944731733445, 0.9361882835187677, 0.9996864694437768, 0.6362089554904448, 0.9953520100332663, 0.6930817202154262, 0.7769924641436625, 0.7317694846187353, 0.7822755819379591, 0.5460021769895691, 0.9523695769691674, 0.9598566561013606, 0.935157374971827, 0.5235476600623583, 0.9773681154783058, 0.505507625725735, 0.502247464577892, 0.5873178080014742, 0.5082704644326295, 0.7971525231108532, 0.785323373902998, 0.7863041642135743, 0.6799859387811807, 0.6915936357581286, 0.6391799037372303, 0.7194506944498889, 0.9643246123524412, 0.9578034702344362, 0.6868348163698194, 0.9740181992006678, 0.8233728145059058, 0.6090455995069652, 0.9919213667069353, 0.8225918190943015, 0.5722153559515416, 0.6466971640584545, 0.8188835412159523, 0.635352270044736, 0.606858916375445, 0.8086000272665491, 0.655719252284851, 0.6227769673619772, 0.6974808336519753, 0.8499070971419187, 0.601380143101638, 0.6749651183753537, 0.8760828528226896, 0.5578696371688032, 0.753789750050286, 0.615865925712267, 0.5617806364913916, 0.6955363968196353, 0.8722026530832309, 0.5346976818168937, 0.7391828093708055, 0.873394672842703, 0.6470347870115213, 0.5424983891483468, 0.9319429020602201, 0.5897503262913022, 0.8893369725019158, 0.798959269076549, 0.8484577992184688, 0.9430346628704371, 0.6349722534159709, 0.6447164616917106, 0.6055261048026652, 0.565708943093664, 0.5482088524580484, 0.7910540548306486, 0.9610567986209879, 0.7566005519802097, 0.824501709408838, 0.9383729632091219, 0.9606613314078527, 0.9411620872234758, 0.9272264209073104, 0.5514526848680157, 0.9963010981692335, 0.8581258008940429, 0.599065832770186, 0.6889601561255582, 0.7454988218064776, 0.5629343373604588, 0.8688092578778374, 0.5581593740439847, 0.6360599153005609, 0.6371510517666517, 0.6537778576594295, 0.6249803417479908, 0.8522052965303617, 0.6001920762166961, 0.8992721623717591, 0.9919097336835425, 0.8950436011196043, 0.7160756236475578, 0.8711297964806644, 0.6348238989727513, 0.6304816053092042, 0.6362250867938167, 0.6131915560696263, 0.9940759321267136, 0.6885060770487422, 0.6960961563933001, 0.7625133496847913, 0.6597340877733213, 0.7478834701642584, 0.8998512478567494, 0.6852637542733634, 0.9566357178710628, 0.6280222855917093, 0.5913517861032932, 0.8412331350753671, 0.8688645090597189, 0.8753083500627209, 0.7386835350533971, 0.5406821942138791, 0.7987905846955892, 0.5710651075392195, 0.6355619476220505, 0.9721328977571808, 0.6970660862705734, 0.8749148766814232, 0.905706753342763, 0.7864036294690988, 0.5246670357349397, 0.9321077941779283, 0.5218160179944462, 0.8064764602392189, 0.6389540122604964, 0.963559734637439, 0.7428108407038007, 0.8671327506479165, 0.9107925183119818, 0.9135977597361349, 0.8082634232241097, 0.5379220338640927, 0.5165255350185272, 0.8021531926314007, 0.8716271725220442, 0.5305082699319386, 0.915559209222054, 0.8840820056173866, 0.9471516941651124, 0.926557098715256, 0.9718477179759468, 0.6590817563887217, 0.5913675722394167, 0.6369696790017609, 0.9106384387391555, 0.719528091157075, 0.8009579947080794, 0.7523304603432603, 0.7331044938243771, 0.5530409628142281, 0.6666887270683064, 0.9657379509138455, 0.8178473733340665, 0.6852914425556762, 0.7192666342970415, 0.7073912639055471, 0.8725909961250637, 0.605176269245173, 0.7237311575293854, 0.5365040350284151, 0.7709504698436517, 0.6214983482222878, 0.6240349487964467, 0.9996690594482227, 0.8205551205066512, 0.826862589267795, 0.5423302766184221, 0.9842278237667413, 0.773091211672985, 0.8196319509567942, 0.9761850616479957, 0.5764936137121155, 0.9683214205656352, 0.9316593091838121, 0.5345914612595577, 0.7727059178172788, 0.943850277306174, 0.9407340048034383, 0.5353041058911965, 0.7695986773464334, 0.7466608808825244, 0.9399240231293675, 0.8844037599470556, 0.6504970761769442, 0.9863928507259703, 0.6989293558059742, 0.9265078284245098, 0.5281693417372859, 0.8252836986692402, 0.7234138100035288, 0.8441236213071256, 0.5545009812258646, 0.5786700253472867, 0.7690452516853952, 0.5658561788048722, 0.5959351711058287, 0.7968114057538936, 0.6052474543512414, 0.6395851802885959, 0.7506283767017303, 0.502194217052345, 0.8741121983909836, 0.696078830486254, 0.8965970678718375, 0.6748747514331819, 0.9733677855138607, 0.7596998691460154, 0.7264587390606567, 0.948832749630691, 0.7544402354091189, 0.8242394368732722, 0.8597521513764534, 0.5555220463073187, 0.7892311331626973, 0.7257274666952468, 0.5765030432832039, 0.6192704886803155, 0.8079141477627929, 0.6674654960148632, 0.7272847785503033, 0.6756974842548271, 0.5996125443930935, 0.5122462792811866, 0.9174085549058488, 0.5943268654772108, 0.9103536038775519, 0.9221452208003482, 0.5870631033667202, 0.891766263408569, 0.857145532592291, 0.9311680829138571, 0.8755344666034666, 0.9012567580101645, 0.5210707191573762, 0.9422137102267337, 0.8123514494757624, 0.9301981090015636, 0.9263445053447201, 0.771548555179675, 0.7841879465260464, 0.6722672605527822, 0.7370825204958562, 0.635465459392013, 0.8063026163063913, 0.5514159505057342, 0.9349081305163724, 0.6446201526983965, 0.5127423971596378, 0.5870938502625589, 0.5743253467093243, 0.5591090734487358, 0.8455064424854835, 0.9350283496028226, 0.9592196254762237, 0.8098288535599051, 0.9537005089852649, 0.5823111988632454, 0.8578752818253397, 0.585768066370915, 0.8831680427539519, 0.7021139544810788, 0.8384425908295075, 0.7913260673908126, 0.5664336691762595, 0.7197853700556349, 0.9824579843863046, 0.6757424707905111, 0.9832603467263322, 0.9104384262941277, 0.7303431034840833, 0.935055071863091, 0.6420054777502311, 0.5567148658980781, 0.9851596146096849, 0.6889664017114291, 0.9297839713014391, 0.9080834804120185, 0.5229862858435828, 0.708621678573212, 0.604537503718062, 0.5843847038227242, 0.9248096610814802, 0.7191418376142588, 0.5673323164578594, 0.7794142481468961, 0.9888720936491682, 0.9919139294481889, 0.8746232562174905, 0.9644814603271044, 0.5567400004509351, 0.7724384433853828, 0.6645488726918798, 0.9450158465947951, 0.8238149498153546, 0.7229629852133392, 0.5678948778694267, 0.7256584060730948, 0.9302689219722006, 0.8194021587402984, 0.8753507324956966, 0.9733550000951381, 0.7420595002188588, 0.8353603805060739, 0.5640112268064146, 0.5352740396420815, 0.7379825355793026, 0.8738624699489599, 0.8382046319265071, 0.8480996544352566, 0.57654030875328, 0.5239772598439156, 0.9453230393701757, 0.617961777181045, 0.9304148797994815, 0.6022674833023568, 0.589605988765367, 0.7214539333741001, 0.654968652418474, 0.7283038589099768, 0.6179940181161131, 0.6937547277491053, 0.6854939616631658, 0.8493544218248839, 0.5369209707878804, 0.6391040907805651, 0.6001439495362031, 0.5597223772330879, 0.8031506974010745, 0.6642261303387131, 0.9868069222930591, 0.6384491510468828, 0.8553708625139232, 0.6989386923345899, 0.9404665130119751, 0.7832728762267307, 0.8278859599962497, 0.6262111905196429, 0.9136448848656207, 0.7801233676219956, 0.7721861929508815, 0.8273261069025927, 0.933777770661287, 0.7040727573546305, 0.8641011968307513, 0.5957033405739773, 0.9939760034511478, 0.9335005239290722, 0.5332094646999134, 0.5103742920575367, 0.723977892041942, 0.6251335109704081, 0.7220989215793179, 0.6308672324013286, 0.5029941280631471, 0.7357537280270583, 0.6736872049940814, 0.6187408898007409, 0.8041537356167526, 0.675853548512441, 0.9499363317635884, 0.6351896818600573, 0.5522411529760733, 0.8459164104460313, 0.9520157857947174, 0.5526102486056337, 0.7494358568867495, 0.9860840784269347, 0.7942858726589566, 0.7478374111036192, 0.8719438584228076, 0.9227365279903159, 0.8165025650483779, 0.6360474751136839, 0.573765415531176, 0.800247845618399, 0.9870374544729446, 0.9726930677076759, 0.884846080187987, 0.9414255555496662, 0.9034060926442355, 0.5903508598245627, 0.9717357354405942, 0.8679502023477232, 0.6336911781145449, 0.7946460747053499, 0.9613690951409468, 0.8692908497845447, 0.7403370704444519, 0.7406054277877585, 0.7223175516584591, 0.899752956040943, 0.5165240131066341, 0.50204354641688, 0.8146501602149094, 0.5792530140891923, 0.9993211620697843, 0.6806795913384456, 0.9942649650562598, 0.5414041628658534, 0.719562643026995, 0.6551491633501487, 0.5914218791481687, 0.5880675297834395, 0.6822876255807723, 0.5519074748394075, 0.9730619365916178, 0.624458041601203, 0.6988180582769736, 0.8466415825895969, 0.9070635033548773, 0.9337187634637032, 0.9164819579333904, 0.8022082588447716, 0.797644960114219, 0.8634379578035718, 0.7447844799652571, 0.8774546874779121, 0.6431931853253061, 0.5230252307400471, 0.8632300279411491, 0.8012085205315975, 0.5024510393761998, 0.8552811032593752, 0.505443572650951, 0.5574458310741685, 0.9309626285808071, 0.6872912678633983, 0.7430444962837403, 0.8407051425696945, 0.5573257524099416, 0.8631553674007912, 0.6412404446334794, 0.7285425521158087, 0.9400707190841502, 0.8477115492459115, 0.5614554502920657, 0.619633179424081, 0.6126091621061225, 0.9382399773276356, 0.9099507176906099, 0.8277377726447337, 0.5307322921333122, 0.955536327198598, 0.9649699322942493, 0.8097984681635391, 0.8735330192697458, 0.7688589599308842, 0.7392546858048454, 0.6122423093913484, 0.8073344233441012, 0.6434278940491902, 0.5375007188405646, 0.7923394188478565, 0.8091434528106487, 0.9440338912058159, 0.8115872019102313, 0.7753655147856608, 0.6924738409012603, 0.8699461788587695, 0.5170479667116278, 0.992146915361816, 0.9995269521394223, 0.7805937885745025, 0.5291556392622393, 0.7517601685364261, 0.7961817139297116, 0.6391590234871372, 0.6095484491076342, 0.867301395310123, 0.9533276727033977, 0.587411903589989, 0.66792272538054, 0.9523345214857284, 0.6148894925927268, 0.8158719449208995, 0.756059727642439, 0.5009081372511728, 0.5012472284962648, 0.737247299950565, 0.9415680820750083, 0.6554686033487663, 0.6842166272296817, 0.6458488782875154, 0.7273054273504622, 0.6632435837345323, 0.987943082225055, 0.5826464927106112, 0.7158966102436626, 0.6553691381476071, 0.7289242868098997, 0.7889811331204526, 0.9517755353145807, 0.5339089505375244, 0.6548245336374527, 0.8733338225221887, 0.5203099759363916, 0.56236803822315, 0.9652009000561731, 0.7588894914511056, 0.5784175075080198, 0.6339122700348616, 0.6590615386263019, 0.7955495039625022, 0.9554078324269487, 0.865149779680215, 0.8034385728620793, 0.7350106366686651, 0.7572071931848392, 0.6163167112002167, 0.866135881430524, 0.6061617949018, 0.989394015874587, 0.5549434642884974, 0.617084430256432, 0.8675993852716162, 0.7515785921951659, 0.7084039705887266, 0.9071995194653251, 0.6758807890615847, 0.7025641953029929, 0.5289786029471281, 0.5151513380537904, 0.9202151392185343, 0.9038527815506181, 0.6637331126752628, 0.6862173802741992, 0.9710921194676476, 0.8952428809228756, 0.8580801860100827, 0.575743973130131, 0.7853020932582342, 0.5341098525262933, 0.7571721725580749, 0.8813121510947759, 0.5405830668800213, 0.9347868488546452, 0.7337457207835676, 0.9011142170165534, 0.5910348847231306, 0.5471478503970024, 0.9519217906894581, 0.767251831443715, 0.6041231850849308, 0.998900064253295, 0.7472350349728045, 0.7185139724830945, 0.8322199578790063, 0.7043869510138521, 0.8582290014819363, 0.9751090045146935, 0.5735406704571773, 0.5256843340683633, 0.6964604985684065, 0.9526418555162575, 0.5477750115625999, 0.5973600579587193, 0.8521007009835788, 0.5931424015014031, 0.9467078969779139, 0.7288784102303474, 0.9887563833408415, 0.7385706341822376, 0.9130024039183007, 0.657478917459555, 0.8352886283115846, 0.8427407299811147, 0.8229561599147421, 0.6096160192419359, 0.5357218703799154, 0.6683600019882516, 0.8634987525931513, 0.6458605300010933, 0.5018876339927323, 0.9627305016274751, 0.6161814918341494, 0.8275363479460646, 0.5117445531074762, 0.581559861257529, 0.8950908076700244, 0.9683687299353383, 0.5510586890209821, 0.6638286620099064, 0.9240057908313225, 0.8769369530670927, 0.8389735884293803, 0.7848930322651494, 0.8345005366709617, 0.6860191129934217, 0.7551220110987271, 0.7013147694023386, 0.8575566652056583, 0.7380734292992517, 0.863154290602175, 0.5828372616884099, 0.9099417183997924, 0.9626057457947856, 0.8905133374172609, 0.680759758056823, 0.7039009140246736, 0.7198725197771925, 0.5117210541105777, 0.9546846995845888, 0.5288632417102436, 0.880176684452145, 0.9894010707397338, 0.7330513111882395, 0.5963460937723953, 0.5564915628920803, 0.837146394996737, 0.5160754525120426, 0.5686577290536003, 0.9756353350811161, 0.9985730314607663, 0.8633490677526926, 0.6244810936491085, 0.987506366711443, 0.560585990614486, 0.550845168642973, 0.8694366917813132, 0.8203688551572812, 0.715956542313152, 0.6144202196380414, 0.5774523633242221, 0.9537814047125743, 0.5790428076375542, 0.7796715861384553, 0.9023104980652343, 0.8227416691790517, 0.8574455489002122, 0.9425359308055201, 0.516845639847886, 0.9204864171913631, 0.6961091767164143, 0.8387924115158569, 0.6471023853195853, 0.8678148448797449, 0.9087616641555555, 0.8243253529482056, 0.5358392087152326, 0.924934987706379, 0.7371224458043093, 0.999268794564987, 0.9973061305888389, 0.5441225052538691, 0.8389019360363947, 0.884342343428402, 0.7322295386492843, 0.7968938165122266, 0.5092679065541165, 0.7484090764480738, 0.5427028339347797, 0.6859299785724274, 0.7281427237967777, 0.774453938437313, 0.8010683018755631, 0.5717844684486955, 0.9992780839362846, 0.6979626462477668, 0.6889754586917087, 0.9302602162143062, 0.8757838572055301, 0.646839660421571, 0.5884576989916769, 0.5867389871805289, 0.7503887008057454, 0.9035621131805441, 0.6064782810846077, 0.7778843988728577, 0.9170359766666525, 0.7301957390489748, 0.6933170884677745, 0.5585263178097403, 0.7137111778367289, 0.6529991620185698, 0.5654723525058238, 0.9884062252974346, 0.6394536248751955, 0.7736395473703896, 0.6323189279341378, 0.9177689391925734, 0.7898677018117501, 0.8988621858050794, 0.5998172523557341, 0.6317006906632736, 0.8332873836553158, 0.7711633376865864, 0.8631884315457896, 0.6244091936458203, 0.7340025010624667, 0.7245979343205423, 0.9555158216929912, 0.5213077901131763, 0.7405089780148952, 0.8833659529912998, 0.5881172129040733, 0.926330367478251, 0.6746004033193174, 0.9084904679454786, 0.7896002291868531, 0.7598353901602235, 0.5706948623033774, 0.8254972985495006, 0.7352449786942404, 0.5120856684113388, 0.5315003444453832, 0.5616595195126723, 0.8706061463112675, 0.6814984614426904, 0.875190833984115, 0.9926650147143579, 0.7672845229018712, 0.7937750968238775, 0.8283666271558211, 0.5308035498437738, 0.772119778757026, 0.9100577371601554, 0.5328450239174466, 0.979669607734148, 0.6610792911627558, 0.8162201621292682, 0.5871150324385919, 0.7613446685032714, 0.8713981957744954, 0.6445526462049653, 0.6658427732204781, 0.7944804634646253, 0.920314806880153, 0.952936418526781, 0.5649095569817495, 0.5487141975973385, 0.7132957245555249, 0.5091927211718152, 0.7007915432098168, 0.9244027801955063, 0.6584457601441271, 0.6922616108017079, 0.9171302210780846, 0.9414056060902245, 0.5513856535266255, 0.8419190752280072, 0.8778396440453488, 0.7239131489091031, 0.9560367566478314, 0.8304561988591725, 0.9139517500823218, 0.7894413429808342, 0.9029395193664302, 0.990940775664438, 0.6593118495401235, 0.9486892655033976, 0.9768233104909887, 0.9301826368635776, 0.6124381488990407, 0.5441660281081113, 0.5093765713396958, 0.6711250213502493, 0.798314807064079, 0.6573056690471669, 0.7170732211739163, 0.8535751960556612, 0.8444215430976928, 0.9813491620826227, 0.5369586867397191, 0.860114198671005, 0.7815189422408508, 0.7624155907509259, 0.6729122468099822, 0.6840224909160406, 0.9021325141994361, 0.7731095334794715, 0.6504360530962451, 0.7031117842872686, 0.5215670838501183, 0.6924330444452, 0.6168228241697655, 0.646004220711135, 0.6382001872162537, 0.8517230468431767, 0.8634880590591774, 0.7396428821210087, 0.5997860156359345, 0.7921996172057345, 0.6896543476819358, 0.6395201841436029, 0.6155956541405854, 0.7263563784538201, 0.5390182459925098, 0.69331595278687, 0.6385423786748617, 0.6617555889871267, 0.5102366541568008, 0.7724615895910922, 0.7636289041486937, 0.5184411885086964, 0.8541316261758002, 0.7747680386583431, 0.6390633779317545, 0.7731410934142613, 0.8536951968108772, 0.8853054875461037, 0.6255540823561775, 0.9407867223668411, 0.6315503615721917, 0.7778722654843064, 0.6524969902375071, 0.8876012196591614, 0.8214356220619887, 0.6377968304331385, 0.8183472874540201, 0.9028225383818062, 0.5996058730285235, 0.7593161560984718, 0.575992862804927, 0.924465600159188, 0.8589127389772402, 0.7486607231736101, 0.5915538108676446, 0.9062367607599304, 0.5433255372022414, 0.6595653502172145, 0.9557792245253739, 0.8508571061146291, 0.5834454998750074, 0.9546960375861332, 0.5967025657711924, 0.7734242953860511, 0.970843901624217, 0.6060780057333263, 0.9320846761569805, 0.5668448281834433, 0.8301792971408364, 0.8490016682306271, 0.6170405716693426, 0.8959907036548195, 0.9361999163714092, 0.6336240181211032, 0.6277263932596173, 0.5124254561640167, 0.7648448874135448, 0.989661819613592, 0.7252213785749106, 0.8545788488436239, 0.7043756053261256, 0.5756218741196866, 0.871260205507764, 0.87381413501619, 0.5948141005916745, 0.635427336055595, 0.7782309844416029, 0.6840288721011681, 0.6315369255477354, 0.6951665943339383, 0.6254526793308657, 0.8320113089243022, 0.6135750542094365, 0.6833237054617922, 0.8150935668696533, 0.994889259645769, 0.6483681100208303, 0.5851808095114677, 0.5437423670805535, 0.8903838814915281, 0.8272551920333365, 0.8838936656969443, 0.7288832685151663, 0.5605485888884207, 0.7602747867298909, 0.8574120002789631, 0.5335794896631173, 0.815033438224527, 0.6094643784776088, 0.7460030634204053, 0.5876663206186197, 0.6348354127444049, 0.848504440226718, 0.8919211976025943, 0.6992533135266592, 0.7415072038752718, 0.5639297615839439, 0.7596938676374698, 0.8792570720658492, 0.6740590894147045, 0.8455572996782195, 0.8880168043199572, 0.814225342259349, 0.7315047814953104, 0.6614914785355068, 0.7762059882960279, 0.7575969940598501, 0.6383702961597786, 0.7844473358271989, 0.6633185142583427, 0.8342689388167852, 0.9414065671602974, 0.9179553272225416, 0.5701517455013294, 0.6936762853978828, 0.7573796597044875, 0.7527751235508133, 0.6688354197185848, 0.5196997502584553, 0.7176404726892684, 0.9652011722755022, 0.9708751063762607, 0.7674831123731418, 0.6240088291162796, 0.505090541827045, 0.9780640005970426, 0.7545638833012638, 0.5509102680682512, 0.5828106028627774, 0.8912679311135547, 0.7017480653743106, 0.7331998836151055, 0.8518953102046496, 0.5714581741555148, 0.550909757303863, 0.6730993681943074, 0.7054791984520665, 0.5767283710838983, 0.9296766907485283, 0.8365070831460203, 0.661130232945691, 0.9540858591562655, 0.7225515212540341, 0.7277173784664888, 0.8453899720410105, 0.6622832553227549, 0.7516877832647164, 0.9356106880917298, 0.5573294276378735, 0.7328016306488547, 0.5638754436813347, 0.9520966342213749, 0.8920189497816349, 0.8380646922218815, 0.6725100298452646, 0.5421900083681157, 0.9373436245611221, 0.6679121011219129, 0.8086249945060171, 0.7535540330713151, 0.7852778456636447, 0.790888306473177, 0.9757599792523581, 0.9642829785057687, 0.9586969954254465, 0.8835665835198057, 0.9496497518404878, 0.6385839003136378, 0.9878483809061412, 0.8153050523632945, 0.9304284077248157, 0.6964423106059063, 0.7102660138457473, 0.8044331702062312, 0.9508903571099662, 0.7326894989985919, 0.774159673418922, 0.818915779460676, 0.7744522539608831, 0.8077157787019906, 0.9866379318249698, 0.5927689912897689, 0.9991450570014321, 0.8654538111015284, 0.5977933445901655, 0.5747938099223273, 0.9413170414582781, 0.6193844299984156, 0.6463953776850195, 0.8430962153076926, 0.5764414603914474, 0.570962660625452, 0.9179172014488464, 0.5316456831027562, 0.9595266292527473, 0.6063268322738932, 0.8232062589807712, 0.5660651711105849, 0.6485587625587348, 0.5059116338446665, 0.5688547387576752, 0.7355642688308717, 0.7851614267922912, 0.9227712332430009, 0.57045826843964, 0.609966425304332, 0.817014162621649, 0.5707625253590509, 0.7215645438610719, 0.8684301882150207, 0.7176642465394139, 0.9605157428603137, 0.7311140593071636, 0.866535382007537, 0.7885091848034721, 0.7516081629732061, 0.6074686141036252, 0.5624116042154514, 0.6045816747657915, 0.5676002789849917, 0.5427044880098473, 0.8663204244462486, 0.8963890445192917, 0.8893996980675272, 0.6515596972854836, 0.9952099835464322, 0.7716944945412094, 0.8149798546924985, 0.8943160113402251, 0.5088059251519701, 0.532459274332486, 0.9050873697384183, 0.6104669604714952, 0.6515399480088291, 0.7549170941932117, 0.9061486694369036, 0.553611305734802, 0.8856359032010247, 0.6145840657705401, 0.7641110683329642, 0.7150015630916995, 0.7926489923581513, 0.8204384072332189, 0.771232697573623, 0.8226086973045528, 0.7839874711770528, 0.9282934285380748, 0.5008312349452013, 0.5418049670665133, 0.6313272263795774, 0.549117519293226, 0.5719803029024055, 0.6100464353139263, 0.8761765159736936, 0.6429483942957441, 0.6196038378846589, 0.7609029122473085, 0.9214576945584977, 0.9645497848247164, 0.7369835256458757, 0.6216957310225268, 0.9786840966534742, 0.9751111497041811, 0.7892226669769602, 0.629820587906426, 0.6200789584233202, 0.6791704347270491, 0.9134147666637352, 0.9729130379986044, 0.5517889029969911, 0.7115658171862862, 0.7252061554369221, 0.7138246412706165, 0.9849652084070071, 0.7827845605816215, 0.5296755737564074, 0.9671138433072439, 0.8502192933213482, 0.7493136440188322, 0.8674747815914826, 0.798318840391611, 0.9896010171298834, 0.6809497456599645, 0.9197768368391976, 0.6504686126640813, 0.5358762214453237, 0.832830083827018, 0.7110409716588864, 0.635078606213124, 0.7558052663657242, 0.8921789420442451, 0.8487454996494218, 0.8641075396087056, 0.9800124565795845, 0.5000559721552484, 0.6065085015867893, 0.6582789911791302, 0.6406264564379013, 0.8870931520624259, 0.9894145586589953, 0.645635146105423, 0.755491820730436, 0.8895081783427561, 0.6874905304653735, 0.689533443825578, 0.7927853045235663, 0.8750278958429307, 0.5605027432341125, 0.7587664578311641, 0.9875234727151312, 0.936000672086516, 0.5951125123026333, 0.8783917955142768, 0.68758425164619, 0.8433129345947887, 0.7175049447368512, 0.8117172070496694, 0.6369571733259263, 0.7190107581201763, 0.7624178356078095, 0.6904457660565789, 0.9519617344769022, 0.6566936771848639, 0.7582753895913427, 0.9817337293032052, 0.500080006824439, 0.7497836465441834, 0.8367733452195928, 0.6543391464255732, 0.6359994142496221, 0.9194168914851195, 0.9244422407297501, 0.6660510204138014, 0.9288512290495021, 0.9146788525587133, 0.6426105995088313, 0.7144386854085156, 0.9437820259416208, 0.9558328907843214, 0.954366235484343, 0.9385991639496123, 0.6023793717581377, 0.5974258537869948, 0.7811538486507303, 0.736984610291792, 0.7671914390027867, 0.9503837035755609, 0.8142306131613228, 0.8343212176586222, 0.5631142071511115, 0.8979790530290099, 0.874173618358426, 0.6871130898575205, 0.5185619478074466, 0.7478081326938273, 0.6554609463921683, 0.8922228757756877, 0.6795019753061504, 0.5645629715254767, 0.6059904219680993, 0.7414008817305552, 0.6680742863635135, 0.6863845668998695, 0.8642156362622928, 0.5981523809676895, 0.7918183505915263, 0.5671282333810108, 0.9441367227992512, 0.6732490613235761, 0.8215712557264803, 0.6582216620876005, 0.9521003968287074, 0.8638482535219381, 0.5690008230598235, 0.6608346343717719, 0.6420428710055248, 0.9021221029813375, 0.524402579923368, 0.7456752315863353, 0.7155901809402152, 0.8952348895473125, 0.7978919452674176, 0.7545986458902445, 0.63578230770617, 0.5408472120569778, 0.6527944960513558, 0.6837518661956224, 0.8830131677033306, 0.6221532872749751, 0.7667388229234762, 0.9848950197534263, 0.6501881666800187, 0.9519520150048292, 0.5487888734456635, 0.6653561207964898, 0.5308596754366892, 0.6996120075633679, 0.5267772815516115, 0.6234443349382139, 0.8620337298902546, 0.856596845014818, 0.5024998274567096, 0.8522919470863339, 0.7849478329537662, 0.8208647448398487, 0.9982808205289218, 0.7980655184123273, 0.7256188426574154, 0.5077688273101739, 0.7808265080379737, 0.6025540698792263, 0.6918947110981011, 0.6109700817600172, 0.678993065245532, 0.8829655860220751, 0.8837752579161613, 0.7574441252285893, 0.870754158442457, 0.5022633023889127, 0.9358340979997148, 0.5674821379134618, 0.9095576782902091, 0.7718076324325522, 0.999756528972937, 0.7703482758575615, 0.6430475595959688, 0.8637006067770885, 0.563658121084998, 0.6210464824400493, 0.7952384000906114, 0.6723816109212228, 0.5277760837401848, 0.8078466684611402, 0.5803571819445605, 0.8417755712958603, 0.9722994623557875, 0.7866580716665681, 0.8565240162583267, 0.9927960107381547, 0.9889071566665536, 0.6415724711628291, 0.5646575023384757, 0.7922501210260146, 0.9557313399602778, 0.9091807057424097, 0.8379572475529509, 0.8708083835979041, 0.589577319403417, 0.9090787723883196, 0.7504313954290216, 0.9147562021959416, 0.993692875488213, 0.677387428454445, 0.9226431215985085, 0.6553238335762699, 0.549509142885388, 0.5486011295031906, 0.7020553777851175, 0.7237979737901383, 0.723669562360168, 0.5771303827707688, 0.9806523763241399, 0.6526879538709627, 0.6482851801841928, 0.6229604825103068, 0.5977003698192891, 0.7703076860276437, 0.6175749420021998, 0.8097708002255222, 0.9662671174359538, 0.9043354473254901, 0.6387816878537749, 0.9552972421706176, 0.8161664661515777, 0.9309985700408483, 0.6500369676936437, 0.5812443184922517, 0.7909130588069917, 0.9776325516056859, 0.9054624943183023, 0.864535773853826, 0.6002545295186092, 0.7448822345064816, 0.5362230621160828, 0.688758601031672, 0.971836841195679, 0.6865164456221627, 0.5388450756922325, 0.5773557648457891, 0.7815565021144992, 0.8176529547539346, 0.9510778865726575, 0.569122207614216, 0.96313859336764, 0.9783310820501129, 0.6648420856187144, 0.6241624983397844, 0.9760697728045902, 0.6538788045102955, 0.9629329055953695, 0.74425868556308, 0.9291936326460746, 0.8354103383442933, 0.8843808057311033, 0.9095777097897808, 0.9401207657419168, 0.5616179231494967, 0.8628156275747954, 0.6073448602173697, 0.8508102170915143, 0.7078387289051243, 0.7258850929824697, 0.5205311960703491, 0.8988691424519363, 0.93615396606798, 0.8326248997891011, 0.6757509651661018, 0.5665614891874233, 0.6226916914959164, 0.8405993661147282, 0.6515818784408864, 0.7110778350728437, 0.7554359792263752, 0.8687386595377649, 0.5001902412097599, 0.7964130426338334, 0.9303340289970154, 0.7759433141136296, 0.846585580258258, 0.6033990721481306, 0.9413285884935536, 0.5413971860029716, 0.6257970463081501, 0.6755476041019361, 0.6019143414536647, 0.5696316613629091, 0.8550403337834482, 0.8525767646381681, 0.9565781576860086, 0.8491081851987208, 0.8250728927115919, 0.6709507643090785, 0.774935394525021, 0.7235401833688564, 0.6235966288800994, 0.6328910679763902, 0.9484806168172337, 0.7161679218539456, 0.8210717753405995, 0.5439375048751992, 0.5304810455592698, 0.8119810916835811, 0.9644995483839338, 0.8741621343132255, 0.5455657915959617, 0.869685199245947, 0.5912716224499379, 0.7051620354131545, 0.8388951750013531, 0.5158565540630072, 0.610205656731832, 0.8300667480475091, 0.8573503116807768, 0.7057672969286046, 0.9159604331293216, 0.7173407429580148, 0.6339455051293346, 0.9808703345807535, 0.9733823257212734, 0.7604664097423224, 0.5129993499512808, 0.8671968707949897, 0.5365741213605325, 0.6168921247835548, 0.6880944334908004, 0.9871064536713507, 0.7734120121635125, 0.6242907903305939, 0.538306378781372, 0.6802472413326192, 0.6323253056192311, 0.7335469822804119, 0.5353524684077403, 0.9727977194554296, 0.506895597262834, 0.6277063867113679, 0.5245653575180587, 0.5907262924168901, 0.6632479535646121, 0.9772142417752963, 0.5616144225747579, 0.8531719766579451, 0.5931451039738543, 0.5721106700047913, 0.8755097539136604, 0.9104665541252905, 0.7549753075084722, 0.6617001863826524, 0.8178495470871461, 0.542245457273187, 0.5470862744335719, 0.8663679504672593, 0.998326955856168, 0.8045462627830399, 0.5854665970523644, 0.6741864058637215, 0.9951874851156981, 0.9330357760878578, 0.6044133076910604, 0.7427322146942776, 0.9412165364422151, 0.6398983582939577, 0.7344881466458819, 0.773332090889497, 0.9547492572352527, 0.5083405233970665, 0.5779899375912815, 0.6991050567050283, 0.6205916073721164, 0.6056357293467758, 0.8345659303437994, 0.6729320238031301, 0.6061673550828377, 0.8648717448227128, 0.7413910361594924, 0.7829219247288526, 0.7540959227697169, 0.7151463861209262, 0.7974096902355978, 0.7739528720019456, 0.6563624262003962, 0.7140480141715536, 0.9419037161874184, 0.5587708745152524, 0.6389351702647315, 0.8602413334611787, 0.824927643700302, 0.8942570050665345, 0.71379883380052, 0.5685073507522647, 0.8430280855424881, 0.583059440363771, 0.8676531944334638, 0.7362683662463755, 0.8465168759580199, 0.5198989470876668, 0.6238397385105181, 0.8339993009601113, 0.8483853345347845, 0.7811814165540996, 0.7863362267566205, 0.8730368021568462, 0.621753808386192, 0.6514606115043966, 0.7137141949200607, 0.8104010016446057, 0.9769529052133872, 0.7256309439562421, 0.6824984209005515, 0.8295016317839301, 0.5251956693414451, 0.9864158666509952, 0.5772301399625873, 0.9875782000455907, 0.7191168070078253, 0.7234769132324198, 0.7372753560007737, 0.771738742286141, 0.7629948200716083, 0.7258424771076999, 0.7839074693600454, 0.7767065540760667, 0.9812427660444494, 0.969865332874206, 0.7082239742742146, 0.9903062014635321, 0.9128234526938969, 0.9368833275105366, 0.7289130643644749, 0.7556827443323716, 0.7682478444474018, 0.75980742853592, 0.8291794001070514, 0.8496240076817969, 0.6255923629223423, 0.8265051581166337, 0.5959184153193282, 0.9670051775566576, 0.6809657671717968, 0.9757321082280281, 0.6945695821238158, 0.7827790368879024, 0.9053598285720763, 0.8991833943113972, 0.6155742981539738, 0.9536512425431225, 0.7908514690288, 0.5037332597324491, 0.6858020767818784, 0.8742505544263284, 0.9450357191339444, 0.7068723593586657, 0.6486352657637666, 0.8876419236078792, 0.5268979355600686, 0.9341286390950982, 0.6011839024125996, 0.8207111552436646, 0.7401921268650421, 0.7887619418443532, 0.523956558604113, 0.770264698131278, 0.9082358915246818, 0.9654067092599821, 0.906296990897494, 0.5039557907747881, 0.6008561678107764, 0.604394865995223, 0.8088001913499139, 0.7927232600700231, 0.7671335874184606, 0.5630460311034273, 0.9506989607617382, 0.785609302729967, 0.5101612945594375, 0.6067251838158287, 0.5127263544195717, 0.6834158029687761, 0.9395270316546953, 0.8063458218161524, 0.633908346357819, 0.8376939944666062, 0.7944021096262497, 0.5549154533196559, 0.6982975860713625, 0.8439523991179256, 0.680171268461726, 0.5788034869838906, 0.7019055473746338, 0.5412075349761283, 0.7859355306823858, 0.6558450064576099, 0.5302014895265901, 0.6148566984818946, 0.6887587634098675, 0.6594569013958811, 0.9170656101439103, 0.9737415823904696, 0.7087425605307122, 0.9101983902879333, 0.9658771660963614, 0.6835860193915324, 0.8750874356486786, 0.681662631958729, 0.8481467766737314, 0.5880519082631828, 0.8203495523787578, 0.8015248540740145, 0.6981299441186393, 0.7081281371954384, 0.8196100731677938, 0.9703320818720874, 0.509461757240379, 0.9943232999673288, 0.8844766551670455, 0.5467485376286707, 0.8792482975023211, 0.5634763843572344, 0.5482589425719138, 0.6901244756810097, 0.9740788701238245, 0.8697009797471662, 0.5864465235684261, 0.8010031416216071, 0.9250421535216236, 0.6348268788697667, 0.6434678646579195, 0.5889966979322467, 0.6670546302163707, 0.8627571210602751, 0.8722687225953956, 0.9004175034218971, 0.5935895503930033, 0.8559795698340189, 0.5077662829860846, 0.620058014440481, 0.5163958881317496, 0.8666977090874687, 0.6808186992580899, 0.8440424531502704, 0.8741968034822576, 0.7992956235754738, 0.799261848787018, 0.6826274867805695, 0.8283285312255879, 0.6006065804586413, 0.809438016871797, 0.7587371875019433, 0.5630875847783551, 0.9295606186802705, 0.935601035940265, 0.5430469177671382, 0.8980398445636784, 0.5417282336228656, 0.9574654039231186, 0.8690928794456307, 0.7644216769873693, 0.716813083400157, 0.7069567804360728, 0.6197870463055837, 0.7967049404960627, 0.8717378759355833, 0.7279528127447579, 0.8768612050777398, 0.7506679063522612, 0.7134660464918008, 0.780077166272757, 0.655946014136134, 0.5280198175905362, 0.9939030163334119, 0.7429407471717188, 0.7469776000088644, 0.8376164901432406, 0.6094686321634675, 0.9477088639391489, 0.5766331514146295, 0.7532452673921928, 0.9203474061511454, 0.9966576398629334, 0.9492402381358382, 0.9090140461519864, 0.714007725483506, 0.6144628421456717, 0.679685446786062, 0.8684838977306113, 0.6193131844320424, 0.8935486582845376, 0.5664141137294072, 0.8106868445144895, 0.7140046164829956, 0.7047221991293801, 0.986342087129952, 0.7717969244229994, 0.7391605894941138, 0.8553249091218393, 0.5583003467985519, 0.5886530881238241, 0.5121536666943505, 0.6788584989881076, 0.9578499024393627, 0.6148512758386527, 0.8191606608569187, 0.5168880650229227, 0.8315595811856531, 0.9575686041035664, 0.5137384928391516, 0.5106819648576597, 0.7886244256716753, 0.7910878441706821, 0.7404645928889197, 0.9758455911969199, 0.7851334954287759, 0.5706029329715465, 0.5074152634582181, 0.8462374312248466, 0.7788780323502821, 0.8183331258747355, 0.6902538465681798, 0.6707895835753073, 0.7973028319522545, 0.5063524106810142, 0.6580053699544799, 0.596515337246516, 0.7868397849912903, 0.6994890855870946, 0.5937344952583283, 0.9932996317120621, 0.9840837900365083, 0.9405537824392072, 0.9117346867493494, 0.8740409340340765, 0.9936265396830009, 0.8705311284402826, 0.9099220316616122, 0.9055449101057935, 0.8623898619543964, 0.9791032309139369, 0.914461014614841, 0.6444304679436672, 0.7150132632746078, 0.7058768006472431, 0.7202201252126308, 0.8695937829693159, 0.79188607290727, 0.5034995421833053, 0.7449869621501994, 0.9343861735375243, 0.5758205441658621, 0.5392529122598739, 0.8629134995873893, 0.8791982597790581, 0.7601598293506338, 0.6692730806311594, 0.5183657640203589, 0.7940034406460567, 0.8769394891367321, 0.8477641758079717, 0.5056848843379858, 0.5492613233035433, 0.9212499046825329, 0.5316364526687535, 0.7646816997245015, 0.6788823087299005, 0.7863321904209051, 0.9898066636660356, 0.7108556147442388, 0.6612060709277119, 0.5700169656819173, 0.5215933940036153, 0.7333554456437458, 0.9609663805251559, 0.9231929138734121, 0.9337371669457257, 0.8670775325289848, 0.7314566525314543, 0.5467782555700698, 0.5705936791661985, 0.6234410119440137, 0.8815242350194572, 0.7679519209752783, 0.5371027687463987, 0.522037551991223, 0.7137993711253314, 0.6520880426434443, 0.5408784959508292, 0.9153095992639101, 0.8762551958531081, 0.6363404340954004, 0.8147626316497365, 0.5543575018033272, 0.9886443026111833, 0.9076949872824994, 0.5926609161408057, 0.6374067082678001, 0.6115289328620721, 0.6815715551040169, 0.8471066522142261, 0.6468866673919186, 0.7750541203251822, 0.7316038588297746, 0.9700857170855068, 0.7490634752068355, 0.8068685831070006, 0.5839494070463915, 0.7496030935078123, 0.9007745574643156, 0.5102686038791264, 0.8438527561282501, 0.9930130926984095, 0.6468716059317183, 0.9752869440543016, 0.8731601641747315, 0.5483120300887341, 0.5953613920035372, 0.5104349749140829, 0.7311296409742496, 0.974301203051281, 0.811799781571604, 0.6097140674901507, 0.6797058026524148, 0.5144526275849111, 0.5903831796675205, 0.848562886864056, 0.5892925915140157, 0.9821578179029494, 0.6683998373761568, 0.5892800265000978, 0.5959639190603867, 0.5814636734124988, 0.5953027728528983, 0.7557420825423358, 0.9949805042631438, 0.9176148587815305, 0.9900150150191829, 0.9193615861585676, 0.7737622396672961, 0.8414469002006981, 0.5535279094153006, 0.8678345829647095, 0.611264107468184, 0.8640925884351307, 0.5060537630597941, 0.7358460342244464, 0.723510013216413, 0.7401298872831905, 0.7378804005228252, 0.9269241435137228, 0.577352149151625, 0.8541362583126486, 0.9666399343230403, 0.7187804920427878, 0.9503534067744577, 0.5087445020294419, 0.8085377674287706, 0.7704565562360006, 0.7734220111854782, 0.601109897351751, 0.7903568449004019, 0.5353557658973362, 0.5561758212535487, 0.8722133041474303, 0.5320648589774879, 0.5637251211449286, 0.7314589167746395, 0.7035825265968116, 0.5712156088898348, 0.9774144516851367, 0.5384041925295441, 0.7345506883069897, 0.9836169625421645, 0.7950522138295567, 0.7897242717036588, 0.9183827531731419, 0.6229721037445977, 0.6746804045584598, 0.7565765582462101, 0.9266101925928852, 0.7298409475026295, 0.5704956270615525, 0.9799473769481859, 0.6538063497093778, 0.8964725691935138, 0.682302664188223, 0.6921317902184793, 0.7304867819967408, 0.7547805181625152, 0.5829412852949875, 0.569872103406133, 0.8807288802767499, 0.7509706685836235, 0.9660426084595501, 0.9217901327810607, 0.8456605759909811, 0.8934460227506991, 0.6710167988250784, 0.5979633842848059, 0.6798207423035745, 0.9130384589568533, 0.5132997829445594, 0.8503031588341262, 0.6434871853418189, 0.6006570816449182, 0.9198694158774043, 0.5425505687291619, 0.6521798736026094, 0.6836914973658679, 0.8205298829843898, 0.5781853469164084, 0.840431941275729, 0.9751913879650309, 0.6485494394927421, 0.6562435763672974, 0.5775776260846277, 0.6513004972420562, 0.9886271295032967, 0.9298343399947526, 0.8573268339027899, 0.7325026374156536, 0.6188055372284378, 0.7746551011859772, 0.8751964161937282, 0.555905519714358, 0.8852795148513938, 0.636620133352392, 0.8967131867907963, 0.8460072727130885, 0.8078969869820216, 0.572146891839379, 0.6907859516289473, 0.7452111823596044, 0.6178622458073662, 0.8102002241078541, 0.630587439590538, 0.5934953952749475, 0.9373016966186146, 0.5054610015028231, 0.7099131068564407, 0.5714760025937884, 0.5621453439407077, 0.7462634863641237, 0.6463455377038514, 0.7758606747794017, 0.91754104403023, 0.5183575939451247, 0.6255371500569755, 0.6901913354180865, 0.6298047961573544, 0.6875869451381393, 0.7062429869683298, 0.7077796492074167, 0.5193328811071523, 0.5438666202880804, 0.9359411408850066, 0.9520268441638357, 0.9684753086214691, 0.9871883875699745, 0.7386504529875202, 0.7522102539682822, 0.6521095480663308, 0.9205002129051287, 0.9044487460008785, 0.6329307164109058, 0.5909183136674853, 0.6271436440446326, 0.6774196544422912, 0.846861137240982, 0.8342703616125807, 0.9681464830489246, 0.6660704101588089, 0.5676060413443387, 0.7481699211997752, 0.590522376810963, 0.5827564610197038, 0.8737223379947909, 0.691794608676941, 0.8666372964817817, 0.9558309674396461, 0.9657506527751494, 0.8875698683663412, 0.7547077006974585, 0.7290726346682792, 0.7710625123507272, 0.9057406489217421, 0.7633444911055577, 0.9476441475882756, 0.542562888597435, 0.6333260906455465, 0.7848750243642019, 0.6359716924505678, 0.7187282724263773, 0.7652140891685351, 0.5260324284115959, 0.5230472851149174, 0.6761576863865053, 0.9225611119583732, 0.5634242949612384, 0.6562942110642864, 0.9592643146757249, 0.9823090032608259, 0.6139094180864593, 0.7906136714936169, 0.6228931977024228, 0.6579681919955609, 0.8295998543562044, 0.9325071006763894, 0.9902568174534989, 0.7019565323386201, 0.9199509589023591, 0.9245437157128957, 0.6598810327506761, 0.6986123559424948, 0.5300365458624923, 0.728903140103657, 0.830874691176252, 0.6047918039723696, 0.5568193238516881, 0.9202933397074484, 0.712874652313106, 0.9149449243532493, 0.5833193045840612, 0.8610493144663511, 0.5191110378633998, 0.9894191971309938, 0.6094902080695475, 0.9555562240296971, 0.741214605426687, 0.5290185115055004, 0.79728789357286, 0.6882797342798401, 0.8923522982409217, 0.8057345869926738, 0.9599051969579523, 0.8407598472179947, 0.589645864655787, 0.9176530347316918, 0.7300850535181439, 0.7853355196748435, 0.7454322117785979, 0.5532127537511888, 0.9785962272261043, 0.5905537264000956, 0.8432487612895594, 0.8203017993685966, 0.6710378243392128, 0.6909397107501671, 0.7211292132781495, 0.527314906095141, 0.8116194976160014, 0.7441041077237811, 0.9477476022940986, 0.6293735888466868, 0.5419512190976233, 0.5651802924634752, 0.706220675302992, 0.5074632508893993, 0.9787699250095376, 0.8740561954036175, 0.8294225343949846, 0.8227191162638525, 0.6417033135078625, 0.7437792114909827, 0.6403612838762727, 0.9328183227889411, 0.7029914009638571, 0.8681413089201897, 0.9442671896675905, 0.7585418703321896, 0.6794518010053563, 0.772322846871085, 0.9528765905520373, 0.9664977563272539, 0.7562522089810153, 0.6392991945232034, 0.6222808935334542, 0.8279570579235013, 0.6341060324302766, 0.9031021301069986, 0.802002400676671, 0.505578653739801, 0.912962160572334, 0.8713173429408001, 0.6390428468904029, 0.5908936204735615, 0.7945709519129285, 0.9475333288230099, 0.778745643493481, 0.8170960985575497, 0.7508662626677216, 0.9599242311616736, 0.7744327947214878, 0.9167070878739147, 0.8300788941293703, 0.9778463504769739, 0.9871798717886135, 0.6122368003825923, 0.5666307290151955, 0.9783927879680387, 0.9822706668823967, 0.6649322801002437, 0.5430965451364691, 0.9986544455202371, 0.6952814837732141, 0.7349170497998216, 0.6278094086632011, 0.802045458980239, 0.5497174261644576, 0.9523121792539233, 0.6241935852817637, 0.9587884917291114, 0.689824548257564, 0.857121555888116, 0.9740057758862213, 0.979265754670857, 0.6913518422150862, 0.99760284509281, 0.9983855024438831, 0.6706661380032899, 0.9547088131750132, 0.5060909313854629, 0.6867253673199448, 0.7951345738804387, 0.7243264702950678, 0.5721265068932961, 0.7430520396055388, 0.6469365213888953, 0.5889760924962494, 0.9551158089662939, 0.6913681092315056, 0.5922313899588452, 0.8664042141266572, 0.8115308667024341, 0.9806699989278861, 0.5694569606354845, 0.9072070380871445, 0.6173462826116789, 0.8341765058307501, 0.9682730258857101, 0.545241452688916, 0.9034264152283727, 0.7824720497613445, 0.9184924211034589, 0.5831003825515858, 0.6075705747967812, 0.864027745407216, 0.7580604268454465, 0.517160893502495, 0.8868836957454981, 0.7847689148147282, 0.527400419576874, 0.693190446325883, 0.5765461755990934, 0.9437406832210771, 0.8772430069991488, 0.8033066667622429, 0.6184577066189134, 0.6395587020056827, 0.7418359932019427, 0.8547438666726384, 0.7272225880134492, 0.718704480968376, 0.9345825281302411, 0.6957076442967451, 0.5387639655468093, 0.7592059146683239, 0.8748777726216762, 0.855987430511104, 0.749010541588795, 0.8634206586310182, 0.8495443343064946, 0.9465650124882212, 0.6351505679149587, 0.9461441205415717, 0.889295758708957, 0.6675679723296711, 0.8420677433210874, 0.9337191909392202, 0.7468835785309835, 0.8839129268465203, 0.7946723451939854, 0.8895466491376695, 0.8961626368468458, 0.5121029779440802, 0.849147766597251, 0.7682102448571397, 0.7989519083300534, 0.7865117434772482, 0.8950231869991041, 0.7876088568034689, 0.85554912186893, 0.7214374591687175, 0.5187812224209014, 0.5240874195396423, 0.6824291334504814, 0.7652741734215883, 0.8064787847090141, 0.9147460547460837, 0.7710145811444251, 0.924252674326156, 0.8500682606395646, 0.6171421537968234, 0.9342781180791884, 0.8580221078886219, 0.7111138192045275, 0.866976614931233, 0.8916171055640963, 0.6355119270308696, 0.9074952009757923, 0.927569699346803, 0.5705601256450781, 0.6922342946244342, 0.7115993536010479, 0.7897991062145038, 0.7942725973474504, 0.7513025384214407, 0.695926334200474, 0.5702259344660491, 0.5724711988093297, 0.6649219748280832, 0.7842877263817938, 0.5363461489871306, 0.9425163131176608, 0.7252391330415715, 0.915897109260265, 0.5542377127591511, 0.6600757278030294, 0.7415296026004143, 0.6617392346763933, 0.9004632526631084, 0.6796498830420972, 0.6708497040095693, 0.9983793991670797, 0.9328940065831414, 0.817015594694765, 0.6401262618513379, 0.8333572123274824, 0.9655367639171866, 0.8342146472708187, 0.5047611733565573, 0.6099391566905511, 0.8296892313182594, 0.8019627049337127, 0.969760386140785, 0.6438986652959446, 0.7578500704963014, 0.9905670774036427, 0.9797504848466887, 0.5887129479586061, 0.6722512769949606, 0.6369585741052902, 0.759382723763201, 0.6799309217458427, 0.9116229821577054, 0.7232473903246877, 0.5893634641598794, 0.7037857078621659, 0.8642607344856321, 0.6342940580965062, 0.9570447944368172, 0.8108900498813527, 0.9182740013092765, 0.6751874997906469, 0.5269469784492896, 0.6085946021664639, 0.7679999253009596, 0.6465075790681712, 0.751573502721812, 0.74940667522129, 0.5581564148794014, 0.6388995994117443, 0.9181876791589225, 0.8084159248601714, 0.6773384743956117, 0.8241663663475938, 0.8319784406531555, 0.7183551520422901, 0.9237708533781789, 0.8466696226279593, 0.8658717689256948, 0.7975082229396816, 0.571663340606704, 0.5813970019949055, 0.9113874233685182, 0.5293755928608965, 0.5256980716063653, 0.5017446175981466, 0.7913435302798484, 0.8517348013100761, 0.598679354954965, 0.7440470979717769, 0.5199427750441246, 0.8535962987996475, 0.6069397374337346, 0.7150646174654964, 0.7529658384910554, 0.7043338854024099, 0.9741418240007701, 0.8552643855824491, 0.9634483838519525, 0.8489492626927924, 0.7546066235541978, 0.8785389637774085, 0.9198787718832461, 0.6787680030036713, 0.5198083082183431, 0.9485779182169143, 0.5085586596569871, 0.8575360159069498, 0.8166193606453238, 0.5196621646661814, 0.6470623110070439, 0.5226509362765877, 0.5900816572277021, 0.8699453462763788, 0.767514360335483, 0.5313129431129221, 0.8132824014333131, 0.7658625378594875, 0.6014067099767318, 0.7531334135826995, 0.9139648020200914, 0.5465839591741937, 0.7406508424996027, 0.6490701192791335, 0.9777356062487055, 0.890958068795684, 0.8895901446230301, 0.7908151614344257, 0.7230041706027575, 0.8231403268716235, 0.9531762883496021, 0.9106062195026217, 0.6228319876693438, 0.6218308775504738, 0.9063527931777632, 0.9964941707290418, 0.5480071916940161, 0.7032188763838997, 0.5873050931496897, 0.6630234790525704, 0.9661254629837557, 0.5142567242302978, 0.7645334415164511, 0.6177847873613345, 0.9483037715831025, 0.8015527192599182, 0.9625755109454361, 0.9930495867483418, 0.6227671884482655, 0.7324720980806865, 0.8113202890440456, 0.6218843806336298, 0.6202209011968242, 0.5613952568263465, 0.6153642895382844, 0.7131324277317246, 0.7523692948199576, 0.7396593336370267, 0.8131913880148962, 0.9037721880476797, 0.9150096149347242, 0.6585771877529025, 0.9466271082598279, 0.8907215713702149, 0.5118170661278311, 0.9757206913419789, 0.5565211863691255, 0.8732124176415287, 0.6674955103759167, 0.8872445817037674, 0.5232781897355445, 0.9162691974796877, 0.7128557682990595, 0.539912316569634, 0.9886003998044786, 0.5582596367981942, 0.7427243757269651, 0.6267930679059905, 0.6354555581917156, 0.9811928317300935, 0.6651886633379088, 0.6977262578794653, 0.5348836199007428, 0.6553608721574518, 0.7423123868232128, 0.9928638982988087, 0.7991858919181479, 0.7812327645108456, 0.7470059334203096, 0.9635822133326402, 0.9694753842086898, 0.5134490042298839, 0.5838363944321778, 0.767689793430596, 0.6591673941622938, 0.8545620397469404, 0.7821712576064366, 0.7240788217934258, 0.7433176828836832, 0.8750653931392751, 0.5765097383197615, 0.6361090748461231, 0.7112080580374573, 0.549557737487159, 0.9738512377043128, 0.539523980921929, 0.6716662456489315, 0.6935337052919881, 0.6456315144182497, 0.5290645867648422, 0.894261216884999, 0.6685408987987108, 0.6655189830622653, 0.7001779575099594, 0.7132700163012219, 0.9764399216838492, 0.9033452934410335, 0.6407629377114712, 0.9860885420640633, 0.8019515316602209, 0.6222261628616605, 0.6809813456720559, 0.9816032855863401, 0.9161120107450793, 0.8371153992220716, 0.9483846775256416, 0.5124452547120203, 0.7225938306585353, 0.852526289334464, 0.6847655345962924, 0.5959525125230868, 0.5299318071333041, 0.6670427649285655, 0.8690891503441056, 0.5923726283978662, 0.7128045977419701, 0.7625276686136393, 0.625929581417299, 0.5652516338050293, 0.6426608295410114, 0.995699581335271, 0.6737603918667412, 0.6843146231943231, 0.6337717228276332, 0.6711096220467289, 0.5648029212056591, 0.776863961829641, 0.9066077452704311, 0.8128291196802694, 0.9462397914991955, 0.9722128540823919, 0.9399883998992268, 0.7033623226097039, 0.5626162966448176, 0.96695890848151, 0.7758622728436593, 0.5723056414512113, 0.93922360392649, 0.8994424312492398, 0.5080377891754017, 0.8275472004236022, 0.955603193255905, 0.6931847894368811, 0.7761630501381329, 0.6218517082408739, 0.6873571932009659, 0.795516379540289, 0.6803685748839416, 0.9804504703422925, 0.6462944345111931, 0.5081893097204986, 0.8691597214994289, 0.8838067645996932, 0.5456480317006549, 0.5950199425226022, 0.5181668200726851, 0.7850480544161145, 0.6360825359355856, 0.9499647231425721, 0.7128709218121763, 0.6548127941522107, 0.6511721086987323, 0.6652895090619155, 0.7954092252293646, 0.9578776563615983, 0.775916915864207, 0.829976060697744, 0.5789110300118435, 0.8352881829399423, 0.9702606267030025, 0.7802688821094172, 0.6695088637136226, 0.5750087629851326, 0.7545230746296789, 0.5858758941047959, 0.5690290866845167, 0.8952916706891628, 0.5219558282397825, 0.5946266042284032, 0.8544613056111535, 0.8965340854312234, 0.6228129474269233, 0.944437560530557, 0.5314323892140806, 0.5916399350084192, 0.6302222831508997, 0.6878335211099054, 0.5849060777797022, 0.9499714824897154, 0.6992610627994175, 0.8659185795869534, 0.7551895310837757, 0.6878587900444297, 0.9377760556285135, 0.834623391390326, 0.8467315738682699, 0.5291111656061869, 0.7501040564627416, 0.8016543453362529, 0.5920951724243292, 0.5500136588726485, 0.666811968495526, 0.5711435233016371, 0.5904158886115318, 0.6725586204577417, 0.90198222767453, 0.9502099543082565, 0.8470790523280353, 0.6242187631059095, 0.9024211811959547, 0.6159475346183614, 0.6176108231332735, 0.7386105764496378, 0.6625159618180698, 0.8031160634828163, 0.6487926469013334, 0.9501940803451178, 0.9425658992210723, 0.8861968188942257, 0.6328790769309508, 0.59340830547932, 0.8216995687528705, 0.909311046441329, 0.8932791817436441, 0.5132120464726435, 0.7608079319594474, 0.5155267330021667, 0.5092095475243361, 0.862179267397635, 0.6256192728205885, 0.9809753081687194, 0.8949477671324134, 0.9593642370094123, 0.8651226844480817, 0.7884572155384264, 0.734744145576124, 0.7742856094086499, 0.9517766451752766, 0.9276076398832886, 0.670077509445093, 0.7632008325135392, 0.8618494655616495, 0.9109121208552686, 0.6443377303296304, 0.8955081493776202, 0.51731294136897, 0.6007993958555099, 0.5458401458810198, 0.5137266062364265, 0.6924359894664367, 0.7614430751665677, 0.6012357732304892, 0.6195952145331016, 0.9186915342764346, 0.782742021632167, 0.8237458397995374, 0.69640907588066, 0.6483473632091574, 0.5599652445397691, 0.9350188178472796, 0.7087109971445016, 0.712850347815372, 0.8271739374967204, 0.734275832478656, 0.7401413651237085, 0.5991186769893461, 0.8175800810975173, 0.5008638075495012, 0.9939843491781227, 0.90489315376946, 0.7709306854624349, 0.9835295938551804, 0.8480715680898916, 0.7223348385049042, 0.9536696701881286, 0.7843431572065018, 0.6586215588219373, 0.5907432789239357, 0.5693180219380188, 0.7146653564155254, 0.9332152536813454, 0.8530933981154935, 0.926906534122903, 0.9420222804053167, 0.8028238068473967, 0.595417770784451, 0.8120284649279297, 0.6863581170362573, 0.5970254822717731, 0.9804672306580717, 0.7739418004108239, 0.9402689458206686, 0.9502939234125307, 0.7687410640562264, 0.65660951822674, 0.7937274228574114, 0.8823547854499252, 0.8076599532145059, 0.8946708923073687, 0.7050311655452377, 0.5765701241095607, 0.6205336976280684, 0.6198754899606117, 0.7603211646907875, 0.5183199214515348, 0.6012708330074977, 0.9480765023941007, 0.8294632620428553, 0.8702304995829456, 0.9911287583885566, 0.6700538655460215, 0.5781375989147626, 0.859067128670368, 0.6948510518972867, 0.9283983325437464, 0.8693311055891553, 0.6190275901585365, 0.9149742860515866, 0.5944739525426304, 0.8782985442829271, 0.5743436714534731, 0.5992852470382621, 0.6832342290776159, 0.8324958379616685, 0.7168854912087665, 0.6063559068265988, 0.6455254134374562, 0.7955481290760057, 0.8412998502400431, 0.9916067164381382, 0.9237525671438356, 0.8307948076973568, 0.9726804655569681, 0.6762845162840283, 0.5361316150767967, 0.5005711828058386, 0.9554002562295898, 0.5360774413407717, 0.8014155355266699, 0.6241362030621489, 0.6833086279093877, 0.6053230746500022, 0.962119732309127, 0.6518729338009841, 0.7193126724788127, 0.9096209973694134, 0.897697516311974, 0.7051137476570414, 0.8473496054336787, 0.7466073470269557, 0.8298801443725504, 0.9510611044447337, 0.6817383296444388, 0.5130492073137576, 0.7505564445120791, 0.6852888832051347, 0.550939473869557, 0.8547333547383891, 0.9227306322413524, 0.6487642408996843, 0.8909228463734833, 0.6445528287777342, 0.6268396887319996, 0.7574121669734872, 0.8921266969721058, 0.8423743412533167, 0.5123466036775046, 0.8471894186192879, 0.8532988774484345, 0.5065489628104339, 0.990263457513439, 0.8943995733520101, 0.8301698190607809, 0.9976170643782316, 0.5510597207342556, 0.5821295371452987, 0.7431719294280579, 0.9018036220075891, 0.9211500097798228, 0.6326378322383319, 0.677551262857065, 0.8228231869495276, 0.5886278737168401, 0.8696472056794755, 0.6733578438009721, 0.6267571051563964, 0.8820094419736886, 0.9353752094622305, 0.6704820643285987, 0.7106454338356627, 0.7135777381381889, 0.7427215464667138, 0.530434274814855, 0.5681508197859824, 0.7156043186655001, 0.7992037578117456, 0.9486218169932861, 0.7814263226813429, 0.6439059087373202, 0.5717490931895569, 0.5473527658935254, 0.8282376913980285, 0.8028555870049627, 0.628169800302576, 0.9880510184727862, 0.7487738384219551, 0.6917121831218305, 0.6593201351400373, 0.6439186605156049, 0.7554717129085864, 0.7630544813650183, 0.8121609728023234, 0.6305191658904397, 0.8905406146991565, 0.7541258976717424, 0.8281193307849621, 0.7446297201411589, 0.7252931643290985, 0.9909924512828439, 0.8060102826133053, 0.6212341837751765, 0.5637079099676242, 0.797792654434947, 0.8355109989126472, 0.7644714069975632, 0.645585668419173, 0.8189054577525324, 0.7857903872026908, 0.5914928313601078, 0.5976386003690091, 0.8652336233027127, 0.9914850697648452, 0.9086456152188989, 0.8704830455578684, 0.6276075138528714, 0.52914298792594, 0.6195097408434715, 0.5885502351304424, 0.6608741369192093, 0.5866597752868904, 0.6919754202093273, 0.6927078202304044, 0.7972031140440152, 0.5697109297434009, 0.6953058978593024, 0.608403246286292, 0.7166494533995618, 0.5198599154330195, 0.8115547519336773, 0.9791278669054138, 0.928118412733935, 0.6590175559346291, 0.8679086650424018, 0.7360603458616535, 0.8710231325396811, 0.5180368278838094, 0.975020480920893, 0.5142449952396251, 0.7911652513627483, 0.8769839045774863, 0.6610394515441891, 0.9310348564280464, 0.6799965691836011, 0.6732872157898673, 0.8349605377530334, 0.9834237531769703, 0.9235930851723169, 0.6767464446848138, 0.835843282460293, 0.8668014117698113, 0.6021685635572162, 0.7258776705614188, 0.8951130473333039, 0.62300484492394, 0.8798407792621468, 0.7267583229106322, 0.9700888366762808, 0.8886013685775583, 0.754716151515249, 0.8239832159326868, 0.7881822551921112, 0.8603441773482214, 0.5888134806446903, 0.6779197043528884, 0.8097284335973631, 0.5809631637905689, 0.5273509055677443, 0.898579839374415, 0.941652782462451, 0.929589495687299, 0.8378962612962453, 0.6126717472416345, 0.9921297180408164, 0.703759546659913, 0.5966530965467078, 0.6378934262622764, 0.5765731353106781, 0.9640399571498555, 0.9713406566277575, 0.9825713713592038, 0.7451287261287436, 0.8542391545954193, 0.6454418795547616, 0.5861760423350318, 0.8188983311257081, 0.6674865858658641, 0.8639709574977847, 0.9994063067348345, 0.5846749075493848, 0.8644985462470287, 0.9854578342075038, 0.862903555605723, 0.5003378789902059, 0.6883518017647419, 0.9030172331695439, 0.9247272104883573, 0.9588028882553716, 0.9789884826311752, 0.852668437206129, 0.8168705590196959, 0.6976501429408454, 0.9342788334477232, 0.5875421041894149, 0.714092959407634, 0.7646700568335592, 0.9058448822961994, 0.5146738044530863, 0.6603811461767091, 0.6092342436191933, 0.5132137065788493, 0.6239426396466639, 0.640883066142616, 0.85272347917719, 0.6897648743357301, 0.6192642161324101, 0.9952279223179481, 0.7650356815644315, 0.7865297189835554, 0.6695430453662721, 0.5182983843682736, 0.9168750466736839, 0.6755096187302005, 0.7114556634376008, 0.5057939209288003, 0.5370033077665864, 0.9778463664653512, 0.6332871584924726, 0.7912353744832437, 0.9763246639682768, 0.6867854290672684, 0.5617803395816848, 0.6393682579085034, 0.9738631476638406, 0.9780031745781824, 0.822257524273144, 0.8157066337825956, 0.5164705338207201, 0.5513299404248634, 0.853655391307778, 0.8779292224836985, 0.8472924566845708, 0.7697266257157449, 0.5642599237712325, 0.7737976536135589, 0.8706118341672465, 0.5366320901373511, 0.9008567188038847, 0.7054724682993907, 0.6113179937098443, 0.5876026883245187, 0.9742078858307466, 0.7608212019685795, 0.7604782195471025, 0.5282461248675484, 0.620405572505111, 0.8333154304834924, 0.8870137900885434, 0.826422450767583, 0.9184601436405122, 0.8456279074121822, 0.7821380703762446, 0.7068817606021603, 0.9107523732868413, 0.9318889680403144, 0.8654418685642673, 0.6793807167085084, 0.612160178701372, 0.8842703188100205, 0.873598145269701, 0.6497215786109007, 0.931213346116476, 0.6149022615487212, 0.6444430081426185, 0.6860674397890315, 0.5821402728781437, 0.8298407722541518, 0.8277469797251489, 0.8979713171812418, 0.9719555912526902, 0.7338945127339414, 0.6014202947192002, 0.5996411374935335, 0.6875396867713843, 0.6194227565714142, 0.976116816008839, 0.6191253737444731, 0.6270397102879768, 0.6096836324840562, 0.964172713244203, 0.9765487416653424, 0.5012542756996514, 0.9853121654734658, 0.7588402051858953, 0.6913137713460542, 0.8173645047640308, 0.7927132675599271, 0.5637200278496961, 0.9278666904151456, 0.9223921215224923, 0.9605473696266573, 0.8004861656751482, 0.7846748249847542, 0.7997726390026219, 0.759722410668708, 0.8707174461655023, 0.8484383699703109, 0.7771136847442852, 0.7490747023760962, 0.8374325166397312, 0.6797280749504526, 0.5094589745317621, 0.5774042663400778, 0.799135702576284, 0.9978946313458136, 0.6976765298446879, 0.5325088921798913, 0.8979688761871206, 0.9439145157704123, 0.9853786738893369, 0.7244910120228047, 0.8162834024997014, 0.7363181470790152, 0.9335992583972548, 0.8949327281474738, 0.7036800425802852, 0.8492598601196545, 0.6059856691436523, 0.5673179036081584, 0.9494205954682107, 0.7173811595295181, 0.6442003116111723, 0.621354656878832, 0.5288075666863968, 0.6255678383872645, 0.7147315269088537, 0.7705661855995328, 0.8952816799899129, 0.7803951271543091, 0.8109590743596506, 0.5065763582180836, 0.7537692895645269, 0.6751783848693034, 0.7129734354996993, 0.9786346359328832, 0.5199900297093932, 0.923264790970669, 0.7708853583278132, 0.5558448682591703, 0.974072118832112, 0.8335501048463222, 0.9586804908873681, 0.8822644617555269, 0.6896995887387583, 0.9861778868379506, 0.521824542176839, 0.7276511345991714, 0.888586028044792, 0.5203101117779306, 0.5790974199428722, 0.6012777423187119, 0.938000600142089, 0.8498914493178635, 0.786800744548739, 0.5893644306958297, 0.650491999031011, 0.6465311880390288, 0.7441021114291638, 0.734150012487329, 0.7962981301126105, 0.9852242295673475, 0.7268978546554093, 0.7280172364920512, 0.8224751064581635, 0.957028404103943, 0.9817905560609668, 0.5369959622380341, 0.8851414437107447, 0.7929848647862605, 0.807733844961908, 0.7705802218344472, 0.683696344304934, 0.7864658199232268, 0.6529224200640622, 0.6485483226572991, 0.9226476936338444, 0.9108097307296873, 0.749987958388814, 0.5340881910880257, 0.5743099409946999, 0.6834955134875677, 0.6332320592091154, 0.9472666311929288, 0.8632035565699447, 0.5496392256823733, 0.9225772922897166, 0.5932527539937337, 0.5579673345133628, 0.9042606707220053, 0.7759484123832183, 0.8132800869401332, 0.6951648648603059, 0.6466080216231732, 0.5545075707601956, 0.8907637020995536, 0.7721959963931204, 0.9371775334993759, 0.5472016957589538, 0.9827815566175959, 0.7103844939514767, 0.9562510645727873, 0.7427363060905783, 0.7591947003887387, 0.85239092809422, 0.6746616016735691, 0.8887936411809347, 0.8137214413615869, 0.9609913026595587, 0.9542977940386217, 0.8293422313746968, 0.6730371874261274, 0.8305182561369495, 0.7344726966956477, 0.5297834057638697, 0.8852817463492392, 0.981109104229754, 0.9771910943206247, 0.8246471323810557, 0.6465816566979541, 0.5185803594569404, 0.9160379373392784, 0.9790886807563529, 0.5609047849425232, 0.9778775396250454, 0.6603141662832859, 0.9778851081856152, 0.9346784703423079, 0.6445556890237055, 0.9132693404266976, 0.6573880588826733, 0.6270206650422874, 0.7986380270306527, 0.5129796320686948, 0.6452812788186566, 0.8100569736208453, 0.6483221033063677, 0.6323232543101662, 0.6495963502841746, 0.5062028796151388, 0.915129589082625, 0.7941088677900563, 0.7513661420014064, 0.993339794528106, 0.635076502267028, 0.7729634199494155, 0.8293048610242163, 0.750513141171602, 0.5303449527549902, 0.7010304783190036, 0.7554963329341913, 0.6259107608920911, 0.5349013480268159, 0.5827284995939663, 0.8263151156361411, 0.5298323842573325, 0.5946158521914476, 0.9754955643513266, 0.9868279007227565, 0.9822323475895809, 0.7778159126061002, 0.8340734307910702, 0.8705956153127026, 0.9123992191312413, 0.9011754782912376, 0.7888230855118635, 0.9598534768210858, 0.5835878685745621, 0.6666254354876704, 0.7855861210421888, 0.6196351934231503, 0.7058713257874311, 0.7009958890966677, 0.858755515751394, 0.967243060850965, 0.9742052190664889, 0.8769455426032402, 0.8548811864988783, 0.8217109913428968, 0.5039482513343911, 0.776669430812695, 0.9583196506784865, 0.9042069648792446, 0.8645100553615441, 0.9754850490785422, 0.8216024510464697, 0.6279274217654651, 0.5322829293201456, 0.5058638587372077, 0.7460724924613097, 0.9871406004353476, 0.8493758396379559, 0.8585729798406558, 0.770108896534936, 0.9650246170385749, 0.8348820461929722, 0.60299740449321, 0.9319782660342892, 0.6268648263997225, 0.6773845902943788, 0.907752697881359, 0.9782705332329722, 0.6728167846282175, 0.6400517857926313, 0.6062783133743771, 0.5205097222533196, 0.5637524728412673, 0.8736634880872969, 0.8736429969737349, 0.8685729838861147, 0.576031582312912, 0.8561096627763827, 0.8687854157072612, 0.6521478671680432, 0.6184318914381468, 0.7531819156927135, 0.5712781510153637, 0.7623846887943988, 0.940599434233417, 0.7421861771092866, 0.8117321796850808, 0.7690124748076372, 0.8365313525118758, 0.7312482908719278, 0.995285166396167, 0.905168791804503, 0.8410865717767019, 0.6628847675859183, 0.9892132508665801, 0.6034828558646433, 0.7579436731662544, 0.8219378041611534, 0.6212196114144277, 0.6688487755419619, 0.9977676025686084, 0.7274792502222478, 0.659635070687246, 0.6261802567230732, 0.8842574360130357, 0.9888965698147376, 0.8078023682623976, 0.5118095312159286, 0.5129849204534497, 0.6526435713082869, 0.8056304404959924, 0.7915808929124195, 0.7918384343262539, 0.8737286916089619, 0.8960291116398265, 0.8816681699247096, 0.504664801259557, 0.5215889552940515, 0.8627626747193922, 0.706742306441511, 0.7139829497476511, 0.9148624985455633, 0.5695928069862761, 0.862868265396909, 0.7377691880221662, 0.5249107014233279, 0.7115159414749305, 0.6241963826413459, 0.9217004465952123, 0.8490823642501144, 0.6808014826709099, 0.9946269349702261, 0.8621524721925775, 0.6080065744849723, 0.6013647523212218, 0.8689148801331339, 0.9398339072349008, 0.7438180218187744, 0.816002922297372, 0.8615670236000187, 0.53608568131713, 0.5640950734030019, 0.6859198341795024, 0.9919194499178707, 0.8336428896804791, 0.9015540497683754, 0.7404822105604493, 0.9188961623724864, 0.9233726720608525, 0.9514711274036023, 0.8890186086921745, 0.6147398542101986, 0.8487767436880712, 0.5005282834222371, 0.8401860401965184, 0.7300428418851678, 0.849591239224952, 0.549387674869787, 0.5265894123235442, 0.6004406585565386, 0.575484271527253, 0.8056397375419742, 0.5796497950906796, 0.7042862413577691, 0.5291701184536199, 0.9224110884209167, 0.776218760764015, 0.9931010694442037, 0.5093446217481975, 0.5743131129918393, 0.6105828631943347, 0.8632721637832277, 0.8817064628143183, 0.7608735920549523, 0.685643838877565, 0.6118845479171254, 0.9851324980727088, 0.7756526181240012, 0.9956248501949077, 0.8987438543257749, 0.9272288042343748, 0.5265215628441791, 0.8029871009833407, 0.7526064168693669, 0.9864025258191482, 0.739281317975619, 0.9715788393824061, 0.7648472759065864, 0.6995531137746249, 0.7120063267721094, 0.9718099424398359, 0.680753995739255, 0.635625280798557, 0.9007197634811972, 0.9303705548798495, 0.5156369904636888, 0.6366345340799231, 0.5576873139123852, 0.9577903624717886, 0.8936854399284058, 0.7088547571653638, 0.7693662968482091, 0.6795442842139074, 0.7928807573797967, 0.9254757996505798, 0.9823579234454809, 0.9110325701620141, 0.9948385167229596, 0.9379603112090108, 0.6316143512324224, 0.9705343487643872, 0.6326778071125656, 0.999883239624098, 0.5648533819024387, 0.7218007843164547, 0.9928605197276925, 0.9969408494009111, 0.62107548376426, 0.5783013062079394, 0.8557480155075801, 0.7926068282548712, 0.6923457502345177, 0.9846985745857866, 0.8194656829130611, 0.5234257390160213, 0.7289817640967325, 0.8149169396688574, 0.8571161300134367, 0.5062567341204259, 0.6461174786050496, 0.982104444947651, 0.5171364702992487, 0.865225699315552, 0.7335623782154561, 0.7593827899789366, 0.7688179798430392, 0.9112711297142877, 0.8275509702043653, 0.8112953745616628, 0.7180093499639324, 0.9795228920848251, 0.6140403389057019, 0.8108141321835675, 0.9313799524772324, 0.5251099539920631, 0.6401007757650061, 0.7730867562104369, 0.67652192628378, 0.8863931308827434, 0.6942988310877849, 0.5189666715584043, 0.8642089913389904, 0.7376473754313014, 0.5299818435493173, 0.993840187451696, 0.9849511229950383, 0.9746108998385011, 0.9251893184697364, 0.7837338752549284, 0.8048222907540676, 0.8109319156211123, 0.770940155249388, 0.743862905148438, 0.7131536953895057, 0.7561001196144376, 0.7991535963966074, 0.8906691680214338, 0.9658544571612939, 0.514746999002548, 0.6957975756196262, 0.5634565323242247, 0.7180931148280274, 0.5994101319076797, 0.5840221641321031, 0.7938320213863823, 0.7844818055481654, 0.5878463776547185, 0.997536233090176, 0.8435756146636667, 0.5460204760751011, 0.9875382427137148, 0.651383030957468, 0.7784108779505365, 0.5590861376788707, 0.9377230331340949, 0.6560592318043796, 0.5094739617627955, 0.6379332578296175, 0.7859756530559701, 0.6255609940095445, 0.7345072835570805, 0.501503705898837, 0.5457940051994392, 0.6554708807302758, 0.8338719454132826, 0.7777572255844079, 0.5913669152983028, 0.938606169209789, 0.9189525554434103, 0.5985006584296071, 0.5546991442534287, 0.7670459848585671, 0.6118260621211402, 0.8055494711240188, 0.8395065735164626, 0.8488177406156179, 0.5885592212652937, 0.9244869431157141, 0.9705054788958072, 0.6719746338438259, 0.6484257718755128, 0.6922972407270068, 0.9583933013365113, 0.5714689858816085, 0.6526756805121818, 0.81774327842093, 0.7699150059452289, 0.8665762926081576, 0.7111409645988419, 0.8849356896082234, 0.5825019982181783, 0.9185779884151599, 0.7676787878330578, 0.8128951636022201, 0.633675931243713, 0.7824568473579775, 0.8589392401789775, 0.5657010379121655, 0.8827950808870118, 0.603359500678919, 0.7081542687559742, 0.8964647831069328, 0.7147053796336547, 0.7744067743825938, 0.7960779392018194, 0.9204293735280673, 0.6294035029855071, 0.9122983094017776, 0.7950050914147266, 0.9106454709815011, 0.6015472267157314, 0.9979069481992328, 0.6113997057890141, 0.7538692621585846, 0.9403921394990424, 0.8272071174946076, 0.6150835006248065, 0.942813466607736, 0.6243938195574261, 0.7311572908621525, 0.7479018100833977, 0.6931064280059327, 0.896126776929506, 0.5346782439892053, 0.7039335974246629, 0.6310466929251495, 0.6876581492552243, 0.6886932481804044, 0.8847106935732849, 0.5654512935922635, 0.5727220871046435, 0.8289228817101367, 0.9249306802135316, 0.79633484219617, 0.6889302748041759, 0.9879042680284793, 0.8039178919720367, 0.7658379853042198, 0.8821783622686075, 0.930238191702977, 0.818597629814331, 0.7316623924191255, 0.5894441497960976, 0.9060658357804723, 0.897381925918505, 0.7446603869933135, 0.8455703805228844, 0.9546716392589578, 0.6361294007326204, 0.569866251934618, 0.9091768735973151, 0.9377845226011138, 0.8874791641685716, 0.6592303951969358, 0.891567999513368, 0.6897698821085527, 0.6325877346351865, 0.8858108233937549, 0.6387720603330902, 0.6302296548389184, 0.5489963773619555, 0.8099469001148923, 0.7205930594907279, 0.6149589666956001, 0.8827865098330481, 0.8308290822299433, 0.967829214217026, 0.6154532467876384, 0.6273035180467585, 0.6895309321810392, 0.9675574207371941, 0.534600045455246, 0.598047250851556, 0.6196084033262734, 0.693303522315621, 0.7572942932835808, 0.5730427126403741, 0.9080670854125947, 0.5263638313740037, 0.6222298583632762, 0.795400135637337, 0.7969951135325606, 0.9174571741860016, 0.988474218021812, 0.6825084798145928, 0.9661736587445102, 0.5071641055869592, 0.7403191536566893, 0.9236967242221514, 0.8194012941239425, 0.6938068385015705, 0.8652535406294477, 0.8389041209625907, 0.9778094617844368, 0.5671060754373496, 0.5534375027175932, 0.5964649192712613, 0.6411234222803035, 0.6375687408273797, 0.6255932865439637, 0.7193166196030888, 0.5394461585364465, 0.8059393649355786, 0.6853254416984582, 0.6149183590588428, 0.5790393413333546, 0.8992350962553372, 0.9087035600121067, 0.6890898817413582, 0.8154956619066446, 0.7135330179978667, 0.7143098550614394, 0.8906230223036155, 0.798853549300406, 0.5599898515151249, 0.5792123402733262, 0.673056472526651, 0.9953079748784541, 0.7829061329651172, 0.8102069111414774, 0.6179783889940409, 0.527071134738089, 0.5244121610703681, 0.808117022024541, 0.6344001437598015, 0.6257826971010652, 0.7726820907042393, 0.932103542202771, 0.8689062865244706, 0.977484681322511, 0.6618760649079587, 0.993267807154227, 0.842930530551219, 0.5374393905122407, 0.5721399357637784, 0.6077042785960042, 0.7301252167721946, 0.7084311380496862, 0.8910886341397835, 0.7345882176776177, 0.9132275517943809, 0.6026380190795577, 0.8196719557990215, 0.7518152296123968, 0.5096129260612331, 0.9850075048661511, 0.8362150582116841, 0.5431500472844265, 0.5594800001858937, 0.5928085598653962, 0.626924231421614, 0.6691924569737737, 0.7095763837623448, 0.5154148804174882, 0.5228297831893199, 0.7240717504231475, 0.9418510127747313, 0.7590548756897166, 0.5463767281042421, 0.9572985211334778, 0.7579614134329704, 0.5820873398744931, 0.6634422681344438, 0.5562322130828758, 0.9547190606801086, 0.5002003484152409, 0.87334002491291, 0.6019356192582546, 0.5794965926139274, 0.8978271362932682, 0.5638345231778942, 0.9030743796858851, 0.9151914178206361, 0.5716408561095492, 0.628945050086583, 0.7934862065164143, 0.8561772739736881, 0.7645416167131035, 0.742866929179399, 0.6346396255057883, 0.6552558726626371, 0.9981310689266456, 0.9175916930612011, 0.6245758916628614, 0.6583396237695678, 0.951394549361007, 0.754946424343629, 0.603317845031764, 0.5282801259611107, 0.9481366905741669, 0.8102606646242777, 0.6087605055852958, 0.658026945047013, 0.7556756111408467, 0.6692499567809378, 0.9885249197906365, 0.8984343750979731, 0.5734452436124857, 0.5335241469063996, 0.8058631106474681, 0.9927301982675054, 0.5215146454073922, 0.9063871776069825, 0.8999640239342048, 0.7338003163984808, 0.9158640283219296, 0.506616564423393, 0.5042441497203942, 0.7502302459154004, 0.5043161996992194, 0.5711169534963243, 0.758852688338316, 0.5520774489963227, 0.5374572196030165, 0.5834771229806732, 0.586355086210834, 0.5159338627450696, 0.6061405441904972, 0.7671889234229722, 0.9137739350801666, 0.6307158069571319, 0.7281133953624085, 0.5187964848767732, 0.9219567287407582, 0.5151055557790802, 0.5465001743982982, 0.7654165976033411, 0.981993670506943, 0.6767453853198728, 0.5344013912071528, 0.6640720296074774, 0.860185992056777, 0.5489948850163331, 0.8624620809775372, 0.8989656213063, 0.6832296702234486, 0.863914137099483, 0.77723514093558, 0.5450993945372753, 0.5024989748932527, 0.5525214218779371, 0.9207876217433147, 0.7039111154514816, 0.5335503445343456, 0.7333494979729538, 0.9023033428138516, 0.544682133772614, 0.8101495183985734, 0.5775684636752135, 0.8437231749901263, 0.6787110028790858, 0.7908112234277245, 0.6489491545781547, 0.6865784440902077, 0.5220197528532842, 0.8638123893476357, 0.5133012360174454, 0.6064255117540142, 0.8036311735685501, 0.785573759991024, 0.8019090848932591, 0.9478543252912759, 0.859348473741329, 0.89726641326805, 0.5617227452700158, 0.8823915669076141, 0.5362500267428163, 0.5568696065995894, 0.7905311503542153, 0.9877797054623356, 0.9523565445100739, 0.9531582879224669, 0.8572424779556032, 0.6680071271874065, 0.5232288882121054, 0.9832485598769024, 0.7531311328508193, 0.9421975154524806, 0.6640729867614114, 0.5528573285007017, 0.9215048865515443, 0.6873369521555186, 0.7418509223264074, 0.5198371594994323, 0.7635048794651234, 0.9097979901901952, 0.7667713526757609, 0.867400284165702, 0.5250334476654059, 0.7419556879676836, 0.8221358789072098, 0.8582938528133006, 0.8118857920410345, 0.5958266015015772, 0.7482746162602381, 0.536228329706911, 0.9870599419004715, 0.9224551899061908, 0.5067334319980614, 0.9594929953077582, 0.8912901074349018, 0.7201873484860879, 0.7040264733209198, 0.5509048510353249, 0.7492095760179136, 0.5041418315662656, 0.5262730841202634, 0.6910829115818162, 0.817899620069202, 0.9256052763077467, 0.8758132565573261, 0.7598115976811975, 0.7536947308281199, 0.5011594678156721, 0.6148113528723971, 0.7790133610298089, 0.6891917525983311, 0.7012464170838515, 0.7270887250033318, 0.7773981739544438, 0.754631434800277, 0.7184149958635819, 0.6469504731430287, 0.8235039975196892, 0.6889357845360696, 0.7816516719262471, 0.5970731913307725, 0.6018403036098314, 0.747890857696534, 0.6614344887175958, 0.7339823066022173, 0.6891720433696379, 0.9454464981951998, 0.9054591515535171, 0.7088506271665045, 0.5891679210661617, 0.586742343534782, 0.5538097784991378, 0.9889759816344426, 0.5360789963278647, 0.7035741372826185, 0.7416133210580937, 0.6105596382736578, 0.9464483109770694, 0.7134850113375828, 0.8265623262532441, 0.9899323489407355, 0.6346669909417313, 0.8335297875941989, 0.7483639346932054, 0.5063493928473892, 0.6161264536650596, 0.8848230202608369, 0.6507425727592122, 0.9188842780650947, 0.7345288167257873, 0.8188292712519527, 0.8307194744901041, 0.9210665523614134, 0.5168247768221741, 0.7720434201386402, 0.8570210464184038, 0.6181217362118312, 0.5887654049033096, 0.9657230477172232, 0.9575314715548007, 0.5951699343023586, 0.5590737969461447, 0.8896075934562713, 0.6414573467926039, 0.6020192080879981, 0.8345283467903198, 0.839097455267774, 0.966241311917039, 0.9830525889093882, 0.6259029532294955, 0.5877323544514845, 0.7684484273526273, 0.5877668119882993, 0.993733825194794, 0.6786552353284994, 0.5360504398451713, 0.7820962225936695, 0.9293905352650026, 0.5863454371113651, 0.6907414540567892, 0.8940802290871641, 0.6587865134835178, 0.8700147145551167, 0.6549978290173204, 0.759859723933286, 0.5261884765050104, 0.5828609945298147, 0.6445241308378324, 0.8808941526723264, 0.9262243404706649, 0.7215757792703958, 0.5996149655088523, 0.9797286238261118, 0.537735873620546, 0.5887806289618314, 0.7570953763802515, 0.702176488691318, 0.5751253434246595, 0.8948965169532697, 0.5473055914819531, 0.5626791608860124, 0.7908219726690175, 0.6949854652250429, 0.6754681113809047, 0.7171467908630458, 0.5206675526079327, 0.7224717662515149, 0.5152718857335219, 0.6117146737922758, 0.623863395282704, 0.8436394791721626, 0.54957168726346, 0.8872326341825979, 0.6713181426588349, 0.8001330697523115, 0.9095118549475805, 0.5134273351346348, 0.9082852556448828, 0.6844956381150311, 0.9406293554745777, 0.9467771564113715, 0.9545698987320441, 0.691735507249782, 0.8497242299715768, 0.6999958606743965, 0.5401132407445146, 0.5780338544429767, 0.8870121665466242, 0.5784406377153488, 0.9927469885925824, 0.9848168071589416, 0.6136207011785109, 0.7202719028602171, 0.7197430130237071, 0.5805031357408508, 0.8195889494953336, 0.6921119404870886, 0.5525949484009401, 0.532480286295208, 0.6520492688707731, 0.9371068508240226, 0.7189542159942852, 0.5240218085752559, 0.9184010132907554, 0.5568894444171528, 0.5223368256872758, 0.6337347635860533, 0.8459308304287485, 0.5698380625088562, 0.9368287488260878, 0.7217103635853193, 0.7527391812708972, 0.9428293541588535, 0.7709068556350549, 0.6426295768044404, 0.6760654212613266, 0.9843516224963117, 0.7235910926808036, 0.9949752949944504, 0.5586378640685176, 0.9623373284512811, 0.7454406671492223, 0.7586028374964042, 0.8332442118633823, 0.8489548610256359, 0.534200541974334, 0.8381035193147015, 0.5890648774928746, 0.9868369589608299, 0.5161821183081391, 0.8169560426177476, 0.6524846094779834, 0.6969587608853749, 0.8341319157559267, 0.9137693513188956, 0.5320171552714645, 0.7429044091316547, 0.9395855118943186, 0.9831137047639765, 0.85191867538874, 0.9804068934931656, 0.820823634337425, 0.6077859719910985, 0.9452667035506483, 0.505826307545698, 0.9314090264397952, 0.7118962745871116, 0.7178235975100803, 0.7130745646936075, 0.8481978506477454, 0.9928799396375515, 0.7328322597831287, 0.9232325070617664, 0.6570682472968008, 0.7998974764543723, 0.6880162555590266, 0.9407573400895896, 0.8673561841788952, 0.6803807835772268, 0.7979625913789983, 0.540735332479933, 0.7415630472374211, 0.9179706639856343, 0.5794084527845946, 0.7278629921953569, 0.961969845878268, 0.6197817311794773, 0.5231271574368441, 0.8679502479275114, 0.9866032862736531, 0.7895521574516248, 0.7401943585312034, 0.7113143406390694, 0.7154688067112676, 0.5506669049177123, 0.8046126832140872, 0.6439797758125838, 0.8431182968182965, 0.5281084164146633, 0.6102765970119532, 0.6449589477381902, 0.5838927735719177, 0.9260997494976642, 0.6494117229995571, 0.9348618693023125, 0.932520677349661, 0.5624970117607075, 0.7209291568252365, 0.9926735048832045, 0.6166250159155053, 0.7533799390420638, 0.9134358543309674, 0.764221763650425, 0.6834840923290021, 0.9462267320512144, 0.5586095726626757, 0.7444736563042065, 0.8889037920838192, 0.7625907142967507, 0.5050087378796815, 0.7263123316644409, 0.5972203310611263, 0.506477061320628, 0.5321944953197504, 0.6980471381435711, 0.7761566041666235, 0.6374339063973231, 0.709544149292455, 0.706446603009067, 0.7841919388886525, 0.9205818009164475, 0.7115512488698488, 0.7553115806297279, 0.5129284392360897, 0.7521309689535673, 0.5133961020532811, 0.9726726636263785, 0.9547259286862544, 0.6042900769372668, 0.6218899803892581, 0.9509486224948204, 0.6059671409400227, 0.5936803335614809, 0.9813933141173071, 0.9640796043048614, 0.8550458602520585, 0.9568467027192429, 0.9570931747234002, 0.5951061533876034, 0.755989613159569, 0.625472173203746, 0.5705409453442298, 0.719733157504769, 0.9650115415988918, 0.7260245468582671, 0.9454749060121973, 0.7939749420683677, 0.8317705036529718, 0.7682133978671, 0.5489038157412258, 0.5844209259946905, 0.8257011344551184, 0.9630770793590319, 0.7723344871833253, 0.8040708561573062, 0.6485905057782368, 0.5206935768652425, 0.7412430729626016, 0.9660629646046635, 0.7627594535468001, 0.5171755728968142, 0.6151612044781721, 0.5457223491085579, 0.5862406629142031, 0.5461848141537364, 0.8481724412659264, 0.7430326711366473, 0.6608739971617141, 0.5953943529956385, 0.5846747478466583, 0.606766714398552, 0.6188874120452241, 0.605481916187754, 0.7815566108687669, 0.9211020284422324, 0.6136457882650339, 0.9928222283523525, 0.5091029955790949, 0.5792909549181862, 0.8051246210108001, 0.8738034156330458, 0.732783480081792, 0.9689093022313217, 0.7672926629839008, 0.9149897934755709, 0.6006154636571039, 0.813903673068713, 0.968716403934349, 0.5637695765680577, 0.7573991811993912, 0.5004579832217595, 0.7557002587981664, 0.8835673715207079, 0.7995207789835731, 0.9051198137689692, 0.9939275095159334, 0.897095473800662, 0.5366826697082349, 0.5358855813327252, 0.9800938643979646, 0.9094704087395798, 0.7379442039583994, 0.890952571580262, 0.8737997870920091, 0.9483129243175785, 0.5821045335542847, 0.8986256544311527, 0.5626697931342801, 0.5278290199739462, 0.840000293089227, 0.7969906489961074, 0.8714780881387354, 0.8404899589266508, 0.5025451458339566, 0.6494538058740104, 0.9505019301906628, 0.7398831175816367, 0.8947553538472942, 0.8633671160233354, 0.9587287813939693, 0.9017604625644309, 0.648689538652612, 0.5840017269467649, 0.6288547808428511, 0.8727157234563561, 0.6695630990265533, 0.8281405447200438, 0.8911701846469884, 0.909750916536005, 0.8161430274568622, 0.89377781378817, 0.80882012211441, 0.813128695188692, 0.5595346906521864, 0.9968144101679457, 0.8223291696769097, 0.8756686806999955, 0.8286324610469593, 0.8095242695956094, 0.9735267309412049, 0.7321373521165797, 0.5811663821182826, 0.5841585475006349, 0.917371607846394, 0.7539035104755887, 0.5194359219342091, 0.7867628517612746, 0.9543054754787684, 0.7888227245851245, 0.951480471166434, 0.7206601036107341, 0.932175868788558, 0.8850522878466878, 0.924069490382869, 0.8609969790236933, 0.6616126643024397, 0.8403203297981612, 0.85488908333498, 0.5574163346992489, 0.8736462452436744, 0.5156542123618743, 0.8327639470426216, 0.6060484914253987, 0.9907384106741438, 0.648146730624312, 0.998618027905157, 0.775769115893397, 0.9085648463279152, 0.9950923494395599, 0.5486060989882622, 0.8912076335268708, 0.7897845170808033, 0.6126292254212649, 0.8417784959018078, 0.9291521330135335, 0.5101560726849164, 0.9885752189273493, 0.9194896772845892, 0.9312574280001343, 0.8822247259033704, 0.9094464398523381, 0.9420102450914463, 0.918437010219903, 0.8753945038810458, 0.6578986237107418, 0.872229725337279, 0.9030596898524113, 0.8848979542198676, 0.7208920513667691, 0.511776636596811, 0.855260393798039, 0.5902477592762265, 0.6581712702817564, 0.9015579577018892, 0.7142456276263041, 0.7572950528657283, 0.5462475608155781, 0.5000786417221623, 0.5373888464835148, 0.7859801275110689, 0.5008981885452968, 0.762513901742253, 0.6006877981739418, 0.9710072971360155, 0.5971440102535412, 0.5960827244592352, 0.5820437841380925, 0.7714984891103441, 0.5108109383514781, 0.9276823603734585, 0.7743160433157491, 0.5140363505542195, 0.8760168995930766, 0.9611600439842278, 0.918900125789564, 0.9911694768475607, 0.8214816310240092, 0.6232304169317402, 0.970882235448933, 0.9884365540050629, 0.7794351102944277, 0.6745405331736694, 0.602702154032092, 0.6017661386410345, 0.903780750642698, 0.6459595828362853, 0.7657053091820556, 0.6928546092578323, 0.6669451856244035, 0.8769482378780203, 0.8244856241080796, 0.7972292624957529, 0.5967474522384612, 0.9189645711739616, 0.942516825288894, 0.8449574708900496, 0.8683951350515529, 0.7638782777214503, 0.616875334987939, 0.63413826415906, 0.6088282265850395, 0.6314250636723298, 0.6904332146988206, 0.6442258436636552, 0.52204229660931, 0.9909538232748485, 0.512898700630541, 0.6952969316495028, 0.8537453755226072, 0.63468093027349, 0.7153589427269695, 0.8562562550618144, 0.51784000437774, 0.6886256046387663, 0.6776035915688039, 0.9203244504936738, 0.6119054766303262, 0.57124620298642, 0.8014459271388481, 0.8920234776824107, 0.6259844703509418, 0.8934328023776047, 0.8257083392059856, 0.8595634511824359, 0.9670581309144124, 0.92147760848203, 0.8144232309560255, 0.8006190542523965, 0.5180254838907512, 0.6830532468822188, 0.7892314134525233, 0.5428867665940142, 0.9885087519109435, 0.5934756734634438, 0.844950043952224, 0.9766537076767758, 0.9980360950428239, 0.971037363068107, 0.7080477244822156, 0.7222666961464113, 0.6716263438350798, 0.8924411086074779, 0.9820718352374875, 0.8530946763162284, 0.5200145972035004, 0.8476582828113424, 0.5402874774155775, 0.7740999647535847, 0.9960148127514891, 0.8243094320980939, 0.5926317220676371, 0.6817487502505383, 0.6348180165310942, 0.5788601923322744, 0.6342013216438502, 0.7774387407492109, 0.5371191723398732, 0.840657087916743, 0.5848778230731824, 0.5376798738965985, 0.6471151333879328, 0.6830995934102179, 0.5285033053470656, 0.7999684957021047, 0.637835968816542, 0.7801918869084797, 0.5284934181776583, 0.7238583116948336, 0.5796005900925745, 0.7529112034301403, 0.9087616286118214, 0.9051317616299739, 0.7906195220048484, 0.6870761654123307, 0.8723847061939001, 0.6842212173851846, 0.7470014548762441, 0.861428370596651, 0.5101995095417442, 0.7239951739068767, 0.8648354348082998, 0.5460712721459823, 0.879134114487079, 0.8493591533489047, 0.7032901028312493, 0.8711506205030182, 0.5279967662867309, 0.6397417270305398, 0.9505131041020156, 0.5548177673947112, 0.8616257331731578, 0.7643767891391137, 0.8199660904575492, 0.6852161865446308, 0.6989683782491993, 0.5830601847099822, 0.6901411970322235, 0.7852837972624387, 0.5489769083347875, 0.8102810584179128, 0.6210675497408131, 0.6482116314289647, 0.8615803855997839, 0.6542127309817778, 0.8577923528382004, 0.663252416179948, 0.8090672987276755, 0.9421688685306959, 0.7270985017435645, 0.9647397867963364, 0.6645107089635399, 0.8530317034939832, 0.6336882309556067, 0.9491180928643298, 0.7946259004322276, 0.572589634155648, 0.9286969720343036, 0.8318914858402537, 0.6074034247161184, 0.7743616756298486, 0.5974184962933906, 0.6924797714261068, 0.6329702520828449, 0.8903691178498909, 0.5609882634077474, 0.9220495940774649, 0.506045395053826, 0.7117533637174737, 0.7486476927740024, 0.6068837629077415, 0.6441313632621941, 0.6711239920378264, 0.6578280634102246, 0.5188285441166911, 0.6317956047280586, 0.7268750013360536, 0.6855729043535046, 0.7522474804344224, 0.9775598600168411, 0.7644409834751076, 0.7636031732566341, 0.716617934879924, 0.9649635429171118, 0.5173854456981981, 0.5882082130867479, 0.894828754495546, 0.6665883127813165, 0.9393325177166012, 0.7788012990600068, 0.5057956469530591, 0.6826665929582698, 0.6395300916668845, 0.703970779776991, 0.9261834581589978, 0.9693970437873553, 0.7602166273344645, 0.9135122477634794, 0.8044637921676023, 0.6516617650603526, 0.6639959528302959, 0.9845923648826156, 0.9193296699400658, 0.6308542974245064, 0.7078973977468203, 0.6423730305211837, 0.9414970829629907, 0.7363548495098085, 0.7549873589884211, 0.8358153058190192, 0.787806838109455, 0.9281843214197614, 0.6716317912447982, 0.9650754315479685, 0.8813201195213956, 0.6966033432980997, 0.9550062865411368, 0.5177470864359441, 0.5465539085840389, 0.5997958382574129, 0.8420190331083833, 0.5868005829237108, 0.9229085716753012, 0.8218173030110676, 0.939126844443911, 0.8070984587066998, 0.6866218776909487, 0.6171378234996079, 0.8034010655935437, 0.9159646296042729, 0.9186985699757941, 0.5288646563116259, 0.8615459296825971, 0.9628561393606673, 0.9986726991574307, 0.90789864258118, 0.5431541781057858, 0.6785239818188169, 0.6009259261728589, 0.6463543631201403, 0.9185786025920872, 0.5528782022266275, 0.6596576754797618, 0.6683458559441963, 0.7554024136182627, 0.8720828092359243, 0.7643902329401369, 0.9195010681223807, 0.7310178132255717, 0.7848386571332613, 0.8882147586796365, 0.7019527142083755, 0.683381762673692, 0.6443445218655992, 0.6997087445480437, 0.7032251612184979, 0.8665736433418496, 0.5811725426135786, 0.8123614113961777, 0.6881017065126898, 0.9676216165583196, 0.9931642835797114, 0.5882959133589045, 0.6553377136500855, 0.8517975333513832, 0.8079074539352457, 0.6027194599774748, 0.7567925989633764, 0.8239427952778317, 0.5637794614891383, 0.8437610488710302, 0.792316651491932, 0.9199278797610349, 0.78873940912969, 0.620725920615568, 0.9711076429251442, 0.5988950687969297, 0.8420597394152036, 0.5349088097002886, 0.961666619354273, 0.54280410212559, 0.6371904921752667, 0.5656367630162825, 0.7326480274499749, 0.7411356985047979, 0.6491534421350788, 0.5101645310467686, 0.598237689612025, 0.5148250996729435, 0.565446302699699, 0.9129760261065571, 0.5439084949070278, 0.7501550329254698, 0.9839457352728846, 0.8302918925307976, 0.570550110607652, 0.9706899067548193, 0.7682431691417324, 0.9743731583822531, 0.8660546992744587, 0.6087652002772169, 0.9569820711285666, 0.6634470400498514, 0.7039281931993453, 0.634679959491903, 0.9502738979418888, 0.69845530199918, 0.5566156922994812, 0.9764559204740708, 0.5605986023053766, 0.7255523018251273, 0.5982323798218484, 0.9596551454887671, 0.6345023016259079, 0.7984606021749754, 0.9612732937648956, 0.8914489113568156, 0.8849174083992476, 0.7832137535822299, 0.9878132653198735, 0.7681761186707101, 0.8409377289746198, 0.5637860632313221, 0.6185934069426777, 0.7117115465692989, 0.9827768209713899, 0.8079031255834113, 0.8601488289262955, 0.6983739206525243, 0.5827935008524068, 0.8122985025241877, 0.6326799430198426, 0.9600395707626478, 0.9266938695828513, 0.5762099493907603, 0.5694292297316867, 0.5566486153413043, 0.739811251824867, 0.6880355906863176, 0.5235303173353826, 0.7795254638849153, 0.5802329190189521, 0.9962828969051116, 0.8285452661555279, 0.6748922056329716, 0.7958749623299267, 0.951267749683171, 0.8559841466259084, 0.736592913052575, 0.5474103928390852, 0.6478624124415742, 0.8870317467961699, 0.7356776908358738, 0.9043469022780433, 0.8921602040476153, 0.7001867537536337, 0.5617290931041206, 0.9873830145285172, 0.72471533370047, 0.5599229239172292, 0.6415902666421328, 0.5367455008212174, 0.8043874330589348, 0.8261008912815997, 0.5929276956475263, 0.8098215975224767, 0.8533978526811794, 0.7676143025063243, 0.5984049149152426, 0.9302193502724436, 0.6828179450259839, 0.8473214433505032, 0.8412244666960097, 0.6660701153891166, 0.7449361951561962, 0.9161975760115503, 0.5741860942935231, 0.8071624669502624, 0.8919612935237079, 0.7736660862632555, 0.6912304520866435, 0.7881751717635368, 0.8068152379681974, 0.8282023629284929, 0.5429146612773872, 0.8087525285645822, 0.9311548995157843, 0.6485805864673051, 0.7128492791862204, 0.8953033378932789, 0.9303884547959155, 0.813314153789853, 0.7278689947348493, 0.514012472283099, 0.5513342601410394, 0.6808965653807906, 0.70319807618035, 0.664172988055049, 0.7529632418987424, 0.9591175680481253, 0.5768422376252706, 0.9698740743945493, 0.700060122998643, 0.8682384747650651, 0.7366711020364631, 0.7041607655488975, 0.7166733496344654, 0.7166486079353165, 0.6569032481175181, 0.5559256042443486, 0.9439907746029268, 0.6692193895383018, 0.9494058189724879, 0.7646541478074, 0.7346155298938073, 0.6439811430200274, 0.8436696010455349, 0.9716300218551475, 0.9750905163895467, 0.5884336149734453, 0.81138480208325, 0.7705982183194633, 0.5277616321251611, 0.7894044403939382, 0.9321950343673463, 0.5867821434528877, 0.7752523898086516, 0.9260869121938289, 0.513334107126047, 0.8143740559763588, 0.5423311659106962, 0.550448156788141, 0.9461717712409385, 0.807056504742443, 0.542534718201001, 0.8413893291655505, 0.6935670915063925, 0.6081465608594018, 0.9147446848325281, 0.5282310778072699, 0.5945557349395765, 0.8530438787287852, 0.9469249638229, 0.6739546386634876, 0.7531765201067011, 0.8968591267928206, 0.8603826911156228, 0.8519164180486455, 0.7513162438571392, 0.5754350266490649, 0.683220122819043, 0.781825004555446, 0.9501260228424344, 0.970722975660788, 0.8774200900749523, 0.5894687127014253, 0.7690368838721411, 0.892484001718724, 0.6857869451439348, 0.516461076653788, 0.9304277615627503, 0.6480631329647237, 0.7768343899645874, 0.504530219880658, 0.7495252986669827, 0.5266931746496339, 0.7573446139575464, 0.694733937899511, 0.7857908758728794, 0.7833108272510071, 0.909635073950984, 0.9685431867530298, 0.6501851734668354, 0.7890480404189242, 0.60594882739195, 0.6639556364182129, 0.6820849662116637, 0.9577358091430936, 0.7847499330851351, 0.6290369967505831, 0.8788143267914008, 0.6286058844335497, 0.7039937540153789, 0.9536343048217847, 0.866705702728548, 0.7298105383244797, 0.729083124938237, 0.6794192725499718, 0.5969054138436867, 0.6187741877500292, 0.5165067762144937, 0.7533041447733224, 0.6662890840140144, 0.7746128447679088, 0.9315479132158491, 0.6408312306141185, 0.9430616305715208, 0.9075181266273745, 0.8409914479005434, 0.8822361151899761, 0.8351473978540447, 0.6273194936582158, 0.6539691627702393, 0.9199236811537965, 0.5090524811032924, 0.8739143174365627, 0.9753416270341605, 0.5175191160196897, 0.7178915791837718, 0.5384373892931409, 0.6609406371882951, 0.6342070667504023, 0.5166329404664268, 0.9663284207977816, 0.5514980472496173, 0.8087082680150375, 0.8942615279371138, 0.851464654453691, 0.6024915363648367, 0.860776930935545, 0.7043409277899826, 0.8272003908265977, 0.6610936958329336, 0.9434105286237643, 0.8217826227152694, 0.7296949818329208, 0.8918720722676441, 0.9886957858967971, 0.7233407670570102, 0.7737072905244016, 0.8490524879810849, 0.623436565028092, 0.9237149822105852, 0.7933306004673272, 0.795206097090836, 0.675019561591913, 0.6112415130546803, 0.9445253742795321, 0.930161787225776, 0.8859631081018917, 0.7507512467628283, 0.8805869627076925, 0.873028159886029, 0.933032391557209, 0.8643024417690718, 0.6326414864593819, 0.6862264740739836, 0.6556347609405295, 0.9020310530746674, 0.5253672475022577, 0.7297099437383254, 0.6814879238726494, 0.503756172736181, 0.6711484434739092, 0.7917308444280334, 0.7841022087224196, 0.6646338891188452, 0.5576986117907909, 0.5108789712837276, 0.9625538977396751, 0.8019859681389362, 0.7752570575840037, 0.5265639862072902, 0.764765847074311, 0.7443171121309176, 0.8303748626916099, 0.8184748080893272, 0.9960905303694885, 0.7547538941384716, 0.5477621416704201, 0.5123182349394436, 0.6779066518752346, 0.8382340662460874, 0.6547904964273414, 0.8529061187118427, 0.7720610987419586, 0.821503881746419, 0.9220164412399143, 0.9585390664524295, 0.5463322252955307, 0.6547366183799199, 0.6872524853277814, 0.9808976730376754, 0.6008034011641665, 0.7191449579181097, 0.6414520220388803, 0.8744898606127482, 0.6138614177172423, 0.9133250896085549, 0.5781929116845215, 0.985883886108502, 0.9803053277634877, 0.9417136354363632, 0.677509616589322, 0.9784998315524225, 0.9045041791381632, 0.5202589555376531, 0.7100669625704583, 0.8284490373264868, 0.661146977815007, 0.8531741463266075, 0.8475374238896562, 0.8725979056032426, 0.6750641850147603, 0.5741403592149584, 0.8243307401878637, 0.929835321623675, 0.5793192875625431, 0.9709542758605519, 0.8677749981647931, 0.8958725193511176, 0.5877201972009408, 0.6905678924131828, 0.5966440062775965, 0.6553646275787401, 0.6693547759006294, 0.809602840459702, 0.6723143601663004, 0.8301149723323452, 0.51109477393247, 0.913096500062419, 0.983284260927682, 0.817151879002863, 0.8979703609694105, 0.9147978285037528, 0.7570854553689248, 0.6814640710222379, 0.6594957643167556, 0.8026159755133766, 0.9637132997075455, 0.5501791665285045, 0.5535188640731304, 0.50309186687027, 0.8606999510768857, 0.5138343190680205, 0.9471455052474815, 0.7139742330988037, 0.9264512589374287, 0.9015549364329859, 0.5774308693893466, 0.7904560432747711, 0.5026149767707578, 0.9047969986024884, 0.5426756402779274, 0.5252424668109834, 0.9090769546787252, 0.5769457923088585, 0.7970465000393991, 0.7172938458122418, 0.7286175674974995, 0.6052973178234793, 0.7994641380308685, 0.9981300972582834, 0.9304369670477544, 0.5376269747967284, 0.9084036110958795, 0.6663792100271485, 0.7367275912344189, 0.9433467712910748, 0.8261733250453459, 0.8502771509218167, 0.5812651799441128, 0.6754274520863056, 0.901124264560127, 0.7603634619599736, 0.7293914674800595, 0.7643582280896262, 0.5357180957493783, 0.6287462840620569, 0.8954316339228217, 0.6660779458085067, 0.7552774848986948, 0.5593315241097212, 0.8514552831217537, 0.5055080279175532, 0.9511303429603926, 0.5409485022857321, 0.953598777287028, 0.864698345368492, 0.8064981152249568, 0.8543717828733106, 0.6599377948871625, 0.9973600603176396, 0.8067810556824992, 0.8066302245075965, 0.9451145421942755, 0.738681850011147, 0.6554037262058907, 0.8910107606556688, 0.7818476281706608, 0.6399288701456325, 0.5985392499316977, 0.8456344343314758, 0.7700174745335207, 0.8364295966517761, 0.5161636816801866, 0.7121601919569736, 0.9190341413372439, 0.9520123759336308, 0.8436798093138076, 0.9987502236856691, 0.5831990125677827, 0.7499633297208932, 0.886422208411717, 0.9875191288532756, 0.6777095972805767, 0.7217037444099025, 0.5002833808831526, 0.7885558179021244, 0.5438313002691162, 0.7115009136343335, 0.510682004644325, 0.6893451614472603, 0.6320575241221584, 0.8477869054470325, 0.5190818742447312, 0.5220838778721524, 0.6822461187621631, 0.512610785377074, 0.9399690738146694, 0.5257854240732205, 0.8825600794641719, 0.9100665508208555, 0.5728983363088747, 0.8892484507541545, 0.6079949018538373, 0.5508592783532543, 0.8237662607541991, 0.6807692579536453, 0.9333360363547205, 0.6047642070466824, 0.6009572909311247, 0.6982087101497279, 0.9081517642902058, 0.77844971777054, 0.6085828017932422, 0.7852660429265791, 0.6852157832118678, 0.9758978933691312, 0.8933231531556483, 0.5912783786204336, 0.5892250523925329, 0.9142073456607371, 0.7530531340331603, 0.7530023917382668, 0.7516696053093819, 0.9657091568849359, 0.7834447021060484, 0.5811183154338344, 0.689492153341815, 0.5617653628320767, 0.9514648505316319, 0.815509947281801, 0.5784479131815445, 0.8330488148958596, 0.9747544868632937, 0.7563948741207045, 0.9607363718032869, 0.6158030459776194, 0.7788797321335578, 0.8192435063273261, 0.7834967798842976, 0.6086904926499086, 0.5342668689458996, 0.7414271725314849, 0.8532789331175883, 0.6532505307744954, 0.5794898668309681, 0.716730304409906, 0.8230016651922893, 0.6922559089430522, 0.9331015820071131, 0.6715973742531963, 0.8371935792850804, 0.7968623334936431, 0.5699959291565764, 0.6856712145092354, 0.939316737460952, 0.9808733799421738, 0.759919655090111, 0.7966912784464756, 0.7329418487878926, 0.8174243726112934, 0.7065063565187909, 0.8070849062425928, 0.6255270746478542, 0.8542664201099313, 0.9101634676917333, 0.711009342462948, 0.53737799151688, 0.715801952175237, 0.7061047157957836, 0.7252037087812566, 0.6325231047854267, 0.8627431262102347, 0.891969265342491, 0.7915577888368853, 0.8971907634992953, 0.6541917308188606, 0.723644335456151, 0.5336530216601721, 0.9554340004154239, 0.6265738244214228, 0.8296282950016749, 0.5477581233704952, 0.5068736977113326, 0.8874549955852243, 0.5991879836608052, 0.6563999287658498, 0.6674070817144986, 0.5527899440757109, 0.9239531478985907, 0.5847292530549745, 0.9972061197180697, 0.597160918969941, 0.5486865044714594, 0.7661088290626432, 0.6075790512325183, 0.8172657383245531, 0.801695484245152, 0.830333758913063, 0.6649241647279224, 0.7950060461754744, 0.8252607573302759, 0.7718776602817093, 0.7589772064014673, 0.8380331410967975, 0.7881566331911487, 0.8857414639190613, 0.5052025080409103, 0.6279713706624336, 0.9577646585206672, 0.9151696198689997, 0.7753063435062415, 0.8193423175144061, 0.9158933482205157, 0.8845700430360397, 0.5840964156658524, 0.9134428741180247, 0.6616834447974479, 0.6211285285750974, 0.510231067063228, 0.9497687155502316, 0.9684911797621936, 0.8497785304889354, 0.8820313333307868, 0.6616052369724854, 0.8298396568557651, 0.9934201210649313, 0.7644177342962286, 0.8264370780496173, 0.8431692653824938, 0.7842782600522412, 0.7003586519360709, 0.7639545463880432, 0.6114403547417966, 0.5846186807113583, 0.8711414047056744, 0.6895913079264251, 0.5503331038733645, 0.8884023873540761, 0.6196171728446209, 0.7790556851876582, 0.5787275408131747, 0.8249903088369214, 0.6001801535841108, 0.9582346960728735, 0.973349719296112, 0.5756963133737225, 0.7562411819200151, 0.7223183378759885, 0.5888118613183784, 0.7538573571133045, 0.8465410012807488, 0.5494256756536423, 0.5770298975303452, 0.7846800628806687, 0.523491627924876, 0.5218484158701193, 0.5046725468444977, 0.7302634989012375, 0.6808393183790181, 0.5238114315509897, 0.5481941836737201, 0.7693645757832448, 0.8432261512866625, 0.8969438919083907, 0.923982600226516, 0.7758093969356222, 0.5813739656637658, 0.6002925077277455, 0.5176072082161991, 0.5540920986071802, 0.6112449525910346, 0.7750465211349076, 0.7757570264484692, 0.5595551503649814, 0.933622131818469, 0.9822939698837142, 0.5388996077369952, 0.758317245032939, 0.8674924796316189, 0.8688823028933186, 0.9627117511783732, 0.8692829690661623, 0.6977482463563727, 0.6646061524997122, 0.8602896102869811, 0.8899252096248015, 0.9314871699230536, 0.9843628604048241, 0.5219254340577966, 0.5152655877875192, 0.8217985473346672, 0.7268852782249375, 0.7297986544345785, 0.7353750646419545, 0.6826534681296252, 0.5509639507076872, 0.8361641010816186, 0.704275504895332, 0.8184471185003184, 0.7149727785144728, 0.7970082723774135, 0.6775334612699022, 0.8873035932424234, 0.8423027618571185, 0.8577652303138219, 0.7259835098099359, 0.9249069652449188, 0.5985633741863599, 0.8340282754988835, 0.7956664474780734, 0.6676300061449163, 0.6886598637259937, 0.7777435664478444, 0.7250595668498319, 0.6889644301564272, 0.7534922679105016, 0.7139656028684318, 0.741973113483928, 0.7513459290988134, 0.5421137737442362, 0.8979720991118663, 0.514721705741735, 0.752288647240846, 0.6698232665815483, 0.5072833346303469, 0.5173166547233621, 0.5384827275844801, 0.9197280587470387, 0.8268171062767617, 0.9372829604584267, 0.6886430729743043, 0.6590357407443684, 0.5728227253258028, 0.9223237616959826, 0.5390416574641101, 0.5059446160302334, 0.9106041054701892, 0.6018148977580441, 0.9706494512768452, 0.7524342226731245, 0.7579041570562159, 0.9210789866361268, 0.5422301301770539, 0.7547239548512256, 0.5012329241540773, 0.9774355398279864, 0.8344384827514462, 0.5332525362516518, 0.9528252105971496, 0.8793588983332717, 0.5039228967204781, 0.7344713033944713, 0.9228004294470153, 0.7042483415727847, 0.985523608149969, 0.6017088052022289, 0.9687129495531563, 0.9230071590058848, 0.6282812498843506, 0.9491009282019476, 0.8051251689986949, 0.9219836950552206, 0.7291769057078219, 0.9170427932954682, 0.8609687458564482, 0.9637918129614031, 0.7415289175330502, 0.9289522732832547, 0.8905712037701112, 0.9555092654718715, 0.9458212362426633, 0.7648418315524448, 0.6558321590886003, 0.8222899105241213, 0.5241912534587416, 0.6270329590802184, 0.7498906958677417, 0.668905946338469, 0.8001217973519169, 0.8621701247564032, 0.5739497121835685, 0.8385692425462266, 0.7359453739262032, 0.8392996016953591, 0.8071100771322036, 0.7431455571614551, 0.927727771258414, 0.9131138011702704, 0.7793699282909043, 0.820876123004067, 0.892750553204579, 0.6393218899715086, 0.9330296230593116, 0.8925373394950034, 0.6529642934441802, 0.8733479957596078, 0.9243981367172018, 0.5355951342701861, 0.5929722442985198, 0.777162184225711, 0.6192292679754104, 0.700627809663563, 0.9469262509283884, 0.5634112026079467, 0.6151240867911083, 0.6333844603209837, 0.7533569706217403, 0.778447222801905, 0.5757042501493157, 0.5136808373961703, 0.8575605168137014, 0.876087430497845, 0.7799160071119606, 0.7183752045260576, 0.6169051622470454, 0.9323369896288042, 0.8700608528604061, 0.9451986655903231, 0.6384916115723503, 0.5185332530381801, 0.730002538884224, 0.8122699703467172, 0.8493331036041616, 0.673277333280506, 0.6359523109702488, 0.7214813885998668, 0.905230565717156, 0.6375479642933422, 0.942399398947359, 0.8519214160386597, 0.6432404486555461, 0.624016872456743, 0.5175132170451486, 0.9591175621885903, 0.8990481896412579, 0.6158518678925187, 0.5592891640024632, 0.8885500382288389, 0.910448587557042, 0.8245821125041584, 0.8931513164362586, 0.9423951478098049, 0.7788279717784385, 0.7452757575105078, 0.852773619505852, 0.6573991159234867, 0.7175650414979626, 0.5916474264155935, 0.6529388414134587, 0.7171812134752837, 0.745672159082402, 0.7373127825335852, 0.7670550671203178, 0.9691553394474439, 0.6367773322871597, 0.5719429265683893, 0.5745150267816841, 0.8064280974989292, 0.5647388674923154, 0.7451500108983269, 0.9684510319916105, 0.7764498934258155, 0.7768667539896723, 0.6812109794708487, 0.7184821506952508, 0.6468794252060615, 0.6566293893106709, 0.9950606641886591, 0.5789878659666128, 0.9587341216026204, 0.5127693010596299, 0.7977834786351672, 0.7989911317136664, 0.609978947394459, 0.8121770806514742, 0.9272287753488029, 0.9935189068452434, 0.6199641607547012, 0.7100440523777369, 0.7343042747183883, 0.7934913053327022, 0.5138698970732933, 0.8897320092152262, 0.7097309724861927, 0.8081823456174131, 0.7802784448062627, 0.921336372079672, 0.5831191550363857, 0.5120783154625981, 0.7447041500871848, 0.8956700598157445, 0.7017392445098963, 0.8722651755614773, 0.7766931448606265, 0.9666718856379313, 0.7473171334271823, 0.9703552064521179, 0.9107489531975462, 0.5456182410559896, 0.8828433293765963, 0.8182517140530873, 0.6312984236974378, 0.7525170533086345, 0.8383103756921146, 0.7989563643374373, 0.7576162078305235, 0.9200674496958938, 0.7343629898024697, 0.7796191597501069, 0.5915573371785137, 0.7967496397871344, 0.8080659770084593, 0.6808025095078809, 0.6188670704067333, 0.7495760633260984, 0.6095853491153249, 0.9248610171242448, 0.5156419339834262, 0.5064840739976058, 0.6041400466409435, 0.9564524761905364, 0.5501721780015589, 0.9124385614820479, 0.839702963408464, 0.7737946719524449, 0.7503696490075045, 0.6794100640539968, 0.7918905405177837, 0.5776523134000733, 0.6992323897654393, 0.8095063826777888, 0.7589425621063379, 0.8071781966383254, 0.6186891931695206, 0.6368634878536605, 0.9199126762377283, 0.791909504819039, 0.73557398773538, 0.7464261377842148, 0.7398750029536619, 0.8684590286454339, 0.5355353482239544, 0.9221211535863637, 0.9467645638694067, 0.8029248642758935, 0.9563715458509966, 0.7962386962529105, 0.653913545961665, 0.7450792321032129, 0.8661155794766646, 0.5231336441490942, 0.5385233575075048, 0.5418351843355118, 0.8303837395972855, 0.8337320913512061, 0.987317152268013, 0.7828919510397276, 0.8284570414577526, 0.6249962563117621, 0.6531518651507953, 0.974486546309391, 0.550591094976413, 0.6100350791721402, 0.8606562583265809, 0.8221904145678179, 0.5698732047152022, 0.653266194336974, 0.8064075840987676, 0.9186362939814369, 0.9467477253735906, 0.5369429323249217, 0.9293442592260581, 0.8991564560079248, 0.7884041395191708, 0.6865645970108285, 0.6059427923622156, 0.5238219977291545, 0.7615220480651452, 0.875284283975185, 0.8729303356550996, 0.988519497144114, 0.9665308519593745, 0.8499149272100581, 0.94090547197794, 0.6911254854486043, 0.6837183384821515, 0.5870513510752859, 0.764340195222579, 0.8093828350153445, 0.6941512702726154, 0.9981064389071289, 0.7016006721521265, 0.558137169276649, 0.6703971637813064, 0.9852093730494573, 0.961192716022914, 0.6484367448159312, 0.5825594339184328, 0.6874787272277016, 0.7563398863842533, 0.6269101867860427, 0.7660100308600897, 0.7531190705112367, 0.7043435293931837, 0.7009325109282865, 0.7482778802541242, 0.5481405379209443, 0.7224393057378109, 0.7800491937845856, 0.8010138560754063, 0.91629222737315, 0.6913950730253537, 0.5574177590164633, 0.8425990942975665, 0.9354863527997269, 0.5276951828953107, 0.9350444590097021, 0.5464287583479721, 0.6404266925968616, 0.9143429291339844, 0.9409723996575179, 0.7678409081967268, 0.9111410303355842, 0.926294575469322, 0.8611044399180439, 0.7024076215661174, 0.9396366123598121, 0.9404065355546105, 0.7456278545388724, 0.7736450072622403, 0.9968800583780466, 0.827418088171207, 0.79082128208032, 0.786727135175465, 0.6747960046659838, 0.7412715488308472, 0.5116303028558697, 0.5328934878065753, 0.7458483200922799, 0.5077020213975356, 0.8220030188279186, 0.6766574536070752, 0.5725822686113561, 0.6053435045225222, 0.7094868726646217, 0.6766672773150189, 0.8411375039139795, 0.8167555585371169, 0.838836672573425, 0.8577709265002269, 0.970866154362014, 0.8003302602121203, 0.6209443605189096, 0.5052338934275085, 0.7091792426583331, 0.6147008569684345, 0.6316134899975194, 0.5810746651124139, 0.635492251393428, 0.6463253027381892, 0.9870918405597184, 0.7657260128895131, 0.970533322256957, 0.7630124562997289, 0.5081932164501801, 0.7590175830907722, 0.8039233527146321, 0.6724259775640027, 0.9170875674418935, 0.8067134974557337, 0.9185521789805471, 0.6029981924374888, 0.7173194632959746, 0.6977692445813111, 0.9986490612358803, 0.5019836658614591, 0.7970069027241683, 0.7717038449412323, 0.8828840244046752, 0.6728160016525071, 0.8140715602404598, 0.6636648285826103, 0.6203427170886834, 0.8504007463635426, 0.5966202708950672, 0.6580820366355268, 0.8130990891815146, 0.7295485007295024, 0.7907497214295864, 0.5935886531487177, 0.6628176272350312, 0.5504116147213027, 0.7642864545307708, 0.6344249607254551, 0.5731908139086128, 0.5678748194589236, 0.7368545967968592, 0.62973993561267, 0.9234067716169134, 0.838693251089057, 0.7164855275738656, 0.7245699621699159, 0.6236310527032661, 0.8207075555620293, 0.8779726019308114, 0.7666332463033028, 0.6527605210346719, 0.7601253251662072, 0.6359102935361064, 0.6026366689103102, 0.7234602201681353, 0.8517804783495737, 0.773878917869188, 0.9215294457347931, 0.5357177423158078, 0.5157615050481208, 0.7676470619407454, 0.5981826697529385, 0.9429841767933813, 0.5116144419305162, 0.7020579071099492, 0.8825279638551222, 0.5399272756464595, 0.8912896101806322, 0.6463888532447271, 0.5785281980576727, 0.8405046095906981, 0.7475123451670094, 0.6328023902713371, 0.9168655672654042, 0.88621306086152, 0.65191931291862, 0.7626757853905666, 0.9035701897053834, 0.7722503384517627, 0.6489068891979337, 0.818013717311225, 0.9206667237260553, 0.8978168327954779, 0.883588427060723, 0.5758892448816737, 0.8078038517920276, 0.5413074279662486, 0.9796478494945166, 0.7727291089834984, 0.9391994165457906, 0.503987012671705, 0.620560697355186, 0.5973807453510176, 0.7036344315800415, 0.8558823420144719, 0.9412423935599321, 0.7005173222398873, 0.9990735093378857, 0.8852858946143427, 0.7611576744426141, 0.8051299933062286, 0.9653697547748622, 0.6476103799346271, 0.7274751376671845, 0.9073033760116487, 0.8985043144825652, 0.533078399679815, 0.5286682742564943, 0.7611737455870538, 0.794271613178312, 0.9955722016149294, 0.5622097792737504, 0.897430709147769, 0.6921439013252573, 0.8210569164230899, 0.6444482615393012, 0.7221661465823336, 0.9499030774236426, 0.6331979038374209, 0.91543269438054, 0.8555285012521426, 0.9548765109556829, 0.8727352148203187, 0.7556444025041991, 0.8282157684283026, 0.7335478673402797, 0.676247237631094, 0.733888939938057, 0.5508318354516244, 0.7472708388419892, 0.5386079773624559, 0.6352631263871495, 0.821126612224069, 0.7513198593508599, 0.6127849949861568, 0.9562188469727204, 0.8597308847243488, 0.9669575306451057, 0.755889496618334, 0.7051782386705745, 0.5736922396096353, 0.642065226514319, 0.9542850147444959, 0.791918487214628, 0.8216401924459454, 0.7414018660003612, 0.90186588696906, 0.7606437695538002, 0.5347319670171946, 0.889479897454116, 0.9049571039986823, 0.6639623721809582, 0.6887487984682923, 0.6321421590077488, 0.931423627850855, 0.6881617579693198, 0.5469870042346187, 0.7383372870505791, 0.5785977596568654, 0.750159700365589, 0.7171727251085426, 0.9485509043700102, 0.9390693953476386, 0.8862203385225418, 0.8521348832917972, 0.7814896660394083, 0.5918656748480045, 0.879319777807057, 0.7136790713591727, 0.6803671702974822, 0.601628410791893, 0.5151196024358333, 0.833694740278752, 0.547457422306619, 0.9387671147233221, 0.7757055653657725, 0.9466260311617959, 0.8898637225269058, 0.5215160180952999, 0.7257770690477776, 0.6001631143963488, 0.795625504195495, 0.6564252871706034, 0.9297113745636447, 0.6149060554202427, 0.6568503828005847, 0.6847483145621616, 0.5865213055507471, 0.9620201364938625, 0.567933948333215, 0.5812384916142879, 0.6370920444052324, 0.5010047979985541, 0.8661454019685788, 0.981522628624437, 0.7898184606110086, 0.9992462661384856, 0.7532472299314281, 0.6034415129695625, 0.8666911215349871, 0.6915671331213543, 0.7455803410470806, 0.5327146425606516, 0.9419427710991087, 0.6987353635014277, 0.8866977467310333, 0.6624850510364708, 0.6458882348813069, 0.9221153747328263, 0.6241104146997551, 0.872537887534437, 0.5117343298932915, 0.834820416953454, 0.677139699892374, 0.9912151295180935, 0.8499116293654445, 0.8240636262828774, 0.7615903003374043, 0.9129512299811282, 0.8500214385909941, 0.504900044743861, 0.6751154693313166, 0.6417097152455999, 0.6679109455716208, 0.6845822381846286, 0.9516701807680967, 0.8157728116286687, 0.6136550187371879, 0.8263186663520177, 0.6437261706236387, 0.748870230272313, 0.5469828292603638, 0.7071410327478522, 0.9096149609852904, 0.5559391495688852, 0.8535093267157989, 0.5314594446102543, 0.9405689706925434, 0.6196767865722914, 0.7484718790158924, 0.6993648858981407, 0.663877239088323, 0.6619451838372181, 0.7077757668959079, 0.8820838346998877, 0.884865247387109, 0.5089340926904962, 0.6376634172652771, 0.6942357417527127, 0.8965896001755023, 0.5864011398607731, 0.9919974305826833, 0.8186863207477799, 0.746556613280982, 0.7403513158523746, 0.6753065234621275, 0.5155471967074023, 0.7388535910314673, 0.7891852255478183, 0.5139816380759346, 0.8491301668089417, 0.9358164623685468, 0.8711764330015488, 0.9974609766070998, 0.909519551219379, 0.5915258735495681, 0.567625122291107, 0.7589619539268198, 0.9024678846771257, 0.6071269194509638, 0.8102085066964145, 0.7226136131782694, 0.5417735494564064, 0.5021940398782163, 0.6888206069296474, 0.9959575711821567, 0.9701830928619795, 0.9225005450284066, 0.6194057982871678, 0.8018568499759038, 0.8640725055717403, 0.5174257113323735, 0.7948315310942939, 0.6194600134994133, 0.6434723557482066, 0.6311707851673517, 0.6333889930224467, 0.9425860207557608, 0.602055261173888, 0.5807712276371997, 0.9939879979826541, 0.6491840876280763, 0.8628697488373203, 0.9578533109449152, 0.7420947178374142, 0.6142949836044843, 0.8652005542288967, 0.8786462865875523, 0.573355794347346, 0.9509534618224041, 0.8503859790222765, 0.9381895422746396, 0.6214418341169876, 0.7218451194113092, 0.5570697253774426, 0.776314441490965, 0.9853166701722438, 0.8953760214790139, 0.7996849502935799, 0.8478289749327352, 0.7028984500037343, 0.8795998694716184, 0.8104144141883207, 0.7156724100581938, 0.8505248022408394, 0.7758695697167441, 0.9350733954101169, 0.8789267335451644, 0.7550748653121064, 0.5462713466073628, 0.7840476091431494, 0.9417521218246018, 0.5419711555349487, 0.5786568802555321, 0.7693771535260154, 0.6265014396388997, 0.6655931847751663, 0.9773283280199695, 0.9546044723781018, 0.8340724897434157, 0.9683060866634476, 0.5001399107023636, 0.8774858152267193, 0.9294748971645981, 0.5402826586359877, 0.7833760651943742, 0.5463793496684042, 0.9765151230389668, 0.6963325811109191, 0.5740945948869673, 0.6059034366947902, 0.976363795512181, 0.9586744785219797, 0.8617346240929775, 0.7461143683823421, 0.5048120030683891, 0.7600366944201111, 0.7766833194677001, 0.6272265349659516, 0.6405638673199578, 0.9588256756263582, 0.8503197690142028, 0.9839424731618136, 0.8124000334716612, 0.8050736038127854, 0.7462128035225153, 0.6248603836889024, 0.5327609012301722, 0.8339128417001244, 0.9886656763086086, 0.9208984408245298, 0.8860130331451068, 0.9866752627778754, 0.6009913774615068, 0.68328557269886, 0.8446975838893065, 0.9888107293671462, 0.9683890381363305, 0.977739755064111, 0.6524379117615284, 0.998911318026726, 0.6799927780341344, 0.8775484020179402, 0.6560357973751016, 0.7451599856222448, 0.9870849946102881, 0.5880973273511096, 0.8050526384730623, 0.6748244958451983, 0.6297393190211804, 0.596897531755493, 0.5940670973911517, 0.9634572207257754, 0.5706660248402509, 0.653013558592765, 0.9189781326891979, 0.9393690583186667, 0.7976595351735143, 0.6203475520474494, 0.8384480996887431, 0.529598171410532, 0.9259501504698707, 0.6900429725938853, 0.5030750576970209, 0.9704446499496828, 0.9395365951187312, 0.6548678031173893, 0.945377143252258, 0.7979962867124291, 0.6069503242792441, 0.5068480256583795, 0.5041881991695869, 0.535492614347675, 0.6641413349253429, 0.6492660211272339, 0.7560884455928757, 0.8920482823499201, 0.931058456437271, 0.5706285012541792, 0.681703744322389, 0.8824121501979917, 0.6269499941859034, 0.8750983369606002, 0.9098766808840533, 0.7339902769939712, 0.5683473598024874, 0.8684907916366487, 0.7960418316487713, 0.7172006417398389, 0.8240284005062627, 0.6431203873916461, 0.5191764292204233, 0.567995671918784, 0.5225409798058036, 0.8872325172189863, 0.929368125336101, 0.6642636695084851, 0.7273704863752025, 0.5592179359027629, 0.5173656609032258, 0.9936514440368452, 0.976054697053124, 0.6053540552095398, 0.6026691454999007, 0.6543383433776387, 0.5405175429396117, 0.8668557542566296, 0.7709823289876689, 0.5546005370584073, 0.9110819803139214, 0.5460346684988737, 0.8077390618922178, 0.6195528528873089, 0.673396806873974, 0.9531676851681437, 0.5896744187445115, 0.728279903946831, 0.7951980111733349, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0, 50000.0};
int h_B[]= {
0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250, 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342, 344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 374, 376, 378, 380, 382, 384, 386, 388, 390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434, 436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480, 482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526, 528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572, 574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618, 620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664, 666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710, 712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756, 758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802, 804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848, 850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894, 896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940, 942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986, 988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026, 1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064, 1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102, 1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140, 1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178, 1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216, 1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330, 1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368, 1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406, 1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444, 1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482, 1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520, 1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558, 1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596, 1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634, 1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710, 1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748, 1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786, 1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824, 1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862, 1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900, 1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938, 1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976, 1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014, 2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052, 2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090, 2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128, 2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166, 2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204, 2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242, 2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280, 2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318, 2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356, 2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394, 2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432, 2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470, 2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508, 2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546, 2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584, 2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622, 2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660, 2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698, 2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736, 2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774, 2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812, 2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850, 2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888, 2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926, 2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964, 2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002, 3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040, 3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078, 3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116, 3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154, 3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192, 3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230, 3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3256, 3258, 3260, 3262, 3264, 3266, 3268, 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306, 3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344, 3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382, 3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420, 3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458, 3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496, 3498, 3500, 3502, 3504, 3506, 3508, 3510, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535, 3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3562, 3564, 3566, 3568, 3570, 3572, 3575, 3577, 3579, 3581, 3584, 3586, 3589, 3591, 3596, 3598, 3607, 3609, 3611, 3613, 3616, 3618, 3621, 3623, 3629, 3631, 3633, 3635, 3638, 3640, 3643, 3645, 3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687, 3689, 3692, 3694, 3696, 3698, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725, 3727, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762, 3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3787, 3789, 3792, 3794, 3797, 3799, 3802, 3804, 3807, 3809, 3811, 3813, 3816, 3818, 3821, 3823, 3828, 3830, 3833, 3835, 3839, 3841, 3844, 3846, 3849, 3851, 3853, 3855, 3858, 3860, 3862, 3864, 3868, 3870, 3873, 3875, 3878, 3880, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3912, 3914, 3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952, 3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991, 3994, 3996, 3999, 4001, 4007, 4009, 4011, 4013, 4016, 4018, 4020, 4022, 4025, 4027, 4030, 4032, 4037, 4039, 4041, 4043, 4046, 4048, 4051, 4053, 4059, 4061, 4067, 4069, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4107, 4109, 4111, 4113, 4115, 4117, 4120, 4122, 4125, 4127, 4133, 4135, 4140, 4142, 4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4173, 4175, 4178, 4180, 4183, 4185, 4188, 4190, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218, 4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256, 4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294, 4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332, 4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370, 4372, 4374, 4376, 4378, 4380, 4382, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409, 4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4446, 4448, 4450, 4452, 4455, 4457, 4459, 4461, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4481, 4483, 4485, 4487, 4490, 4492, 4495, 4497, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4519, 4521, 4523, 4525, 4528, 4530, 4533, 4535, 4540, 4542, 4544, 4546, 4548, 4550, 4553, 4555, 4558, 4560, 4563, 4565, 4568, 4570, 4573, 4575, 4578, 4580, 4583, 4585, 4006, 4004, 4590, 4592, 4594, 4596, 4598, 4600, 4602, 4604, 4606, 4608, 4182, 4177, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636, 4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713, 4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4006, 4004, 4006, 4004, 4036, 4024, 4006, 4004, 4006, 4004, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789, 4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3626, 3626, 3650, 3648, 3650, 3648, 3628, 3628, 3650, 3648, 3650, 3648, 4066, 4064, 4006, 4004, 4195, 4193, 4006, 4004, 4066, 4064, 4066, 4064, 4006, 4004, 4058, 4056, 4172, 4170, 4182, 4177, 4172, 4170, 4182, 4177, 3628, 3626, 3650, 3648, 3650, 3648, 4066, 4064, 4066, 4064, 4066, 4064, 4006, 4004, 4058, 4056, 4058, 4066, 4064, 4066, 4064, 4172, 4170, 4182, 4177, 4172, 4170, 4182, 4177, 4172, 4170, 3628, 3626, 3628, 3626, 3848, 3848, 3574, 3574, 3628, 3626, 3628, 3626, 3650, 3648, 3650, 3648, 3806, 3801, 3806, 3801, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3628, 3626, 3628, 3626, 3650, 3648, 3650, 3648, 4006, 4004, 4006, 4004, 4036, 4024, 4036, 4024, 4058, 4056, 4058, 4056, 4006, 4004, 4006, 4004, 4006, 4004, 4006, 4004, 4036, 4024, 4058, 4056, 4195, 4193, 4036, 4024, 4036, 4024, 4066, 4064, 4172, 4170, 4036, 4024, 4036, 4024, 4058, 4056, 4058, 4056, 4066, 4064, 4066, 4064, 4006, 4004, 4036, 4024, 4036, 4024, 4066, 4064, 4066, 4064, 4132, 4130, 4132, 4130, 4172, 4170, 4172, 4170, 4172, 4170, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282, 5284, 4582, 4587, 4587, 4582, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359, 5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397, 5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 4587, 4582, 4587, 4582, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5511, 5513, 5515, 5517, 5519, 5521, 4006, 4004, 4066, 4064, 5542, 5544, 5546, 5548, 5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586, 5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 3628, 3626, 3628, 3626, 3628, 3626, 3806, 3801, 3806, 3801, 3604, 3602, 3604, 3602, 3604, 3602, 3604, 3602, 3628, 3626, 3806, 3801, 3806, 3801, 3628, 3626, 4006, 4004, 4036, 4024, 4066, 4064, 4036, 4024, 4024, 4036, 4066, 4064, 4066, 4064, 4066, 4064, 4066, 4064, 4036, 4024, 4058, 4056, 4058, 4056, 4066, 4064, 4036, 4024, 4036, 4024, 4066, 4064, 4006, 4004, 4006, 4004, 4066, 4064, 4066, 4064, 4006, 4004, 4006, 4004, 4066, 4064, 4006, 4004, 4006, 4004, 4036, 4024, 4006, 4004, 4006, 4004, 4066, 4064, 4066, 4064, 4006, 4004, 4006, 4004, 4066, 4064, 4172, 4170, 4172, 4170, 4195, 4193, 3628, 3626, 3628, 3626, 3650, 3648, 3628, 3626, 4006, 4004, 4172, 4170, 4006, 4004, 4066, 4064, 4058, 4056, 4066, 4064, 4195, 4193, 4195, 4193, 4006, 4004, 4058, 4056, 4066, 4064, 4172, 4170, 4195, 4193, 4195, 4193, 4587, 4582, 4587, 4582, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081, 6083, 6085, 4587, 4582, 6109, 6111, 6113, 6115, 6117, 6119, 6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 4587, 4582, 4587, 4582, 4587, 4582, 6155, 6157, 6159, 6161, 4587, 4582, 4587, 4582, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195, 6197, 6199, 6201, 4587, 4582, 4587, 4582, 4587, 4582, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308, 3604, 3602, 3604, 3602, 3604, 3602, 3628, 3626, 3882, 3604, 3602, 3604, 3602, 3600, 3604, 3602, 3604, 3602, 3600, 3604, 3602, 3604, 3602, 3604, 3602, 3628, 3626, 3628, 3626, 3882, 3628, 3626, 3628, 3626, 4006, 4004, 4036, 4024, 4066, 4064, 4036, 4024, 4036, 4024, 4066, 4064, 4172, 4170, 4172, 4170, 4006, 4004, 4036, 4024, 4006, 4004, 4056, 4056, 4066, 4064, 4006, 4004, 4006, 4004, 4036, 4024, 4066, 4064, 4063, 4066, 4064, 4063, 4066, 4064, 4172, 4170, 4195, 4193, 4006, 4004, 4006, 4004, 4036, 4024, 4006, 4004, 4006, 4004, 4036, 4024, 4066, 4064, 4066, 4064, 4006, 4004, 4006, 4004, 4036, 4024, 4036, 4024, 4058, 4056, 4058, 4056, 4066, 4064, 4132, 4130, 4132, 4130, 4172, 4170, 4172, 4170, 4172, 4170, 4193, 4195, 3604, 3602, 3604, 3602, 3604, 3602, 3595, 3595, 3604, 3602, 3604, 3602, 3606, 3604, 3602, 3606, 3628, 3626, 3628, 3626, 3650, 3648, 3650, 3648, 3786, 3786, 3806, 3801, 3806, 3801, 3827, 3827, 3838, 3838, 3867, 3867, 4036, 4024, 4058, 4056, 4144, 4144, 4106, 4139, 4172, 4170, 4195, 4193, 4006, 4004, 4006, 4004, 4036, 4024, 4036, 4024, 4066, 4064, 4006, 4004, 4006, 4004, 4036, 4024, 4024, 4036, 4058, 4056, 4058, 4056, 4066, 4064, 4066, 4064, 4130, 4130, 4132, 4132, 4137, 4137, 4106, 4132, 4130, 4132, 4130, 4139, 4172, 4170, 4172, 4170, 4193, 4195, 4172, 4170, 4195, 4193, 6901, 6903, 6905, 6907, 6909, 6911, 4587, 4582, 4587, 4582, 6942, 6944, 6946, 6948, 6950, 6952, 6954, 6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992, 4587, 4582, 4567, 4567, 4552, 4552, 4587, 4582, 4480, 4480, 4501, 4501, 4518, 4518, 4539, 4539, 7081, 7083, 7085, 7087, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106, 7108, 7110, 7112, 7114, 7116, 7119, 7121, 7126, 7128, 7130, 7132, 7135, 7137, 7139, 7141, 7145, 7147, 7150, 7152, 7155, 7157, 7165, 7167, 7169, 7171, 7173, 7175, 7178, 7180, 7183, 7185, 7188, 7190, 7197, 7199, 7203, 7205, 7209, 7211, 7213, 7215, 7233, 7235, 7253, 7255, 7123, 7118, 7123, 7118, 7293, 7295, 7123, 7118, 7302, 7304, 7306, 7308, 7123, 7118, 7313, 7315, 7123, 7118, 7177, 7192, 7634, 7636, 7638, 7640, 7673, 7675, 7677, 7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7159, 7159, 7713, 7715, 7717, 7719, 7123, 7118, 7123, 7118, 7123, 7118, 7123, 7118, 7754, 7756, 7758, 7760, 7762, 7764, 7771, 7773, 7123, 7118, 7123, 7118, 7160, 7123, 7118, 7123, 7118, 7164, 7897, 7899, 7123, 7118, 7123, 7118, 7914, 7916, 7918, 7920, 7922, 7924, 7123, 7118, 7089, 7162, 7123, 7118, 7089, 7162, 7162, 7089, 7089, 7162, 8429, 8431, 8433, 8435, 8437, 8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 7123, 7118, 6276, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 7123, 7118, 8544, 8546, 8587, 8589, 7154, 7149, 7154, 7149, 8599, 8601, 7123, 8621, 8623, 8625, 8627, 8638, 8640, 8642, 8644, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 7118, 6276, 7154, 7149, 7154, 7149, 7177, 7192, 9044, 9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 7123, 7118, 7089, 7162, 9086, 9088, 9091, 9093, 9106, 9108, 9111, 9113, 7123, 7118, 7125, 7089, 7162, 7162, 7089, 9155, 9157, 9159, 9161, 9163, 9165, 9168, 9170, 9185, 9187, 9189, 9191, 9194, 9196, 9199, 9201, 7089, 7125, 7144, 7144, 7160, 7162, 7164, 9172, 9198, 9172, 9172, 9198, 9172, 9172, 9198, 9198, 9172, 9172, 9198, 9203, 9203, 9193, 9193, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 11008, 11010, 11012, 11014, 11016, 11018, 11020, 11022, 11024, 11026, 11028, 11030, 11032, 11034, 11036, 11038, 11040, 11042, 11044, 11046, 11048, 11050, 11052, 11054, 11056, 11058, 11060, 11062, 11064, 11066, 11068, 11070, 11072, 11074, 11076, 11078, 11080, 11082, 11084, 11086, 11088, 11090, 11092, 11094, 11096, 11098, 11100, 11102, 11104, 11106, 11108, 11110, 11112, 11114, 11116, 11118, 11120, 11122, 11124, 11126, 11128, 11130, 11132, 11134, 11136, 11138, 11140, 11142, 11144, 11146, 11148, 11150, 11152, 11154, 11156, 11158, 11160, 11162, 11164, 11166, 11168, 11170, 11172, 11174, 11176, 11178, 11180, 11182, 11184, 11186, 11188, 11190, 11192, 11194, 11196, 11198, 11200, 11202, 11204, 11206, 11208, 11210, 11212, 11214, 11216, 11218, 11220, 11222, 11224, 11226, 11228, 11230, 11232, 11234, 11236, 11238, 11240, 11242, 11244, 11246, 11248, 11250, 11252, 11254, 11256, 11258, 11260, 11262, 11264, 11266, 11268, 11270, 11272, 11274, 11276, 11278, 11280, 11282, 11284, 11286, 11288, 11290, 11292, 11294, 11296, 11298, 11300, 11302, 11304, 11306, 11308, 11310, 11312, 11314, 11316, 11318, 11320, 11322, 11324, 11326, 11328, 11330, 11332, 11334, 11336, 11338, 11340, 11342, 11344, 11346, 11348, 11350, 11352, 11354, 11356, 11358, 11360, 11362, 11364, 11366, 11368, 11370, 11372, 11374, 11376, 11378, 11380, 11382, 11384, 11386, 11388, 11390, 11392, 11394, 11396, 11398, 11400, 11402, 11404, 11406, 11408, 11410, 11412, 11414, 11416, 11418, 11420, 11422, 11424, 11426, 11428, 11430, 11432, 11434, 11436, 11438, 11440, 11442, 11444, 11446, 11448, 11450, 11452, 11454, 11456, 11458, 11460, 11462, 11464, 11466, 11468, 11470, 11472, 11474, 11476, 11478, 11480, 11482, 11484, 11486, 11488, 11490, 11492, 11494, 11496, 11498, 11500, 11502, 11504, 11506, 11508, 11510, 11512, 11514, 11516, 11518, 11520, 11522, 11524, 11526, 11528, 11530, 11532, 11534, 11536, 11538, 11540, 11542, 11544, 11546, 11548, 11550, 11552, 11554, 11556, 11558, 11560, 11562, 11564, 11566, 11568, 11570, 11572, 11574, 11576, 11578, 11580, 11582, 11584, 11586, 11588, 11590, 11592, 11594, 11596, 11598, 11600, 11602, 11604, 11606, 11608, 11610, 11612, 11614, 11616, 11618, 11620, 11622, 11624, 11626, 11628, 11630, 11632, 11634, 11636, 11638, 11640, 11642, 11644, 11646, 11648, 11650, 11652, 11654, 11656, 11658, 11660, 11662, 11664, 11666, 11668, 11670, 11672, 11674, 11676, 11678, 11680, 11682, 11684, 11686, 11688, 11690, 11692, 11694, 11696, 11698, 11700, 11702, 11704, 11706, 11708, 11710, 11712, 11714, 11716, 11718, 11720, 11722, 11724, 11726, 11728, 11730, 11732, 11734, 11736, 11738, 11740, 11742, 11744, 11746, 11748, 11750, 11752, 11754, 11756, 11758, 11760, 11762, 11764, 11766, 11768, 11770, 11772, 11774, 11776, 11778, 11780, 11782, 11784, 11786, 11788, 11790, 11792, 11794, 11796, 11798, 11800, 11802, 11804, 11806, 11808, 11810, 11812, 11814, 11816, 11818, 11820, 11822, 11824, 11826, 11828, 11830, 11832, 11834, 11836, 11838, 11840, 11842, 11844, 11846, 11848, 11850, 11852, 11854, 11856, 11858, 11860, 11862, 11864, 11866, 11868, 11870, 11872, 11874, 11876, 11878, 11880, 11882, 11884, 11886, 11888, 11890, 11892, 11894, 11896, 11898, 11900, 11902, 11904, 11906, 11908, 11910, 11912, 11914, 11916, 11918, 11920, 11922, 11924, 11926, 11928, 11930, 11932, 11934, 11936, 11938, 11940, 11942, 11944, 11946, 11948, 11950, 11952, 11954, 11956, 11958, 11960, 11962, 11964, 11966, 11968, 11970, 11972, 11974, 11976, 11978, 11980, 11982, 11984, 11986, 11988, 11990, 11992, 11994, 11996, 11998, 12000, 12002, 12004, 12006, 12008, 12010, 12012, 12014, 12016, 12018, 12020, 12022, 12024, 12026, 12028, 12030, 12032, 12034, 12036, 12038, 12040, 12042, 12044, 12046, 12048, 12050, 12052, 12054, 12056, 12058, 12060, 12062, 12064, 12066, 12068, 12070, 12072, 12074, 12076, 12078, 12080, 12082, 12084, 12086, 12088, 12090, 12092, 12094, 12096, 12098, 12100, 12102, 12104, 12106, 12108, 12110, 12112, 12114, 12116, 12118, 12120, 12122, 12124, 12126, 12128, 12130, 12132, 12134, 12136, 12138, 12140, 12142, 12144, 12146, 12148, 12150, 12152, 12154, 12156, 12158, 12160, 12162, 12164, 12166, 12168, 12170, 12172, 12174, 12176, 12178, 12180, 12182, 12184, 12186, 12188, 12190, 12192, 12194, 12196, 12198, 12200, 12202, 12204, 12206, 12208, 12210, 12212, 12214, 12216, 12218, 12220, 12222, 12224, 12226, 12228, 12230, 12232, 12234, 12236, 12238, 12240, 12242, 12244, 12246, 12248, 12250, 12252, 12254, 12256, 12258, 12260, 12262, 12264, 12266, 12268, 12270, 12272, 12274, 12276, 12278, 12280, 12282, 12284, 12286, 12288, 12290, 12292, 12294, 12296, 12298, 12300, 12302, 12304, 12306, 12308, 12310, 12312, 12314, 12316, 12318, 12320, 12322, 12324, 12326, 12328, 12330, 12332, 12334, 12336, 12338, 12340, 12342, 12344, 12346, 12348, 12350, 12352, 12354, 12356, 12358, 12360, 12362, 12364, 12366, 12368, 12370, 12372, 12374, 12376, 12378, 12380, 12382, 12384, 12386, 12388, 12390, 12392, 12394, 12396, 12398, 12400, 12402, 12404, 12406, 12408, 12410, 12412, 12414, 12416, 12418, 12420, 12422, 12424, 12426, 12428, 12430, 12432, 12434, 12436, 12438, 12440, 12442, 12444, 12446, 12448, 12450, 12452, 12454, 12456, 12458, 12460, 12462, 12464, 12466, 12468, 12470, 12472, 12474, 12476, 12478, 12480, 12482, 12484, 12486, 12488, 12490, 12492, 12494, 12496, 12498, 12500, 12502, 12504, 12506, 12508, 12510, 12512, 12514, 12516, 12518, 12520, 12522, 12524, 12526, 12528, 12530, 12532, 12534, 12536, 12538, 12540, 12542, 12544, 12546, 12548, 12550, 12552, 12554, 12556, 12558, 12560, 12562, 12564, 12566, 12568, 12570, 12572, 12574, 12576, 12578, 12580, 12582, 12584, 12586, 12588, 12590, 12592, 12594, 12596, 12598, 12600, 12602, 12604, 12606, 12608, 12610, 12612, 12614, 12616, 12618, 12620, 12622, 12624, 12626, 12628, 12630, 12632, 12634, 12636, 12638, 12640, 12642, 12644, 12646, 12648, 12650, 12652, 12654, 12656, 12658, 12660, 12662, 12664, 12666, 12668, 12670, 12672, 12674, 12676, 12678, 12680, 12682, 12684, 12686, 12688, 12690, 12692, 12694, 12696, 12698, 12700, 12702, 12704, 12706, 12708, 12710, 12712, 12714, 12716, 12718, 12720, 12722, 12724, 12726, 12728, 12730, 12732, 12734, 12736, 12738, 12740, 12742, 12744, 12746, 12748, 12750, 12752, 12754, 12756, 12758, 12760, 12762, 12764, 12766, 12768, 12770, 12772, 12774, 12776, 12778, 12780, 12782, 12784, 12786, 12788, 12790, 12792, 12794, 12796, 12798, 12800, 12802, 12804, 12806, 12808, 12810, 12812, 12814, 12816, 12818, 12820, 12822, 12824, 12826, 12828, 12830, 12832, 12834, 12836, 12838, 12840, 12842, 12844, 12846, 12848, 12850, 12852, 12854, 12856, 12858, 12860, 12862, 12864, 12866, 12868, 12870, 12872, 12874, 12876, 12878, 12880, 12882, 12884, 12886, 12888, 12890, 12892, 12894, 12896, 12898, 12900, 12902, 12904, 12906, 12908, 12910, 12912, 12914, 12916, 12918, 12920, 12922, 12924, 12926, 12928, 12930, 12932, 12934, 12936, 12938, 12940, 12942, 12944, 12946, 12948, 12950, 12952, 12954, 12956, 12958, 12960, 12962, 12964, 12966, 12968, 12970, 12972, 12974, 12976, 12978, 12980, 12982, 12984, 12986, 12988, 12990, 12992, 12994, 12996, 12998, 13000, 13002, 13004, 13006, 13008, 13010, 13012, 13014, 13016, 13018, 13020, 13022, 13024, 13026, 13028, 13030, 13032, 13034, 13036, 13038, 13040, 13042, 13044, 13046, 13048, 13050, 13052, 13054, 13056, 13058, 13060, 13062, 13064, 13066, 13068, 13070, 13072, 13074, 13076, 13078, 13080, 13082, 13084, 13086, 13088, 13090, 13092, 13094, 13096, 13098, 13100, 13102, 13104, 13106, 13108, 13110, 13112, 13114, 13116, 13118, 13120, 13122, 13124, 13126, 13128, 13130, 13132, 13134, 13136, 13138, 13140, 13142, 13144, 13146, 13148, 13150, 13152, 13154, 13156, 13158, 13160, 13162, 13164, 13166, 13168, 13170, 13172, 13174, 13176, 13178, 13180, 13182, 13184, 13186, 13188, 13190, 13192, 13194, 13196, 13198, 13200, 13202, 13204, 13206, 13208, 13210, 13212, 13214, 13216, 13218, 13220, 13222, 13224, 13226, 13228, 13230, 13232, 13234, 13236, 13238, 13239, 13240, 13242, 13244, 13246, 13248, 13250, 13251, 13252, 13254, 13256, 13258, 13260, 13262, 13264, 13266, 13268, 13270, 13272, 13274, 13276, 13278, 13280, 13282, 13284, 13286, 13288, 13290, 13292, 13294, 13296, 13298, 13300, 13302, 13304, 13306, 13308, 13310, 13311, 13312, 13313, 13314, 13315, 13316, 13317, 13318, 13319, 13320, 13322, 13324, 13326, 13328, 13330, 13332, 13334, 13336, 13337, 13338, 13339, 13340, 13341, 13342, 13343, 13344, 13345, 13346, 13347, 13348, 13349, 13350, 13351, 13352, 13353, 13354, 13355, 13356, 13357, 13358, 13359, 13360, 13361, 13362, 13363, 13364, 13365, 13366, 13367, 13368, 13369, 13370, 13371, 13372, 13373, 13374, 13375, 13376, 13377, 13378, 13379, 13380, 13381, 13382, 13383, 13384, 13385, 13386, 13387, 13388, 13389, 13390, 13391, 13392, 13393, 13394, 13395, 13396, 13397, 13398, 13399, 13400, 13401, 13402, 13403, 13404, 13405, 13406, 13407, 13408, 13409, 13410, 13411, 13412, 13413, 13414, 13415, 13416, 13417, 13418, 13419, 13420, 13421, 13422, 13423, 13424, 13425, 13426, 13427, 13428, 13429, 13430, 13431, 13432, 13433, 13434, 13435, 13436, 13437, 13438, 13439, 13440, 13441, 13442, 13443, 13444, 13445, 13446, 13447, 13448, 13449, 13450, 13451, 13452, 13453, 13454, 13455, 13456, 13457, 13458, 13459, 13460, 13461, 13462, 13463, 13464, 13465, 13466, 13467, 13468, 13469, 13470, 13471, 13472, 13473, 13474, 13475, 13476, 13477, 13478, 13479, 13480, 13481, 13482, 13483, 13484, 13485, 13486, 13487, 13488, 13489, 13490, 13491, 13492, 13493, 13494, 13495, 13496, 13497, 13498, 13499, 13500, 13501, 13502, 13503, 13504, 13505, 13506, 13507, 13508, 13509, 13510, 13511, 13512, 13513, 13514, 13515, 13516, 13517, 13518, 13519, 13520, 13521, 13523, 13525, 13527, 13529, 13531, 13533, 13535, 13537, 13539, 13540, 13541, 13542, 13543, 13545, 13547, 13549, 13551, 13553, 13555, 13557, 13559, 13561, 13563, 13565, 13567, 13569, 13571, 13573, 13575, 13577, 13579, 13581, 13583, 13584, 13585, 13586, 13587, 13589, 13591, 13593, 13595, 13597, 13599, 13601, 13603, 13605, 13607, 13609, 13610, 13611, 13612, 13613, 13615, 13617, 13619, 13621, 13623, 13625, 13627, 13629, 13631, 13633, 13635, 13637, 13639, 13641, 13643, 13645, 13646, 13647, 13648, 13649, 13650, 13651, 13652, 13653, 13654, 13655, 13656, 13657, 13658, 13659, 13660, 13661, 13662, 13663, 13664, 13665, 13666, 13667, 13668, 13669, 13670, 13671, 13672, 13673, 13674, 13675, 13676, 13677, 13678, 13679, 13680, 13681, 13682, 13683, 13684, 13685, 13686, 13687, 13688, 13689, 13690, 13691, 13692, 13693, 13694, 13695, 13696, 13697, 13698, 13699, 13700, 13701, 13702, 13703, 13704, 13705, 13706, 13707, 13708, 13709, 13710, 13711, 13712, 13713, 13714, 13715, 13716, 13717, 13718, 13719, 13720, 13721, 13722, 13723, 13724, 13725, 13726, 13727, 13728, 13729, 13730, 13731, 13732, 13733, 13734, 13735, 13736, 13737, 13738, 13739, 13740, 13741, 13742, 13743, 13744, 13745, 13746, 13747, 13748, 13749, 13750, 13751, 13752, 13753, 13754, 13755, 13756, 13757, 13758, 13759, 13760, 13761, 13762, 13763, 13764, 13765, 13766, 13767, 13768, 13769, 13770, 13771, 13772, 13773, 13774, 13775, 13776, 13777, 13778, 13779, 13780, 13781, 13782, 13783, 13785, 13787, 13789, 13791, 13793, 13795, 13797, 13799, 13801, 13802, 13803, 13805, 13807, 13809, 13811, 13813, 13815, 13817, 13819, 13820, 13821, 13822, 13823, 13824, 13825, 13827, 13829, 13830, 13831, 13832, 13833, 13835, 13837, 13839, 13841, 13843, 13845, 13847, 13849, 13850, 13851, 13852, 13853, 13854, 13855, 13857, 13859, 13861, 13863, 13865, 13867, 13869, 13871, 13872, 13873, 13874, 13875, 13876, 13877, 13878, 13879, 13880, 13881, 13882, 13883, 13884, 13885, 13886, 13887, 13888, 13889, 13890, 13891, 13892, 13893, 13894, 13895, 13896, 13897, 13898, 13899, 13900, 13901, 13902, 13903, 13904, 13905, 13906, 13907, 13908, 13909, 13910, 13911, 13912, 13913, 13914, 13915, 13916, 13917, 13918, 13919, 13920, 13921, 13922, 13923, 13924, 13925, 13926, 13927, 13928, 13929, 13930, 13931, 13932, 13933, 13934, 13935, 13936, 13937, 13938, 13939, 13940, 13941, 13942, 13943, 13944, 13945, 13946, 13947, 13948, 13949, 13950, 13951, 13952, 13953, 13954, 13955, 13956, 13957, 13958, 13959, 13960, 13961, 13962, 13963, 13964, 13965, 13966, 13967, 13968, 13969, 13970, 13971, 13972, 13973, 13974, 13975, 13976, 13977, 13978, 13979, 13980, 13981, 13982, 13983, 13984, 13985, 13986, 13987, 13988, 13989, 13990, 13991, 13992, 13993, 13994, 13995, 13996, 13997, 13998, 13999, 14000, 14001, 14002, 14003, 14004, 14005, 14006, 14007, 14008, 14009, 14010, 14011, 14012, 14013, 14014, 14015, 14016, 14017, 14018, 14019, 14020, 14021, 14022, 14023, 14024, 14025, 14026, 14027, 14028, 14029, 14030, 14031, 14032, 14033, 14034, 14035, 14036, 14037, 14038, 14039, 14040, 14041, 14042, 14043, 14044, 14045, 14046, 14047, 14048, 14049, 14050, 14051, 14052, 14053, 14054, 14055, 14056, 14057, 14058, 14059, 14060, 14061, 14062, 14063, 14064, 14065, 14066, 14067, 14068, 14069, 14070, 14071, 14072, 14073, 14074, 14075, 14076, 14077, 14078, 14079, 14080, 14081, 14082, 14083, 14084, 14085, 14086, 14087, 14089, 14091, 14093, 14094, 14095, 14096, 14097, 14099, 14101, 14103, 14105, 14107, 14109, 14111, 14113, 14115, 14117, 14119, 14121, 14123, 14124, 14125, 14126, 14127, 14128, 14129, 14130, 14131, 14132, 14133, 14134, 14135, 14136, 14137, 14138, 14139, 14141, 14143, 14145, 14147, 14149, 14151, 14153, 14155, 14157, 14159, 14161, 14163, 14165, 14167, 14169, 14171, 14173, 14175, 14177, 14179, 14181, 14183, 14185, 14187, 14189, 14191, 14193, 14195, 14197, 14198, 14199, 14200, 14201, 14203, 14204, 14205, 14207, 14209, 14210, 14211, 14213, 14214, 14215, 14216, 14217, 14219, 14221, 14223, 14225, 14227, 14229, 14231, 14233, 14234, 14235, 14237, 14239, 14240, 14241, 14242, 14243, 14244, 14245, 14246, 14247, 14249, 14251, 14253, 14255, 14256, 14257, 14258, 14259, 14260, 14261, 14262, 14263, 14264, 14265, 14267, 14268, 14269, 14270, 14271, 14273, 14275, 14277, 14278, 14279, 14280, 14281, 14282, 14283, 14284, 14285, 14286, 14287, 14288, 14289, 14291, 14293, 14295, 14297, 14299, 14301, 14303, 14304, 14305, 14306, 14308, 14310, 14312, 14314, 14315, 14316, 14318, 14320, 14321, 14322, 14323, 14324, 14326, 14327, 14329, 14331, 14333, 14335, 14337, 14339, 14341, 14343, 14344, 14345, 14346, 14347, 14348, 14349, 14350, 14351, 14353, 14355, 14357, 14359, 14361, 14363, 14364, 14365, 14366, 14367, 14369, 14371, 14373, 14375, 14376, 14377, 14378, 14379, 14380, 14381, 14382, 14384, 14386, 14388, 14390, 14392, 14394, 14396, 14398, 14399, 14400, 14401, 14402, 14403, 14404, 14405, 14406, 14407, 14408, 14409, 14410, 14411, 14412, 14413, 14414, 14415, 14416, 14417, 14418, 14419, 14420, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 15707, 15714, 4499, 4494, 4480, 4499, 4494, 4501, 4537, 4532, 4532, 4537, 4537, 4532, 4518, 4003, 3998, 15745, 4003, 3998, 15747, 4034, 4029, 15749, 4003, 3998, 15751, 4003, 3998, 15753, 4034, 4029, 4034, 4029, 4499, 4494, 4499, 4494, 4499, 4494, 4499, 4494, 4557, 4557, 3593, 3588, 3574, 3588, 3593, 3593, 3588, 15763, 3600, 15765, 15767, 3593, 3588, 3574, 3588, 3593, 3593, 3588, 15769, 3600, 15771, 15773, 3625, 3620, 3625, 3620, 3647, 3642, 15777, 3647, 3642, 15779, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 3604, 3602, 3600, 3604, 3602, 3604, 3602, 3625, 3620, 3625, 3620, 3647, 3642, 15783, 3647, 3642, 15785, 15787, 15789, 4071, 4063, 4129, 4124, 4192, 4187, 15791, 15793, 15795, 15797, 15799, 15801, 15803, 15805, 15807, 15809, 4192, 4187, 15811, 3625, 3620, 3647, 3642, 15813, 3647, 3642, 15815, 4003, 3998, 4055, 4050, 4055, 4050, 15817, 15819, 15821, 4071, 15823, 4055, 4050, 15825, 4055, 4050, 4055, 4050, 15828, 4063, 15830, 4071, 15832, 15834, 15836, 15838, 4192, 4187, 4192, 4187, 15840, 3574, 3604, 3602, 15842, 15844, 3825, 3820, 3796, 3791, 3806, 3801, 3825, 3820, 3843, 3843, 3593, 3588, 3593, 3588, 3604, 3602, 3604, 3602, 3600, 3625, 3620, 15850, 3625, 3620, 15852, 3647, 3642, 15854, 3647, 3642, 15856, 3796, 3791, 3806, 3801, 3820, 3825, 3827, 3729, 3729, 3848, 3796, 3791, 3806, 3801, 3825, 3820, 3796, 3791, 15858, 3796, 3791, 15860, 3825, 3820, 3843, 3848, 3877, 3872, 3877, 3872, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 15862, 15864, 15866, 3600, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 15868, 15870, 15872, 3600, 3625, 3620, 15874, 3625, 3620, 15876, 3647, 3642, 15878, 3647, 3642, 15880, 3825, 3820, 3786, 3786, 4003, 3998, 15882, 4003, 3998, 15884, 4034, 4029, 15886, 4034, 4029, 15888, 4055, 4050, 15890, 4055, 4050, 15892, 4003, 3998, 15894, 4003, 3998, 15896, 4034, 4029, 4034, 4029, 4003, 3998, 15898, 4003, 3998, 15900, 4034, 4029, 15902, 15904, 4063, 15906, 4003, 3998, 4006, 4004, 4034, 4029, 15908, 4003, 3998, 4006, 4004, 4034, 4029, 15910, 15912, 4192, 4187, 15914, 4182, 4177, 4192, 4187, 4003, 3998, 4006, 4004, 4029, 15916, 4003, 3998, 4006, 4004, 4034, 15918, 4003, 3998, 4006, 4004, 4034, 4029, 4034, 4029, 4055, 4050, 15920, 4055, 4050, 15922, 15924, 4063, 15926, 4071, 4003, 3998, 15928, 15930, 15932, 4055, 4050, 4058, 4056, 15934, 4063, 15936, 4071, 15938, 15940, 4139, 4139, 15942, 4182, 4177, 4187, 4192, 15944, 4182, 4177, 4187, 4192, 15946, 4177, 4192, 4187, 4192, 4187, 4499, 4494, 4499, 4494, 4494, 4499, 4480, 4537, 4532, 4518, 4537, 4532, 4537, 4532, 4562, 4557, 4562, 4557, 4557, 4562, 4552, 4577, 4572, 4577, 4572, 4499, 4494, 4499, 4494, 4494, 4499, 4501, 4532, 4537, 4532, 4537, 4532, 4537, 4537, 4532, 4562, 4557, 4562, 4557, 4562, 4557, 4567, 4577, 4572, 15959, 4499, 4494, 4480, 4499, 4494, 4501, 4537, 4532, 4532, 4537, 4537, 4532, 4518, 4562, 4557, 4562, 4557, 4562, 4557, 4562, 4557, 14903, 15981, 4499, 4494, 4480, 4494, 4499, 4501, 4532, 4537, 4537, 4532, 4532, 4537, 4518, 4562, 4557, 4562, 4557, 4562, 4557, 4552, 14919, 15983, 4562, 4557, 4562, 4557, 4557, 4562, 4557, 4562, 4577, 4572, 4587, 4582, 3625, 3620, 3806, 3801, 3882, 4055, 4050, 15996, 4055, 4050, 4056, 15998, 4106, 4129, 4124, 4106, 4106, 16016, 16018, 3796, 3791, 3825, 3820, 3729, 3604, 3602, 3647, 3642, 3650, 3648, 3791, 3801, 3791, 3806, 3825, 3820, 3825, 3820, 3791, 3806, 3801, 3843, 3843, 3625, 3620, 16020, 3647, 3642, 3796, 3791, 16022, 3796, 3791, 16024, 3729, 3882, 3593, 3588, 16026, 16028, 16030, 3593, 3588, 16032, 3625, 3620, 3625, 3620, 16034, 3647, 3642, 3650, 3647, 3642, 3648, 3796, 3791, 16036, 3796, 3791, 16038, 3729, 3604, 3602, 16040, 3796, 3796, 3838, 4106, 16042, 4056, 4058, 4063, 16044, 4058, 4056, 16046, 4129, 4124, 4182, 4177, 4182, 4177, 16048, 4034, 4029, 4034, 4029, 4055, 4050, 4055, 4050, 16052, 4071, 16054, 16056, 16058, 4071, 4063, 4124, 4124, 4124, 4124, 4144, 4124, 4124, 4182, 4177, 4003, 3998, 16060, 4055, 4050, 16062, 4055, 4050, 16064, 16066, 4071, 4063, 4003, 3998, 4034, 4029, 16068, 16070, 16072, 4063, 16074, 16076, 4055, 4050, 4056, 16078, 16080, 16082, 16084, 4055, 4050, 16086, 4106, 4192, 16088, 4003, 3998, 16090, 16092, 4003, 3998, 16094, 16096, 4034, 4029, 4055, 4050, 4058, 4055, 4050, 4056, 16098, 4071, 16100, 4063, 4003, 3998, 16102, 16104, 4055, 4050, 4058, 4056, 16106, 4071, 4063, 4129, 4129, 4129, 4129, 16108, 4182, 4177, 4187, 16110, 4182, 4177, 16112, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 3604, 3602, 3604, 3602, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 3604, 3602, 3604, 3602, 3625, 3620, 16114, 16116, 3647, 3642, 3647, 3642, 16118, 3796, 3791, 3806, 3801, 3825, 3820, 3796, 3791, 3806, 3801, 3825, 3820, 3729, 3843, 3843, 3838, 3867, 3593, 3588, 3574, 3593, 3588, 3604, 3602, 16120, 3806, 3801, 3827, 3729, 3877, 3872, 3882, 3877, 3872, 3843, 3848, 3843, 3838, 3877, 3872, 3877, 3872, 3867, 16122, 4058, 4056, 4129, 4124, 4137, 4144, 4177, 4192, 4187, 4192, 4187, 16124, 4182, 4177, 16126, 4058, 4056, 16128, 4071, 4063, 16130, 16132, 4177, 4182, 4192, 4187, 16134, 4182, 4177, 4192, 4187, 16136, 4003, 3998, 16138, 4034, 4029, 4034, 4029, 4055, 4050, 4071, 4063, 4003, 3998, 4006, 4004, 4055, 4050, 16140, 4055, 4050, 16142, 16144, 4192, 4187, 16146, 4192, 4187, 16148, 4499, 4494, 4499, 4494, 4499, 4494, 4499, 4494, 4537, 4532, 4537, 4532, 4518, 4537, 4532, 4562, 4557, 4562, 4557, 4562, 4557, 4552, 4577, 4572, 16150, 4499, 4494, 4499, 4494, 4499, 4494, 4499, 4494, 4537, 4532, 4537, 4532, 4518, 4537, 4532, 4562, 4557, 4562, 4557, 4562, 4557, 4552, 4577, 4572, 16152, 4499, 4494, 4499, 4494, 4499, 4494, 4501, 4532, 4537, 4532, 4537, 4532, 4537, 4518, 4557, 4557, 4557, 4552, 4572, 4577, 16163, 16173, 4480, 4499, 4494, 4577, 4572, 16175, 4562, 15214, 16177, 4577, 4572, 16181, 4577, 4572, 16183, 4494, 4499, 4480, 4587, 4582, 4499, 4494, 4499, 4494, 4499, 4494, 4501, 4532, 4537, 4532, 4537, 4532, 4537, 4518, 4557, 4557, 4577, 4572, 16193, 4499, 4494, 4499, 4494, 4494, 4499, 4501, 4537, 4532, 4532, 4537, 4537, 4532, 4518, 4557, 4557, 4572, 4577, 16195, 4499, 4494, 4499, 4494, 4494, 4499, 4501, 4537, 4532, 4537, 4532, 4537, 4532, 4518, 4562, 4562, 4562, 4562, 4577, 4572, 16197, 4499, 4494, 4480, 4494, 4499, 4501, 3593, 3588, 16207, 16209, 3574, 16211, 16213, 3825, 3820, 3848, 3729, 3877, 3872, 3593, 3588, 3593, 3588, 3593, 3588, 3574, 16216, 16218, 3593, 3588, 3593, 3588, 3593, 3588, 3574, 16221, 16223, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 16226, 16228, 16230, 3600, 3625, 3620, 16232, 16234, 3647, 3642, 3648, 3647, 3642, 3650, 3796, 3791, 3806, 3801, 3825, 3820, 3796, 3791, 3806, 3801, 3825, 3820, 3796, 3791, 3806, 3801, 3820, 3825, 3820, 3825, 3729, 3848, 3729, 3877, 3872, 3593, 3588, 3593, 3588, 3593, 3588, 3574, 3604, 3602, 3604, 3602, 3604, 3602, 3600, 3625, 3620, 16237, 3625, 3620, 16239, 3647, 3642, 3650, 3648, 3806, 3801, 3825, 3820, 3806, 3801, 3825, 3820, 3825, 3820, 3848, 3843, 3838, 3877, 3872, 3877, 3872, 3882, 4063, 4003, 3998, 4063, 4129, 4124, 4182, 4177, 4187, 4182, 4177, 4187, 16241, 16243, 4034, 4029, 4055, 4050, 4058, 16245, 4071, 16247, 16249, 4055, 4050, 4058, 4056, 16251, 4063, 4129, 4124, 4129, 4124, 4144, 16253, 4187, 4182, 4177, 4187, 16255, 4003, 3998, 16257, 4034, 4029, 16259, 4003, 3998, 4003, 3998, 16261, 4055, 4050, 4055, 4050, 4055, 4050, 16265, 4063, 16267, 4003, 3998, 16269, 4034, 4029, 16271, 4055, 4050, 4058, 4056, 16273, 16276, 16279, 4071, 4129, 4124, 4106, 4144, 4139, 4139, 16281, 4182, 4177, 4187, 4187, 4187, 16283, 4003, 3998, 16285, 4003, 3998, 16287, 4034, 4029, 16289, 4003, 3998, 16291, 4003, 3998, 16293, 4034, 4029, 16295, 4055, 4050, 4058, 4055, 4050, 4056, 16297, 4063, 16299, 4071, 4003, 3998, 16301, 4003, 3998, 16303, 4034, 4029, 16305, 4034, 4029, 16307, 4055, 4050, 16309, 4055, 4050, 16311, 16313, 4071, 4063, 4129, 4124, 4106, 4144, 4106, 4106, 16315, 4129, 4124, 16317, 4139, 4137, 4139, 4144, 16319, 4182, 4177, 4192, 16321, 4182, 4177, 4192, 16323, 4182, 4177, 4192, 4192, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 16327, 3600, 16329, 16331, 3593, 3588, 3574, 3593, 3588, 3593, 3588, 16335, 3600, 16337, 16340, 3625, 3620, 16343, 3625, 3620, 16345, 3647, 3642, 16347, 3647, 3642, 16349, 3796, 3791, 3806, 3801, 3825, 3820, 3825, 3820, 3796, 3791, 16353, 3796, 3791, 16355, 3820, 3825, 3827, 3729, 3729, 3848, 3729, 3877, 3872, 3882, 3877, 3872, 3877, 3872, 3796, 3791, 3806, 3801, 3825, 3820, 3786, 3796, 3791, 3806, 3801, 3825, 3820, 3825, 3820, 3843, 3843, 3843, 3848, 3877, 3872, 3877, 3872, 3877, 3872, 3882, 16363, 4058, 4056, 4071, 4063, 16365, 4137, 4129, 4124, 16371, 4192, 4187, 4182, 4177, 16373, 4003, 3998, 16375, 4003, 3998, 16377, 4034, 4029, 16379, 4034, 4029, 16381, 4055, 4050, 4058, 4056, 16383, 4071, 4063, 4003, 3998, 16385, 4003, 3998, 16387, 4034, 4029, 16389, 4034, 4029, 4034, 4029, 4055, 4050, 16393, 4055, 4050, 16395, 16397, 4063, 16399, 4071, 4129, 4124, 4129, 4124, 4129, 4124, 4129, 4124, 4144, 4129, 4124, 16408, 4129, 4124, 16410, 4137, 4144, 16413, 4177, 16415, 4182, 4192, 4187, 4192, 4187, 16419, 4182, 4177, 4192, 4187, 16421, 4499, 4494, 4499, 4494, 4501, 4577, 4572, 4499, 4494, 4499, 4494, 4499, 4494, 4501, 4537, 4532, 4537, 4532, 4518, 4562, 4557, 4562, 4557, 4577, 4572, 16426, 4577, 4572, 16428, 4499, 4494, 4499, 4494, 4499, 4494, 4501, 4532, 4537, 4532, 4537, 4532, 4537, 4518, 4562, 4557, 4562, 4557, 4562, 4557, 4552, 4577, 4572, 16443, 4499, 4494, 4480, 4494, 4499, 4501, 4537, 4532, 4537, 4532, 4532, 4537, 4518, 4562, 4557, 4562, 4557, 4557, 4562, 4557, 4562, 15680, 16449, 4494, 4499, 4494, 4499, 4499, 4494, 4499, 4494, 4537, 4532, 4537, 4532, 4537, 4532, 4537, 4532, 4557, 4562, 4552, 4562, 4557, 4567, 4577, 4572, 4587, 4582, 7123, 7118, 7154, 7149, 7159, 16406, 16405, 7154, 7149, 7144, 7154, 7149, 7154, 7149, 7162, 7089, 7089, 7162, 7187, 7177, 7187, 16488, 16490, 7162, 7089, 7177, 7154, 7149, 7159, 7154, 7149, 7149, 7154, 7187, 7187, 7154, 7149, 7149, 7154, 7187, 7192, 7177, 16493, 7089, 7187, 7182, 16497, 7089, 7154, 7149, 7162, 7162, 16500, 16406, 16405, 16406, 16405, 16368, 16367, 16406, 16405, 16368, 16367, 16368, 16367, 16406, 16405, 16368, 16367, 7123, 7118, 7154, 7149, 7089, 7162, 7162, 7089, 7182, 7123, 7118, 7154, 7149, 7154, 7149, 7162, 7089, 7089, 7162, 7182, 7177, 7182, 7192, 16516, 16518, 7162, 7089, 16520, 16522, 7159, 7162, 7089, 7182, 16528, 16530, 7154, 7149, 7159, 7162, 7089, 7182, 7177, 7182, 7192, 16533, 16535, 7154, 7149, 7159, 7162, 7089, 7182, 7192, 7177, 16539, 7154, 7149, 7154, 7149, 7159, 7089, 7089, 16541, 7162, 7162, 16546, 7154, 7149, 7159, 7089, 7162, 7162, 7089, 16548, 16550, 16552, 16554, 7123, 7118, 16406, 16405, 16368, 16367, 16406, 16405, 16406, 16405, 16368, 16367, 16406, 16405, 16368, 16367, 16406, 16405, 16406, 16405, 16368, 16367, 16565, 7154, 7149, 7144, 7089, 7162, 7162, 7089, 7187, 7182, 16572, 7154, 7149, 7162, 7162, 7187, 7182, 7187, 7182, 16576, 16578, 7187, 7182, 7154, 7149, 7154, 7149, 7089, 7187, 7182, 7089, 16592, 16594, 7089, 7089, 7187, 7182, 7089, 7089, 7187, 7182, 16406, 16405, 16368, 16367, 16406, 16405, 16406, 16405, 16368, 16367, 16406, 16405, 16604, 7154, 7149, 7144, 16606, 16612, 7154, 7149, 7154, 7149, 7154, 7149, 7159, 16615, 16617, 7187, 7182, 7177, 7187, 7182, 7192, 7123, 7118, 7154, 7149, 7154, 7149, 7154, 7149, 7159, 7187, 7182, 7177, 7187, 7182, 7192, 7123, 7118, 7154, 7149, 7154, 7149, 7154, 7149, 7159, 7187, 7182, 7177, 7187, 7182, 7192, 9203, 9198, 9172, 9198, 9172, 9172, 9198, 9172, 9193, 9193, 9172, 9172, 9172, 9203, 9198, 9193, 9198, 9203, 9172, 9172, 9172, 9203, 9198, 9193, 9198, 9203, 9198, 9172, 9198, 9203, 9198, 9172, 9193, 9198, 9172, 9193, 9198, 9172, 9203, 9172, 9193, 9172, 9203, 9198, 9193, 9198, 9203, 9172, 9193, 9172, 9203, 9198, 9193, 9198, 9203, 9172, 9172, 9203, 9198, 9172, 9193, 16634, 16633, 16631, 16633, 16631, 9198, 9172, 9198, 9172, 9198, 9172, 9203, 9172, 9172, 9172, 9203, 9198, 9193, 9198, 9203, 9203, 9203, 9198, 9193, 9198, 9203, 9198, 9193, 9198, 9203, 9172, 9172, 9172, 9203, 16591, 9198, 9193, 9198, 9203, 9172, 9172, 9172, 9203, 16591, 9198, 9172, 9193, 9172, 9198, 9203, 16638, 9193, 16640, 9203, 16642, 9193, 16644, 9203, 9172, 9172, 9172, 9172, 9198, 9193, 9198, 9203, 16633, 16631, 16633, 16631, 16649, 16648, 16649, 16648, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 16898, 16899, 16900, 16901, 16902, 16903, 16904, 16905, 16906, 16907, 16908, 16909, 16910, 16911, 16912, 16914, 16915, 16917, 16918, 16920, 16921, 16923, 16924, 16926, 16927, 16928, 16929, 16930, 16931, 16932, 16933, 16934, 16935, 16936, 16937, 16938, 16939, 16940, 16941, 16942, 16943, 16944, 16945, 16946, 16948, 16951, 16952, 16953, 16954, 16955, 16956, 16957, 16959, 16962, 16963, 16964, 16965, 16966, 16967, 16969, 16970, 16972, 16973, 16974, 16975, 16976, 16977, 16978, 16979, 16980, 16981, 16982, 16983, 16984, 16985, 16986, 16987, 16988, 16989, 16990, 16991, 16993, 16994, 16998, 16999, 17000, 17001, 17002, 17003, 17014, 17015, 17017, 17018, 17019, 17020, 17022, 17023, 17025, 17026, 17027, 17028, 17029, 17030, 17034, 17036, 17037, 17039, 17040, 17041, 17042, 17044, 17046, 17051, 17052, 17053, 17054, 17056, 17057, 17058, 17061, 17062, 17063, 17064, 17065, 17066, 17067, 17068, 17069, 17070, 17071, 17072, 17073, 17074, 17075, 17076, 17077, 17078, 17079, 17080, 17081, 17083, 17084, 17086, 17087, 17089, 17090, 17092, 17093, 17094, 17095, 17096, 17097, 17098, 17099, 17100, 17101, 17102, 17103, 17104, 17105, 17106, 17107, 17108, 17109, 17111, 17112, 17114, 17115, 17116, 17117, 17118, 17119, 17120, 17121, 17122, 17123, 17124, 17125, 17126, 17127, 17128, 17132, 17133, 17134, 17135, 17136, 17137, 17138, 17139, 17143, 17144, 17145, 17147, 17148, 17150, 17151, 17153, 17154, 17156, 17157, 17158, 17159, 17160, 17161, 17163, 17164, 17166, 17167, 17169, 17170, 17172, 17173, 17175, 17176, 17178, 17179, 17181, 17182, 17184, 17185, 17186, 17187, 17188, 17189, 17191, 17192, 17194, 17195, 17198, 17200, 17201, 17202, 17203, 17204, 17205, 17207, 17208, 17209, 17210, 17211, 17212, 17215, 17216, 17218, 17219, 17220, 17221, 17222, 17223, 17224, 17225, 17226, 17228, 17229, 17230, 17231, 17232, 17234, 17235, 17236, 17237, 17238, 17239, 17240, 17241, 17242, 17243, 17245, 17246, 17249, 17251, 17252, 17253, 17257, 17258, 17259, 17260, 17262, 17264, 17267, 17268, 17270, 17271, 17272, 17273, 17275, 17276, 17277, 17278, 17280, 17281, 17282, 17283, 17284, 17285, 17286, 17287, 17288, 17289, 17290, 17291, 17292, 17293, 17294, 17295, 17296, 17297, 17298, 17299, 17300, 17301, 17302, 17303, 17304, 17305, 17306, 17307, 17308, 17309, 17310, 17311, 17312, 17313, 17314, 17315, 17316, 17317, 17318, 17319, 17320, 17321, 17322, 17323, 17324, 17325, 17326, 17327, 17328, 17329, 17330, 17331, 17332, 17333, 17335, 17336, 17337, 17338, 17339, 17340, 17341, 17342, 17343, 17344, 17345, 17346, 17347, 17348, 17349, 17350, 17351, 17352, 17353, 17354, 17355, 17356, 17358, 17359, 17360, 17361, 17362, 17363, 17364, 17365, 17366, 17367, 17368, 17369, 17370, 17371, 17372, 17373, 17374, 17375, 17376, 17377, 17378, 17380, 17381, 17382, 17383, 17384, 17385, 17386, 17387, 17388, 17389, 17390, 17391, 17392, 17393, 17394, 17395, 17396, 17397, 17398, 17400, 17401, 17402, 17404, 17405, 17406, 17407, 17408, 17411, 17412, 17413, 17414, 17415, 17416, 17417, 17418, 17419, 17420, 17421, 17422, 17423, 17424, 17425, 17426, 17427, 17428, 17429, 17430, 17431, 17432, 17433, 17434, 17435, 17436, 17438, 17439, 17440, 17441, 17443, 17444, 17446, 17447, 17448, 17449, 17453, 17454, 17456, 17457, 17458, 17459, 17461, 17462, 17463, 17464, 17465, 17466, 17467, 17468, 17470, 17471, 17473, 17474, 17475, 17477, 17478, 17479, 17480, 17482, 17483, 17484, 17486, 17487, 17489, 17490, 17491, 17492, 17493, 17494, 17496, 17497, 17498, 17499, 17500, 17501, 17502, 17503, 17505, 17509, 17510, 17511, 17512, 17513, 17514, 17515, 17516, 17517, 17518, 17519, 17520, 17521, 17523, 17524, 17526, 17527, 17530, 17531, 17532, 17533, 17534, 17535, 17539, 17542, 17543, 17544, 17549, 17550, 17552, 17553, 17555, 17556, 17559, 17560, 17563, 17564, 17565, 17566, 17567, 17568, 17569, 17570, 17572, 17574, 17575, 17576, 17579, 17580, 17581, 17582, 17584, 17585, 17586, 17587, 17588, 17589, 17591, 17592, 17593, 17595, 17596, 17598, 17599, 17600, 17601, 17602, 17603, 17604, 17605, 17606, 17607, 17608, 17609, 17610, 17611, 17612, 17613, 17614, 17615, 17616, 17617, 17618, 17619, 17620, 17621, 17624, 17625, 17626, 17627, 17629, 17630, 17631, 17632, 17633, 17634, 17635, 17636, 17637, 17638, 17639, 17640, 17641, 17642, 17643, 17644, 17645, 17646, 17647, 17648, 17649, 17650, 17651, 17652, 17654, 17655, 17656, 17657, 17658, 17659, 17660, 17661, 17662, 17663, 17664, 17665, 17666, 17667, 17668, 17669, 17670, 17671, 17673, 17674, 17675, 17676, 17677, 17678, 17679, 17680, 17681, 17682, 17683, 17685, 17686, 17688, 17689, 17691, 17692, 17695, 17696, 17697, 17698, 17700, 17701, 17702, 17703, 17705, 17706, 17708, 17709, 17710, 17711, 17712, 17713, 17714, 17715, 17716, 17717, 17718, 17719, 17720, 17721, 17723, 17724, 17727, 17728, 17730, 17731, 17733, 17734, 17735, 17736, 17737, 17738, 17739, 17740, 17741, 17742, 17743, 17744, 17745, 17746, 17747, 17748, 17749, 17750, 17751, 17752, 17753, 17754, 17755, 17756, 17758, 17759, 17760, 17761, 17762, 17763, 17764, 17765, 17766, 17767, 17768, 17769, 17770, 17771, 17772, 17773, 17774, 17775, 17776, 17777, 17778, 17779, 17780, 17781, 17783, 17784, 17785, 17786, 17787, 17788, 17789, 17790, 17791, 17792, 17793, 17794, 17795, 17796, 17797, 17798, 17799, 17800, 17801, 17802, 17805, 17806, 17807, 17808, 17809, 17811, 17812, 17814, 17815, 17817, 17818, 17820, 17821, 17822, 17823, 17824, 17825, 17826, 17827, 17828, 17829, 17830, 17831, 17832, 17833, 17834, 17835, 17836, 17837, 17838, 17839, 17840, 17841, 17842, 17844, 17845, 17846, 17847, 17848, 17849, 17850, 17851, 17852, 17853, 17854, 17855, 17856, 17857, 17858, 17859, 17860, 17861, 17863, 17864, 17865, 17866, 17867, 17868, 17869, 17870, 17871, 17872, 17873, 17874, 17875, 17876, 17877, 17878, 17879, 17880, 17881, 17882, 17884, 17885, 17886, 17887, 17888, 17889, 17890, 17891, 17894, 17897, 17898, 17899, 17900, 17901, 17902, 17903, 17904, 17905, 17906, 17907, 17908, 17909, 17912, 17913, 17914, 17915, 17916, 17917, 17918, 17921, 17922, 17923, 17924, 17925, 17926, 17927, 17931, 17932, 17933, 17936, 17937, 17938, 17939, 17940, 17941, 17942, 17943, 17944, 17945, 17946, 17947, 17948, 17949, 17950, 17951, 17952, 17953, 17954, 17955, 17956, 17957, 17958, 17959, 17960, 17961, 17962, 17963, 17964, 17965, 17966, 17967, 17968, 17969, 17970, 17971, 17972, 17973, 17974, 17975, 17976, 17977, 17978, 17979, 17980, 17981, 17982, 17984, 17985, 17987, 17988, 17989, 17990, 17991, 17992, 17993, 17994, 17995, 17996, 17997, 17998, 17999, 18000, 18001, 18002, 18003, 18004, 18005, 18006, 18007, 18008, 18009, 18010, 18011, 18012, 18013, 18014, 18015, 18016, 18017, 18018, 18019, 18020, 18023, 18024, 18025, 18026, 18027, 18029, 18032, 18033, 18034, 18035, 18037, 18038, 18039, 18040, 18041, 18042, 18044, 18045, 18046, 18047, 18049, 18050, 18052, 18053, 18055, 18056, 18057, 18058, 18060, 18061, 18062, 18063, 18064, 18065, 18067, 18069, 18070, 18072, 18073, 18075, 18076, 18077, 18078, 18082, 18083, 18084, 18085, 18086, 18087, 18088, 18090, 18091, 18092, 18093, 18094, 18096, 18097, 18099, 18100, 18102, 18103, 18105, 18106, 18108, 18109, 18111, 18112, 18114, 18115, 18116, 18117, 18118, 18119, 18121, 18123, 18124, 18125, 18127, 18128, 18130, 18131, 18133, 18134, 18136, 18137, 18139, 18140, 18143, 18144, 18145, 18146, 18147, 18148, 18149, 18150, 18152, 18153, 18155, 18156, 18157, 18158, 18160, 18161, 18162, 18164, 18165, 18166, 18168, 18169, 18170, 18171, 18172, 18173, 18174, 18175, 18176, 18177, 18178, 18180, 18183, 18184, 18185, 18186, 18187, 18188, 18189, 18191, 18194, 18195, 18197, 18198, 18200, 18201, 18203, 18204, 18206, 18207, 18208, 18209, 18210, 18211, 18212, 18213, 18214, 18215, 18217, 18218, 18220, 18221, 18222, 18223, 18224, 18225, 18226, 18227, 18228, 18229, 18230, 18231, 18232, 18233, 18234, 18235, 18236, 18237, 18238, 18239, 18240, 18241, 18242, 18243, 18244, 18245, 18246, 18247, 18248, 18249, 18250, 18251, 18252, 18253, 18254, 18255, 18256, 18257, 18258, 18259, 18261, 18262, 18263, 18264, 18266, 18267, 18268, 18270, 18271, 18272, 18273, 18275, 18276, 18278, 18279, 18281, 18282, 18284, 18285, 18287, 18288, 18289, 18290, 18292, 18293, 18294, 18295, 18297, 18298, 18300, 18301, 18303, 18304, 18305, 18306, 18307, 18308, 18310, 18311, 18314, 18316, 18317, 18318, 18319, 18320, 18321, 18322, 18323, 18324, 18325, 18326, 18327, 18329, 18330, 18332, 18333, 18335, 18337, 18338, 18339, 18340, 18341, 18343, 18344, 18345, 18346, 18348, 18349, 18350, 18351, 18352, 18353, 18354, 18355, 18356, 18357, 18358, 18359, 18360, 18361, 18362, 18363, 18364, 18365, 18366, 18367, 18368, 18369, 18370, 18371, 18372, 18374, 18375, 18377, 18378, 18379, 18380, 18381, 18382, 18383, 18384, 18385, 18386, 18387, 18388, 18389, 18390, 18391, 18392, 18393, 18394, 18395, 18396, 18397, 18398, 18399, 18401, 18402, 18403, 18404, 18405, 18406, 18407, 18408, 18409, 18410, 18411, 18412, 18413, 18414, 18415, 18416, 18417, 18418, 18419, 18420, 18421, 18422, 18424, 18425, 18426, 18427, 18428, 18429, 18430, 18431, 18432, 18433, 18434, 18435, 18436, 18437, 18438, 18439, 18440, 18441, 18442, 18443, 18444, 18445, 18446, 18447, 18448, 18449, 18450, 18451, 18452, 18453, 18454, 16278, 16275, 18455, 18456, 17013, 16897, 18457, 18458, 18459, 18460, 18461, 18462, 18463, 18464, 18465, 18466, 18467, 18468, 18469, 18470, 18473, 18474, 18475, 18476, 18477, 18478, 18479, 18480, 18481, 18482, 18483, 18484, 18485, 18486, 18487, 18488, 18489, 18490, 18491, 18493, 18494, 18495, 18497, 18498, 18499, 18500, 18501, 16342, 16339, 16342, 16339, 16278, 16275, 18503, 18504, 17013, 17011, 16278, 16275, 18505, 18506, 18507, 18508, 17013, 17011, 16278, 16275, 18509, 18510, 18511, 18512, 17050, 17048, 16342, 16339, 16342, 16339, 18513, 18514, 18515, 18516, 18517, 18518, 18519, 18520, 18521, 18522, 18523, 18524, 18525, 18526, 18527, 18528, 18529, 18530, 18531, 18532, 18533, 18534, 18535, 18536, 18537, 18538, 18539, 18540, 18541, 18544, 18545, 18548, 18549, 18550, 18551, 18554, 18555, 18556, 18557, 18558, 18559, 18560, 18561, 18562, 18565, 18566, 18567, 18568, 18569, 18570, 18571, 18572, 16220, 16339, 16225, 16342, 16342, 16339, 18574, 18575, 18576, 18577, 18578, 18579, 18580, 18582, 18583, 18585, 18586, 18587, 18588, 18589, 18590, 18591, 18596, 18597, 16339, 16220, 16225, 16342, 16342, 16339, 16220, 16339, 16342, 16225, 16339, 16342, 16220, 16339, 16342, 16225, 16342, 16339, 18598, 18599, 18600, 18601, 16278, 16275, 18602, 18603, 18604, 18605, 18606, 18607, 18608, 18609, 18610, 18611, 18612, 18613, 18614, 18615, 18616, 18617, 18619, 18620, 18621, 18622, 18623, 18624, 18625, 18626, 18627, 18629, 18630, 18631, 18632, 18633, 18634, 18635, 18636, 18639, 18640, 18641, 18642, 18643, 18644, 18645, 18646, 18647, 18648, 18651, 18652, 18653, 18654, 18655, 18656, 18657, 18658, 16220, 16339, 16225, 16342, 16339, 16342, 16220, 16339, 16225, 16342, 16342, 16339, 18659, 18660, 18661, 18662, 18663, 18664, 18665, 18666, 16278, 16275, 16342, 16339, 16342, 16339, 18667, 18668, 18669, 18670, 18672, 18673, 18674, 18677, 18678, 18679, 18680, 18681, 18682, 18683, 18686, 18687, 18688, 18689, 18690, 18691, 18692, 18693, 18694, 18695, 18696, 18697, 18698, 18699, 18700, 18701, 18702, 18703, 18704, 18705, 18706, 18707, 18708, 18709, 18710, 18711, 18712, 18713, 18714, 18715, 18716, 18717, 18718, 18719, 18720, 18721, 18722, 18723, 18724, 18725, 18726, 18727, 18728, 18729, 18730, 18731, 16628, 16591, 18732, 18733, 18734, 18735, 18736, 18737, 18738, 18739, 16591, 18740, 18741, 18742, 18743, 18744, 18745, 18746, 18747, 16591, 18748, 16567, 18749, 18750, 18751, 18752, 18753, 18754, 18755, 18756, 18757, 18758, 18759, 18760, 18761, 18762, 18763, 18764, 18765, 18766, 18767, 18768, 16628, 16591, 16628, 16591, 18769, 18770, 18771, 18772, 18773, 18774, 18775, 18776, 16628, 16591, 16628, 16591, 18777, 18778, 18779, 16591, 16591, 18780, 18781, 18782, 16567, 16633, 16631, 16614, 16633, 16631, 18784, 18785, 18786, 18787, 18788, 18789, 18790, 18791, 18792, 18793, 18794, 16567, 18795, 18796, 18797, 18798, 18799, 18800, 18801, 18802, 16591, 18803, 18804, 18805, 18806, 18807, 18808, 16630, 16629, 18809, 18810, 18811, 18812, 18813, 18814, 18815, 18816, 18817, 18818, 18819, 18820, 18821, 18822, 18823, 18824, 18825, 18826, 16630, 16629, 18827, 18828, 18829, 18830, 18831, 18832, 16614, 16633, 16631, 18834, 18836, 18838, 18840, 16614, 16633, 16631, 18841, 18842, 18843, 18844, 18845, 18846, 18847, 18848, 18849, 18850, 18851, 18852, 18853, 18854, 18855, 18856, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18944, 18947, 18950, 18952, 18954, 18957, 18959, 18961, 18963, 18965, 18967, 18969, 18971, 18973, 18975, 18977, 18981, 18984, 18986, 18989, 18992, 18994, 18997, 18999, 19001, 19003, 19005, 19008, 19010, 19012, 19015, 19017, 19019, 19021, 19023, 19025, 19027, 19029, 19031, 19033, 19035, 19037, 19039, 19041, 19043, 19045, 19048, 19050, 19052, 19056, 19058, 19061, 19063, 19065, 19067, 19069, 19073, 19075, 19077, 19079, 19082, 19084, 19086, 19088, 19090, 19092, 19094, 19100, 19102, 19104, 19106, 19108, 19110, 19114, 19116, 19118, 19121, 19123, 19126, 19129, 19131, 19134, 19136, 19138, 19140, 19142, 19146, 19148, 19150, 19152, 19154, 19156, 19158, 19160, 19162, 19164, 19166, 19168, 19170, 19173, 19175, 19177, 19179, 19181, 19183, 19185, 19187, 19189, 19191, 19193, 19196, 19198, 19201, 19203, 19205, 19207, 19209, 19211, 19215, 19217, 19219, 19225, 19227, 19229, 19231, 19234, 19236, 19238, 19240, 19242, 19245, 19248, 19250, 19252, 19254, 19256, 19259, 19261, 19263, 19265, 19267, 19270, 19272, 19274, 19276, 19278, 19280, 19282, 19285, 19287, 19290, 19293, 19295, 19297, 19300, 19302, 19304, 19306, 19309, 19312, 19315, 19317, 19319, 19322, 19324, 19326, 19330, 19332, 19334, 19336, 19338, 19340, 19342, 19344, 19347, 19349, 19353, 19357, 19359, 19362, 19364, 19366, 19372, 19374, 19377, 19381, 19383, 19385, 19387, 19391, 19393, 19395, 19397, 19399, 19402, 19405, 19407, 19410, 19419, 19421, 19423, 19425, 19427, 19429, 19431, 19433, 19436, 19445, 19447, 19449, 19451, 19453, 19455, 19457, 19460, 19463, 19467, 19469, 19471, 19473, 19476, 19481, 19483, 19485, 19487, 19493, 19496, 19498, 19501, 19503, 19505, 19507, 19509, 19512, 19514, 19516, 19518, 19520, 19522, 19524, 19526, 19528, 19530, 19532, 19534, 19536, 19543, 19546, 19548, 19550, 19554, 19557, 19563, 19565, 19568, 19570, 19575, 19577, 19579, 19581, 19583, 19587, 19589, 19591, 19593, 19595, 19597, 19599, 19601, 19603, 19605, 19607, 19609, 19611, 19613, 19615, 19617, 19619, 19621, 19623, 19625, 19628, 19630, 19632, 19634, 19637, 19639, 19641, 19643, 19645, 19647, 19649, 19652, 19654, 19656, 19658, 19661, 19663, 19665, 19667, 19670, 19672, 19674, 19681, 19684, 19686, 19690, 19692, 19694, 19697, 19699, 19701, 19703, 19706, 19708, 19710, 19715, 19717, 19719, 19721, 19724, 19726, 19728, 19733, 19735, 19737, 19739, 19742, 19744, 19746, 19753, 19755, 19758, 19761, 19764, 19768, 19770, 19772, 19774, 19777, 19779, 19781, 19784, 19787, 19789, 19792, 19794, 19797, 19800, 19802, 19804, 19806, 19808, 19810, 19812, 19814, 19816, 19818, 19823, 19825, 19827, 19829, 19832, 19834, 19836, 19839, 19841, 19843, 19845, 19847, 19849, 19851, 19853, 19855, 19860, 19862, 19866, 19869, 19871, 19874, 19877, 19879, 19883, 19885, 19888, 19890, 19894, 19897, 19899, 19901, 19903, 19905, 19907, 19909, 19912, 19914, 19916, 19918, 19921, 19927, 19932, 19934, 19936, 19938, 19940, 19942, 19944, 19947, 19952, 19954, 19956, 19958, 19960, 19962, 19964, 19966, 19972, 19978, 19981, 19984, 19988, 19991, 19993, 19996, 19999, 20001, 20004, 20006, 20008, 20010, 20012, 20014, 20016, 20018, 20020, 20022, 20024, 20031, 20034, 20036, 20038, 20040, 20042, 20045, 20047, 20049, 20051, 20057, 20059, 20061, 20064, 20066, 20069, 20071, 20073, 20075, 20077, 20079, 20081, 20083, 20085, 20087, 20089, 20091, 20093, 20095, 20097, 20099, 20101, 20105, 20107, 20109, 20111, 20114, 20116, 20122, 20124, 20126, 20128, 20130, 20132, 20135, 20137, 20139, 20141, 20144, 20146, 20149, 20151, 20153, 20155, 20157, 20159, 20161, 20164, 20166, 20168, 20171, 20173, 20175, 20178, 20180, 20183, 20186, 20188, 20190, 20193, 20195, 20197, 20199, 20202, 20204, 20206, 20208, 20210, 20212, 20214, 20216, 20218, 20221, 20224, 20226, 20228, 20230, 20233, 20234, 19951, 19924, 20119, 19975, 20237, 20238, 20239, 20242, 20244, 20246, 20248, 20253, 20256, 20259, 20261, 20265, 20267, 20270, 16447, 16445, 20273, 20276, 20280, 20281, 18988, 20282, 20283, 18996, 20284, 20285, 19882, 19892, 20286, 20119, 20118, 20288, 20289, 19951, 20290, 20291, 19924, 20118, 20296, 20297, 19047, 20298, 20299, 19055, 19054, 20300, 20302, 20119, 20118, 20304, 20305, 19099, 16360, 16359, 19562, 15847, 15846, 19099, 16360, 16359, 19113, 19562, 19125, 20306, 20307, 19133, 20308, 20309, 16359, 20029, 16360, 19859, 19857, 19172, 20104, 19572, 19586, 19574, 19213, 19214, 20312, 20314, 19586, 19585, 17227, 17233, 19214, 19213, 19222, 19221, 19977, 16406, 16405, 19233, 20121, 20316, 20318, 20320, 20322, 20325, 20327, 20329, 20331, 20333, 20339, 20342, 17357, 17379, 20345, 20348, 20354, 20357, 20360, 20362, 20363, 20364, 20365, 19791, 20366, 20367, 16360, 16359, 19766, 19865, 19882, 16406, 16405, 19892, 20119, 20118, 16417, 16418, 18095, 20104, 20103, 20113, 20068, 20119, 20118, 20120, 20121, 20368, 20370, 20377, 20380, 20382, 20384, 20386, 20387, 20388, 20389, 19791, 20390, 20391, 16360, 19766, 16359, 19371, 19369, 19857, 16360, 16359, 20392, 20393, 20394, 20395, 19791, 20396, 20397, 16360, 20029, 16359, 20398, 20399, 20400, 20401, 19791, 20402, 20403, 16360, 19766, 16359, 19857, 19414, 19951, 19950, 16406, 19969, 16405, 19977, 19975, 16326, 16325, 19418, 19435, 20404, 20406, 20119, 20118, 16417, 16418, 18095, 20408, 20409, 19435, 16401, 16402, 20410, 16401, 16402, 19442, 16402, 16401, 20412, 20414, 16417, 16418, 17597, 19951, 19459, 16406, 16405, 19924, 20119, 20118, 16417, 16418, 18095, 19950, 19951, 16406, 16405, 19969, 19977, 19975, 16417, 16418, 19480, 19479, 16401, 16402, 20416, 20418, 16402, 16401, 20119, 16417, 16418, 17597, 16359, 20029, 16360, 19541, 20056, 16360, 16359, 20029, 19562, 19560, 20103, 20104, 19573, 19572, 19586, 19574, 20103, 20104, 20422, 20424, 19586, 19585, 20104, 20103, 20118, 19977, 20121, 20120, 20426, 20429, 20431, 20433, 19680, 16446, 16445, 20435, 20439, 20441, 16447, 16445, 16448, 16446, 16446, 16445, 16448, 16447, 17813, 16447, 16445, 16448, 16446, 16446, 16445, 16448, 16447, 17813, 20443, 20445, 20447, 20450, 16447, 16445, 16448, 16446, 16446, 16448, 16447, 16445, 20455, 20459, 20461, 20462, 20463, 20464, 19791, 20465, 20466, 16360, 16359, 19766, 20467, 20468, 20469, 20470, 19791, 20471, 20472, 16360, 19821, 16359, 19859, 19857, 19865, 19882, 19920, 19868, 20473, 20475, 20119, 16417, 16418, 18095, 19882, 19911, 19920, 19887, 19892, 20479, 20119, 20118, 16417, 16418, 18095, 19951, 19911, 19920, 20481, 20482, 19924, 16406, 16405, 20119, 20118, 16417, 16418, 18095, 19951, 19950, 16406, 16405, 19969, 19977, 19975, 16417, 16418, 16326, 16325, 20483, 20484, 19995, 20485, 20486, 20003, 16359, 20029, 16360, 20056, 16360, 16359, 20104, 20103, 20068, 20119, 20118, 20120, 20121, 20104, 20103, 20113, 20119, 20118, 20121, 20120, 20491, 20494, 20496, 20498, 20501, 20504, 18423, 20507, 20509, 20511, 20513, 20516, 20519, 20522, 20524, 20526, 20528, 20531, 20534, 20538, 20540, 20543, 20338, 20251, 20338, 20251, 20547, 20548, 20353, 20255, 20353, 20351, 20557, 16633, 16631, 16633, 16631, 20566, 16633, 16631, 16633, 16631, 16633, 16631, 20568, 20569, 20572, 20338, 20336, 20575, 20578, 20338, 20336, 20589, 20590, 20353, 20351, 20591, 20592, 20601, 20602, 20353, 20351, 20603, 20604, 20608, 16633, 16631, 20609, 16633, 16631, 20610, 20613, 20614, 20615, 20616, 20617, 20618, 20619, 20621, 20623, 20625, 20627, 20630, 20639, 16633, 16631, 20646, 20647, 16631, 16633, 16633, 16631, 16633, 16631, 16633, 16631, 20666, 20667, 16631, 16633, 16633, 16631, 20668, 20671, 20674, 20675, 20676, 20681, 20682, 20683, 20692, 20694, 20584, 20546, 20588, 20586, 20596, 20594, 20600, 20598, 20561, 16649, 16648, 20565, 20563, 20634, 16649, 16648, 20638, 20636, 20584, 20546, 20588, 20586, 20596, 20594, 20600, 20598, 20552, 16649, 16648, 20556, 20554, 20561, 16649, 16648, 20565, 20563, 20634, 16649, 16648, 20638, 20636, 20584, 20582, 20588, 20586, 20596, 20594, 20600, 20598, 20607, 16649, 16648, 20691, 20689, 20678, 20677, 20679, 20680, 16649, 16648, 16647, 16646, 20691, 20689, 20634, 16649, 16648, 20638, 20636, 20640, 20641, 20643, 20645, 20649, 20651, 20655, 16649, 16648, 20658, 20660, 20664, 16649, 16648, 20678, 20677, 20680, 20679, 16649, 16648, 16647, 16646, 20691, 20689, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 17707, 16896, 18283, 17256, 21262, 21260, 20772, 21263, 21264, 21265, 21266, 16417, 18949, 18946, 18956, 16458, 16457, 16916, 16913, 16919, 16925, 16922, 16051, 16050, 16452, 16451, 16454, 16453, 21280, 21281, 16334, 16333, 18983, 21286, 21284, 16334, 16333, 18991, 21289, 21287, 15776, 15775, 16971, 16968, 16334, 16333, 19007, 16342, 16339, 19014, 15782, 15781, 16995, 16992, 18021, 17540, 18104, 18110, 17005, 18113, 16264, 16263, 19881, 21292, 21290, 16997, 18068, 16392, 16391, 18302, 17009, 15827, 16263, 20772, 16401, 16404, 21293, 18331, 21295, 21296, 21297, 16417, 21154, 16418, 21115, 17004, 18101, 17540, 18054, 18110, 17005, 18113, 16264, 16263, 19881, 21299, 17035, 17008, 16392, 16391, 18302, 17009, 15827, 16263, 21150, 16403, 16402, 21302, 18154, 21303, 21304, 16417, 21154, 16418, 21155, 18347, 16334, 16333, 19545, 16339, 19081, 16342, 17986, 17016, 17024, 17021, 18101, 18098, 18104, 18107, 18110, 18113, 19946, 16264, 16263, 21306, 17035, 18126, 15827, 16263, 17038, 21309, 21310, 21311, 21313, 21314, 21315, 16417, 21154, 16418, 21155, 16334, 19060, 16333, 19838, 16342, 16339, 17060, 17059, 17088, 17091, 20801, 19096, 21317, 21318, 21319, 16362, 16361, 20804, 16357, 20790, 16358, 21320, 21321, 21322, 19567, 16333, 16334, 15849, 15848, 19081, 16339, 16342, 17085, 17082, 17091, 17088, 20801, 19096, 21323, 21324, 21325, 19556, 20804, 16357, 17113, 17110, 16358, 21326, 21327, 16236, 16215, 16334, 16333, 19120, 21328, 16334, 16333, 19128, 21331, 17149, 17146, 17155, 17152, 21167, 19144, 21334, 21335, 21336, 16361, 16362, 20033, 21177, 19145, 21337, 21338, 19864, 19542, 17165, 17162, 17171, 17168, 17177, 17174, 21197, 17183, 17180, 16392, 16391, 17193, 17190, 17196, 17244, 17197, 21339, 21340, 16404, 16403, 16402, 16401, 21341, 21342, 21343, 17199, 20998, 17704, 20836, 17206, 20839, 17213, 17247, 17244, 21344, 21345, 18277, 17254, 17256, 17255, 21186, 21004, 16404, 16403, 16402, 16401, 21346, 21348, 21349, 17729, 20842, 17732, 20845, 21350, 20847, 21351, 20849, 16392, 16391, 17247, 17244, 21352, 21353, 17254, 17707, 17256, 17255, 20856, 21354, 21355, 17266, 17265, 21356, 21357, 21358, 20857, 16325, 20859, 16326, 21359, 21360, 16418, 16417, 19244, 16454, 16453, 16458, 16457, 19247, 19258, 16446, 16445, 15958, 15957, 19269, 16452, 16451, 16458, 16457, 16456, 16455, 19284, 16448, 16447, 17334, 19292, 19289, 19299, 16458, 16457, 16448, 16447, 16446, 16445, 21372, 19314, 19311, 19321, 16458, 16457, 19328, 16446, 16445, 21373, 16448, 16447, 16446, 16445, 20907, 19776, 16334, 16333, 21379, 19783, 16334, 16333, 21381, 19763, 16334, 16333, 21383, 17896, 17935, 19799, 19796, 20909, 16351, 21088, 16352, 21091, 16358, 16357, 21386, 21387, 21388, 16362, 16361, 19346, 17578, 17548, 18030, 18135, 20934, 20942, 17399, 17557, 18104, 18059, 17562, 17537, 19351, 19946, 21389, 21390, 16402, 16401, 21391, 21392, 21393, 16402, 16401, 21394, 21395, 20961, 21396, 21122, 21397, 20962, 21398, 18280, 18277, 18286, 18283, 21186, 21187, 18299, 18296, 16392, 16391, 18302, 18312, 18309, 21399, 21400, 16404, 16403, 16402, 16401, 21401, 21402, 18331, 18328, 21403, 21404, 21405, 21406, 16418, 16417, 21190, 17732, 16334, 19776, 16333, 21413, 19783, 16334, 16333, 21415, 19763, 16334, 16333, 21417, 17410, 17409, 19404, 19401, 17472, 17469, 16351, 21088, 16352, 21420, 21421, 21422, 16362, 16236, 19831, 16334, 16333, 19838, 16342, 16339, 17983, 17986, 20917, 21423, 21424, 16352, 16351, 20920, 16358, 16357, 21425, 21426, 21427, 19864, 16362, 16361, 19776, 16334, 16333, 21428, 19783, 16334, 16333, 21430, 19763, 16334, 16333, 21432, 17437, 17476, 19799, 19796, 17445, 17442, 16358, 16357, 21435, 21436, 21437, 16362, 16361, 19390, 16334, 19776, 16333, 21438, 19783, 16334, 16333, 21440, 19763, 16334, 16333, 21442, 17460, 17935, 19404, 19401, 17472, 17469, 16351, 21088, 16352, 21445, 21446, 21447, 16361, 16215, 19831, 16334, 16333, 19838, 16339, 16342, 17986, 17476, 21104, 21105, 20044, 21107, 16358, 16357, 21448, 21449, 19864, 16362, 16361, 18101, 18098, 18104, 18110, 17541, 18113, 19949, 19946, 21450, 21451, 17547, 18129, 18030, 18135, 18141, 18138, 21150, 16404, 16403, 16402, 16401, 21452, 21453, 21454, 18331, 18154, 21455, 21456, 21155, 21457, 21458, 17557, 17481, 17495, 18059, 17562, 18113, 19417, 19416, 21459, 21460, 17578, 17548, 18135, 17485, 20934, 20947, 16402, 16401, 21461, 16402, 16401, 21463, 21464, 20936, 21465, 21154, 21466, 20937, 21467, 17557, 17554, 17495, 17562, 17561, 16051, 16050, 19475, 16264, 16263, 21470, 21468, 17578, 17577, 18030, 18135, 20959, 20942, 21471, 21472, 21474, 21475, 21476, 21477, 21478, 21479, 20943, 21481, 21122, 21482, 20962, 21483, 17548, 17578, 17522, 18031, 17528, 17525, 20947, 17557, 18021, 17536, 18059, 17562, 17537, 19949, 19946, 21484, 21485, 16403, 16404, 21486, 21487, 21488, 16404, 16403, 21489, 21490, 21114, 21491, 21154, 21492, 20962, 21493, 18101, 17540, 18104, 18110, 17541, 18113, 19946, 19462, 21494, 21495, 17548, 17547, 18030, 18135, 18138, 18141, 21150, 16404, 16403, 16402, 16401, 21496, 21497, 21498, 18331, 18154, 21499, 21500, 21153, 21501, 21154, 21502, 17557, 17554, 17558, 17562, 17561, 18113, 19478, 19475, 21503, 21504, 17578, 17577, 18030, 18135, 20959, 20960, 21505, 21506, 21507, 21509, 21510, 21511, 20961, 21512, 21154, 21513, 20962, 21514, 16334, 16333, 19500, 16220, 16339, 16334, 16333, 19511, 16225, 16342, 17623, 17622, 17628, 18205, 20977, 16351, 20980, 16352, 21515, 21516, 21517, 16361, 16362, 20033, 21177, 20044, 21518, 21519, 19864, 19542, 16333, 19545, 16334, 19838, 16342, 16339, 18199, 17653, 18205, 18202, 20985, 19552, 21520, 21521, 21522, 16361, 19556, 16362, 21107, 16358, 16357, 21523, 21524, 19567, 20063, 17687, 17672, 18286, 18283, 20990, 21187, 21006, 16392, 16391, 18302, 18312, 18265, 21525, 21526, 16404, 16403, 16402, 16401, 21527, 21528, 21529, 21530, 16418, 16417, 20994, 17704, 18280, 17687, 18283, 18286, 20995, 20996, 21006, 16392, 16391, 18302, 18312, 17693, 21531, 21532, 16404, 16403, 16402, 16401, 21533, 21535, 21536, 17699, 20998, 17704, 18280, 17707, 18283, 18286, 21186, 21004, 21006, 16392, 16391, 18302, 18312, 17722, 21537, 21538, 18331, 18328, 21539, 21540, 21541, 21542, 17729, 21213, 17732, 16452, 16451, 16454, 16453, 16458, 19627, 16457, 19636, 16446, 16445, 17757, 16452, 16451, 16454, 16453, 16458, 19651, 16457, 19660, 16446, 16445, 17782, 19669, 16452, 16451, 19676, 16458, 16457, 21547, 21548, 21549, 17803, 19705, 16452, 16451, 19712, 16458, 16457, 21553, 21554, 17810, 19723, 16452, 16451, 19730, 16458, 16457, 21555, 21556, 17804, 19741, 16452, 16451, 19748, 16458, 16457, 21557, 21558, 21559, 21560, 17819, 19760, 19683, 20192, 16458, 16457, 16448, 16447, 16446, 16445, 21561, 19705, 16452, 16451, 19712, 16458, 16457, 21562, 21563, 17810, 19723, 16452, 16451, 19730, 16458, 16457, 21564, 21565, 17862, 19741, 16452, 16451, 19748, 16458, 16457, 21566, 21567, 21568, 21569, 17819, 19760, 19757, 20192, 16458, 16457, 16448, 16447, 16446, 16445, 21570, 17816, 17819, 19696, 16454, 16453, 16458, 16457, 20148, 20223, 20220, 21045, 19696, 16454, 16453, 16458, 16457, 20148, 20223, 20220, 21045, 19705, 16452, 16451, 19712, 16458, 16457, 21575, 21576, 17843, 19723, 16452, 16451, 19730, 16458, 16457, 21577, 21578, 17862, 19741, 16452, 16451, 19748, 16458, 16457, 21579, 21580, 21581, 21582, 17883, 19760, 19757, 19776, 16334, 16333, 21585, 19783, 16334, 16333, 21587, 19763, 16334, 16333, 21589, 17935, 17896, 19799, 19796, 21085, 16351, 21088, 16352, 21091, 16358, 16357, 21592, 21593, 21594, 16361, 16215, 19776, 16334, 16333, 21595, 19783, 16334, 16333, 21597, 16334, 16333, 19786, 21599, 17935, 17934, 19799, 19796, 21085, 16351, 21088, 16352, 21091, 16358, 16357, 21602, 21603, 21604, 16362, 16236, 19831, 16334, 16333, 19838, 16342, 16339, 17986, 17983, 21104, 21105, 20044, 21107, 16358, 16357, 21605, 21606, 19864, 16362, 16361, 18021, 18051, 18104, 18110, 18059, 18113, 16264, 16263, 19881, 21607, 21608, 18071, 18068, 18030, 18031, 21119, 21609, 21610, 16402, 16401, 21611, 16402, 16401, 21613, 21114, 21614, 21122, 21615, 21115, 21616, 18051, 18021, 18022, 18110, 18059, 18113, 16264, 16263, 19881, 21617, 21618, 18071, 18126, 18031, 18030, 21119, 21619, 21620, 16401, 16402, 21621, 16402, 16401, 21623, 21624, 21135, 21625, 21122, 21626, 21155, 21627, 18101, 18051, 18054, 18059, 18110, 18113, 16264, 16263, 19946, 21628, 21629, 18071, 18068, 18074, 18132, 21133, 21630, 16403, 16404, 21633, 21634, 21635, 16403, 16404, 21636, 21637, 21135, 21638, 21154, 21639, 21155, 21640, 18101, 18098, 18104, 18110, 18107, 18113, 19949, 19946, 21641, 21642, 18129, 18126, 18135, 18132, 18141, 18138, 21150, 16404, 16403, 16402, 16401, 21643, 21644, 21645, 18154, 18151, 21646, 21647, 21153, 21648, 21154, 21649, 21155, 21650, 21651, 16334, 16333, 19990, 21654, 21652, 16334, 16333, 19998, 21657, 21655, 18199, 18196, 18205, 18202, 21167, 16352, 16351, 18219, 18216, 20026, 21658, 21659, 21660, 16362, 16361, 20033, 21177, 20044, 21180, 16358, 16357, 21661, 21662, 21663, 20063, 16362, 16361, 18280, 18277, 18283, 18260, 21186, 21187, 18299, 18296, 16392, 16391, 18302, 18312, 18265, 21664, 21665, 16404, 16403, 16402, 16401, 21666, 18331, 18328, 21667, 21668, 21669, 21670, 16418, 16417, 21190, 18274, 18280, 18277, 18286, 18283, 21196, 21197, 18299, 18296, 16392, 16391, 18302, 18312, 18309, 21671, 21672, 16404, 16403, 16402, 16401, 21673, 18331, 18328, 21674, 21675, 21676, 21677, 16418, 16417, 21213, 18347, 16452, 16451, 16454, 16453, 16458, 20148, 16457, 20220, 16446, 16445, 18376, 18373, 16452, 16451, 20134, 16458, 20148, 16457, 20220, 16446, 16445, 18376, 18373, 20143, 16452, 16451, 16458, 20148, 16457, 20220, 16446, 16445, 18376, 18373, 20163, 16452, 16451, 20170, 16458, 16457, 20177, 16446, 16445, 18400, 20185, 20182, 20192, 16458, 16457, 16448, 16447, 16446, 16445, 21684, 16454, 16453, 16452, 16451, 16458, 16457, 16456, 16455, 20223, 20220, 21257, 20241, 16614, 16630, 16629, 16630, 16629, 16630, 16629, 16630, 16629, 16630, 16629, 20379, 16596, 16597, 20232, 16512, 20241, 16513, 21700, 21701, 16513, 16512, 16630, 16629, 16532, 16537, 21702, 21703, 21704, 16630, 16629, 20258, 16532, 21706, 21707, 20356, 16630, 16629, 16537, 21279, 16630, 16629, 20258, 21708, 21709, 20356, 16630, 16629, 21279, 16630, 16629, 21711, 21712, 16502, 21713, 21714, 16503, 20372, 21716, 21717, 16502, 21718, 21719, 16503, 16630, 16629, 20530, 21720, 21721, 16502, 16503, 16503, 16502, 16567, 16512, 16513, 16537, 16532, 21725, 21726, 16614, 16513, 16512, 16532, 16537, 21729, 21730, 21731, 20347, 16532, 21733, 21734, 21735, 20341, 16537, 21378, 21737, 20347, 16532, 21739, 21740, 21741, 20356, 16537, 21378, 20372, 16630, 16629, 21744, 21745, 21747, 21748, 20379, 20428, 16633, 16631, 20506, 20503, 21751, 21754, 16628, 20428, 16633, 16631, 16597, 16596, 16630, 16629, 21763, 21764, 16596, 16597, 21765, 21767, 21768, 16596, 21769, 21770, 16597, 16630, 16629, 21771, 21772, 16596, 21773, 21774, 16597, 21775, 21777, 21778, 16596, 21779, 21780, 16597, 20500, 20493, 21784, 20506, 20503, 20500, 16630, 16629, 21787, 20506, 20503, 16628, 20515, 16630, 16629, 20521, 20518, 16628, 20530, 16630, 16629, 20536, 20533, 20537, 20574, 20580, 20545, 21791, 21792, 21793, 21794, 21795, 21796, 21797, 21798, 20629, 16649, 16648, 21799, 21800, 21801, 21802, 21803, 21804, 21805, 21806, 21807, 21808, 20574, 20571, 20580, 20545, 21809, 21810, 21811, 21812, 21813, 21814, 21815, 21816, 21817, 21818, 21819, 21820, 21821, 21822, 21823, 21824, 21825, 21826, 21827, 21828, 21829, 21830, 21831, 20629, 16649, 16648, 20574, 20571, 20580, 20577, 21832, 21833, 21834, 21835, 21836, 21837, 21838, 21839, 21840, 21841, 21842, 21843, 21844, 20612, 20629, 20673, 20670, 21845, 21846, 21847, 21848, 21849, 21850, 21851, 21852, 21853, 21854, 20629, 16649, 16648, 21855, 21856, 21857, 21858, 21859, 21860, 21861, 21862, 21863, 21864, 21865, 21866, 21867, 21868, 21869, 21870, 21871, 21872, 21873, 20673, 20670, 21874, 21875, 21876, 21877, 21878, 21879, 21880, 21881, 21882, 21883, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 22016, 22017, 22018, 22019, 22021, 22022, 22023, 22024, 22027, 22028, 22029, 22030, 22031, 22032, 22033, 22034, 22035, 22036, 22037, 22038, 22039, 22040, 22041, 22042, 22043, 22044, 22046, 22047, 22048, 22050, 22051, 22052, 22053, 22055, 22056, 22057, 22058, 22059, 22060, 22061, 22062, 22063, 22064, 22065, 22066, 22067, 22068, 22069, 22070, 22071, 22072, 22073, 22074, 22075, 22076, 22077, 22078, 22080, 22081, 22082, 22083, 22084, 22085, 22086, 22087, 22088, 22089, 22090, 22091, 21294, 22093, 22094, 22097, 22098, 22099, 22100, 22101, 22102, 22103, 22104, 22105, 22106, 22107, 22108, 22109, 22110, 22111, 22112, 22113, 22114, 22115, 22116, 22117, 22118, 22119, 22120, 22121, 22122, 22123, 22124, 22125, 22127, 22128, 22129, 22130, 22131, 22132, 22133, 22134, 22135, 22136, 22137, 22138, 22139, 22140, 22141, 22142, 22143, 22144, 22145, 22146, 22147, 22148, 22149, 22150, 22151, 22152, 22153, 22154, 22155, 22156, 22157, 22160, 22163, 22164, 22165, 22166, 22167, 22168, 22169, 22170, 22171, 22172, 22173, 22174, 22175, 22176, 22177, 22178, 22179, 22182, 22183, 22184, 22185, 22186, 22187, 22188, 22191, 22192, 22193, 22194, 22195, 22196, 22197, 22198, 22199, 22200, 22201, 22202, 22203, 22204, 22205, 22208, 22209, 22210, 22211, 22212, 22213, 22214, 22216, 22217, 22218, 22219, 22220, 22221, 22222, 22223, 22224, 22225, 22226, 22227, 22228, 22229, 22230, 22231, 22232, 22235, 22236, 22237, 22238, 22239, 22240, 22242, 22243, 22244, 22245, 22246, 22247, 22248, 22249, 22250, 22251, 22252, 22253, 22254, 22255, 22256, 22257, 22258, 22259, 22260, 22262, 22263, 22264, 22265, 22266, 22267, 22269, 22270, 22271, 22272, 22273, 22274, 22275, 22276, 22277, 22278, 22280, 22281, 22282, 22283, 22284, 22285, 22286, 22287, 22288, 22289, 22291, 22293, 22294, 22295, 22296, 22298, 22300, 22301, 22302, 22303, 22304, 22305, 22307, 22308, 22309, 22310, 22311, 22312, 22314, 22315, 22316, 22319, 22320, 22321, 22322, 22323, 22325, 22326, 22327, 22328, 22329, 22330, 22331, 22332, 22333, 22334, 22335, 22336, 22337, 22338, 22339, 22340, 22341, 22342, 22343, 22344, 22345, 22346, 22347, 22348, 22349, 22350, 22351, 22352, 22353, 22354, 22355, 22356, 22357, 22359, 22360, 22361, 22362, 22363, 22364, 22365, 22366, 22368, 22369, 22370, 22371, 22372, 22373, 22374, 22375, 22377, 22378, 22379, 22381, 22382, 22383, 22384, 22385, 22386, 22387, 22388, 22389, 22390, 22391, 22392, 22393, 22394, 22395, 22396, 22399, 22400, 22401, 22402, 22403, 22404, 22405, 22406, 22407, 22408, 22409, 22410, 22411, 22412, 22413, 22414, 22415, 22416, 22418, 22419, 22420, 22423, 22424, 22425, 22427, 22429, 22431, 22433, 22434, 22435, 22436, 22437, 22438, 22439, 22440, 22441, 22442, 22443, 22444, 22445, 22446, 22448, 22449, 22450, 22451, 22452, 22454, 22455, 22456, 22458, 22460, 22461, 22462, 22463, 22464, 22465, 22466, 22468, 22469, 22470, 22472, 22473, 22474, 22475, 22476, 22477, 22478, 22479, 22480, 22481, 22482, 22483, 22484, 22485, 22488, 22489, 22490, 22491, 22492, 22493, 22494, 22495, 22496, 22497, 22498, 22499, 22501, 22502, 22503, 22504, 22505, 22506, 22509, 22510, 22511, 22512, 22513, 22514, 22516, 22517, 22518, 22520, 22521, 22522, 22523, 22524, 22525, 22526, 22527, 22528, 22529, 22530, 22531, 22532, 22535, 22536, 22537, 22538, 22539, 22540, 22542, 22543, 22544, 22546, 22547, 22548, 22549, 22550, 22551, 22552, 22553, 22554, 22555, 22556, 22557, 22558, 22559, 22562, 22563, 22564, 22565, 22566, 22567, 22568, 22569, 22570, 22571, 22572, 22573, 22574, 22575, 22576, 22577, 22578, 22580, 22581, 22582, 22583, 22584, 22585, 22586, 22587, 22588, 22589, 22590, 22591, 22593, 22594, 22595, 22596, 22597, 22598, 22599, 22600, 22601, 22602, 22603, 22604, 22607, 22608, 22609, 22611, 22612, 22614, 22615, 22616, 22617, 22618, 22619, 22620, 22621, 22622, 22624, 22625, 22626, 22627, 22628, 22629, 22630, 22631, 22633, 22634, 22635, 22637, 22639, 22641, 22643, 22644, 22645, 22646, 22647, 22648, 22649, 22650, 22651, 22652, 22654, 22655, 22656, 22657, 22658, 22659, 22660, 22661, 22663, 22666, 22669, 22671, 22673, 22675, 22676, 22677, 22678, 22679, 22680, 22681, 22682, 22683, 22684, 22685, 22686, 22687, 22688, 22689, 22690, 22692, 22693, 22694, 22697, 22698, 22699, 22701, 22703, 22705, 22707, 22708, 22709, 22710, 22711, 22712, 22713, 22714, 22715, 22717, 22718, 22719, 22720, 22721, 22722, 22723, 22724, 22725, 22726, 22727, 22728, 22731, 22732, 22733, 22735, 22737, 22739, 22740, 22741, 22742, 22743, 22744, 22745, 22746, 22747, 22749, 22750, 22751, 22752, 22753, 22754, 22755, 22758, 22760, 22761, 22763, 22765, 22767, 22768, 22769, 22770, 22771, 22772, 22773, 22774, 22775, 22776, 22777, 22778, 22779, 22780, 22781, 22782, 22783, 22784, 22785, 22788, 22789, 22790, 22791, 22792, 22793, 22795, 22796, 22797, 22798, 22799, 22800, 22801, 22802, 22803, 22804, 22805, 22806, 22807, 22808, 22809, 22812, 22813, 22814, 22815, 22816, 22817, 22818, 22820, 22821, 22822, 22823, 22824, 22825, 22826, 22827, 22828, 22829, 22830, 22831, 22832, 22833, 22834, 22836, 22837, 22838, 22839, 22840, 22842, 22844, 22845, 22846, 22847, 22848, 22849, 22850, 22851, 22852, 22853, 22854, 22855, 22856, 22857, 22858, 22859, 22860, 22862, 22863, 22864, 22865, 22867, 22869, 22870, 22871, 22872, 22873, 22874, 22875, 22876, 22877, 22878, 22879, 22880, 22881, 22882, 22883, 22884, 22886, 22887, 22888, 22890, 22892, 22893, 22894, 22895, 22896, 22897, 22898, 22899, 22900, 22901, 22902, 22903, 22904, 22905, 22906, 22907, 22908, 22909, 22910, 22911, 22912, 22913, 22914, 22915, 22916, 22917, 22918, 22919, 22920, 22921, 22922, 22923, 22926, 22927, 22928, 22929, 22930, 22931, 22932, 22933, 22935, 22936, 22937, 22938, 22939, 22940, 22941, 22942, 22944, 22945, 22946, 22947, 22948, 22949, 22950, 22951, 22953, 22955, 22956, 22957, 22958, 22959, 22960, 22961, 22962, 22963, 22964, 22966, 22967, 22968, 22969, 22970, 22971, 22972, 22974, 22975, 22976, 22977, 22978, 22979, 22980, 22981, 22983, 22984, 22985, 22986, 22987, 22988, 22989, 22990, 22992, 22994, 22995, 22996, 22997, 22998, 22999, 23000, 23001, 23002, 23003, 23005, 23006, 23007, 23008, 23009, 23010, 23011, 23012, 23013, 23014, 23015, 23016, 23017, 23018, 23019, 23020, 23021, 23022, 23023, 23024, 23025, 23026, 23027, 23028, 23029, 23030, 23031, 23033, 23034, 23035, 23036, 23037, 23038, 23039, 23040, 23042, 23043, 23044, 23045, 23046, 23047, 23048, 23049, 23051, 23053, 23054, 23055, 23056, 23057, 23058, 23060, 23061, 23062, 23064, 23065, 23066, 23067, 23068, 23069, 23070, 23071, 23072, 23073, 23074, 23075, 23076, 23077, 23078, 23079, 23082, 23083, 23084, 23085, 23086, 23088, 23089, 23090, 23092, 23093, 23094, 23095, 23096, 23097, 23098, 23099, 23100, 23101, 23102, 23103, 23104, 23105, 23106, 23107, 23110, 23111, 23112, 23113, 23114, 23115, 23116, 23117, 23118, 23119, 23120, 23121, 23122, 23123, 23124, 23125, 23126, 23128, 23129, 23130, 23131, 23132, 23133, 23134, 23135, 23136, 23137, 23138, 23139, 23140, 23142, 23143, 23144, 23145, 23146, 23147, 23149, 23150, 23152, 23153, 23154, 23155, 23157, 23159, 23161, 23162, 23163, 23164, 23165, 23166, 23167, 23168, 23169, 23170, 23172, 23173, 23174, 23175, 23176, 23177, 23179, 23180, 21622, 23182, 23183, 23184, 23186, 23188, 23190, 23192, 23193, 23194, 23195, 23196, 23197, 23198, 23199, 23200, 23201, 23203, 23204, 23205, 23206, 23207, 23208, 23209, 23210, 23211, 23214, 23215, 23216, 23218, 23220, 23222, 23224, 23225, 23226, 23227, 23228, 23229, 23230, 23231, 23232, 23234, 23235, 23236, 23237, 23238, 23239, 23240, 23241, 23242, 23243, 23244, 23245, 23248, 23249, 23250, 23252, 23254, 23256, 23257, 23259, 23260, 23261, 23263, 23264, 23265, 23266, 23268, 23269, 23270, 23271, 23272, 23273, 23274, 23275, 23276, 23277, 23278, 23279, 23282, 23283, 23284, 23285, 23286, 23287, 23288, 23289, 23290, 23293, 23294, 23295, 23296, 23297, 23298, 23299, 23300, 23301, 23302, 23303, 23304, 23305, 23306, 23307, 23308, 23309, 23311, 23312, 23313, 23314, 23315, 23316, 23317, 23318, 23320, 23322, 23323, 23324, 23325, 23326, 23327, 23328, 23329, 23330, 23331, 23332, 23333, 23334, 23335, 23336, 23337, 23338, 23339, 23341, 23342, 23343, 23344, 23345, 23346, 23347, 23348, 23350, 23352, 23353, 23354, 23355, 23356, 23357, 23358, 23359, 23360, 23361, 23362, 23363, 23364, 23365, 23366, 23367, 23368, 23369, 23370, 23371, 23372, 23373, 23374, 23375, 23376, 23377, 23378, 23379, 23380, 23381, 23382, 23383, 23384, 23385, 23386, 23387, 23388, 23389, 23390, 23391, 23392, 23393, 23394, 23395, 23396, 23397, 23398, 23399, 23400, 23401, 23402, 23403, 23404, 23405, 23406, 23407, 23408, 23410, 23411, 23412, 23413, 23414, 23415, 23416, 23417, 23418, 23419, 23420, 23421, 23422, 23423, 23424, 23425, 23426, 23427, 23428, 23429, 23430, 23431, 23432, 23433, 23434, 23435, 23436, 23437, 23438, 23439, 23440, 23442, 23443, 23444, 23445, 23446, 23447, 23448, 23451, 23452, 23453, 23454, 23455, 23457, 23458, 23459, 23460, 23461, 23462, 23463, 23464, 23465, 23467, 23468, 23469, 23470, 23471, 23472, 23473, 23475, 23476, 23478, 23479, 23480, 23482, 23483, 23485, 23486, 23487, 23488, 23489, 23491, 23492, 23493, 23494, 23495, 23496, 23497, 23498, 23499, 23500, 23502, 23503, 23504, 23505, 23506, 23507, 23510, 23511, 23512, 23515, 23516, 23517, 23519, 23520, 23521, 23524, 23525, 23526, 23527, 23528, 23529, 23530, 23532, 23534, 23535, 23536, 23537, 23538, 23539, 23542, 23543, 23544, 23545, 23546, 23547, 23548, 23549, 23550, 23552, 23553, 23555, 23557, 23558, 23560, 23561, 23562, 23563, 23565, 23566, 23568, 23570, 23572, 23573, 23575, 23576, 23577, 23579, 23580, 23581, 23582, 23583, 23585, 23586, 23587, 23588, 23589, 23590, 23591, 23592, 23593, 23594, 23595, 23596, 23597, 23598, 23599, 23600, 23601, 23602, 23603, 23605, 23607, 23609, 23611, 23612, 23613, 23614, 23617, 23619, 23622, 23624, 23625, 23626, 23627, 23628, 23630, 23632, 23634, 23636, 23639, 23641, 23644, 23646, 23649, 23651, 23652, 23653, 23654, 23655, 23656, 23657, 23658, 23660, 23662, 23664, 23666, 23669, 23671, 23672, 23673, 23674, 23675, 23677, 23679, 23681, 23683, 23685, 23686, 23687, 23688, 23691, 23693, 23694, 23554, 23699, 23704, 23569, 23707, 23708, 23709, 23711, 23713, 23715, 23717, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23808, 23810, 23814, 23817, 23819, 23822, 23825, 23827, 23829, 23831, 23834, 23838, 23842, 23844, 23846, 23849, 23852, 23854, 23856, 23859, 23862, 23866, 23868, 23871, 23875, 23885, 23888, 23891, 23894, 23895, 23897, 23900, 23904, 23906, 23908, 23914, 23917, 23920, 23922, 23924, 23927, 23930, 23933, 23934, 23936, 23945, 23948, 23951, 23953, 23957, 23958, 23964, 23966, 23968, 23970, 23973, 23975, 23979, 23983, 23987, 23989, 23992, 23993, 23996, 23997, 23999, 24003, 24004, 24010, 24012, 24014, 24016, 24019, 24021, 24023, 24026, 24029, 24031, 24033, 24042, 24045, 24047, 24051, 24053, 24062, 24064, 24067, 24069, 24073, 24075, 24081, 24083, 24086, 24089, 24092, 24094, 24097, 24099, 24101, 24105, 24107, 24110, 24112, 24114, 24116, 24119, 24122, 24124, 24127, 24130, 24133, 24136, 24137, 24139, 24146, 24148, 24149, 24152, 24154, 24158, 24161, 24164, 24167, 24169, 24170, 24176, 24178, 24182, 24184, 24187, 24190, 24192, 24195, 24199, 24203, 24206, 24209, 24212, 24213, 24215, 24217, 24222, 24223, 24225, 24228, 24231, 24235, 24238, 24240, 24241, 24244, 24247, 24250, 24253, 24254, 24256, 24258, 24260, 24262, 24263, 24266, 24269, 24272, 24275, 24276, 24278, 24280, 24285, 24286, 24288, 24291, 24294, 24300, 24303, 24306, 24309, 24312, 24315, 24317, 24319, 24322, 24324, 24326, 24327, 24332, 24335, 24338, 24341, 24343, 24347, 24349, 24355, 24358, 24360, 24362, 24366, 24368, 24378, 24380, 24382, 24385, 24388, 24391, 24394, 24396, 24397, 24403, 24406, 24409, 24412, 24414, 24416, 24419, 24421, 24423, 24424, 24429, 24432, 24435, 24438, 24440, 24446, 24450, 24453, 24455, 24458, 24460, 24462, 24468, 24469, 24475, 24477, 24480, 24483, 24485, 24489, 24490, 24494, 24497, 24499, 24501, 24506, 24509, 24512, 24514, 24518, 24522, 24524, 24529, 24532, 24535, 24537, 24543, 24545, 24550, 24553, 24556, 24563, 24565, 24567, 24570, 24574, 24576, 24578, 24581, 24585, 24588, 24591, 24593, 24596, 24601, 24604, 24609, 24612, 24615, 24618, 24620, 24623, 24625, 24627, 24630, 24635, 24638, 24643, 24646, 24649, 24652, 24654, 24657, 24659, 24663, 24666, 24669, 24672, 24675, 24678, 24681, 24684, 24689, 24692, 24697, 24700, 24703, 24706, 24708, 24711, 24714, 24717, 24718, 24720, 24727, 24729, 24730, 24732, 24735, 24738, 24741, 24742, 24744, 24751, 24753, 24754, 24756, 24759, 24762, 24768, 24771, 24774, 24777, 24780, 24784, 24786, 24790, 24792, 24794, 24798, 24801, 24804, 24808, 24810, 24814, 24817, 24823, 24826, 24829, 24833, 24835, 24838, 24839, 24841, 24842, 24848, 24851, 24854, 24857, 24859, 24861, 24864, 24866, 24868, 24869, 24876, 24880, 24884, 24886, 24889, 24891, 24894, 24895, 24901, 24903, 24904, 24907, 24909, 24913, 24915, 24918, 24921, 24923, 24925, 24926, 24930, 24934, 24936, 24940, 24942, 24945, 24948, 24950, 24952, 24953, 24957, 24961, 24963, 24965, 24968, 24971, 24973, 24976, 24979, 24982, 24984, 24987, 24990, 24993, 24995, 24998, 25001, 25005, 25007, 25010, 25012, 25014, 25016, 25018, 25020, 25022, 25027, 25029, 25031, 25033, 25035, 24202, 23943, 23816, 25041, 25045, 25047, 25049, 25052, 25057, 25062, 25066, 24661, 24696, 25070, 25081, 25085, 25087, 24899, 23879, 23884, 23882, 23880, 23913, 23911, 23909, 24899, 24202, 23943, 23941, 23956, 23963, 23961, 23978, 23982, 24002, 24008, 24037, 24035, 24041, 24039, 24050, 24058, 24056, 22299, 22297, 24072, 24079, 24077, 25090, 25092, 25096, 25098, 24144, 24142, 24157, 22432, 22430, 22428, 24181, 24202, 25113, 25118, 25120, 25122, 24221, 24284, 24298, 24331, 24346, 22642, 22640, 22638, 24371, 22668, 22665, 21473, 22674, 22672, 22670, 22706, 22704, 22702, 22738, 22736, 24443, 22757, 22766, 22764, 22762, 24467, 24465, 24473, 24488, 24504, 24521, 24527, 24542, 24540, 24548, 24562, 24560, 25126, 25128, 25130, 25133, 24600, 24608, 24634, 24642, 24661, 24696, 25139, 24688, 24696, 24725, 24723, 24749, 24747, 24766, 24789, 23160, 23158, 23156, 24813, 23191, 23189, 23187, 23223, 23221, 23219, 24875, 23255, 23253, 24899, 24912, 24933, 24939, 24960, 25149, 25151, 25153, 25156, 25159, 25162, 25165, 25168, 25170, 25025, 25172, 25103, 25106, 25109, 25112, 25178, 25037, 25181, 25076, 25039, 25038, 25183, 25040, 25185, 25187, 25056, 25061, 25065, 25069, 25193, 25075, 25073, 25195, 25076, 25080, 25078, 25197, 25199, 25202, 25204, 25101, 25103, 25104, 25106, 25107, 25109, 25110, 25112, 25210, 25212, 25214, 25218, 25221, 25125, 25224, 25226, 25227, 25228, 25138, 25136, 25229, 25144, 25142, 25230, 25231, 25148, 25146, 25232, 25236, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 25348, 25352, 25354, 25355, 25358, 25359, 25364, 25366, 25367, 25371, 25374, 25375, 25379, 25380, 25385, 25388, 25389, 25390, 25396, 25398, 25404, 25406, 25411, 25420, 25426, 25435, 25436, 25437, 25439, 25440, 25442, 25444, 25445, 25448, 25449, 25450, 25452, 25453, 25454, 25460, 25472, 25474, 25478, 25479, 25480, 25487, 25488, 25493, 25494, 25495, 25496, 25503, 25504, 25505, 25506, 25513, 25514, 25517, 25524, 25538, 25556, 25566, 25568, 25573, 25575, 25576, 25580, 25585, 25587, 25592, 25594, 25598, 25601, 25603, 25604, 25605, 25607, 25608, 25609, 25610, 25612, 25613, 25614, 25615, 25616, 25617, 25620, 25621, 25623, 25624, 25625, 25626, 25627, 25628, 25631, 25632, 25634, 25635, 25637, 25638, 25640, 25641, 25642, 25643, 25644, 25645, 25648, 25649, 25650, 25657, 25658, 25659, 25666, 25667, 25670, 25673, 25681, 25688, 25701, 25705, 25706, 25712, 25715, 25719, 25721, 25729, 25731, 25736, 25738, 25739, 25741, 25742, 25743, 25745, 25746, 25747, 25749, 25750, 25751, 25753, 25754, 25756, 25758, 25345, 23929, 23926, 23815, 25766, 25767, 25768, 25769, 25770, 25773, 25774, 25775, 25776, 25351, 23824, 25777, 25778, 24705, 25024, 24592, 24662, 25024, 25780, 25357, 25361, 24893, 25709, 25713, 25783, 23861, 23858, 25784, 23877, 25785, 25786, 25787, 23890, 23887, 25378, 25377, 25788, 25789, 25790, 25382, 24893, 25709, 25713, 25791, 23929, 23926, 23940, 25792, 25793, 25794, 25392, 25795, 25394, 25796, 25797, 23965, 25400, 25798, 23980, 23985, 25799, 25403, 25409, 25800, 25801, 25412, 25414, 24018, 24025, 25417, 24028, 25802, 25803, 25804, 25805, 24044, 25425, 25806, 25807, 25808, 25428, 25809, 25810, 24066, 25431, 25811, 25433, 25434, 25812, 25813, 25457, 25458, 25818, 25819, 25462, 25820, 24163, 24160, 24166, 24172, 25467, 25821, 25822, 25823, 25470, 25824, 24189, 24197, 25825, 25477, 25826, 25483, 25830, 24219, 25486, 24233, 25491, 25490, 25499, 25501, 25509, 25831, 24282, 25512, 24296, 25516, 25832, 24311, 24308, 24314, 25522, 24321, 24329, 25833, 24337, 24334, 24340, 25532, 25834, 24351, 22632, 25835, 25836, 25837, 25537, 24357, 25540, 25838, 25839, 25840, 25841, 25842, 25843, 25844, 25542, 24384, 24390, 24387, 24393, 24399, 25548, 25845, 25846, 25847, 24408, 24405, 24411, 25554, 24418, 24426, 25848, 25849, 24434, 24431, 24437, 25564, 25850, 25565, 25851, 25852, 25853, 25854, 25571, 25855, 25856, 25857, 25574, 25578, 25858, 25581, 25582, 25584, 25859, 24511, 25860, 25589, 25591, 25861, 24534, 25862, 25863, 25597, 25864, 24555, 24558, 25865, 25866, 24592, 24705, 25024, 25871, 25872, 24617, 25873, 25874, 24651, 24671, 24680, 24671, 24680, 25875, 25876, 24662, 24671, 24680, 25878, 25879, 24705, 25653, 25654, 25880, 25881, 25656, 25662, 25663, 25882, 25883, 25665, 24764, 25669, 25884, 24779, 24776, 25675, 25885, 25678, 23151, 25886, 25887, 25888, 24803, 24800, 25683, 25889, 24819, 24816, 25890, 25891, 25892, 24828, 24825, 25690, 25691, 24844, 25693, 25893, 25894, 25895, 24853, 24850, 24856, 25699, 24863, 24871, 25896, 25897, 25898, 25708, 24893, 25709, 25713, 25899, 25717, 25900, 24920, 24928, 25901, 25725, 25727, 25902, 24947, 24955, 25903, 25735, 25906, 25024, 25908, 25910, 25913, 25094, 25761, 25100, 25762, 25915, 25763, 25916, 25764, 25917, 25765, 25918, 25919, 25920, 25868, 25922, 25923, 25924, 25926, 25870, 25044, 25051, 25929, 25930, 25931, 25932, 25779, 25934, 25935, 25937, 25938, 25939, 25781, 25941, 25827, 25782, 25814, 25094, 25816, 25100, 25944, 25945, 25946, 25947, 25948, 25949, 25950, 25951, 25909, 25911, 25827, 25829, 25904, 25905, 25907, 25909, 25911, 25956, 25957, 25868, 25869, 25870, 25962, 25963, 25877, 25965, 25966, 25969, 25970, 25904, 25905, 25907, 25909, 25911, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 26255, 26256, 26257, 23812, 26152, 23813, 26258, 25346, 26259, 26112, 26268, 26269, 26213, 26215, 26217, 26272, 26251, 23409, 26254, 26273, 26191, 26274, 26217, 26275, 26251, 23409, 26254, 26276, 23841, 23837, 26278, 26117, 26279, 26280, 26281, 26233, 26282, 26234, 26284, 26285, 23865, 26119, 23874, 26287, 26288, 26291, 26292, 25372, 26122, 23903, 26293, 26294, 26295, 26125, 26298, 26299, 26300, 26233, 26301, 26234, 26303, 26304, 25386, 26152, 23939, 26305, 22159, 26306, 26129, 26309, 26311, 26312, 26314, 26131, 26315, 26317, 26318, 26320, 25407, 25405, 26321, 26134, 26324, 26325, 26326, 26327, 26328, 26329, 25422, 26330, 26332, 26334, 26335, 22290, 26337, 26339, 26342, 26343, 26345, 26346, 26138, 25438, 26141, 24104, 26138, 25438, 26141, 24104, 26143, 22358, 26145, 22367, 26254, 24126, 26143, 22358, 26145, 22367, 26254, 24126, 25455, 22380, 22376, 26349, 26350, 26151, 26353, 26355, 26356, 26357, 26358, 26359, 26360, 26363, 26152, 26365, 26366, 24194, 26368, 26240, 25740, 25481, 22471, 22467, 26370, 26372, 26373, 26158, 26374, 26375, 26376, 26159, 25497, 22519, 22515, 26377, 26378, 26163, 25507, 22545, 22541, 26379, 26381, 26382, 26168, 26383, 26384, 26169, 26386, 26387, 26388, 26389, 26390, 26391, 25526, 26393, 26394, 26395, 26396, 26398, 26399, 26400, 26403, 26404, 24365, 26405, 26407, 26410, 26413, 26414, 26415, 26416, 26417, 26418, 26419, 26420, 26423, 26424, 26425, 26426, 26427, 26428, 25558, 26429, 26431, 26432, 26433, 26434, 26436, 26438, 25569, 25567, 26441, 26442, 26175, 26445, 26177, 26446, 26178, 26448, 26449, 26450, 26179, 26452, 24516, 26454, 26455, 26181, 26457, 22866, 26458, 26460, 26183, 26462, 26463, 26464, 26185, 24573, 26188, 24584, 26191, 26466, 26217, 26467, 26251, 23409, 26254, 26468, 26193, 26195, 26197, 26471, 26198, 22965, 26201, 26203, 26205, 26474, 26206, 23004, 26209, 26475, 26211, 26476, 26209, 26477, 26211, 26478, 26213, 26215, 26217, 26481, 26251, 23409, 26209, 26482, 26211, 26483, 26213, 26215, 26217, 26486, 26251, 23409, 25651, 23063, 23059, 26487, 26488, 26491, 25660, 23091, 23087, 26492, 26493, 26496, 26225, 26497, 26498, 26226, 26500, 26501, 24783, 26502, 26504, 26505, 26506, 26509, 26510, 24807, 26511, 26513, 26514, 26515, 26518, 26519, 24832, 26520, 26521, 26522, 26523, 26524, 26527, 26528, 26529, 26530, 26531, 26532, 25703, 26533, 24883, 24879, 26536, 26537, 26538, 26233, 26539, 26234, 26541, 26235, 26543, 26544, 25723, 26546, 26547, 26237, 26549, 26550, 25733, 26552, 26240, 25740, 26243, 25744, 26246, 25748, 26249, 25004, 26251, 23409, 26254, 26554, 26558, 26559, 26560, 26561, 26563, 26565, 26567, 26571, 26573, 26576, 26262, 26577, 26263, 26578, 26264, 26265, 26266, 26267, 26583, 26584, 26587, 26277, 26589, 26591, 26592, 26593, 26594, 26595, 26596, 26369, 26605, 26556, 26606, 26607, 26608, 26609, 26610, 26553, 26611, 26555, 26612, 26556, 26613, 26616, 26617, 26618, 26619, 26621, 26622, 26624, 26626, 26627, 26553, 26628, 26555, 26629, 26556, 26630, 26600, 26598, 26604, 26602, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 26881, 26883, 26884, 26885, 26887, 26888, 26889, 26890, 26892, 26893, 26894, 26896, 26897, 26898, 26900, 26902, 26904, 26905, 26906, 26908, 26909, 26911, 26913, 26915, 26916, 26917, 26918, 26920, 26921, 26922, 26286, 26924, 26925, 26927, 26928, 26929, 26930, 26932, 26933, 26935, 26937, 26938, 26939, 26940, 26942, 26943, 26944, 26946, 26947, 26948, 26953, 26956, 26958, 26959, 26961, 26965, 26968, 26973, 26975, 26979, 26980, 26981, 26982, 26983, 26984, 26985, 26986, 26987, 26988, 26989, 26990, 26991, 26992, 26993, 26994, 26995, 26996, 26997, 26998, 26999, 27000, 27001, 27002, 27004, 27005, 27007, 27010, 27012, 27014, 27017, 26367, 27019, 27020, 27021, 27022, 27023, 26371, 27027, 27029, 27031, 27032, 27033, 27034, 27037, 27038, 27039, 27040, 26380, 27044, 27046, 27047, 27048, 27054, 27055, 27059, 27061, 27062, 27064, 27066, 27067, 27070, 27073, 27075, 27076, 27082, 27084, 27088, 27089, 27090, 27091, 27094, 27096, 27098, 27102, 27104, 26453, 27107, 27109, 27112, 27116, 27117, 27118, 27119, 27120, 27122, 27124, 27125, 27126, 27128, 27129, 27130, 27132, 27133, 27134, 27135, 27136, 27138, 27139, 27140, 27142, 27144, 27146, 27148, 27149, 27150, 27152, 27153, 27154, 27156, 27158, 27159, 27160, 27162, 27163, 27164, 27165, 27166, 27168, 27170, 27171, 27172, 27174, 27176, 27178, 27179, 27180, 27182, 27184, 27186, 27187, 27189, 27191, 27193, 27194, 27196, 27199, 27201, 27202, 27208, 27209, 27210, 27211, 27213, 27215, 27216, 27217, 27219, 27222, 26545, 27225, 27228, 26551, 27230, 27231, 27232, 27233, 27234, 27235, 27236, 27237, 27238, 27239, 27240, 26952, 26950, 26955, 26962, 26964, 26336, 26971, 26344, 26435, 26406, 26397, 27069, 27206, 27095, 27100, 26451, 26456, 26461, 27115, 26952, 26950, 26955, 26962, 26964, 26336, 26971, 26344, 27252, 27254, 27256, 27257, 27258, 27259, 26435, 27069, 26397, 27206, 26406, 27095, 27100, 26451, 26456, 26461, 27115, 27263, 26952, 26950, 26955, 26962, 26964, 26336, 26971, 26344, 26354, 27069, 27206, 26364, 27271, 27273, 26503, 26512, 27198, 27206, 26542, 26548, 27279, 27281, 27283, 27052, 26397, 26406, 27069, 27080, 26435, 27095, 27100, 26451, 26456, 26461, 27115, 26503, 26512, 27198, 27206, 26542, 26548, 27294, 27296, 27298, 27242, 27244, 26564, 26562, 26568, 26566, 27249, 27250, 27251, 27261, 27262, 27266, 27268, 27270, 27300, 27301, 27302, 27303, 27276, 27278, 27285, 27287, 27288, 27290, 27291, 27293, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 26886, 27411, 26945, 27444, 27450, 27451, 27472, 27475, 27016, 27485, 27492, 27496, 27053, 27081, 27520, 27566, 27569, 27570, 27573, 27207, 27592, 27221, 27227, 26912, 27417, 27415, 27421, 27419, 27423, 27427, 27425, 27429, 26934, 27434, 27432, 27438, 27436, 26949, 27615, 27616, 26954, 26957, 27617, 27618, 27446, 27619, 26967, 26969, 27620, 27621, 26974, 27622, 27455, 27453, 27459, 27457, 27463, 27461, 27465, 27469, 27467, 27471, 27045, 27502, 27495, 27028, 27491, 27026, 27043, 27623, 27086, 27519, 27624, 27509, 27511, 27625, 27057, 27507, 27072, 27626, 27514, 27627, 27204, 27628, 27522, 27097, 27629, 27524, 27103, 27630, 27527, 27108, 27631, 27110, 27113, 27632, 27633, 27484, 27409, 26903, 26901, 26907, 27538, 27123, 27121, 27127, 26912, 27417, 27415, 27395, 27393, 26934, 27434, 27432, 27438, 27436, 26949, 27634, 27635, 26954, 26957, 27636, 27637, 27446, 26967, 27638, 26969, 27639, 27640, 26974, 27641, 27455, 27453, 27459, 27457, 27463, 27461, 27465, 27469, 27467, 27471, 27028, 27491, 27045, 27502, 27495, 27043, 27026, 27648, 27086, 27519, 27072, 27649, 27514, 27650, 27057, 27507, 27651, 27204, 27652, 27509, 27511, 27653, 27522, 27097, 27654, 27524, 27103, 27655, 27527, 27108, 27656, 27110, 27113, 27657, 27658, 27404, 26895, 26271, 26270, 26899, 27409, 26903, 26901, 26907, 27538, 27123, 27121, 27127, 27484, 26912, 27417, 27415, 27421, 27419, 27423, 27427, 27425, 27429, 26934, 27434, 27432, 27438, 27436, 26949, 27660, 27661, 26954, 26957, 27662, 27663, 27446, 26967, 27664, 26969, 27665, 27666, 26974, 27667, 27455, 27453, 27459, 27457, 27463, 27461, 27465, 27469, 27467, 27471, 27177, 27576, 27009, 27668, 27479, 27072, 27669, 27514, 27670, 27204, 27597, 27595, 27015, 27671, 27613, 27611, 27241, 27484, 27177, 27576, 27674, 27578, 27580, 27675, 27582, 27584, 27676, 27586, 27588, 27677, 27204, 27597, 27595, 27220, 27678, 27226, 27679, 27605, 27607, 27609, 27613, 27611, 27241, 27026, 27028, 27491, 27495, 27043, 27045, 27502, 27683, 27050, 27684, 27057, 27507, 27685, 27509, 27511, 27072, 27686, 27514, 27687, 27078, 27688, 27086, 27519, 27689, 27522, 27097, 27690, 27524, 27103, 27691, 27527, 27108, 27692, 27110, 27113, 27693, 27694, 27534, 27532, 27538, 27123, 27121, 27127, 27544, 27131, 26470, 26469, 27549, 27137, 26473, 26472, 27141, 27143, 27145, 27147, 27558, 27151, 26480, 26479, 27155, 27157, 27565, 27161, 26485, 26484, 27177, 27576, 27695, 27578, 27580, 27696, 27582, 27584, 27697, 27586, 27588, 27698, 27204, 27597, 27595, 27220, 27699, 27226, 27700, 27605, 27607, 27609, 27613, 27611, 27241, 27704, 27705, 27706, 27707, 27708, 27709, 27710, 27711, 27712, 27253, 27255, 26580, 26579, 26582, 26581, 27713, 27714, 27264, 27715, 27716, 27717, 27718, 27720, 27274, 27272, 27722, 27723, 27280, 27284, 27282, 27724, 27725, 27726, 27727, 27728, 27729, 27295, 27299, 27297, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 27910, 27913, 27914, 27915, 27919, 27921, 27927, 26910, 27928, 27929, 27930, 27931, 27932, 27933, 27934, 27935, 27936, 27937, 27938, 27939, 27940, 27440, 27941, 27942, 27944, 27945, 26960, 27948, 27950, 27951, 27952, 27954, 26976, 27909, 27956, 27957, 27958, 27959, 27960, 27961, 27962, 27963, 27964, 27965, 27966, 27967, 27968, 27969, 27970, 27971, 27972, 27974, 27975, 27977, 27978, 27980, 27981, 27982, 27984, 27986, 27591, 27092, 27988, 27989, 27991, 27992, 27994, 27995, 27997, 27998, 28001, 28002, 28003, 28004, 28005, 28006, 28007, 28008, 28009, 28010, 26910, 28011, 28012, 28013, 28014, 27397, 28015, 28016, 28017, 28018, 28019, 27440, 28020, 28021, 28023, 28024, 26960, 28027, 28028, 28030, 28031, 28033, 26976, 27909, 28035, 28036, 28037, 28038, 28039, 28040, 28041, 28042, 28043, 28044, 28045, 28046, 28047, 28048, 28049, 28050, 28051, 28053, 28054, 28055, 28057, 28059, 28060, 28062, 27591, 28064, 28065, 27092, 28067, 28068, 28070, 28071, 28073, 28074, 28076, 28077, 28080, 28081, 28082, 28083, 28084, 28085, 28086, 28087, 28088, 28089, 28090, 28091, 28092, 28093, 28094, 26910, 28095, 28096, 28097, 28098, 28099, 28100, 28101, 28102, 28103, 28104, 28105, 28106, 28107, 27440, 28108, 28109, 28111, 28112, 26960, 28115, 28116, 28118, 28119, 28121, 26976, 27909, 28123, 28124, 28125, 28126, 28127, 28128, 28129, 28130, 28131, 28132, 27476, 28133, 28134, 28135, 28137, 28138, 28140, 28142, 27591, 27212, 28143, 28144, 28145, 27482, 28147, 28148, 28149, 28150, 27169, 27175, 28151, 28152, 28154, 28155, 28157, 28158, 28160, 28161, 28163, 27591, 27212, 28164, 28165, 28166, 27600, 28168, 27603, 28170, 28171, 28172, 28173, 28174, 28175, 28176, 28177, 28178, 28179, 28180, 28181, 28182, 28184, 26392, 28186, 28187, 28189, 28190, 28191, 28193, 28195, 27083, 28197, 28198, 27092, 28200, 28201, 28203, 28204, 28206, 28207, 28209, 28210, 28213, 28214, 28215, 28216, 28217, 28218, 28219, 28220, 28221, 28222, 28223, 28224, 28225, 28226, 28227, 28228, 28229, 28230, 28231, 28232, 28233, 28234, 28235, 28236, 28237, 28238, 28239, 28240, 27169, 27175, 28241, 28242, 28244, 28245, 28247, 28248, 28250, 28251, 28253, 27591, 27212, 28254, 28255, 28256, 27600, 28258, 27603, 28260, 28261, 28262, 28263, 28264, 28265, 28268, 28270, 28275, 28276, 28277, 28278, 28279, 28280, 28283, 28289, 28290, 28293, 28294, 28295, 28302, 28303, 28304, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 28423, 28424, 28426, 28429, 28433, 28435, 28437, 28441, 28442, 27947, 27949, 28448, 28449, 28450, 28452, 28454, 28457, 27035, 27024, 27041, 27973, 27976, 27979, 28473, 27985, 28476, 28477, 27987, 27990, 28481, 28483, 28485, 28487, 28491, 28496, 28497, 28499, 28501, 28503, 28505, 28507, 28511, 28512, 28026, 28514, 28518, 28519, 28520, 28522, 28524, 28527, 27035, 27041, 27024, 28052, 28539, 28058, 28061, 28544, 28063, 28547, 28066, 28069, 28551, 28553, 28555, 28556, 28558, 28561, 28565, 28571, 28572, 28574, 28577, 28581, 28583, 28585, 28589, 28590, 28114, 28592, 28596, 28597, 28598, 28600, 28602, 28605, 27003, 28608, 28611, 28613, 28141, 28616, 28617, 28618, 28620, 28621, 28622, 27167, 28626, 27173, 28627, 28153, 28156, 28159, 28162, 28637, 28638, 28639, 28641, 28642, 28643, 28644, 28648, 27024, 27035, 27041, 28183, 28659, 28185, 28188, 28664, 28194, 28667, 28196, 28670, 28199, 28202, 28674, 28676, 28678, 28679, 28681, 28685, 28687, 28689, 28691, 28697, 28699, 28703, 28705, 27167, 28707, 27173, 28708, 28243, 28246, 28249, 28252, 28718, 28719, 28720, 28722, 28723, 28724, 28725, 28729, 28439, 28447, 25175, 25177, 28461, 28464, 26569, 25182, 25184, 28509, 28517, 25190, 28736, 25192, 28738, 28531, 28533, 25194, 25196, 25198, 26590, 28587, 28595, 25207, 25209, 28610, 25211, 28741, 25953, 28629, 25954, 25217, 25216, 25220, 28744, 28653, 28657, 25225, 23696, 23695, 23698, 23697, 23703, 23702, 28710, 25971, 25235, 25234, 25238, 28747, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 28422, 27955, 28945, 28946, 28947, 28960, 28961, 28495, 28034, 28979, 28980, 28981, 28994, 28996, 28997, 28570, 28122, 29015, 29026, 29028, 29042, 29043, 29044, 29060, 29061, 29063, 29065, 29067, 29069, 29071, 28431, 28428, 28932, 28934, 28937, 28935, 29085, 29086, 28445, 25912, 25914, 29087, 25174, 29088, 25176, 29089, 29090, 28474, 28953, 28468, 28470, 28472, 28956, 28955, 28000, 28484, 28482, 29091, 29092, 29093, 28965, 28966, 28968, 28971, 28969, 29094, 29095, 28515, 25927, 25928, 29096, 25189, 29098, 25191, 29100, 29101, 28986, 28546, 28542, 28538, 28540, 28990, 28989, 28079, 28554, 28552, 29102, 29103, 29104, 29105, 28579, 28576, 29002, 29004, 29007, 29005, 29106, 29107, 28593, 25942, 25943, 29108, 25206, 29109, 25208, 29110, 29020, 28614, 28612, 29022, 29024, 29111, 25952, 29113, 29114, 29034, 28635, 28633, 28631, 29036, 29040, 29038, 29115, 29116, 29117, 29118, 25955, 29120, 29121, 28661, 28669, 28665, 29046, 28663, 29051, 29055, 29054, 28212, 28677, 28675, 26614, 29122, 29123, 29124, 29125, 29126, 29127, 29128, 29129, 29077, 28716, 28714, 28712, 29079, 29083, 29081, 29130, 29131, 29132, 29133, 25972, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 28929, 29214, 29215, 29216, 29217, 29218, 29219, 28940, 29222, 29223, 29224, 29226, 29228, 28466, 28462, 28465, 29231, 29232, 29233, 29234, 29235, 29236, 29237, 29238, 29239, 29240, 25921, 25925, 28963, 29244, 29245, 29246, 29247, 29248, 28974, 29251, 29252, 29253, 29255, 29257, 28536, 28535, 28534, 29260, 29261, 29262, 29263, 29264, 29265, 29266, 29267, 29268, 29269, 25933, 25936, 25940, 28999, 29274, 29275, 29276, 29277, 29278, 29279, 29010, 29282, 29283, 29284, 29286, 29288, 29016, 29290, 29291, 29292, 29293, 29294, 29296, 29029, 29027, 29299, 29300, 29301, 29302, 29303, 29304, 29305, 29307, 29310, 28654, 28651, 28655, 29311, 29313, 29314, 29315, 29316, 29317, 29318, 29319, 29320, 29321, 29322, 29323, 29324, 25958, 25960, 25959, 29326, 25964, 25967, 29072, 29070, 29333, 29334, 29335, 29336, 29337, 29338, 29339, 29341, 29344, 28272, 28284, 28291, 28292, 28301, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29440, 29441, 29445, 29447, 29225, 29227, 29453, 29454, 29455, 29456, 29458, 29461, 29463, 29466, 29467, 29468, 29472, 29474, 29254, 29256, 29480, 29481, 29482, 29483, 29485, 29488, 29490, 29493, 29494, 29495, 29496, 29497, 29501, 29503, 29285, 29287, 29509, 29510, 29295, 29516, 29517, 29518, 29520, 29523, 29309, 29527, 29528, 29529, 29531, 29533, 29535, 29537, 29539, 29543, 29544, 29545, 29547, 29548, 29549, 29550, 29551, 29553, 29556, 29343, 29444, 28267, 28266, 29560, 29471, 28735, 28734, 29561, 29500, 28286, 28285, 29514, 29562, 28743, 29563, 28296, 28746, 29564, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29698, 29699, 29229, 29703, 29705, 29708, 29242, 29243, 29712, 29713, 29716, 29717, 29719, 29722, 29270, 29271, 29272, 29728, 29729, 29289, 29733, 29298, 29737, 29741, 29744, 29748, 29325, 29750, 29752, 29753, 29332, 29756, 29760, 29697, 28733, 28732, 29761, 29762, 29764, 29469, 29099, 29097, 29765, 29766, 29768, 29727, 28288, 28287, 29769, 29770, 29771, 29112, 29739, 29119, 29773, 29775, 29758, 29134, 29776, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29953, 29954, 29956, 29961, 29962, 29964, 29970, 29973, 29975, 29976, 29546, 29980, 29981, 29982, 29985, 29986, 29987, 29988, 29957, 28274, 28273, 29991, 29992, 29993, 29994, 29965, 28740, 28282, 28281, 29997, 29998, 29999, 30000, 29972, 30003, 30004, 30005, 29977, 28297, 30008, 30009, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30209, 30212, 30216, 30208, 30223, 30226, 30227, 30228, 30211, 30230, 30233, 30234, 30235, 30236, 30214, 30238, 30241, 29772, 29974, 30244, 30245, 30246, 28299, 28300, 28298, 29983, 30248, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30467, 30468, 30210, 30470, 30472, 30473, 30213, 29767, 30476, 30478, 30479, 30002, 30482, 30483, 30217, 30486, 30487, 30488, 30489, 30490, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30720, 30722, 30723, 30724, 30726, 30727, 30729, 30243, 30734, 30485, 30736, 30247, 30481, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30976, 30469, 30979, 30474, 30982, 30484, 30985, 30988, 30733, 30739, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31238, 30730, 30978, 30725, 30721, 31240, 31241, 30981, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31488, 31489, 31490, 31491, 31492, 31495, 31493, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31744, 31239, 31747, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32000, 32002, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32256, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32512, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32768, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255};
int h_C[]= {
1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113, 115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251, 253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297, 299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343, 345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 375, 377, 379, 381, 383, 385, 387, 389, 391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435, 437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481, 483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527, 529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573, 575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619, 621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665, 667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711, 713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757, 759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803, 805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849, 851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895, 897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941, 943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987, 989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027, 1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065, 1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103, 1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141, 1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179, 1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217, 1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331, 1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369, 1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407, 1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445, 1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483, 1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521, 1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559, 1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597, 1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711, 1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749, 1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787, 1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825, 1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863, 1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901, 1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939, 1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977, 1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015, 2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091, 2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129, 2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167, 2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205, 2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243, 2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281, 2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319, 2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357, 2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395, 2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433, 2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471, 2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509, 2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547, 2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585, 2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623, 2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661, 2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699, 2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737, 2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775, 2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813, 2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851, 2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889, 2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927, 2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965, 2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003, 3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041, 3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079, 3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117, 3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155, 3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193, 3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231, 3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3257, 3259, 3261, 3263, 3265, 3267, 3269, 3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307, 3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345, 3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383, 3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421, 3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459, 3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497, 3499, 3501, 3503, 3505, 3507, 3509, 3511, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534, 3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3563, 3565, 3567, 3569, 3571, 3573, 3576, 3578, 3580, 3582, 3585, 3587, 3590, 3592, 3597, 3599, 3608, 3610, 3612, 3614, 3617, 3619, 3622, 3624, 3630, 3632, 3634, 3636, 3639, 3641, 3644, 3646, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686, 3688, 3690, 3693, 3695, 3697, 3699, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724, 3726, 3728, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763, 3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3788, 3790, 3793, 3795, 3798, 3800, 3803, 3805, 3808, 3810, 3812, 3814, 3817, 3819, 3822, 3824, 3829, 3831, 3834, 3836, 3840, 3842, 3845, 3847, 3850, 3852, 3854, 3856, 3859, 3861, 3863, 3865, 3869, 3871, 3874, 3876, 3879, 3881, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3913, 3915, 3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953, 3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3978, 3980, 3982, 3984, 3986, 3988, 3990, 3992, 3995, 3997, 4000, 4002, 4008, 4010, 4012, 4014, 4017, 4019, 4021, 4023, 4026, 4028, 4031, 4033, 4038, 4040, 4042, 4044, 4047, 4049, 4052, 4054, 4060, 4062, 4068, 4070, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4108, 4110, 4112, 4114, 4116, 4118, 4121, 4123, 4126, 4128, 4134, 4136, 4141, 4143, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4174, 4176, 4179, 4181, 4184, 4186, 4189, 4191, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219, 4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257, 4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295, 4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333, 4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371, 4373, 4375, 4377, 4379, 4381, 4383, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408, 4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4447, 4449, 4451, 4453, 4456, 4458, 4460, 4462, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4482, 4484, 4486, 4488, 4491, 4493, 4496, 4498, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4520, 4522, 4524, 4526, 4529, 4531, 4534, 4536, 4541, 4543, 4545, 4547, 4549, 4551, 4554, 4556, 4559, 4561, 4564, 4566, 4569, 4571, 4574, 4576, 4579, 4581, 4584, 4586, 4005, 4005, 4591, 4593, 4595, 4597, 4599, 4601, 4603, 4605, 4607, 4609, 372, 372, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637, 4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712, 4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 3993, 3993, 4005, 4005, 4035, 4035, 4005, 4005, 3993, 3993, 4776, 4778, 4780, 4782, 4784, 4786, 4788, 4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 3561, 3561, 3561, 3561, 3561, 3561, 3603, 3603, 3603, 3603, 3603, 3603, 3615, 3627, 3637, 3637, 3649, 3649, 3615, 3627, 3637, 3637, 3649, 3649, 3976, 3976, 3993, 3993, 4194, 4194, 4005, 4005, 3976, 3976, 3976, 3976, 4005, 4005, 4057, 4057, 3512, 3512, 372, 372, 3512, 3512, 373, 373, 3627, 3627, 3637, 3637, 3649, 3649, 3976, 3976, 3976, 3976, 3976, 3976, 3993, 3993, 4057, 4057, 4045, 4065, 4065, 4065, 4065, 3512, 3512, 372, 372, 3512, 3512, 373, 373, 3512, 3512, 3615, 3615, 3627, 3627, 3832, 3837, 3583, 3594, 3627, 3627, 3615, 3615, 3649, 3649, 3637, 3637, 3691, 3691, 3700, 3700, 3561, 3561, 3561, 3561, 3561, 3561, 3603, 3603, 3603, 3603, 3603, 3603, 3627, 3627, 3615, 3615, 3637, 3637, 3649, 3649, 4005, 4005, 3993, 3993, 4035, 4035, 4015, 4015, 4045, 4045, 4057, 4057, 3993, 3993, 4005, 4005, 3993, 3993, 4005, 4005, 4015, 4015, 4045, 4045, 4169, 4169, 4035, 4035, 4015, 4015, 3976, 3976, 4171, 4171, 4015, 4015, 4015, 4015, 4057, 4057, 4045, 4045, 4065, 4065, 4065, 4065, 4005, 4005, 4015, 4015, 4035, 4035, 3976, 3976, 3976, 3976, 4119, 4119, 4131, 4131, 4171, 4171, 4171, 4171, 4171, 4171, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283, 5285, 4384, 4384, 4463, 4463, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358, 5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396, 5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 4384, 4384, 4463, 4463, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5512, 5514, 5516, 5518, 5520, 5522, 4005, 4005, 3976, 3976, 5543, 5545, 5547, 5549, 5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587, 5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 3627, 3627, 3615, 3615, 3615, 3615, 3691, 3691, 3700, 3700, 3561, 3561, 3561, 3561, 3561, 3561, 3603, 3603, 3615, 3615, 3691, 3691, 3700, 3700, 3627, 3627, 4005, 4005, 4035, 4035, 4065, 4065, 4035, 4035, 4015, 4015, 3976, 3976, 3976, 3976, 3976, 3976, 4065, 4065, 4035, 4035, 4045, 4045, 4057, 4057, 4065, 4065, 4035, 4035, 4015, 4015, 3976, 3976, 3993, 3993, 4005, 4005, 3976, 3976, 3976, 3976, 4005, 4005, 3993, 3993, 4065, 4065, 4005, 4005, 3993, 3993, 4035, 4035, 4005, 4005, 3993, 3993, 3976, 3976, 3976, 3976, 3993, 3993, 4005, 4005, 4065, 4065, 3512, 3512, 3512, 3512, 4194, 4194, 3615, 3615, 3627, 3627, 3637, 3637, 3615, 3615, 4005, 4005, 4171, 4171, 3993, 3993, 3976, 3976, 4045, 4045, 4065, 4065, 4169, 4169, 4194, 4194, 3993, 3993, 4045, 4045, 4065, 4065, 4171, 4171, 4169, 4169, 4194, 4194, 4384, 4384, 4463, 4463, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080, 6082, 6084, 6086, 4384, 4384, 6110, 6112, 6114, 6116, 6118, 6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 4384, 4384, 4384, 4384, 4463, 4463, 6156, 6158, 6160, 6162, 4384, 4384, 4384, 4384, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194, 6196, 6198, 6200, 6202, 4384, 4384, 4384, 4384, 4384, 4384, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309, 3561, 3561, 3561, 3561, 3603, 3603, 3615, 3615, 3857, 3561, 3561, 3561, 3561, 3601, 3561, 3561, 3561, 3561, 3605, 3603, 3603, 3603, 3603, 3603, 3603, 3615, 3615, 3627, 3627, 3866, 3627, 3627, 3615, 3615, 4005, 4005, 4035, 4035, 3976, 3976, 4035, 4035, 4015, 4015, 4065, 4065, 3512, 3512, 3512, 3512, 3993, 3993, 4035, 4035, 4005, 4005, 4045, 4057, 3976, 3976, 4005, 4005, 3993, 3993, 4015, 4015, 4065, 4065, 3254, 4065, 4065, 3255, 4065, 4065, 3512, 3512, 4194, 4194, 3993, 3993, 4005, 4005, 4035, 4035, 4005, 4005, 3993, 3993, 4015, 4015, 3976, 3976, 3976, 3976, 4005, 4005, 3993, 3993, 4035, 4035, 4015, 4015, 4045, 4045, 4057, 4057, 4065, 4065, 4131, 4131, 4119, 4119, 3512, 3512, 3512, 3512, 3512, 3512, 4194, 4194, 3561, 3561, 3561, 3561, 3561, 3561, 3583, 3594, 3603, 3603, 3603, 3603, 3601, 3603, 3603, 3605, 3615, 3615, 3627, 3627, 3637, 3637, 3649, 3649, 3815, 3826, 3691, 3691, 3700, 3700, 3815, 3826, 3832, 3837, 3857, 3866, 4035, 4035, 4045, 4045, 4104, 4105, 3911, 3911, 4171, 4171, 4194, 4194, 3993, 3993, 4005, 4005, 4015, 4015, 4035, 4035, 3976, 3976, 3993, 3993, 4005, 4005, 4015, 4015, 4035, 4035, 4045, 4045, 4057, 4057, 4065, 4065, 4065, 4065, 4131, 4119, 4119, 4131, 4104, 4105, 4138, 4119, 4119, 4131, 4131, 4138, 4171, 4171, 4171, 4171, 4169, 4169, 4171, 4171, 4194, 4194, 6902, 6904, 6906, 6908, 6910, 6912, 4384, 4384, 4463, 4463, 6943, 6945, 6947, 6949, 6951, 6953, 6955, 6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993, 4384, 4384, 4445, 4454, 4445, 4454, 4463, 4463, 4489, 4500, 4489, 4500, 4527, 4538, 4527, 4538, 7082, 7084, 7086, 7088, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107, 7109, 7111, 7113, 7115, 7117, 7120, 7122, 7127, 7129, 7131, 7133, 7136, 7138, 7140, 7142, 7146, 7148, 7151, 7153, 7156, 7158, 7166, 7168, 7170, 7172, 7174, 7176, 7179, 7181, 7184, 7186, 7189, 7191, 7198, 7200, 7204, 7206, 7210, 7212, 7214, 7216, 7234, 7236, 7254, 7256, 6900, 6900, 6900, 6900, 7294, 7296, 6900, 6900, 7303, 7305, 7307, 7309, 6900, 6900, 7314, 7316, 6900, 6900, 4807, 4807, 7635, 7637, 7639, 7641, 7674, 7676, 7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7134, 7143, 7714, 7716, 7718, 7720, 6900, 6900, 6900, 6900, 6900, 6900, 6900, 6900, 7755, 7757, 7759, 7761, 7763, 7765, 7772, 7774, 6940, 6940, 6940, 6940, 5510, 6940, 6940, 6940, 6940, 5510, 7898, 7900, 6940, 6940, 6940, 6940, 7915, 7917, 7919, 7921, 7923, 7925, 6940, 6940, 5606, 5606, 6940, 6940, 5606, 5606, 5606, 5606, 5606, 5606, 8430, 8432, 8434, 8436, 8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 6900, 6900, 6941, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 6900, 6900, 8545, 8547, 8588, 8590, 6154, 6154, 6154, 6154, 8600, 8602, 6900, 8622, 8624, 8626, 8628, 8639, 8641, 8643, 8645, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 6900, 7124, 6277, 6277, 6277, 6277, 6310, 6310, 9045, 9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 6900, 6900, 7161, 7161, 9087, 9089, 9092, 9094, 9107, 9109, 9112, 9114, 6940, 6940, 6941, 7161, 7161, 7161, 7161, 9156, 9158, 9160, 9162, 9164, 9166, 9169, 9171, 9186, 9188, 9190, 9192, 9195, 9197, 9200, 9202, 7161, 7124, 7134, 7143, 7163, 7161, 7163, 9110, 9110, 9090, 9110, 9090, 9090, 9090, 9090, 9110, 9110, 9110, 9110, 9167, 9173, 9167, 9173, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 11009, 11011, 11013, 11015, 11017, 11019, 11021, 11023, 11025, 11027, 11029, 11031, 11033, 11035, 11037, 11039, 11041, 11043, 11045, 11047, 11049, 11051, 11053, 11055, 11057, 11059, 11061, 11063, 11065, 11067, 11069, 11071, 11073, 11075, 11077, 11079, 11081, 11083, 11085, 11087, 11089, 11091, 11093, 11095, 11097, 11099, 11101, 11103, 11105, 11107, 11109, 11111, 11113, 11115, 11117, 11119, 11121, 11123, 11125, 11127, 11129, 11131, 11133, 11135, 11137, 11139, 11141, 11143, 11145, 11147, 11149, 11151, 11153, 11155, 11157, 11159, 11161, 11163, 11165, 11167, 11169, 11171, 11173, 11175, 11177, 11179, 11181, 11183, 11185, 11187, 11189, 11191, 11193, 11195, 11197, 11199, 11201, 11203, 11205, 11207, 11209, 11211, 11213, 11215, 11217, 11219, 11221, 11223, 11225, 11227, 11229, 11231, 11233, 11235, 11237, 11239, 11241, 11243, 11245, 11247, 11249, 11251, 11253, 11255, 11257, 11259, 11261, 11263, 11265, 11267, 11269, 11271, 11273, 11275, 11277, 11279, 11281, 11283, 11285, 11287, 11289, 11291, 11293, 11295, 11297, 11299, 11301, 11303, 11305, 11307, 11309, 11311, 11313, 11315, 11317, 11319, 11321, 11323, 11325, 11327, 11329, 11331, 11333, 11335, 11337, 11339, 11341, 11343, 11345, 11347, 11349, 11351, 11353, 11355, 11357, 11359, 11361, 11363, 11365, 11367, 11369, 11371, 11373, 11375, 11377, 11379, 11381, 11383, 11385, 11387, 11389, 11391, 11393, 11395, 11397, 11399, 11401, 11403, 11405, 11407, 11409, 11411, 11413, 11415, 11417, 11419, 11421, 11423, 11425, 11427, 11429, 11431, 11433, 11435, 11437, 11439, 11441, 11443, 11445, 11447, 11449, 11451, 11453, 11455, 11457, 11459, 11461, 11463, 11465, 11467, 11469, 11471, 11473, 11475, 11477, 11479, 11481, 11483, 11485, 11487, 11489, 11491, 11493, 11495, 11497, 11499, 11501, 11503, 11505, 11507, 11509, 11511, 11513, 11515, 11517, 11519, 11521, 11523, 11525, 11527, 11529, 11531, 11533, 11535, 11537, 11539, 11541, 11543, 11545, 11547, 11549, 11551, 11553, 11555, 11557, 11559, 11561, 11563, 11565, 11567, 11569, 11571, 11573, 11575, 11577, 11579, 11581, 11583, 11585, 11587, 11589, 11591, 11593, 11595, 11597, 11599, 11601, 11603, 11605, 11607, 11609, 11611, 11613, 11615, 11617, 11619, 11621, 11623, 11625, 11627, 11629, 11631, 11633, 11635, 11637, 11639, 11641, 11643, 11645, 11647, 11649, 11651, 11653, 11655, 11657, 11659, 11661, 11663, 11665, 11667, 11669, 11671, 11673, 11675, 11677, 11679, 11681, 11683, 11685, 11687, 11689, 11691, 11693, 11695, 11697, 11699, 11701, 11703, 11705, 11707, 11709, 11711, 11713, 11715, 11717, 11719, 11721, 11723, 11725, 11727, 11729, 11731, 11733, 11735, 11737, 11739, 11741, 11743, 11745, 11747, 11749, 11751, 11753, 11755, 11757, 11759, 11761, 11763, 11765, 11767, 11769, 11771, 11773, 11775, 11777, 11779, 11781, 11783, 11785, 11787, 11789, 11791, 11793, 11795, 11797, 11799, 11801, 11803, 11805, 11807, 11809, 11811, 11813, 11815, 11817, 11819, 11821, 11823, 11825, 11827, 11829, 11831, 11833, 11835, 11837, 11839, 11841, 11843, 11845, 11847, 11849, 11851, 11853, 11855, 11857, 11859, 11861, 11863, 11865, 11867, 11869, 11871, 11873, 11875, 11877, 11879, 11881, 11883, 11885, 11887, 11889, 11891, 11893, 11895, 11897, 11899, 11901, 11903, 11905, 11907, 11909, 11911, 11913, 11915, 11917, 11919, 11921, 11923, 11925, 11927, 11929, 11931, 11933, 11935, 11937, 11939, 11941, 11943, 11945, 11947, 11949, 11951, 11953, 11955, 11957, 11959, 11961, 11963, 11965, 11967, 11969, 11971, 11973, 11975, 11977, 11979, 11981, 11983, 11985, 11987, 11989, 11991, 11993, 11995, 11997, 11999, 12001, 12003, 12005, 12007, 12009, 12011, 12013, 12015, 12017, 12019, 12021, 12023, 12025, 12027, 12029, 12031, 12033, 12035, 12037, 12039, 12041, 12043, 12045, 12047, 12049, 12051, 12053, 12055, 12057, 12059, 12061, 12063, 12065, 12067, 12069, 12071, 12073, 12075, 12077, 12079, 12081, 12083, 12085, 12087, 12089, 12091, 12093, 12095, 12097, 12099, 12101, 12103, 12105, 12107, 12109, 12111, 12113, 12115, 12117, 12119, 12121, 12123, 12125, 12127, 12129, 12131, 12133, 12135, 12137, 12139, 12141, 12143, 12145, 12147, 12149, 12151, 12153, 12155, 12157, 12159, 12161, 12163, 12165, 12167, 12169, 12171, 12173, 12175, 12177, 12179, 12181, 12183, 12185, 12187, 12189, 12191, 12193, 12195, 12197, 12199, 12201, 12203, 12205, 12207, 12209, 12211, 12213, 12215, 12217, 12219, 12221, 12223, 12225, 12227, 12229, 12231, 12233, 12235, 12237, 12239, 12241, 12243, 12245, 12247, 12249, 12251, 12253, 12255, 12257, 12259, 12261, 12263, 12265, 12267, 12269, 12271, 12273, 12275, 12277, 12279, 12281, 12283, 12285, 12287, 12289, 12291, 12293, 12295, 12297, 12299, 12301, 12303, 12305, 12307, 12309, 12311, 12313, 12315, 12317, 12319, 12321, 12323, 12325, 12327, 12329, 12331, 12333, 12335, 12337, 12339, 12341, 12343, 12345, 12347, 12349, 12351, 12353, 12355, 12357, 12359, 12361, 12363, 12365, 12367, 12369, 12371, 12373, 12375, 12377, 12379, 12381, 12383, 12385, 12387, 12389, 12391, 12393, 12395, 12397, 12399, 12401, 12403, 12405, 12407, 12409, 12411, 12413, 12415, 12417, 12419, 12421, 12423, 12425, 12427, 12429, 12431, 12433, 12435, 12437, 12439, 12441, 12443, 12445, 12447, 12449, 12451, 12453, 12455, 12457, 12459, 12461, 12463, 12465, 12467, 12469, 12471, 12473, 12475, 12477, 12479, 12481, 12483, 12485, 12487, 12489, 12491, 12493, 12495, 12497, 12499, 12501, 12503, 12505, 12507, 12509, 12511, 12513, 12515, 12517, 12519, 12521, 12523, 12525, 12527, 12529, 12531, 12533, 12535, 12537, 12539, 12541, 12543, 12545, 12547, 12549, 12551, 12553, 12555, 12557, 12559, 12561, 12563, 12565, 12567, 12569, 12571, 12573, 12575, 12577, 12579, 12581, 12583, 12585, 12587, 12589, 12591, 12593, 12595, 12597, 12599, 12601, 12603, 12605, 12607, 12609, 12611, 12613, 12615, 12617, 12619, 12621, 12623, 12625, 12627, 12629, 12631, 12633, 12635, 12637, 12639, 12641, 12643, 12645, 12647, 12649, 12651, 12653, 12655, 12657, 12659, 12661, 12663, 12665, 12667, 12669, 12671, 12673, 12675, 12677, 12679, 12681, 12683, 12685, 12687, 12689, 12691, 12693, 12695, 12697, 12699, 12701, 12703, 12705, 12707, 12709, 12711, 12713, 12715, 12717, 12719, 12721, 12723, 12725, 12727, 12729, 12731, 12733, 12735, 12737, 12739, 12741, 12743, 12745, 12747, 12749, 12751, 12753, 12755, 12757, 12759, 12761, 12763, 12765, 12767, 12769, 12771, 12773, 12775, 12777, 12779, 12781, 12783, 12785, 12787, 12789, 12791, 12793, 12795, 12797, 12799, 12801, 12803, 12805, 12807, 12809, 12811, 12813, 12815, 12817, 12819, 12821, 12823, 12825, 12827, 12829, 12831, 12833, 12835, 12837, 12839, 12841, 12843, 12845, 12847, 12849, 12851, 12853, 12855, 12857, 12859, 12861, 12863, 12865, 12867, 12869, 12871, 12873, 12875, 12877, 12879, 12881, 12883, 12885, 12887, 12889, 12891, 12893, 12895, 12897, 12899, 12901, 12903, 12905, 12907, 12909, 12911, 12913, 12915, 12917, 12919, 12921, 12923, 12925, 12927, 12929, 12931, 12933, 12935, 12937, 12939, 12941, 12943, 12945, 12947, 12949, 12951, 12953, 12955, 12957, 12959, 12961, 12963, 12965, 12967, 12969, 12971, 12973, 12975, 12977, 12979, 12981, 12983, 12985, 12987, 12989, 12991, 12993, 12995, 12997, 12999, 13001, 13003, 13005, 13007, 13009, 13011, 13013, 13015, 13017, 13019, 13021, 13023, 13025, 13027, 13029, 13031, 13033, 13035, 13037, 13039, 13041, 13043, 13045, 13047, 13049, 13051, 13053, 13055, 13057, 13059, 13061, 13063, 13065, 13067, 13069, 13071, 13073, 13075, 13077, 13079, 13081, 13083, 13085, 13087, 13089, 13091, 13093, 13095, 13097, 13099, 13101, 13103, 13105, 13107, 13109, 13111, 13113, 13115, 13117, 13119, 13121, 13123, 13125, 13127, 13129, 13131, 13133, 13135, 13137, 13139, 13141, 13143, 13145, 13147, 13149, 13151, 13153, 13155, 13157, 13159, 13161, 13163, 13165, 13167, 13169, 13171, 13173, 13175, 13177, 13179, 13181, 13183, 13185, 13187, 13189, 13191, 13193, 13195, 13197, 13199, 13201, 13203, 13205, 13207, 13209, 13211, 13213, 13215, 13217, 13219, 13221, 13223, 13225, 13227, 13229, 13231, 13233, 13235, 13237, 4588, 4589, 13241, 13243, 13245, 13247, 13249, 4610, 4611, 13253, 13255, 13257, 13259, 13261, 13263, 13265, 13267, 13269, 13271, 13273, 13275, 13277, 13279, 13281, 13283, 13285, 13287, 13289, 13291, 13293, 13295, 13297, 13299, 13301, 13303, 13305, 13307, 13309, 4743, 4744, 4747, 4748, 4751, 4752, 4755, 4756, 4759, 4760, 13321, 13323, 13325, 13327, 13329, 13331, 13333, 13335, 4815, 4816, 4818, 4819, 4820, 4821, 4829, 4830, 4832, 4833, 4834, 4835, 4838, 4841, 4844, 4845, 4848, 4849, 4866, 4869, 4872, 4873, 4876, 4877, 4878, 4879, 4880, 4881, 4888, 4889, 4890, 4891, 4892, 4893, 4894, 4895, 4896, 4897, 4898, 4899, 4900, 4901, 4902, 4903, 4904, 4905, 4906, 4907, 4910, 4911, 4916, 4917, 4920, 4921, 4928, 4929, 4930, 4931, 4932, 4933, 4935, 4936, 4939, 4940, 4945, 4946, 4947, 4949, 4950, 4952, 4953, 4954, 4955, 4956, 4957, 4958, 4959, 4964, 4965, 4969, 4970, 4971, 4972, 4982, 4984, 4987, 4990, 4998, 4999, 5002, 5003, 5006, 5007, 5010, 5011, 5030, 5031, 5034, 5035, 5051, 5052, 5053, 5054, 5055, 5056, 5065, 5066, 5067, 5068, 5069, 5070, 5074, 5075, 5078, 5079, 5082, 5083, 5086, 5087, 5094, 5095, 5098, 5099, 5102, 5103, 5106, 5107, 5110, 5111, 5114, 5115, 5118, 5119, 5122, 5123, 5130, 5131, 5134, 5135, 5138, 5139, 5140, 5141, 5143, 5144, 5151, 5152, 5159, 5160, 5161, 5162, 5165, 5166, 5176, 5177, 5183, 5184, 5195, 5196, 5199, 5200, 5201, 5202, 5204, 5205, 5209, 5210, 5211, 5212, 5213, 5214, 5219, 5220, 5222, 5223, 5225, 5226, 5227, 5228, 5231, 5232, 5237, 5238, 5243, 5244, 13522, 13524, 13526, 13528, 13530, 13532, 13534, 13536, 13538, 5309, 5312, 5337, 5338, 13544, 13546, 13548, 13550, 13552, 13554, 13556, 13558, 13560, 13562, 13564, 13566, 13568, 13570, 13572, 13574, 13576, 13578, 13580, 13582, 5441, 5442, 5464, 5465, 13588, 13590, 13592, 13594, 13596, 13598, 13600, 13602, 13604, 13606, 13608, 5530, 5531, 5535, 5536, 13614, 13616, 13618, 13620, 13622, 13624, 13626, 13628, 13630, 13632, 13634, 13636, 13638, 13640, 13642, 13644, 5607, 5608, 5609, 5610, 5637, 5638, 5643, 5644, 5647, 5648, 5653, 5654, 5655, 5656, 5657, 5658, 5661, 5662, 5667, 5668, 5677, 5678, 5681, 5682, 5686, 5687, 5692, 5693, 5697, 5698, 5701, 5702, 5709, 5710, 5713, 5716, 5721, 5722, 5724, 5725, 5726, 5727, 5728, 5729, 5743, 5744, 5747, 5748, 5751, 5752, 5753, 5754, 5761, 5762, 5763, 5764, 5765, 5766, 5768, 5769, 5770, 5771, 5775, 5776, 5777, 5778, 5779, 5780, 5781, 5782, 5785, 5786, 5789, 5790, 5793, 5794, 5795, 5796, 5799, 5800, 5801, 5802, 5811, 5812, 5814, 5815, 5819, 5820, 5821, 5822, 5827, 5828, 5835, 5836, 5840, 5841, 5844, 5845, 5870, 5871, 5872, 5873, 5878, 5879, 5904, 5905, 5924, 5925, 5937, 5938, 5941, 5942, 5945, 5946, 5949, 5950, 5951, 5952, 5957, 5958, 5963, 5964, 5967, 5968, 5983, 5984, 5987, 5988, 5989, 5990, 5993, 5994, 5997, 5998, 6023, 6024, 6049, 6050, 13784, 13786, 13788, 13790, 13792, 13794, 13796, 13798, 13800, 6107, 6108, 13804, 13806, 13808, 13810, 13812, 13814, 13816, 13818, 6141, 6142, 6148, 6149, 6152, 6153, 13826, 13828, 6165, 6166, 6169, 6170, 13834, 13836, 13838, 13840, 13842, 13844, 13846, 13848, 6226, 6227, 6246, 6247, 6268, 6269, 13856, 13858, 13860, 13862, 13864, 13866, 13868, 13870, 6313, 6314, 6315, 6316, 6318, 6319, 6320, 6321, 6326, 6336, 6337, 6338, 6339, 6340, 6348, 6349, 6350, 6351, 6352, 6360, 6361, 6362, 6363, 6364, 6365, 6369, 6370, 6371, 6372, 6404, 6421, 6422, 6425, 6426, 6461, 6462, 6463, 6464, 6470, 6471, 6473, 6474, 6475, 6476, 6481, 6482, 6489, 6490, 6495, 6496, 6499, 6500, 6503, 6504, 6509, 6510, 6515, 6518, 6519, 6520, 6522, 6523, 6526, 6527, 6530, 6531, 6536, 6537, 6538, 6539, 6540, 6541, 6542, 6543, 6551, 6552, 6558, 6559, 6562, 6563, 6566, 6567, 6570, 6571, 6574, 6575, 6578, 6579, 6582, 6583, 6590, 6591, 6593, 6594, 6598, 6599, 6602, 6603, 6606, 6607, 6610, 6611, 6614, 6615, 6618, 6619, 6620, 6621, 6630, 6631, 6634, 6635, 6640, 6641, 6645, 6646, 6650, 6651, 6655, 6657, 6665, 6666, 6668, 6669, 6670, 6671, 6677, 6680, 6681, 6682, 6684, 6685, 6686, 6687, 6688, 6689, 6692, 6693, 6696, 6697, 6700, 6701, 6704, 6705, 6712, 6715, 6718, 6719, 6722, 6723, 6751, 6754, 6756, 6758, 6763, 6766, 6770, 6771, 6776, 6777, 6778, 6779, 6780, 6784, 6785, 6786, 6791, 6792, 6795, 6796, 6799, 6800, 6803, 6804, 6807, 6808, 6813, 6814, 6819, 6820, 6823, 6824, 6827, 6828, 6831, 6834, 6837, 6838, 6841, 6842, 6843, 6844, 6846, 6847, 6851, 6854, 6857, 6860, 6861, 6862, 6863, 6867, 6868, 6871, 6872, 6874, 6876, 6877, 6879, 6880, 6884, 6887, 6888, 6889, 6894, 6895, 14088, 14090, 14092, 6934, 6935, 6938, 6939, 14098, 14100, 14102, 14104, 14106, 14108, 14110, 14112, 14114, 14116, 14118, 14120, 14122, 7017, 7018, 7034, 7037, 7040, 7043, 7045, 7046, 7049, 7052, 7055, 7058, 7061, 7064, 7067, 7070, 14140, 14142, 14144, 14146, 14148, 14150, 14152, 14154, 14156, 14158, 14160, 14162, 14164, 14166, 14168, 14170, 14172, 14174, 14176, 14178, 14180, 14182, 14184, 14186, 14188, 14190, 14192, 14194, 14196, 7257, 7258, 7259, 7260, 14202, 7297, 7298, 14206, 14208, 7310, 7311, 14212, 7321, 7322, 7323, 7324, 14218, 14220, 14222, 14224, 14226, 14228, 14230, 14232, 7701, 7704, 14236, 14238, 7721, 7722, 7723, 7724, 7727, 7728, 7729, 7730, 14248, 14250, 14252, 14254, 7775, 7776, 7777, 7778, 7784, 7789, 7790, 7791, 7792, 7798, 14266, 7901, 7902, 7910, 7911, 14272, 14274, 14276, 7926, 7927, 7935, 7936, 7937, 7938, 7939, 7940, 7941, 7942, 7945, 7946, 14290, 14292, 14294, 14296, 14298, 14300, 14302, 8457, 8458, 8459, 14307, 14309, 14311, 14313, 8495, 8496, 14317, 14319, 8591, 8592, 8593, 8594, 14325, 8603, 14328, 14330, 14332, 14334, 14336, 14338, 14340, 14342, 8693, 8694, 8695, 8696, 8697, 8698, 8703, 8708, 14352, 14354, 14356, 14358, 14360, 14362, 9068, 9069, 9073, 9074, 14368, 14370, 14372, 14374, 9115, 9116, 9117, 9125, 9126, 9127, 9128, 14383, 14385, 14387, 14389, 14391, 14393, 14395, 14397, 9213, 9222, 9225, 9228, 9232, 9233, 9234, 9610, 9611, 9791, 9801, 9980, 9981, 9983, 9984, 9988, 9989, 9991, 9992, 10007, 10009, 10011, 10013, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 15708, 15715, 14593, 14592, 14594, 14596, 14595, 14597, 14599, 14598, 14601, 14600, 14603, 14602, 14604, 14606, 14605, 15746, 14608, 14607, 15748, 14610, 14609, 15750, 14612, 14611, 15752, 14614, 14613, 15754, 14616, 14615, 14618, 14617, 14620, 14619, 14622, 14621, 14624, 14623, 14626, 14625, 14627, 14628, 14630, 14629, 14631, 14633, 14632, 14635, 14634, 15764, 14636, 15766, 15768, 14638, 14637, 14639, 14641, 14640, 14643, 14642, 15770, 14644, 15772, 15774, 14665, 14645, 15326, 14646, 14667, 14647, 15778, 14669, 14648, 15780, 14650, 14649, 14651, 14653, 14652, 14655, 14654, 15323, 15322, 14708, 15133, 14656, 15321, 15320, 14665, 14664, 15326, 15325, 14667, 14666, 15784, 14669, 14668, 15786, 15788, 15790, 14658, 14657, 14660, 14659, 15051, 14661, 15792, 15794, 15796, 15798, 15800, 15802, 15804, 15806, 15808, 15810, 14663, 14662, 15812, 14665, 14664, 14667, 14666, 15814, 14669, 14668, 15816, 14671, 14670, 14673, 14672, 14675, 14674, 15818, 15820, 15822, 14676, 15824, 14678, 14677, 15826, 14680, 14679, 14682, 14681, 15829, 14683, 15831, 14684, 15833, 15835, 15837, 15839, 14686, 14685, 15468, 15415, 15841, 14687, 14689, 14688, 15843, 15845, 14691, 14690, 14693, 14692, 14695, 14694, 14697, 14696, 14698, 14699, 14701, 14700, 14703, 14702, 14705, 14704, 14707, 14706, 14708, 14710, 14709, 15851, 14712, 14711, 15853, 14714, 14713, 15855, 14716, 14715, 15857, 14718, 14717, 14720, 14719, 14722, 14721, 14723, 14724, 14725, 14726, 14728, 14727, 14730, 14729, 14732, 14731, 14734, 14733, 15859, 14736, 14735, 15861, 14738, 14737, 14739, 14740, 14742, 14741, 14744, 14743, 14746, 14745, 14747, 14749, 14748, 14751, 14750, 15863, 15865, 15867, 14752, 14754, 14753, 14755, 14757, 14756, 14759, 14758, 15869, 15871, 15873, 14760, 14762, 14761, 15875, 14764, 14763, 15877, 14766, 14765, 15879, 14768, 14767, 15881, 15336, 14769, 14770, 14771, 14773, 14772, 15883, 14775, 14774, 15885, 14777, 14776, 15887, 14779, 14778, 15889, 14781, 14780, 15891, 14783, 14782, 15893, 14785, 14784, 15895, 14787, 14786, 15897, 14789, 14788, 14791, 14790, 14793, 14792, 15899, 14795, 14794, 15901, 14797, 14796, 15903, 15905, 14798, 15907, 14800, 14799, 14802, 14801, 14804, 14803, 15909, 14806, 14805, 14808, 14807, 14810, 14809, 15911, 15913, 14812, 14811, 15915, 14814, 14813, 14816, 14815, 14818, 14817, 14820, 14819, 14821, 15917, 14823, 14822, 14825, 14824, 14826, 15919, 14828, 14827, 14830, 14829, 14832, 14831, 14834, 14833, 14836, 14835, 15921, 14838, 14837, 15923, 15925, 14839, 15927, 14840, 14842, 14841, 15929, 15931, 15933, 14844, 14843, 15404, 14845, 15935, 14846, 15937, 14847, 15939, 15941, 14848, 14849, 15943, 14851, 14850, 14853, 14852, 15945, 14855, 14854, 14857, 14856, 15947, 14858, 14860, 14859, 14862, 14861, 14864, 14863, 14866, 14865, 15217, 15216, 14867, 15630, 15669, 15631, 15694, 15693, 15696, 15695, 14869, 14868, 14871, 14870, 15657, 15697, 15699, 14873, 14872, 14875, 14874, 14877, 14876, 14879, 14878, 15666, 15665, 14880, 14882, 14881, 14884, 14883, 14908, 14907, 15696, 15235, 14886, 14885, 14888, 14887, 15701, 14889, 14890, 14892, 14891, 15960, 15663, 15217, 14905, 15645, 15233, 14906, 15694, 15693, 15695, 15648, 15630, 15669, 14893, 14895, 14894, 14897, 14896, 14899, 14898, 14901, 14900, 14902, 15982, 15663, 14904, 14905, 15233, 15665, 14906, 14908, 14907, 15696, 15235, 14909, 15668, 14910, 14912, 14911, 14914, 14913, 14916, 14915, 14917, 14918, 15984, 14921, 14920, 14923, 14922, 14925, 14924, 14927, 14926, 14929, 14928, 14931, 14930, 14933, 14932, 14935, 14934, 14936, 14938, 14937, 15997, 14940, 14939, 14941, 15999, 14942, 15075, 15028, 14943, 14944, 16017, 16019, 14946, 14945, 14948, 14947, 14949, 14951, 14950, 14953, 14952, 14955, 14954, 14956, 14957, 14958, 14959, 14961, 14960, 14963, 14962, 15530, 14965, 14964, 14966, 14967, 14969, 14968, 16021, 14971, 14970, 14973, 14972, 16023, 14975, 14974, 16025, 14976, 14977, 14979, 14978, 16027, 16029, 16031, 14981, 14980, 16033, 14983, 14982, 14985, 14984, 16035, 14987, 14986, 14988, 14990, 14989, 14991, 14993, 14992, 16037, 14995, 14994, 16039, 14996, 14998, 14997, 16041, 15524, 15531, 14999, 15000, 16043, 15001, 15002, 15003, 16045, 15005, 15004, 16047, 15007, 15006, 15009, 15008, 15011, 15010, 16049, 15013, 15012, 15015, 15014, 15017, 15016, 15019, 15018, 16053, 15020, 16055, 16057, 16059, 15022, 15021, 15023, 15024, 15025, 15026, 15027, 15028, 15029, 15031, 15030, 15033, 15032, 16061, 15035, 15034, 16063, 15037, 15036, 16065, 16067, 15039, 15038, 15041, 15040, 15043, 15042, 16069, 16071, 16073, 15044, 16075, 16077, 15046, 15045, 15047, 16079, 16081, 16083, 16085, 15049, 15048, 16087, 15050, 15051, 16089, 15053, 15052, 16091, 16093, 15055, 15054, 16095, 16097, 15057, 15056, 15059, 15058, 15060, 15062, 15061, 15063, 16099, 15064, 16101, 15065, 15067, 15066, 16103, 16105, 15069, 15068, 15071, 15070, 16107, 15073, 15072, 15074, 15377, 15075, 15076, 16109, 15078, 15077, 15079, 16111, 15081, 15080, 16113, 15083, 15082, 15084, 15086, 15085, 15088, 15087, 15090, 15089, 15092, 15091, 15094, 15093, 15095, 15097, 15096, 15099, 15098, 15101, 15100, 15103, 15102, 15105, 15104, 16115, 16117, 15107, 15106, 15109, 15108, 16119, 15111, 15110, 15113, 15112, 15115, 15114, 15117, 15116, 15119, 15118, 15121, 15120, 15122, 15123, 15124, 15125, 15126, 15128, 15127, 15129, 15131, 15130, 15133, 15132, 16121, 15135, 15134, 15136, 15137, 15139, 15138, 15140, 15142, 15141, 15143, 15144, 15145, 15146, 15148, 15147, 15150, 15149, 15151, 16123, 15153, 15152, 15155, 15154, 15156, 15157, 15158, 15160, 15159, 15162, 15161, 16125, 15164, 15163, 16127, 15166, 15165, 16129, 15168, 15167, 16131, 16133, 15169, 15170, 15172, 15171, 16135, 15174, 15173, 15176, 15175, 16137, 15178, 15177, 16139, 15180, 15179, 15182, 15181, 15184, 15183, 15450, 15185, 15187, 15186, 15189, 15188, 15191, 15190, 16141, 15193, 15192, 16143, 16145, 15195, 15194, 16147, 15197, 15196, 16149, 15616, 15615, 15618, 15617, 15623, 15198, 15625, 15624, 15628, 15627, 15630, 15629, 15199, 15696, 15695, 15633, 15632, 15635, 15634, 15202, 15698, 15699, 15621, 15200, 16151, 15616, 15615, 15618, 15617, 15623, 15622, 15625, 15624, 15628, 15627, 15630, 15201, 15631, 15696, 15695, 15633, 15632, 15635, 15634, 15202, 15698, 15203, 15639, 15638, 16153, 15641, 15640, 15643, 15642, 15645, 15644, 15204, 15647, 15646, 15649, 15648, 15651, 15650, 15652, 15205, 15655, 15657, 15659, 15207, 15206, 16164, 16174, 15208, 15210, 15209, 15212, 15211, 16176, 15654, 15213, 16178, 15229, 15215, 16182, 15661, 15660, 16184, 15217, 15216, 15218, 15220, 15219, 15641, 15640, 15643, 15642, 15645, 15644, 15225, 15647, 15646, 15649, 15648, 15651, 15650, 15228, 15653, 15698, 15222, 15221, 16194, 15641, 15223, 15643, 15642, 15224, 15232, 15225, 15646, 15226, 15649, 15648, 15650, 15227, 15228, 15655, 15657, 15230, 15229, 16196, 15641, 15231, 15643, 15642, 15233, 15232, 15667, 15694, 15234, 15696, 15235, 15630, 15669, 15652, 15236, 15237, 15238, 15656, 15240, 15239, 16198, 15242, 15241, 15243, 15245, 15244, 15246, 15248, 15247, 16208, 16210, 15249, 16212, 16214, 15251, 15250, 15252, 15253, 15255, 15254, 15257, 15256, 15259, 15258, 15261, 15260, 15262, 16217, 16219, 15264, 15263, 15266, 15265, 15268, 15267, 15269, 16222, 16224, 15271, 15270, 15272, 15274, 15273, 15276, 15275, 16227, 16229, 16231, 15277, 15279, 15278, 16233, 16235, 15281, 15280, 15282, 15284, 15283, 15285, 15287, 15286, 15289, 15288, 15291, 15290, 15293, 15292, 15295, 15294, 15297, 15296, 15299, 15298, 15301, 15300, 15303, 15302, 15305, 15304, 15306, 15307, 15308, 15310, 15309, 15312, 15311, 15314, 15313, 15316, 15315, 15317, 15319, 15318, 15321, 15320, 15323, 15322, 15324, 15326, 15325, 16238, 15328, 15327, 16240, 15330, 15329, 15332, 15331, 15334, 15333, 15336, 15335, 15338, 15337, 15340, 15339, 15342, 15341, 15343, 15344, 15345, 15347, 15346, 15349, 15348, 15350, 15351, 15353, 15352, 15354, 15356, 15355, 15358, 15357, 15359, 15361, 15360, 15362, 16242, 16244, 15364, 15363, 15366, 15365, 15367, 16246, 15368, 16248, 16250, 15370, 15369, 15372, 15371, 16252, 15373, 15375, 15374, 15377, 15376, 15378, 16254, 15379, 15381, 15380, 15382, 16256, 15384, 15383, 16258, 15386, 15385, 16260, 15388, 15387, 15390, 15389, 16262, 15392, 15391, 15394, 15393, 15396, 15395, 16266, 15397, 16268, 15399, 15398, 16270, 15401, 15400, 16272, 15569, 15402, 15404, 15403, 16274, 16277, 16280, 15405, 15407, 15406, 15408, 15409, 15410, 15411, 16282, 15413, 15412, 15414, 15415, 15416, 16284, 15418, 15417, 16286, 15420, 15419, 16288, 15422, 15421, 16290, 15424, 15423, 16292, 15426, 15425, 16294, 15428, 15427, 16296, 15430, 15429, 15431, 15433, 15432, 15434, 16298, 15435, 16300, 15436, 15438, 15437, 16302, 15440, 15439, 16304, 15442, 15441, 16306, 15444, 15443, 16308, 15446, 15445, 16310, 15448, 15447, 16312, 16314, 15450, 15449, 15452, 15451, 15453, 15454, 15455, 15456, 16316, 15458, 15457, 16318, 15459, 15460, 15461, 15462, 16320, 15464, 15463, 15465, 16322, 15467, 15466, 15468, 16324, 15470, 15469, 15471, 15472, 15474, 15473, 15475, 15477, 15476, 15479, 15478, 16328, 15480, 16330, 16332, 15482, 15481, 15483, 15485, 15484, 15487, 15486, 16336, 15488, 16338, 16341, 15490, 15489, 16344, 15492, 15491, 16346, 15494, 15493, 16348, 15496, 15495, 16350, 15498, 15497, 15500, 15499, 15502, 15501, 15504, 15503, 15506, 15505, 16354, 15508, 15507, 16356, 15510, 15509, 15511, 15512, 15513, 15514, 15515, 15517, 15516, 15518, 15520, 15519, 15522, 15521, 15524, 15523, 15526, 15525, 15528, 15527, 15529, 15531, 15530, 15533, 15532, 15535, 15534, 15537, 15536, 15538, 15539, 15540, 15541, 15543, 15542, 15545, 15544, 15547, 15546, 15548, 16364, 15550, 15549, 15552, 15551, 16366, 15553, 15555, 15554, 16372, 15557, 15556, 15559, 15558, 16374, 15561, 15560, 16376, 15563, 15562, 16378, 15565, 15564, 16380, 15567, 15566, 16382, 15569, 15568, 15571, 15570, 16384, 15573, 15572, 15575, 15574, 16386, 15577, 15576, 16388, 15579, 15578, 16390, 15581, 15580, 15583, 15582, 15585, 15584, 16394, 15587, 15586, 16396, 16398, 15588, 16400, 15589, 15591, 15590, 15593, 15592, 15595, 15594, 15597, 15596, 15598, 15600, 15599, 16409, 15602, 15601, 16411, 15603, 15604, 16414, 15605, 16416, 15606, 15608, 15607, 15610, 15609, 16420, 15612, 15611, 15614, 15613, 16422, 15616, 15615, 15618, 15617, 15619, 15621, 15620, 15623, 15622, 15625, 15624, 15645, 15644, 15626, 15628, 15627, 15630, 15629, 15631, 15633, 15632, 15635, 15634, 15637, 15636, 16427, 15639, 15638, 16429, 15641, 15640, 15643, 15642, 15645, 15644, 15667, 15647, 15646, 15649, 15648, 15651, 15650, 15652, 15654, 15653, 15656, 15655, 15658, 15657, 15659, 15661, 15660, 16444, 15663, 15662, 15664, 15666, 15665, 15667, 15694, 15693, 15696, 15695, 15669, 15668, 15670, 15672, 15671, 15674, 15673, 15676, 15675, 15678, 15677, 15679, 16450, 15682, 15681, 15684, 15683, 15686, 15685, 15688, 15687, 15690, 15689, 15692, 15691, 15694, 15693, 15696, 15695, 15698, 15697, 15699, 15701, 15700, 15702, 15704, 15703, 15706, 15705, 15710, 15709, 15712, 15711, 15713, 16369, 16369, 15717, 15716, 15718, 15720, 15719, 15722, 15721, 15724, 15723, 15726, 15725, 15727, 15728, 15729, 16489, 16491, 15731, 15730, 15732, 16470, 15733, 15734, 16470, 15735, 15741, 15740, 15736, 15737, 15739, 15738, 15741, 15740, 15742, 15744, 15743, 16494, 15755, 15757, 15756, 16498, 15758, 15760, 15759, 15761, 15762, 16501, 16369, 16369, 16369, 16369, 16370, 16370, 16407, 16407, 16407, 16407, 16369, 16369, 16407, 16407, 16407, 16407, 15949, 15948, 15951, 15950, 15953, 15952, 15955, 15954, 15956, 15962, 15961, 15964, 15963, 15966, 15965, 15968, 15967, 15970, 15969, 15971, 15972, 15973, 15974, 16517, 16519, 15976, 15975, 16521, 16523, 15977, 15979, 15978, 15980, 16529, 16531, 16474, 16473, 16009, 16007, 15985, 15986, 15987, 15988, 15989, 16534, 16536, 16474, 15990, 15991, 16008, 15992, 15993, 15995, 15994, 16540, 16001, 16000, 16003, 16002, 16004, 16005, 16006, 16542, 16007, 16008, 16547, 16474, 16473, 16009, 16011, 16010, 16013, 16012, 16549, 16551, 16553, 16555, 16015, 16014, 16407, 16407, 16407, 16407, 16369, 16369, 16370, 16370, 16370, 16370, 16369, 16369, 16369, 16369, 16370, 16370, 16407, 16407, 16407, 16407, 16566, 16155, 16154, 16156, 16158, 16157, 16160, 16159, 16162, 16161, 16573, 16166, 16165, 16167, 16168, 16170, 16169, 16172, 16171, 16577, 16579, 16180, 16179, 16186, 16185, 16188, 16187, 16189, 16191, 16190, 16192, 16593, 16595, 16199, 16200, 16202, 16201, 16203, 16204, 16206, 16205, 16369, 16369, 16369, 16369, 16370, 16370, 16407, 16407, 16369, 16369, 16407, 16407, 16605, 16424, 16423, 16425, 16607, 16613, 16431, 16430, 16433, 16432, 16435, 16434, 16436, 16616, 16618, 16438, 16437, 16439, 16441, 16440, 16442, 16468, 16459, 16470, 16460, 16472, 16471, 16474, 16473, 16475, 16462, 16461, 16463, 16465, 16464, 16466, 16468, 16467, 16470, 16469, 16472, 16471, 16474, 16473, 16475, 16477, 16476, 16478, 16480, 16479, 16481, 16603, 16599, 16598, 16482, 16602, 16483, 16485, 16484, 16486, 16487, 16586, 16587, 16588, 16589, 16492, 16496, 16625, 16626, 16495, 16622, 16588, 16589, 16623, 16496, 16625, 16626, 16499, 16602, 16601, 16504, 16599, 16505, 16600, 16507, 16506, 16508, 16510, 16509, 16511, 16514, 16515, 16588, 16570, 16623, 16624, 16625, 16626, 16524, 16525, 16569, 16526, 16571, 16624, 16625, 16527, 16538, 16569, 16570, 16544, 16543, 16545, 16635, 16556, 16556, 16557, 16557, 16559, 16558, 16561, 16560, 16563, 16562, 16564, 16568, 16587, 16569, 16570, 16571, 16624, 16625, 16626, 16574, 16575, 16582, 16583, 16584, 16585, 16582, 16583, 16584, 16585, 16580, 16587, 16588, 16589, 16581, 16582, 16583, 16584, 16585, 16586, 16587, 16588, 16589, 16590, 16599, 16598, 16600, 16602, 16601, 16603, 16639, 16608, 16641, 16609, 16643, 16610, 16645, 16611, 16619, 16620, 16621, 16622, 16623, 16624, 16625, 16626, 16627, 16627, 16632, 16632, 16636, 16636, 16637, 16637, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 4668, 4669, 4670, 4671, 4672, 4673, 4674, 4675, 4676, 4677, 4678, 4679, 4680, 4741, 4742, 4745, 4746, 4749, 4750, 4753, 4754, 4757, 4758, 4761, 4762, 4763, 4764, 4765, 4766, 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4808, 4809, 4810, 4811, 4812, 4813, 4814, 4817, 4822, 4823, 4824, 4825, 4826, 4827, 4828, 4831, 4836, 4837, 4839, 4840, 4842, 4843, 4846, 4847, 4850, 4851, 4852, 4853, 4854, 4855, 4856, 4857, 4858, 4859, 4860, 4861, 4862, 4863, 4864, 4865, 4867, 4868, 4870, 4871, 4874, 4875, 4882, 4883, 4884, 4885, 4886, 4887, 4908, 4909, 4912, 4913, 4914, 4915, 4918, 4919, 4922, 4923, 4924, 4925, 4926, 4927, 4934, 4937, 4938, 4941, 4942, 4943, 4944, 4948, 4951, 4960, 4961, 4962, 4963, 4966, 4967, 4968, 4973, 4974, 4975, 4976, 4977, 4978, 4979, 4980, 4981, 4983, 4985, 4986, 4988, 4989, 4991, 4992, 4993, 4994, 4995, 4996, 4997, 5000, 5001, 5004, 5005, 5008, 5009, 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5020, 5021, 5022, 5023, 5024, 5025, 5026, 5027, 5028, 5029, 5032, 5033, 5036, 5037, 5038, 5039, 5040, 5041, 5042, 5043, 5044, 5045, 5046, 5047, 5048, 5049, 5050, 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5071, 5072, 5073, 5076, 5077, 5080, 5081, 5084, 5085, 5088, 5089, 5090, 5091, 5092, 5093, 5096, 5097, 5100, 5101, 5104, 5105, 5108, 5109, 5112, 5113, 5116, 5117, 5120, 5121, 5124, 5125, 5126, 5127, 5128, 5129, 5132, 5133, 5136, 5137, 5142, 5145, 5146, 5147, 5148, 5149, 5150, 5153, 5154, 5155, 5156, 5157, 5158, 5163, 5164, 5167, 5168, 5169, 5170, 5171, 5172, 5173, 5174, 5175, 5178, 5179, 5180, 5181, 5182, 5185, 5186, 5187, 5188, 5189, 5190, 5191, 5192, 5193, 5194, 5197, 5198, 5203, 5206, 5207, 5208, 5215, 5216, 5217, 5218, 5221, 5224, 5229, 5230, 5233, 5234, 5235, 5236, 5239, 5240, 5241, 5242, 5245, 5246, 5247, 5248, 5249, 5286, 5287, 5288, 5289, 5290, 5291, 5292, 5293, 5294, 5295, 5296, 5297, 5298, 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5310, 5311, 5313, 5314, 5315, 5316, 5317, 5318, 5319, 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5327, 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, 5336, 5419, 5420, 5421, 5422, 5423, 5424, 5425, 5426, 5427, 5428, 5429, 5430, 5431, 5432, 5433, 5434, 5435, 5436, 5437, 5438, 5439, 5440, 5443, 5444, 5445, 5446, 5447, 5448, 5449, 5450, 5451, 5452, 5453, 5454, 5455, 5456, 5457, 5458, 5459, 5460, 5461, 5462, 5463, 5466, 5467, 5468, 5469, 5470, 5471, 5472, 5473, 5474, 5475, 5476, 5477, 5523, 5524, 5525, 5526, 5527, 5528, 5529, 5532, 5533, 5534, 5537, 5538, 5539, 5540, 5541, 5611, 5612, 5613, 5614, 5615, 5616, 5617, 5618, 5619, 5620, 5621, 5622, 5623, 5624, 5625, 5626, 5627, 5628, 5629, 5630, 5631, 5632, 5633, 5634, 5635, 5636, 5639, 5640, 5641, 5642, 5645, 5646, 5649, 5650, 5651, 5652, 5659, 5660, 5663, 5664, 5665, 5666, 5669, 5670, 5671, 5672, 5673, 5674, 5675, 5676, 5679, 5680, 5683, 5684, 5685, 5688, 5689, 5690, 5691, 5694, 5695, 5696, 5699, 5700, 5703, 5704, 5705, 5706, 5707, 5708, 5711, 5712, 5714, 5715, 5717, 5718, 5719, 5720, 5723, 5730, 5731, 5732, 5733, 5734, 5735, 5736, 5737, 5738, 5739, 5740, 5741, 5742, 5745, 5746, 5749, 5750, 5755, 5756, 5757, 5758, 5759, 5760, 5767, 5772, 5773, 5774, 5783, 5784, 5787, 5788, 5791, 5792, 5797, 5798, 5803, 5804, 5805, 5806, 5807, 5808, 5809, 5810, 5813, 5816, 5817, 5818, 5823, 5824, 5825, 5826, 5829, 5830, 5831, 5832, 5833, 5834, 5837, 5838, 5839, 5842, 5843, 5846, 5847, 5848, 5849, 5850, 5851, 5852, 5853, 5854, 5855, 5856, 5857, 5858, 5859, 5860, 5861, 5862, 5863, 5864, 5865, 5866, 5867, 5868, 5869, 5874, 5875, 5876, 5877, 5880, 5881, 5882, 5883, 5884, 5885, 5886, 5887, 5888, 5889, 5890, 5891, 5892, 5893, 5894, 5895, 5896, 5897, 5898, 5899, 5900, 5901, 5902, 5903, 5906, 5907, 5908, 5909, 5910, 5911, 5912, 5913, 5914, 5915, 5916, 5917, 5918, 5919, 5920, 5921, 5922, 5923, 5926, 5927, 5928, 5929, 5930, 5931, 5932, 5933, 5934, 5935, 5936, 5939, 5940, 5943, 5944, 5947, 5948, 5953, 5954, 5955, 5956, 5959, 5960, 5961, 5962, 5965, 5966, 5969, 5970, 5971, 5972, 5973, 5974, 5975, 5976, 5977, 5978, 5979, 5980, 5981, 5982, 5985, 5986, 5991, 5992, 5995, 5996, 5999, 6000, 6001, 6002, 6003, 6004, 6005, 6006, 6007, 6008, 6009, 6010, 6011, 6012, 6013, 6014, 6015, 6016, 6017, 6018, 6019, 6020, 6021, 6022, 6025, 6026, 6027, 6028, 6029, 6030, 6031, 6032, 6033, 6034, 6035, 6036, 6037, 6038, 6039, 6040, 6041, 6042, 6043, 6044, 6045, 6046, 6047, 6048, 6087, 6088, 6089, 6090, 6091, 6092, 6093, 6094, 6095, 6096, 6097, 6098, 6099, 6100, 6101, 6102, 6103, 6104, 6105, 6106, 6143, 6144, 6145, 6146, 6147, 6150, 6151, 6163, 6164, 6167, 6168, 6203, 6204, 6205, 6206, 6207, 6208, 6209, 6210, 6211, 6212, 6213, 6214, 6215, 6216, 6217, 6218, 6219, 6220, 6221, 6222, 6223, 6224, 6225, 6228, 6229, 6230, 6231, 6232, 6233, 6234, 6235, 6236, 6237, 6238, 6239, 6240, 6241, 6242, 6243, 6244, 6245, 6248, 6249, 6250, 6251, 6252, 6253, 6254, 6255, 6256, 6257, 6258, 6259, 6260, 6261, 6262, 6263, 6264, 6265, 6266, 6267, 6270, 6271, 6272, 6273, 6274, 6275, 6311, 6312, 6317, 6322, 6323, 6324, 6325, 6327, 6328, 6329, 6330, 6331, 6332, 6333, 6334, 6335, 6341, 6342, 6343, 6344, 6345, 6346, 6347, 6353, 6354, 6355, 6356, 6357, 6358, 6359, 6366, 6367, 6368, 6373, 6374, 6375, 6376, 6377, 6378, 6379, 6380, 6381, 6382, 6383, 6384, 6385, 6386, 6387, 6388, 6389, 6390, 6391, 6392, 6393, 6394, 6395, 6396, 6397, 6398, 6399, 6400, 6401, 6402, 6403, 6405, 6406, 6407, 6408, 6409, 6410, 6411, 6412, 6413, 6414, 6415, 6416, 6417, 6418, 6419, 6420, 6423, 6424, 6427, 6428, 6429, 6430, 6431, 6432, 6433, 6434, 6435, 6436, 6437, 6438, 6439, 6440, 6441, 6442, 6443, 6444, 6445, 6446, 6447, 6448, 6449, 6450, 6451, 6452, 6453, 6454, 6455, 6456, 6457, 6458, 6459, 6460, 6465, 6466, 6467, 6468, 6469, 6472, 6477, 6478, 6479, 6480, 6483, 6484, 6485, 6486, 6487, 6488, 6491, 6492, 6493, 6494, 6497, 6498, 6501, 6502, 6505, 6506, 6507, 6508, 6511, 6512, 6513, 6514, 6516, 6517, 6521, 6524, 6525, 6528, 6529, 6532, 6533, 6534, 6535, 6544, 6545, 6546, 6547, 6548, 6549, 6550, 6553, 6554, 6555, 6556, 6557, 6560, 6561, 6564, 6565, 6568, 6569, 6572, 6573, 6576, 6577, 6580, 6581, 6584, 6585, 6586, 6587, 6588, 6589, 6592, 6595, 6596, 6597, 6600, 6601, 6604, 6605, 6608, 6609, 6612, 6613, 6616, 6617, 6622, 6623, 6624, 6625, 6626, 6627, 6628, 6629, 6632, 6633, 6636, 6637, 6638, 6639, 6642, 6643, 6644, 6647, 6648, 6649, 6652, 6653, 6654, 6656, 6658, 6659, 6660, 6661, 6662, 6663, 6664, 6667, 6672, 6673, 6674, 6675, 6676, 6678, 6679, 6683, 6690, 6691, 6694, 6695, 6698, 6699, 6702, 6703, 6706, 6707, 6708, 6709, 6710, 6711, 6713, 6714, 6716, 6717, 6720, 6721, 6724, 6725, 6726, 6727, 6728, 6729, 6730, 6731, 6732, 6733, 6734, 6735, 6736, 6737, 6738, 6739, 6740, 6741, 6742, 6743, 6744, 6745, 6746, 6747, 6748, 6749, 6750, 6752, 6753, 6755, 6757, 6759, 6760, 6761, 6762, 6764, 6765, 6767, 6768, 6769, 6772, 6773, 6774, 6775, 6781, 6782, 6783, 6787, 6788, 6789, 6790, 6793, 6794, 6797, 6798, 6801, 6802, 6805, 6806, 6809, 6810, 6811, 6812, 6815, 6816, 6817, 6818, 6821, 6822, 6825, 6826, 6829, 6830, 6832, 6833, 6835, 6836, 6839, 6840, 6845, 6848, 6849, 6850, 6852, 6853, 6855, 6856, 6858, 6859, 6864, 6865, 6866, 6869, 6870, 6873, 6875, 6878, 6881, 6882, 6883, 6885, 6886, 6890, 6891, 6892, 6893, 6896, 6897, 6898, 6899, 6913, 6914, 6915, 6916, 6917, 6918, 6919, 6920, 6921, 6922, 6923, 6924, 6925, 6926, 6927, 6928, 6929, 6930, 6931, 6932, 6933, 6936, 6937, 6994, 6995, 6996, 6997, 6998, 6999, 7000, 7001, 7002, 7003, 7004, 7005, 7006, 7007, 7008, 7009, 7010, 7011, 7012, 7013, 7014, 7015, 7016, 7019, 7020, 7021, 7022, 7023, 7024, 7025, 7026, 7027, 7028, 7029, 7030, 7031, 7032, 7033, 7035, 7036, 7038, 7039, 7041, 7042, 7044, 7047, 7048, 7050, 7051, 7053, 7054, 7056, 7057, 7059, 7060, 7062, 7063, 7065, 7066, 7068, 7069, 7071, 7072, 7073, 7074, 7075, 7076, 7077, 7078, 7079, 7080, 7201, 7202, 7207, 7208, 7217, 17007, 16996, 7223, 7224, 17012, 17010, 7230, 7231, 7232, 7237, 7238, 7239, 7240, 7241, 7242, 7243, 7244, 7245, 7246, 7247, 7261, 7262, 7263, 7264, 7265, 7266, 7267, 7268, 7269, 7270, 7271, 7272, 7273, 7274, 7275, 7276, 7277, 7278, 7279, 7299, 7300, 7301, 7312, 7317, 7318, 7319, 7320, 16950, 16949, 16961, 16960, 17007, 16996, 7374, 7375, 17012, 17010, 17007, 17006, 7411, 7412, 7415, 7416, 17012, 17010, 17032, 17031, 7453, 7454, 7455, 7456, 17049, 17047, 17130, 17129, 17141, 17140, 7569, 7570, 7594, 7595, 7596, 7597, 7642, 7643, 7644, 7645, 7646, 7647, 7648, 7649, 7650, 7697, 7698, 7699, 7700, 7702, 7703, 7705, 7706, 7707, 7708, 7709, 7710, 7711, 7712, 7725, 7726, 7731, 7732, 7733, 7734, 7779, 7780, 7781, 7782, 7783, 7785, 7786, 7787, 7788, 7793, 7794, 7795, 7796, 7797, 7799, 7800, 7801, 17911, 17910, 17920, 17893, 17455, 17895, 7903, 7904, 7905, 7906, 7907, 7908, 7909, 7912, 7913, 7928, 7929, 7930, 7931, 7932, 7933, 7934, 7943, 7944, 17910, 17450, 17920, 17452, 17455, 17895, 17450, 17892, 17452, 17451, 17895, 17929, 17450, 17892, 17452, 17451, 17455, 17895, 8127, 8128, 8129, 8130, 17507, 17506, 8162, 8163, 8169, 8170, 8171, 8172, 8261, 8262, 8263, 8264, 8268, 8269, 8375, 8376, 8377, 8378, 8460, 8461, 8462, 8463, 8464, 8465, 8466, 8467, 8468, 8497, 8498, 8499, 8500, 8501, 8502, 8503, 8504, 8595, 8596, 8604, 8605, 8606, 8607, 8608, 8609, 8610, 8611, 8699, 8700, 8701, 8702, 8704, 8705, 8706, 8707, 17911, 17892, 17920, 17893, 17895, 17929, 17911, 17910, 17920, 17919, 17929, 17928, 8812, 8813, 8814, 8815, 8819, 8820, 8847, 8848, 18080, 18079, 18182, 18181, 18193, 18192, 8988, 8989, 9020, 9021, 9070, 9071, 9072, 9118, 9119, 9120, 9121, 9122, 9123, 9124, 9129, 9130, 9131, 9132, 9133, 9134, 9204, 9205, 9206, 9207, 9208, 9209, 9210, 9211, 9212, 9214, 9215, 9216, 9217, 9218, 9219, 9220, 9221, 9223, 9224, 9226, 9227, 9229, 9230, 9231, 9235, 9236, 9237, 9238, 9239, 9240, 9242, 9244, 9245, 9246, 9247, 9251, 9260, 9261, 9281, 9291, 18472, 18471, 9324, 9325, 9326, 9327, 9330, 9331, 9332, 9333, 18492, 9349, 9350, 9351, 9352, 9355, 9356, 9357, 9358, 18496, 9367, 18502, 9478, 9479, 9480, 9481, 9482, 9483, 9495, 9496, 9497, 9498, 9499, 9500, 9512, 9513, 9514, 9515, 9518, 9519, 9520, 9521, 18543, 18542, 18547, 18546, 9537, 9538, 9539, 9540, 9543, 9544, 9545, 9546, 18553, 18552, 18564, 18563, 9584, 9585, 9586, 18573, 18581, 9598, 9599, 9600, 18584, 18595, 18592, 18593, 18595, 18594, 9616, 9617, 9618, 9619, 9747, 9748, 9749, 9750, 9751, 9752, 9753, 18618, 9766, 9767, 9768, 9769, 9772, 9773, 9774, 9775, 18628, 9792, 9802, 9805, 9806, 9809, 9810, 18638, 18637, 9821, 9822, 9825, 9826, 9835, 9836, 9837, 9838, 9839, 9850, 9851, 9854, 9855, 9864, 9865, 9866, 9867, 9868, 18650, 18649, 9965, 9966, 9967, 9968, 9969, 9970, 18671, 18685, 18675, 9982, 9985, 9990, 9993, 18676, 18685, 18684, 10006, 10008, 10010, 10012, 10016, 10017, 10018, 10019, 10024, 10025, 10032, 10033, 10543, 10544, 10550, 10551, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 18945, 18948, 18951, 18953, 18955, 18958, 18960, 18962, 18964, 18966, 18968, 18970, 18972, 18974, 18976, 18978, 18982, 18985, 18987, 18990, 18993, 18995, 18998, 19000, 19002, 19004, 19006, 19009, 19011, 19013, 19016, 19018, 19020, 19022, 19024, 19026, 19028, 19030, 19032, 19034, 19036, 19038, 19040, 19042, 19044, 19046, 19049, 19051, 19053, 19057, 19059, 19062, 19064, 19066, 19068, 19070, 19074, 19076, 19078, 19080, 19083, 19085, 19087, 19089, 19091, 19093, 19095, 19101, 19103, 19105, 19107, 19109, 19111, 19115, 19117, 19119, 19122, 19124, 19127, 19130, 19132, 19135, 19137, 19139, 19141, 19143, 19147, 19149, 19151, 19153, 19155, 19157, 19159, 19161, 19163, 19165, 19167, 19169, 19171, 19174, 19176, 19178, 19180, 19182, 19184, 19186, 19188, 19190, 19192, 19194, 19197, 19199, 19202, 19204, 19206, 19208, 19210, 19212, 19216, 19218, 19220, 19226, 19228, 19230, 19232, 19235, 19237, 19239, 19241, 19243, 19246, 19249, 19251, 19253, 19255, 19257, 19260, 19262, 19264, 19266, 19268, 19271, 19273, 19275, 19277, 19279, 19281, 19283, 19286, 19288, 19291, 19294, 19296, 19298, 19301, 19303, 19305, 19307, 19310, 19313, 19316, 19318, 19320, 19323, 19325, 19327, 19331, 19333, 19335, 19337, 19339, 19341, 19343, 19345, 19348, 19350, 19354, 19358, 19360, 19363, 19365, 19367, 19373, 19375, 19378, 19382, 19384, 19386, 19388, 19392, 19394, 19396, 19398, 19400, 19403, 19406, 19408, 19411, 19420, 19422, 19424, 19426, 19428, 19430, 19432, 19434, 19437, 19446, 19448, 19450, 19452, 19454, 19456, 19458, 19461, 19464, 19468, 19470, 19472, 19474, 19477, 19482, 19484, 19486, 19488, 19494, 19497, 19499, 19502, 19504, 19506, 19508, 19510, 19513, 19515, 19517, 19519, 19521, 19523, 19525, 19527, 19529, 19531, 19533, 19535, 19537, 19544, 19547, 19549, 19551, 19555, 19558, 19564, 19566, 19569, 19571, 19576, 19578, 19580, 19582, 19584, 19588, 19590, 19592, 19594, 19596, 19598, 19600, 19602, 19604, 19606, 19608, 19610, 19612, 19614, 19616, 19618, 19620, 19622, 19624, 19626, 19629, 19631, 19633, 19635, 19638, 19640, 19642, 19644, 19646, 19648, 19650, 19653, 19655, 19657, 19659, 19662, 19664, 19666, 19668, 19671, 19673, 19675, 19682, 19685, 19687, 19691, 19693, 19695, 19698, 19700, 19702, 19704, 19707, 19709, 19711, 19716, 19718, 19720, 19722, 19725, 19727, 19729, 19734, 19736, 19738, 19740, 19743, 19745, 19747, 19754, 19756, 19759, 19762, 19765, 19769, 19771, 19773, 19775, 19778, 19780, 19782, 19785, 19788, 19790, 19793, 19795, 19798, 19801, 19803, 19805, 19807, 19809, 19811, 19813, 19815, 19817, 19819, 19824, 19826, 19828, 19830, 19833, 19835, 19837, 19840, 19842, 19844, 19846, 19848, 19850, 19852, 19854, 19856, 19861, 19863, 19867, 19870, 19872, 19875, 19878, 19880, 19884, 19886, 19889, 19891, 19895, 19898, 19900, 19902, 19904, 19906, 19908, 19910, 19913, 19915, 19917, 19919, 19922, 19928, 19933, 19935, 19937, 19939, 19941, 19943, 19945, 19948, 19953, 19955, 19957, 19959, 19961, 19963, 19965, 19967, 19973, 19979, 19982, 19985, 19989, 19992, 19994, 19997, 20000, 20002, 20005, 20007, 20009, 20011, 20013, 20015, 20017, 20019, 20021, 20023, 20025, 20032, 20035, 20037, 20039, 20041, 20043, 20046, 20048, 20050, 20052, 20058, 20060, 20062, 20065, 20067, 20070, 20072, 20074, 20076, 20078, 20080, 20082, 20084, 20086, 20088, 20090, 20092, 20094, 20096, 20098, 20100, 20102, 20106, 20108, 20110, 20112, 20115, 20117, 20123, 20125, 20127, 20129, 20131, 20133, 20136, 20138, 20140, 20142, 20145, 20147, 20150, 20152, 20154, 20156, 20158, 20160, 20162, 20165, 20167, 20169, 20172, 20174, 20176, 20179, 20181, 20184, 20187, 20189, 20191, 20194, 20196, 20198, 20200, 20203, 20205, 20207, 20209, 20211, 20213, 20215, 20217, 20219, 20222, 20225, 20227, 20229, 20231, 7218, 7219, 17545, 16369, 16370, 16370, 7227, 7228, 20240, 20243, 20245, 20247, 20249, 20254, 20257, 20260, 20262, 20266, 20268, 20271, 18980, 18979, 20274, 20277, 7328, 7329, 16947, 7334, 7335, 16958, 7360, 7361, 17545, 16369, 20287, 16370, 16370, 7380, 7381, 18122, 7397, 7398, 16369, 16370, 7417, 7418, 17033, 7444, 7445, 17045, 17043, 20301, 20303, 16412, 16412, 7459, 7460, 19098, 19553, 19097, 19561, 19072, 19071, 19098, 19553, 19097, 19112, 19561, 17131, 7520, 7521, 17142, 7526, 7527, 20030, 20028, 20027, 19858, 20055, 18313, 17694, 16369, 17279, 18334, 18313, 18315, 20313, 20315, 18336, 18334, 19195, 19200, 17250, 17248, 17263, 17261, 19976, 19224, 19223, 18334, 17279, 20317, 20319, 20321, 20323, 20326, 20328, 20330, 20332, 20334, 20340, 20343, 19308, 19329, 20346, 20349, 20355, 20358, 20361, 7805, 7806, 7810, 7811, 17930, 7816, 7817, 19822, 19767, 20028, 18120, 17403, 19971, 19352, 19465, 19926, 19925, 19873, 19930, 19876, 18315, 18313, 19356, 19355, 19926, 19974, 18334, 18269, 20369, 20371, 20378, 20381, 20383, 20385, 7950, 7951, 7955, 7956, 17930, 7961, 7962, 19822, 19538, 19361, 19370, 19368, 19539, 19380, 19379, 8002, 8003, 8007, 8008, 17930, 8013, 8014, 19822, 20028, 19389, 8032, 8033, 8037, 8038, 17930, 8043, 8044, 19822, 19538, 19409, 20055, 19858, 18122, 17546, 19971, 19968, 19415, 19976, 19974, 19987, 19986, 17573, 17571, 20405, 20407, 16412, 16412, 19929, 19896, 19876, 8151, 8152, 17504, 19439, 19438, 20411, 19441, 19440, 16369, 19444, 19443, 20413, 20415, 19495, 19930, 19876, 18122, 17538, 19971, 19923, 19465, 19926, 19925, 19873, 19930, 19876, 17546, 17545, 19971, 19970, 19465, 19976, 19974, 19466, 19983, 17573, 17571, 19490, 19489, 20417, 20419, 19492, 19491, 16370, 19495, 19930, 19876, 20030, 19538, 20027, 19540, 19539, 19553, 20030, 20028, 19561, 19559, 17725, 17694, 16369, 16369, 18336, 17726, 17725, 17694, 20423, 20425, 18336, 17726, 18315, 17725, 19974, 19926, 18336, 17726, 20427, 20430, 20432, 20434, 19679, 19678, 19677, 20436, 20440, 20442, 19714, 19713, 19732, 19731, 19752, 19688, 19751, 19750, 19689, 19714, 19713, 19732, 19731, 19752, 19688, 19751, 19750, 19689, 20444, 20446, 20448, 20451, 19714, 19713, 19732, 19731, 19752, 19751, 19750, 19749, 20456, 20460, 8712, 8713, 8717, 8718, 17930, 8723, 8724, 19822, 19767, 20028, 8744, 8745, 8749, 8750, 17930, 8755, 8756, 19822, 20028, 19820, 19858, 20055, 18028, 18122, 18081, 18036, 20474, 20476, 16370, 19873, 19930, 19876, 18122, 18028, 18081, 18036, 16407, 20480, 16412, 16412, 19893, 19896, 19931, 18122, 18066, 18081, 8877, 8878, 19968, 19971, 19923, 19926, 19925, 19929, 19930, 19931, 18122, 18120, 19971, 19970, 19968, 19976, 19974, 19980, 19983, 19987, 19986, 8932, 8933, 18179, 8938, 8939, 18190, 20030, 20028, 20027, 20055, 20054, 20053, 18315, 18313, 16369, 16370, 16370, 18334, 18269, 18315, 18313, 16407, 16412, 16412, 18336, 18334, 20492, 20495, 20497, 20499, 20502, 20505, 20201, 20508, 20510, 20512, 20514, 20517, 20520, 20523, 20525, 20527, 20529, 20532, 20535, 20539, 20541, 20544, 20252, 20250, 20252, 20250, 9292, 9293, 20264, 20263, 20264, 20263, 9334, 20453, 20272, 20458, 20457, 9359, 20275, 20454, 20458, 20452, 20279, 20278, 9375, 20570, 20573, 20337, 20324, 20576, 20579, 20337, 20335, 9522, 9523, 20352, 20350, 9528, 9529, 9547, 9548, 20352, 20350, 9553, 9554, 9587, 20374, 20373, 9593, 20376, 20375, 20611, 9601, 9608, 9609, 9612, 9613, 9614, 20620, 20622, 20624, 20626, 20628, 9754, 9776, 20438, 20437, 9811, 9812, 20454, 20453, 20458, 20457, 20453, 20449, 20458, 20452, 9869, 9870, 20454, 20453, 20458, 20457, 20669, 20672, 9971, 9974, 9975, 9994, 9998, 9999, 20693, 20695, 20583, 20542, 20587, 20585, 20595, 20593, 20599, 20597, 20560, 20559, 20558, 20564, 20562, 20633, 20632, 20631, 20637, 20635, 20583, 20581, 20587, 20585, 20595, 20593, 20599, 20597, 20551, 20550, 20549, 20555, 20553, 20560, 20559, 20558, 20564, 20562, 20633, 20632, 20631, 20637, 20567, 20583, 20581, 20587, 20585, 20595, 20593, 20599, 20597, 20606, 20687, 20605, 20690, 20688, 18835, 18833, 18837, 18783, 20687, 20686, 20685, 20684, 20690, 20688, 20633, 20632, 20631, 20637, 20635, 16636, 16637, 20642, 20644, 20648, 20650, 20654, 20653, 20652, 20657, 20659, 20663, 20662, 20661, 18835, 18833, 18839, 18837, 20687, 20686, 20685, 20684, 20690, 20688, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 21191, 20854, 21002, 21194, 7220, 21261, 17551, 7222, 7225, 7226, 21267, 20775, 20737, 20736, 20740, 20739, 20738, 20742, 20741, 20743, 20745, 20744, 20747, 20746, 20751, 20750, 20749, 20748, 7291, 7292, 20754, 20753, 20752, 7330, 21285, 20757, 20756, 20755, 7336, 21288, 20759, 20758, 20761, 20760, 20764, 20763, 20762, 20767, 20766, 20765, 20769, 20768, 20771, 20770, 20948, 21136, 20949, 21125, 21139, 20954, 20781, 20780, 21117, 7362, 21291, 21145, 20944, 21202, 21201, 21200, 20782, 20784, 20783, 17551, 20773, 21134, 7376, 21210, 7378, 7379, 21298, 20774, 18163, 20786, 17055, 21214, 20948, 21123, 20949, 21125, 21139, 20954, 20781, 20780, 21127, 7396, 21130, 20944, 21202, 21201, 21200, 20782, 20784, 20783, 18142, 21207, 21206, 7410, 21152, 7414, 21305, 20775, 18163, 20786, 17055, 20999, 21096, 20983, 21097, 20984, 21100, 20933, 20776, 20908, 20778, 20777, 20948, 20779, 20949, 21139, 21125, 20954, 21117, 20781, 20780, 7443, 21130, 20944, 20784, 20783, 20782, 7451, 7452, 21312, 7457, 7458, 21316, 20785, 18163, 20786, 17055, 21096, 21097, 20983, 21100, 21099, 20787, 20817, 20818, 20820, 20799, 20800, 20802, 7477, 7478, 7479, 21175, 20987, 20803, 20788, 20789, 20791, 7486, 7487, 7488, 20989, 20983, 20982, 20793, 20792, 20795, 20794, 20933, 20797, 20796, 20799, 20798, 20800, 20802, 7503, 7504, 7505, 21173, 20803, 20805, 20807, 20806, 20808, 7512, 7513, 20810, 20809, 20813, 20812, 20811, 7519, 20816, 20815, 20814, 7525, 20818, 20817, 20820, 20819, 21166, 20821, 7534, 7535, 7536, 20987, 20986, 21173, 21176, 21178, 7542, 7543, 20988, 20989, 20823, 20822, 20825, 20824, 20827, 20826, 17690, 20829, 20828, 20831, 20830, 20833, 20832, 20834, 20852, 20853, 7562, 7563, 21208, 21207, 21120, 21205, 7568, 7571, 7572, 20997, 17217, 20843, 20835, 20837, 20838, 20840, 20853, 20852, 7582, 7583, 21191, 20854, 21194, 21002, 20855, 17214, 21208, 21207, 21206, 21151, 21347, 7598, 7599, 20841, 17217, 20843, 20844, 7604, 20846, 7606, 20848, 20851, 20850, 20853, 20852, 7612, 7613, 20854, 21000, 21001, 21002, 20855, 7619, 7620, 21210, 21209, 7623, 7624, 7625, 17269, 20858, 17274, 20860, 7630, 7631, 20862, 20861, 20865, 20864, 20863, 20868, 20867, 20866, 20871, 20870, 20869, 20873, 20872, 20876, 20875, 20874, 20880, 20879, 20878, 20877, 20883, 20882, 20881, 20884, 20886, 20885, 20889, 20888, 20887, 20893, 20892, 20891, 20890, 7744, 20895, 20894, 20898, 20897, 20896, 20901, 20900, 20899, 7753, 20905, 20904, 20903, 20902, 20906, 21074, 21073, 21069, 21380, 21077, 21076, 21075, 21382, 21078, 21080, 21079, 7815, 21081, 20908, 21083, 20922, 21084, 21070, 21087, 21089, 21090, 21093, 21092, 7829, 7830, 7831, 21175, 21071, 21094, 21112, 21145, 21146, 21147, 20910, 17488, 21137, 20952, 21124, 21126, 21125, 21116, 20911, 21127, 7849, 7850, 21120, 21113, 7853, 7854, 7855, 21188, 20912, 7858, 7859, 17590, 7861, 18163, 7863, 17594, 7865, 21192, 21191, 21194, 21193, 21195, 18291, 21199, 21198, 21202, 21201, 21200, 21204, 21203, 7879, 7880, 21208, 21207, 21206, 21205, 7885, 7886, 21210, 21188, 7889, 7890, 7891, 7892, 21212, 21189, 17684, 20999, 21073, 20925, 21072, 21414, 21077, 21076, 21075, 21416, 21078, 21080, 21079, 7960, 21102, 20921, 20930, 20929, 20932, 20913, 20914, 21087, 21089, 7972, 7973, 7974, 21175, 21094, 21097, 21096, 21095, 21100, 21099, 20915, 21101, 20928, 20916, 7986, 7987, 20919, 20918, 19376, 21109, 21108, 7993, 7994, 7995, 21185, 21111, 21110, 21074, 21073, 21069, 21429, 21077, 21076, 21075, 21431, 21078, 21080, 21079, 8012, 21081, 20921, 21083, 20922, 20924, 20923, 21093, 21092, 8023, 8024, 8025, 21175, 21071, 21094, 21073, 20925, 21072, 21439, 21077, 21076, 21075, 21441, 21078, 21080, 20926, 8042, 20928, 20927, 20930, 20929, 20932, 20931, 21070, 21087, 21089, 8054, 8055, 8056, 21071, 21094, 21097, 21096, 21095, 21100, 21098, 20933, 21102, 21101, 21103, 19412, 21106, 19413, 21109, 21108, 8073, 8074, 21185, 21111, 21110, 21137, 21123, 21124, 21140, 21139, 21116, 21143, 21142, 8086, 8087, 21112, 21145, 21146, 21147, 21149, 21148, 17551, 21208, 21207, 21206, 21151, 8099, 8100, 8101, 21210, 21152, 8104, 8105, 18048, 8107, 8108, 20952, 21137, 21124, 21126, 21140, 21116, 20955, 20956, 8117, 8118, 21112, 20957, 21147, 21146, 20958, 17488, 21120, 20935, 21462, 21188, 21210, 8133, 8134, 18159, 8136, 18163, 8138, 17594, 8140, 20952, 21137, 21124, 21140, 20953, 20939, 20938, 20955, 20941, 20940, 8153, 21469, 21112, 20957, 21146, 21147, 20958, 17508, 8160, 8161, 8164, 8165, 8166, 8167, 8168, 21480, 17590, 8174, 18163, 8176, 17594, 8178, 21145, 20944, 21146, 21147, 20946, 20945, 17529, 21123, 20948, 20949, 21126, 21140, 20954, 21143, 21142, 8194, 8195, 21207, 21134, 8198, 8199, 8200, 21210, 21209, 8203, 8204, 18159, 8206, 18163, 8208, 18048, 8210, 21137, 21123, 21124, 21140, 21139, 21116, 21142, 20950, 8219, 8220, 21145, 21112, 21146, 21147, 21148, 20951, 17551, 21208, 21207, 21206, 21151, 8232, 8233, 8234, 21210, 21188, 8237, 8238, 18159, 8240, 18163, 8242, 20952, 21137, 21124, 21140, 20953, 20954, 20956, 20955, 8251, 8252, 21112, 20957, 21146, 21147, 20958, 17583, 8259, 8260, 21508, 8265, 8266, 8267, 17590, 8271, 18163, 8273, 17594, 8275, 20965, 20964, 20963, 20967, 20966, 20970, 20969, 20968, 20972, 20971, 21163, 20973, 20975, 20974, 20976, 20978, 20979, 20981, 8294, 8295, 8296, 20987, 20986, 21173, 21176, 21178, 8302, 8303, 20988, 20989, 20983, 21097, 20982, 21100, 21099, 20984, 21163, 21162, 21165, 21164, 21090, 21172, 8318, 8319, 8320, 20987, 21173, 20986, 21179, 21182, 21181, 8327, 8328, 20989, 20988, 21000, 21192, 21001, 21193, 21003, 17690, 21005, 21202, 21201, 21200, 21008, 21007, 8343, 8344, 21208, 21207, 21206, 20991, 8349, 8350, 8351, 8352, 20993, 20992, 17684, 20999, 21192, 21000, 21002, 21001, 21003, 17690, 21005, 21202, 21201, 21200, 21008, 21007, 8369, 8370, 21208, 21207, 21206, 21151, 21534, 8379, 8380, 20997, 18342, 20999, 21192, 21000, 21002, 21001, 21003, 18291, 21005, 21202, 21201, 21200, 21008, 21007, 8396, 8397, 21210, 21188, 8400, 8401, 8402, 8403, 21009, 18342, 21010, 21014, 21013, 21012, 21011, 21017, 21016, 21015, 21020, 21019, 21018, 21021, 21025, 21024, 21023, 21022, 21028, 21027, 21026, 21031, 21030, 21029, 21032, 21035, 21034, 21033, 21038, 21037, 21036, 8475, 8476, 8477, 21039, 21048, 21047, 21046, 21051, 21050, 21049, 8511, 8512, 21041, 21055, 21054, 21053, 21058, 21057, 21056, 8520, 8521, 21059, 21062, 21061, 21060, 21065, 21064, 21063, 8529, 8530, 8531, 8532, 21043, 21068, 21067, 21241, 21240, 21239, 21245, 21244, 21243, 21242, 8543, 21048, 21047, 21040, 21051, 21050, 21049, 8554, 8555, 21041, 21055, 21054, 21053, 21058, 21057, 21056, 8563, 8564, 21059, 21062, 21061, 21060, 21065, 21064, 21063, 8572, 8573, 8574, 8575, 21043, 21068, 21067, 21241, 21240, 21239, 21245, 21244, 21243, 21242, 8586, 21042, 21043, 21044, 21249, 21248, 21253, 21252, 21222, 21255, 21254, 21256, 21044, 21249, 21248, 21253, 21252, 21222, 21255, 21254, 21256, 21048, 21047, 21046, 21051, 21050, 21049, 8652, 8653, 21052, 21055, 21054, 21053, 21058, 21057, 21056, 8661, 8662, 21059, 21062, 21061, 21060, 21065, 21064, 21063, 8670, 8671, 8672, 8673, 21066, 21068, 21067, 21074, 21073, 21069, 21586, 21077, 21076, 21075, 21588, 21078, 21080, 21079, 8722, 21101, 21081, 21083, 21082, 21084, 21070, 21087, 21089, 21090, 21093, 21092, 8736, 8737, 8738, 21071, 21094, 21074, 21073, 21072, 21596, 21077, 21076, 21075, 21598, 21080, 21079, 21078, 8754, 21101, 21081, 21083, 21082, 21084, 21086, 21087, 21089, 21090, 21093, 21092, 8768, 8769, 8770, 21175, 21094, 21097, 21096, 21095, 21100, 21099, 21098, 21102, 21101, 21103, 21176, 21106, 21179, 21109, 21108, 8787, 8788, 21185, 21111, 21110, 21137, 21136, 21124, 21140, 21126, 21116, 21129, 21128, 21117, 8801, 8802, 21145, 21112, 21146, 21147, 21118, 8808, 8809, 21120, 21113, 21612, 21188, 21210, 8818, 18159, 8822, 18163, 8824, 18048, 8826, 21123, 21137, 21124, 21140, 21126, 21116, 21129, 21128, 21117, 8836, 8837, 21145, 21144, 21147, 21131, 21118, 8843, 8844, 21121, 21120, 8849, 21188, 21210, 8852, 8853, 18043, 8855, 18163, 8857, 18048, 8859, 21137, 21123, 21124, 21126, 21125, 21141, 21129, 21128, 21127, 8869, 8870, 21130, 21144, 21147, 21131, 21132, 8876, 21207, 21134, 8881, 8882, 8883, 21188, 21210, 8886, 8887, 18089, 8889, 18163, 8891, 18167, 8893, 21137, 21136, 21138, 21140, 21139, 21141, 21143, 21142, 8902, 8903, 21145, 21144, 21147, 21146, 21149, 21148, 18142, 21208, 21207, 21206, 21151, 8915, 8916, 8917, 21152, 21210, 8920, 8921, 18159, 8923, 18163, 8925, 18167, 8927, 8928, 21158, 21157, 21156, 8934, 21653, 21161, 21160, 21159, 8940, 21656, 21163, 21162, 21165, 21164, 21166, 21169, 21168, 21171, 21170, 21172, 8951, 8952, 8953, 21175, 21174, 21173, 21176, 21178, 21179, 21182, 21181, 8962, 8963, 8964, 21185, 21184, 21183, 21192, 21191, 21193, 21194, 21195, 18291, 21199, 21198, 21202, 21201, 21200, 21204, 21203, 8981, 8982, 21208, 21207, 21206, 21205, 8987, 21210, 21188, 8992, 8993, 8994, 8995, 21212, 21189, 18342, 21214, 21192, 21191, 21194, 21193, 21195, 18291, 21199, 21198, 21202, 21201, 21200, 21204, 21203, 9013, 9014, 21208, 21207, 21206, 21205, 9019, 21210, 21209, 9024, 9025, 9026, 9027, 21212, 21211, 18342, 21214, 21219, 21218, 21216, 21215, 21253, 21222, 21221, 21254, 21224, 21223, 21226, 21217, 21219, 21218, 21220, 21253, 21222, 21221, 21254, 21224, 21223, 21226, 21217, 21220, 21219, 21218, 21253, 21222, 21221, 21254, 21224, 21223, 21226, 21225, 21229, 21228, 21227, 21232, 21231, 21230, 21235, 21234, 21233, 21236, 21238, 21237, 21241, 21240, 21239, 21245, 21244, 21243, 21242, 9154, 21249, 21248, 21247, 21246, 21253, 21252, 21251, 21250, 21255, 21254, 21256, 21268, 21258, 21270, 21269, 21276, 21259, 21278, 21274, 21276, 21275, 21278, 21277, 21409, 21574, 21571, 21283, 21366, 21268, 21362, 9279, 9280, 21367, 21366, 21270, 21269, 21272, 21271, 9288, 9289, 21705, 21276, 21275, 21374, 21273, 9298, 9299, 21376, 21278, 21274, 21377, 20269, 21276, 21275, 21374, 9308, 9309, 21376, 21278, 21277, 20269, 21573, 21572, 9337, 9338, 21574, 9340, 9341, 21282, 21408, 9361, 9362, 21574, 9364, 9365, 21584, 21693, 21550, 21283, 9371, 9372, 21552, 21551, 21683, 21546, 21361, 21366, 21362, 21364, 21363, 9489, 9490, 21365, 21367, 21366, 21369, 21368, 9506, 9507, 21732, 21374, 21370, 9526, 9527, 21736, 21376, 21371, 20344, 21738, 21374, 21375, 9551, 9552, 21742, 21376, 21377, 20359, 21408, 21573, 21407, 9591, 9592, 9594, 9595, 21409, 21543, 21411, 21410, 21683, 21546, 21752, 21755, 21412, 21543, 21545, 21544, 21683, 21546, 21693, 21550, 9779, 9780, 21552, 21551, 21766, 9813, 9814, 21583, 9816, 9817, 21571, 21573, 21572, 9842, 9843, 21574, 9845, 9846, 21584, 21776, 9871, 9872, 21583, 9874, 9875, 21584, 21681, 21678, 21785, 21683, 21682, 21681, 21680, 21679, 21788, 21683, 21682, 21685, 21688, 21687, 21686, 21690, 21689, 21691, 21694, 21693, 21692, 21696, 21695, 21723, 21724, 21698, 21697, 10084, 10085, 10087, 10088, 10095, 10096, 10098, 10099, 21760, 21759, 21699, 10153, 10154, 10155, 10157, 10158, 10165, 10166, 10167, 10169, 10170, 21724, 21723, 21728, 21727, 10218, 10219, 10221, 10222, 10229, 10230, 10232, 10233, 10282, 10283, 10284, 10286, 10287, 10294, 10295, 10296, 10298, 10299, 10306, 10307, 10308, 10310, 10311, 21760, 21759, 21758, 21724, 21723, 21728, 21727, 10368, 10369, 10371, 10372, 10379, 10380, 10382, 10383, 10409, 10410, 10411, 10413, 10414, 21749, 21760, 21782, 21781, 10457, 10458, 10460, 10461, 10466, 10467, 10468, 10469, 10471, 10472, 21760, 21759, 21758, 10530, 10531, 10532, 10534, 10535, 10542, 10549, 10553, 10555, 10560, 10562, 10567, 10568, 10569, 10574, 10576, 10581, 10582, 10583, 21782, 21781, 10620, 10621, 10623, 10624, 10629, 10630, 10631, 10632, 10634, 10635, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 7193, 7194, 7195, 7196, 22020, 7221, 20235, 22025, 7229, 7248, 7249, 7250, 7251, 7252, 7280, 7281, 7282, 7283, 7284, 7285, 7286, 7287, 7288, 7289, 7290, 22045, 7325, 7326, 7327, 22049, 7331, 7332, 7333, 22054, 7337, 7338, 7339, 7340, 7341, 7342, 7343, 7344, 7345, 7346, 7347, 7348, 7349, 7350, 7351, 7352, 7353, 7354, 7355, 7356, 7357, 7358, 7359, 22079, 7363, 7364, 7365, 7366, 7367, 7368, 7369, 7370, 7371, 7372, 7373, 22092, 7377, 22095, 7382, 7383, 7384, 7385, 7386, 7387, 7388, 7389, 7390, 7391, 7392, 7393, 7394, 7395, 21300, 7399, 7400, 7401, 7402, 7403, 7404, 7405, 7406, 7407, 7408, 7409, 20292, 7413, 20294, 7419, 7420, 7421, 7422, 7423, 7424, 7425, 7426, 7427, 7428, 7429, 7430, 7431, 7432, 7433, 7434, 7435, 7436, 7437, 7438, 7439, 7440, 7441, 7442, 21307, 7446, 7447, 7448, 7449, 7450, 22158, 22161, 7461, 7462, 7463, 7464, 7465, 7466, 7467, 7468, 7469, 7470, 7471, 7472, 7473, 7474, 7475, 7476, 22180, 7480, 7481, 7482, 7483, 7484, 7485, 22189, 7489, 7490, 7491, 7492, 7493, 7494, 7495, 7496, 7497, 7498, 7499, 7500, 7501, 7502, 22206, 7506, 7507, 7508, 7509, 7510, 7511, 22215, 7514, 7515, 7516, 7517, 7518, 21329, 7522, 7523, 7524, 21332, 7528, 7529, 7530, 7531, 7532, 7533, 22233, 7537, 7538, 7539, 7540, 7541, 22241, 7544, 7545, 7546, 7547, 7548, 7549, 7550, 7551, 7552, 7553, 7554, 7555, 7556, 7557, 7558, 7559, 7560, 7561, 22261, 7564, 7565, 7566, 7567, 20310, 22268, 7573, 7574, 7575, 7576, 7577, 7578, 7579, 7580, 7581, 22279, 7584, 7585, 7586, 7587, 7588, 7589, 7590, 7591, 7592, 7593, 22292, 7600, 7601, 7602, 7603, 7605, 7607, 7608, 7609, 7610, 7611, 22306, 7614, 7615, 7616, 7617, 7618, 22313, 7621, 7622, 22317, 7626, 7627, 7628, 7629, 22324, 7632, 7633, 7651, 7652, 7653, 7654, 7655, 7656, 7657, 7658, 7659, 7660, 7661, 7662, 7663, 7664, 7665, 7666, 7667, 7668, 7669, 7670, 7671, 7672, 7735, 7736, 7737, 7738, 7739, 7740, 7741, 7742, 7743, 7745, 7746, 7747, 7748, 7749, 7750, 7751, 7752, 7766, 7767, 7768, 7769, 7770, 7802, 7803, 7804, 7807, 7808, 7809, 7812, 7813, 7814, 21384, 7818, 7819, 7820, 7821, 7822, 7823, 7824, 7825, 7826, 7827, 7828, 22397, 7832, 7833, 7834, 7835, 7836, 7837, 7838, 7839, 7840, 7841, 7842, 7843, 7844, 7845, 7846, 7847, 7848, 22417, 7851, 7852, 22421, 7856, 7857, 22426, 7860, 7862, 7864, 7866, 7867, 7868, 7869, 7870, 7871, 7872, 7873, 7874, 7875, 7876, 7877, 7878, 22447, 7881, 7882, 7883, 7884, 22453, 7887, 7888, 22457, 22459, 7893, 7894, 7895, 7896, 7947, 7948, 7949, 7952, 7953, 7954, 7957, 7958, 7959, 21418, 7963, 7964, 7965, 7966, 7967, 7968, 7969, 7970, 7971, 22486, 7975, 7976, 7977, 7978, 7979, 7980, 7981, 7982, 7983, 7984, 7985, 22500, 7988, 7989, 7990, 7991, 7992, 22507, 7996, 7997, 7998, 7999, 8000, 8001, 8004, 8005, 8006, 8009, 8010, 8011, 21433, 8015, 8016, 8017, 8018, 8019, 8020, 8021, 8022, 22533, 8026, 8027, 8028, 8029, 8030, 8031, 8034, 8035, 8036, 8039, 8040, 8041, 21443, 8045, 8046, 8047, 8048, 8049, 8050, 8051, 8052, 8053, 22560, 8057, 8058, 8059, 8060, 8061, 8062, 8063, 8064, 8065, 8066, 8067, 8068, 8069, 8070, 8071, 8072, 22579, 8075, 8076, 8077, 8078, 8079, 8080, 8081, 8082, 8083, 8084, 8085, 22592, 8088, 8089, 8090, 8091, 8092, 8093, 8094, 8095, 8096, 8097, 8098, 22605, 8102, 8103, 22610, 8106, 22613, 8109, 8110, 8111, 8112, 8113, 8114, 8115, 8116, 22623, 8119, 8120, 8121, 8122, 8123, 8124, 8125, 8126, 8131, 8132, 22636, 8135, 8137, 8139, 8141, 8142, 8143, 8144, 8145, 8146, 8147, 8148, 8149, 8150, 22653, 8154, 8155, 8156, 8157, 8158, 8159, 22662, 22664, 22667, 8173, 8175, 8177, 8179, 8180, 8181, 8182, 8183, 8184, 8185, 8186, 8187, 8188, 8189, 8190, 8191, 8192, 8193, 22691, 8196, 8197, 22695, 8201, 8202, 22700, 8205, 8207, 8209, 8211, 8212, 8213, 8214, 8215, 8216, 8217, 8218, 22716, 8221, 8222, 8223, 8224, 8225, 8226, 8227, 8228, 8229, 8230, 8231, 22729, 8235, 8236, 22734, 8239, 8241, 8243, 8244, 8245, 8246, 8247, 8248, 8249, 8250, 22748, 8253, 8254, 8255, 8256, 8257, 8258, 22756, 22759, 20420, 8270, 8272, 8274, 8276, 8277, 8278, 8279, 8280, 8281, 8282, 8283, 8284, 8285, 8286, 8287, 8288, 8289, 8290, 8291, 8292, 8293, 22786, 8297, 8298, 8299, 8300, 8301, 22794, 8304, 8305, 8306, 8307, 8308, 8309, 8310, 8311, 8312, 8313, 8314, 8315, 8316, 8317, 22810, 8321, 8322, 8323, 8324, 8325, 8326, 22819, 8329, 8330, 8331, 8332, 8333, 8334, 8335, 8336, 8337, 8338, 8339, 8340, 8341, 8342, 22835, 8345, 8346, 8347, 8348, 22841, 22843, 8353, 8354, 8355, 8356, 8357, 8358, 8359, 8360, 8361, 8362, 8363, 8364, 8365, 8366, 8367, 8368, 22861, 8371, 8372, 8373, 8374, 22868, 8381, 8382, 8383, 8384, 8385, 8386, 8387, 8388, 8389, 8390, 8391, 8392, 8393, 8394, 8395, 22885, 8398, 8399, 22889, 22891, 8404, 8405, 8406, 8407, 8408, 8409, 8410, 8411, 8412, 8413, 8414, 8415, 8416, 8417, 8418, 8419, 8420, 8421, 8422, 8423, 8424, 8425, 8426, 8427, 8428, 8469, 8470, 8471, 8472, 8473, 8474, 22924, 8478, 8505, 8506, 8507, 8508, 8509, 8510, 22934, 8513, 8514, 8515, 8516, 8517, 8518, 8519, 22943, 8522, 8523, 8524, 8525, 8526, 8527, 8528, 22952, 22954, 8533, 8534, 8535, 8536, 8537, 8538, 8539, 8540, 8541, 8542, 8548, 8549, 8550, 8551, 8552, 8553, 22973, 8556, 8557, 8558, 8559, 8560, 8561, 8562, 22982, 8565, 8566, 8567, 8568, 8569, 8570, 8571, 22991, 22993, 8576, 8577, 8578, 8579, 8580, 8581, 8582, 8583, 8584, 8585, 8597, 8598, 8612, 8613, 8614, 8615, 8616, 8617, 8618, 8619, 8620, 8629, 8630, 8631, 8632, 8633, 8634, 8635, 8636, 8637, 8646, 8647, 8648, 8649, 8650, 8651, 23032, 8654, 8655, 8656, 8657, 8658, 8659, 8660, 23041, 8663, 8664, 8665, 8666, 8667, 8668, 8669, 23050, 23052, 8674, 8675, 8676, 8709, 8710, 8711, 8714, 8715, 8716, 8719, 8720, 8721, 21590, 8725, 8726, 8727, 8728, 8729, 8730, 8731, 8732, 8733, 8734, 8735, 23080, 8739, 8740, 8741, 8742, 8743, 8746, 8747, 8748, 8751, 8752, 8753, 21600, 8757, 8758, 8759, 8760, 8761, 8762, 8763, 8764, 8765, 8766, 8767, 23108, 8771, 8772, 8773, 8774, 8775, 8776, 8777, 8778, 8779, 8780, 8781, 8782, 8783, 8784, 8785, 8786, 23127, 8789, 8790, 8791, 8792, 8793, 8794, 8795, 8796, 8797, 8798, 8799, 8800, 23141, 8803, 8804, 8805, 8806, 8807, 23148, 8810, 8811, 8816, 8817, 20477, 8821, 8823, 8825, 8827, 8828, 8829, 8830, 8831, 8832, 8833, 8834, 8835, 23171, 8838, 8839, 8840, 8841, 8842, 23178, 8845, 8846, 23181, 8850, 8851, 23185, 8854, 8856, 8858, 8860, 8861, 8862, 8863, 8864, 8865, 8866, 8867, 8868, 23202, 8871, 8872, 8873, 8874, 8875, 21631, 8879, 8880, 23212, 8884, 8885, 23217, 8888, 8890, 8892, 8894, 8895, 8896, 8897, 8898, 8899, 8900, 8901, 23233, 8904, 8905, 8906, 8907, 8908, 8909, 8910, 8911, 8912, 8913, 8914, 23246, 8918, 8919, 23251, 8922, 8924, 8926, 23258, 8929, 8930, 8931, 23262, 8935, 8936, 8937, 23267, 8941, 8942, 8943, 8944, 8945, 8946, 8947, 8948, 8949, 8950, 23280, 8954, 8955, 8956, 8957, 8958, 8959, 8960, 8961, 23291, 8965, 8966, 8967, 8968, 8969, 8970, 8971, 8972, 8973, 8974, 8975, 8976, 8977, 8978, 8979, 8980, 23310, 8983, 8984, 8985, 8986, 20487, 8990, 8991, 23319, 23321, 8996, 8997, 8998, 8999, 9000, 9001, 9002, 9003, 9004, 9005, 9006, 9007, 9008, 9009, 9010, 9011, 9012, 23340, 9015, 9016, 9017, 9018, 20489, 9022, 9023, 23349, 23351, 9028, 9029, 9030, 9031, 9032, 9033, 9034, 9035, 9036, 9037, 9038, 9039, 9040, 9041, 9042, 9043, 9075, 9076, 9077, 9078, 9079, 9080, 9081, 9082, 9083, 9084, 9085, 9095, 9096, 9097, 9098, 9099, 9100, 9101, 9102, 9103, 9104, 9105, 9135, 9136, 9137, 9138, 9139, 9140, 9141, 9142, 9143, 9144, 9145, 9146, 9147, 9148, 9149, 9150, 9151, 9152, 9153, 9174, 9175, 9176, 9177, 9178, 9179, 9180, 9181, 9182, 9183, 9184, 9243, 9248, 9249, 9250, 9252, 9253, 9254, 9255, 9256, 9257, 9258, 9259, 9262, 9263, 9264, 9265, 9276, 9277, 9278, 23441, 9282, 9283, 9284, 9285, 9286, 9287, 23449, 9294, 9295, 9296, 9297, 23456, 9300, 9301, 9302, 9303, 9304, 9305, 9306, 9307, 23466, 9310, 9311, 9312, 9313, 9335, 9336, 23474, 9339, 23477, 9342, 9360, 23481, 9363, 23484, 9366, 9368, 9369, 9370, 23490, 9373, 9374, 9376, 9377, 9484, 9485, 9486, 9487, 9488, 23501, 9501, 9502, 9503, 9504, 9505, 23508, 9524, 9525, 23513, 9530, 9531, 9532, 9549, 9550, 23522, 9555, 9556, 9557, 9588, 9589, 9590, 23531, 23533, 9602, 9603, 9604, 9605, 9606, 9607, 9615, 9755, 9756, 9757, 9758, 9759, 9777, 9778, 23551, 9781, 9782, 23556, 9815, 23559, 9818, 9840, 9841, 23564, 9844, 23567, 9847, 23571, 9873, 23574, 9876, 9972, 9973, 9976, 9977, 9995, 9996, 9997, 10000, 10001, 10020, 10021, 10022, 10023, 10026, 10027, 10028, 10029, 10030, 10031, 10034, 10035, 10072, 10073, 10078, 10079, 23604, 23606, 23608, 23610, 10145, 10146, 10147, 23615, 23618, 23620, 23623, 10206, 10207, 10212, 10213, 23629, 23631, 23633, 23635, 23637, 23640, 23642, 23645, 23647, 23650, 10315, 10316, 10317, 10356, 10357, 10362, 10363, 23659, 23661, 23663, 23665, 23667, 23670, 10420, 10421, 10452, 10453, 23676, 23678, 23680, 23682, 23684, 10522, 10523, 10524, 23689, 23692, 20696, 20698, 20665, 23700, 23705, 20665, 10615, 10616, 23710, 23712, 23714, 23716, 23718, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23809, 23811, 20236, 23818, 23820, 23823, 23826, 23828, 23830, 23832, 23835, 23839, 23843, 23845, 23847, 23850, 23853, 23855, 23857, 23860, 23863, 23867, 23869, 23872, 23876, 23886, 23889, 23892, 21301, 23896, 23898, 23901, 23905, 20293, 20295, 23915, 23918, 23921, 23923, 23925, 23928, 23931, 21308, 23935, 23937, 23946, 23949, 23952, 23954, 22181, 23959, 22190, 23967, 23969, 23971, 23974, 23976, 22207, 23984, 23988, 23990, 21330, 23994, 21333, 23998, 24000, 22234, 24005, 24011, 24013, 24015, 24017, 24020, 24022, 24024, 24027, 24030, 24032, 20311, 24043, 24046, 24048, 24052, 24054, 24063, 24065, 24068, 24070, 24074, 22318, 24082, 24084, 24087, 24090, 24093, 24095, 24098, 24100, 24102, 24106, 24108, 24111, 24113, 24115, 24117, 24120, 24123, 24125, 24128, 24131, 24134, 21385, 24138, 24140, 24147, 22398, 24150, 24153, 24155, 24159, 24162, 24165, 24168, 22422, 24171, 24177, 24179, 24183, 24185, 24188, 24191, 24193, 24196, 24200, 24204, 24207, 24210, 21419, 24214, 24216, 24218, 22487, 24224, 24226, 24229, 24232, 24236, 24239, 22508, 24242, 24245, 24248, 24251, 21434, 24255, 24257, 24259, 24261, 22534, 24264, 24267, 24270, 24273, 21444, 24277, 24279, 24281, 22561, 24287, 24289, 24292, 24295, 24301, 24304, 24307, 24310, 24313, 24316, 24318, 24320, 24323, 24325, 22606, 24328, 24333, 24336, 24339, 24342, 24344, 24348, 24350, 24356, 24359, 24361, 24363, 24367, 24369, 24379, 24381, 24383, 24386, 24389, 24392, 24395, 22696, 24398, 24404, 24407, 24410, 24413, 24415, 24417, 24420, 24422, 22730, 24425, 24430, 24433, 24436, 24439, 24441, 20421, 24451, 24454, 24456, 24459, 24461, 24463, 22787, 24470, 24476, 24478, 24481, 24484, 24486, 22811, 24491, 24495, 24498, 24500, 24502, 24507, 24510, 24513, 24515, 24519, 24523, 24525, 24530, 24533, 24536, 24538, 24544, 24546, 24551, 24554, 24557, 24564, 24566, 24568, 24571, 24575, 24577, 24579, 24582, 24586, 24589, 22925, 24594, 24597, 24602, 24605, 24610, 24613, 24616, 24619, 24621, 24624, 24626, 24628, 24631, 24636, 24639, 24644, 24647, 24650, 24653, 24655, 24658, 24660, 24664, 24667, 24670, 24673, 24676, 24679, 24682, 24685, 24690, 24693, 24698, 24701, 24704, 24707, 24709, 24712, 24715, 21591, 24719, 24721, 24728, 23081, 24731, 24733, 24736, 24739, 21601, 24743, 24745, 24752, 23109, 24755, 24757, 24760, 24763, 24769, 24772, 24775, 24778, 24781, 24785, 24787, 24791, 24793, 20478, 24799, 24802, 24805, 24809, 24811, 24815, 24818, 24824, 24827, 24830, 24834, 24836, 21632, 24840, 23213, 24843, 24849, 24852, 24855, 24858, 24860, 24862, 24865, 24867, 23247, 24870, 24877, 24881, 24885, 24887, 24890, 24892, 23281, 24896, 24902, 23292, 24905, 24908, 24910, 24914, 24916, 24919, 24922, 24924, 20488, 24927, 24931, 24935, 24937, 24941, 24943, 24946, 24949, 24951, 20490, 24954, 24958, 24962, 24964, 24966, 24969, 24972, 24974, 24977, 24980, 24983, 24985, 24988, 24991, 24994, 24996, 24999, 25002, 25006, 25008, 25011, 25013, 25015, 25017, 25019, 25021, 25023, 25028, 25030, 25032, 25034, 25036, 23944, 23942, 22026, 25042, 25046, 25048, 25050, 25053, 25058, 25063, 25067, 23833, 24695, 25071, 25082, 25086, 25088, 24898, 23878, 23883, 23881, 22096, 23912, 23910, 22126, 24898, 23944, 23942, 22162, 23955, 23962, 23960, 23977, 23981, 24001, 24007, 24036, 24034, 24040, 24038, 24049, 24057, 24055, 24060, 24059, 24071, 24078, 24076, 25091, 25093, 25097, 25099, 24143, 24141, 24156, 24175, 24174, 24173, 24180, 24201, 25114, 25119, 25121, 25123, 24220, 24283, 24297, 24330, 24345, 24354, 24353, 24352, 24370, 24374, 24373, 24372, 24377, 24376, 24375, 24402, 24401, 24400, 24428, 24427, 24442, 24444, 24449, 24448, 24447, 24466, 24464, 24472, 24487, 24503, 24520, 24526, 24541, 24539, 24547, 24561, 24559, 25127, 25129, 25131, 25134, 24599, 24607, 24633, 24641, 24687, 24695, 25140, 24687, 24695, 24724, 24722, 24748, 24746, 24765, 24788, 24797, 24796, 24795, 24812, 24822, 24821, 24820, 24847, 24846, 24845, 24874, 24873, 24872, 24898, 24911, 24932, 24938, 24959, 25150, 25152, 25154, 25157, 25160, 25163, 25166, 25169, 25171, 25089, 25173, 25102, 25105, 25108, 25111, 25179, 21761, 23616, 21715, 25079, 25077, 23621, 21762, 25186, 25188, 25055, 25060, 25108, 25111, 23638, 25074, 25072, 23643, 21715, 25079, 25077, 23648, 25200, 25203, 25205, 23509, 25102, 23514, 25105, 23518, 25108, 23523, 25111, 23668, 25213, 25215, 25219, 25222, 21761, 23690, 20697, 20699, 10556, 25137, 25135, 23701, 25143, 25141, 23706, 10584, 25147, 25145, 25233, 25237, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 23821, 25353, 23836, 23840, 23848, 23851, 23864, 23870, 23873, 23893, 23899, 23902, 23916, 23919, 23932, 23938, 23947, 23950, 25397, 23972, 23991, 23995, 24006, 25421, 25427, 24085, 24088, 24091, 24096, 25441, 24103, 24109, 25446, 24118, 24121, 25451, 24129, 24132, 24135, 24151, 24186, 25475, 24205, 24208, 24211, 24227, 24230, 24243, 24246, 24249, 24252, 24265, 24268, 24271, 24274, 24290, 24293, 24305, 25525, 24364, 25557, 24452, 24457, 24471, 24479, 24482, 24492, 24508, 25588, 24531, 25595, 24552, 25602, 24569, 24572, 25606, 24580, 24583, 24587, 24590, 24595, 24598, 24603, 24606, 24611, 24614, 24622, 25622, 24629, 24632, 24637, 24640, 24645, 24648, 24656, 25633, 24665, 24668, 24674, 24677, 24683, 24686, 24691, 24694, 24699, 24702, 24710, 24713, 24716, 24734, 24737, 24740, 24758, 24761, 24773, 24782, 24806, 24831, 25702, 24878, 24882, 24897, 24906, 24917, 25722, 24944, 25732, 25737, 24967, 24970, 24975, 24978, 24981, 24986, 24989, 24992, 24997, 25000, 25003, 25009, 25755, 25757, 25759, 25344, 25384, 25383, 25476, 9273, 9274, 9275, 25043, 25771, 25054, 25059, 25064, 25068, 25350, 25349, 9317, 9319, 25646, 25760, 25611, 25646, 25760, 25083, 25356, 25360, 25710, 24888, 24900, 9387, 25363, 25362, 9394, 25368, 9396, 9397, 9398, 25370, 25369, 23907, 25376, 9406, 9407, 9408, 25381, 25710, 24888, 24900, 9415, 25384, 25383, 25476, 9424, 9425, 9426, 25391, 9429, 25393, 9431, 9432, 25395, 25399, 9436, 25401, 25402, 9439, 23986, 25408, 9444, 9446, 24009, 25413, 25415, 25418, 25416, 25419, 9454, 9455, 9456, 9457, 25423, 25424, 9460, 9462, 9463, 24061, 9465, 9466, 25429, 25430, 9469, 25432, 24080, 9472, 9473, 25456, 24145, 9563, 9564, 25461, 9567, 25464, 25463, 25465, 25468, 25466, 9573, 9574, 9575, 25469, 9577, 25473, 25476, 9582, 24198, 25115, 25482, 9624, 25484, 25485, 25489, 24237, 24234, 25498, 25500, 25508, 9642, 25510, 25511, 25515, 24299, 9648, 25519, 25518, 25520, 25521, 25523, 25527, 9657, 25529, 25528, 25530, 25531, 9662, 25534, 25533, 9665, 9666, 9667, 25536, 25535, 25539, 9672, 9673, 9674, 9675, 9676, 9677, 9678, 25541, 25543, 25545, 25544, 25546, 25549, 25547, 9686, 9687, 9688, 25551, 25550, 25552, 25553, 25555, 25559, 9696, 9697, 25561, 25560, 25562, 25563, 9702, 24445, 9704, 9705, 9706, 9707, 25570, 9711, 9712, 9714, 24474, 25577, 9718, 24493, 24496, 25583, 9723, 25586, 9727, 24517, 25590, 9730, 25593, 9734, 9735, 25596, 9737, 25599, 25600, 9741, 9742, 25611, 25646, 25760, 9784, 9786, 25618, 9794, 9796, 25629, 25636, 25639, 25636, 25639, 9828, 9830, 25646, 25636, 25639, 9857, 9859, 25646, 25652, 24726, 9882, 9883, 25655, 25661, 24750, 9890, 9891, 25664, 25668, 24767, 9896, 25672, 25671, 25674, 9902, 25677, 25676, 9905, 9906, 9907, 25680, 25679, 25682, 9912, 25685, 25684, 9915, 9916, 9917, 25687, 25686, 25689, 24837, 25694, 25692, 9925, 9926, 9927, 25696, 25695, 25697, 25698, 25700, 25704, 9935, 9936, 9937, 25707, 25710, 24888, 24900, 9945, 25716, 9948, 25720, 25724, 9953, 24929, 25726, 9956, 25730, 25734, 9961, 24956, 25155, 25760, 25161, 25167, 10074, 25815, 25026, 25817, 23450, 10090, 23514, 10092, 23518, 10101, 23523, 10103, 25180, 10148, 25867, 10159, 10160, 10161, 10171, 25084, 25815, 25772, 10224, 10226, 10235, 10237, 21710, 10289, 10290, 10300, 10301, 10302, 25084, 25201, 21722, 25828, 25089, 25815, 25095, 25817, 10373, 10374, 10375, 10376, 10384, 10385, 10386, 10387, 25116, 25117, 21750, 25828, 21783, 23540, 23541, 21756, 21757, 25223, 10525, 25867, 21762, 25132, 10557, 10558, 20656, 10571, 10572, 10585, 10586, 21783, 23578, 23584, 21789, 21790, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 9241, 9266, 9267, 26126, 25387, 26127, 9271, 26153, 26260, 25347, 9314, 9315, 26113, 26214, 26216, 9321, 25752, 26252, 26253, 9329, 26190, 9344, 26216, 9346, 25752, 26252, 26253, 9354, 26115, 26114, 9380, 26116, 9382, 9383, 9384, 25711, 9386, 25714, 9389, 9390, 26118, 25365, 26120, 9395, 26289, 9399, 9400, 26121, 25373, 26123, 9404, 9405, 26296, 26124, 9410, 9411, 9412, 25711, 9414, 25714, 9417, 9418, 26126, 25387, 26127, 9422, 26153, 26307, 26128, 9428, 9430, 26313, 9433, 26130, 9435, 9437, 9438, 9440, 26133, 26132, 9443, 25410, 9447, 9448, 9449, 9450, 9451, 9452, 26135, 26331, 26333, 9458, 9459, 26136, 26338, 9464, 9467, 9468, 9470, 9471, 26137, 26139, 26140, 26142, 26137, 26139, 26140, 26142, 25443, 26144, 25447, 26146, 26253, 26147, 25443, 26144, 25447, 26146, 26253, 26147, 26150, 26149, 26148, 9561, 9562, 25459, 9566, 9568, 9569, 9570, 9571, 9572, 26361, 9576, 25471, 9579, 9580, 26153, 9583, 26239, 26241, 26156, 26155, 26154, 9623, 9625, 9626, 26157, 9628, 9629, 9630, 25492, 26162, 26161, 26160, 9635, 9636, 25502, 26166, 26165, 26164, 9641, 9643, 9644, 26167, 9646, 9647, 24302, 9650, 9651, 9652, 9653, 9654, 9655, 26170, 9658, 9659, 9660, 9661, 9663, 9664, 26401, 9668, 9669, 26171, 9671, 26408, 26411, 9679, 9680, 9681, 9682, 9683, 9684, 9685, 26421, 9689, 9690, 9691, 9692, 9693, 9694, 26172, 26430, 9698, 9699, 9700, 9701, 9703, 26439, 26174, 26173, 9710, 26443, 25572, 9715, 26176, 9717, 25579, 9720, 9721, 9722, 24505, 9725, 26180, 9728, 9729, 24528, 9732, 26182, 26459, 9736, 24549, 9739, 9740, 26465, 26184, 26186, 26187, 26189, 26190, 9761, 26216, 9763, 25752, 26252, 26253, 9771, 26192, 26194, 26196, 9788, 25619, 26199, 26200, 26202, 26204, 9798, 25630, 26207, 26208, 9804, 26210, 9808, 26208, 9820, 26210, 9824, 26212, 26214, 26216, 9832, 25752, 26252, 26208, 9849, 26210, 9853, 26212, 26214, 26216, 9861, 25647, 26252, 26220, 26219, 26218, 9880, 9881, 9884, 26223, 26222, 26221, 9888, 9889, 9892, 26224, 9894, 9895, 24770, 9898, 9899, 26227, 9901, 9903, 9904, 26507, 9908, 9909, 26228, 9911, 9913, 9914, 26516, 9918, 9919, 26229, 9921, 9922, 9923, 9924, 26525, 9928, 9929, 9930, 9931, 9932, 9933, 26230, 26534, 26232, 26231, 9940, 9941, 9942, 25711, 9944, 25714, 9947, 25718, 9950, 9951, 26236, 9954, 9955, 25728, 9958, 9959, 26238, 9962, 26239, 26241, 26242, 26244, 26245, 26247, 26248, 26250, 25752, 26252, 26253, 10015, 10075, 10080, 10081, 10089, 10091, 10100, 10102, 10149, 26574, 10172, 25089, 10209, 25095, 10215, 23450, 23514, 23518, 23523, 10288, 26585, 26588, 21762, 10313, 10318, 10319, 10358, 10359, 10364, 10365, 21743, 10416, 21746, 10418, 10422, 10423, 10454, 10455, 21753, 10463, 25124, 10474, 25164, 10476, 10526, 10536, 10537, 26620, 10570, 26623, 26625, 10617, 10618, 21786, 10626, 25158, 10637, 25164, 10639, 26599, 26597, 26603, 26601, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 26882, 9268, 9269, 9270, 9272, 26261, 9290, 26891, 9316, 9318, 9320, 9322, 9323, 9328, 9343, 9345, 9347, 9348, 9353, 9378, 9379, 9381, 26914, 9385, 26283, 9388, 26919, 9391, 9392, 9393, 26923, 26290, 26926, 9401, 9402, 9403, 26931, 26297, 9409, 26936, 9413, 26302, 9416, 26941, 9419, 9420, 9421, 9423, 26308, 9427, 9434, 26319, 9441, 9442, 9445, 26966, 9453, 9461, 26340, 26347, 9474, 9475, 9476, 9477, 9491, 9492, 9493, 9494, 9508, 9509, 9510, 9511, 9516, 9517, 9533, 9534, 9535, 9536, 9541, 9542, 9558, 9559, 9560, 26351, 9565, 27008, 27011, 26362, 9578, 9581, 27018, 9596, 9597, 9620, 9621, 9622, 27025, 9627, 27030, 9631, 9632, 9633, 9634, 9637, 9638, 9639, 9640, 27042, 9645, 26385, 9649, 27049, 9656, 27056, 27060, 26402, 27063, 9670, 26409, 26412, 27071, 27074, 26422, 27077, 9695, 27085, 26437, 26440, 9708, 9709, 9713, 9716, 9719, 9724, 9726, 27105, 9731, 9733, 9738, 9743, 9744, 9745, 9746, 9760, 9762, 9764, 9765, 9770, 9783, 9785, 9787, 9789, 9790, 9793, 9795, 9797, 9799, 9800, 9803, 9807, 9819, 9823, 9827, 9829, 9831, 9833, 9834, 9848, 9852, 9856, 9858, 9860, 9862, 9863, 9877, 9878, 9879, 26489, 9885, 9886, 9887, 26494, 9893, 26499, 9897, 27181, 9900, 27185, 26508, 27188, 9910, 27192, 26517, 27195, 9920, 27200, 26526, 27203, 9934, 26535, 9938, 9939, 27214, 9943, 26540, 9946, 9949, 9952, 27223, 9957, 9960, 27229, 9963, 9964, 9978, 9979, 9986, 9987, 10002, 10003, 10004, 10005, 10014, 26951, 26310, 26316, 26323, 26880, 26972, 26970, 26977, 27087, 27065, 27058, 27068, 27205, 26444, 27099, 27101, 27106, 27111, 27114, 26951, 26310, 26316, 26323, 26963, 26972, 26970, 26977, 10208, 10214, 10223, 10225, 10234, 10236, 27087, 27068, 27058, 27205, 27065, 26444, 27099, 27101, 27106, 27111, 27114, 10312, 26951, 26310, 26316, 26323, 26963, 26972, 26970, 26977, 27006, 27068, 27205, 27013, 10415, 10417, 27183, 27190, 27197, 27205, 27218, 27224, 10462, 10473, 10475, 27051, 27058, 27065, 27068, 27079, 27087, 26444, 27099, 27101, 27106, 27111, 27114, 27183, 27190, 27197, 27205, 27218, 27224, 10625, 10636, 10638, 26557, 27243, 27246, 27245, 27248, 27247, 26570, 26572, 26575, 27260, 26586, 27265, 27267, 27269, 10751, 10752, 10755, 10756, 27275, 27277, 26615, 27286, 25961, 27289, 25968, 27292, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 27396, 27412, 27439, 27445, 26341, 26348, 27473, 26352, 27481, 27486, 27493, 27497, 27504, 27516, 27521, 27567, 26490, 27571, 26495, 27590, 27593, 27599, 27602, 27413, 27416, 27414, 27420, 27418, 27422, 27426, 27424, 27428, 27430, 27433, 27431, 27437, 27435, 27441, 10053, 10054, 27442, 27443, 10057, 10059, 26322, 10061, 27447, 27448, 10064, 10065, 27449, 10067, 27454, 27452, 27458, 27456, 27462, 27398, 27464, 27468, 27466, 27470, 27500, 27501, 27036, 27489, 27490, 27488, 27499, 10114, 27517, 27518, 10117, 27399, 27510, 10120, 27505, 27506, 27512, 10124, 27513, 10126, 27589, 10130, 27093, 27523, 10133, 26447, 27525, 10136, 27526, 27528, 10139, 27529, 27530, 10142, 10143, 27483, 27408, 27407, 27406, 27410, 27537, 27536, 27535, 27539, 27413, 27416, 27414, 27394, 27392, 27430, 27433, 27431, 27437, 27435, 27441, 10187, 10188, 27442, 27443, 10191, 10193, 26322, 27447, 10196, 27448, 10198, 10199, 27449, 10201, 27454, 27452, 27458, 27456, 27462, 27398, 27464, 27468, 27466, 27470, 27489, 27490, 27500, 27501, 27036, 27499, 27488, 10248, 27517, 27518, 27512, 10252, 27513, 10254, 27505, 27506, 10257, 27589, 10260, 27399, 27510, 10264, 27093, 27523, 10267, 26447, 27525, 10270, 27526, 27528, 10273, 27529, 27530, 10276, 10277, 27403, 27402, 27401, 27400, 27405, 27408, 27407, 27406, 27410, 27537, 27536, 27535, 27539, 27483, 27413, 27416, 27414, 27420, 27418, 27422, 27426, 27424, 27428, 27430, 27433, 27431, 27437, 27435, 27441, 10337, 10338, 27442, 27443, 10341, 10343, 26322, 27447, 10346, 27448, 10348, 10349, 27449, 10351, 27454, 27452, 27458, 27456, 27462, 27460, 27464, 27468, 27466, 27470, 27574, 27575, 27477, 10393, 27478, 27512, 10396, 27513, 10398, 27589, 27596, 27594, 27480, 10405, 27612, 27610, 27614, 27483, 27574, 27575, 10430, 27577, 27579, 10433, 27581, 27583, 10436, 27585, 27587, 10439, 27589, 27596, 27594, 27598, 10446, 27601, 10449, 27604, 27606, 27608, 27612, 27610, 27614, 27488, 27489, 27490, 27036, 27499, 27500, 27501, 10487, 27503, 10490, 27505, 27506, 10493, 27508, 27510, 27512, 10497, 27513, 10499, 27515, 10502, 27517, 27518, 10506, 27093, 27523, 10509, 26447, 27525, 10512, 27526, 27528, 10515, 27529, 27530, 10518, 10519, 27533, 27531, 27537, 27536, 27535, 27539, 27543, 27542, 27541, 27540, 27548, 27547, 27546, 27545, 27550, 27551, 27552, 27553, 27557, 27556, 27555, 27554, 27559, 27560, 27564, 27563, 27562, 27561, 27574, 27575, 10593, 27577, 27579, 10596, 27581, 27583, 10599, 27585, 27587, 10602, 27589, 27596, 27594, 27598, 10609, 27601, 10612, 27604, 27606, 27608, 27612, 27610, 27614, 10652, 10654, 10657, 10658, 10661, 10662, 10679, 10682, 10685, 27642, 27643, 27645, 27644, 27647, 27646, 10725, 10728, 27659, 10733, 10746, 10748, 27719, 27721, 27673, 27672, 10769, 10781, 27680, 27682, 27681, 10806, 10809, 10814, 10818, 10822, 10834, 27701, 27703, 27702, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 27474, 27487, 27494, 27498, 27568, 27572, 10036, 27905, 10038, 10039, 10040, 10041, 10042, 10043, 10044, 10045, 10046, 10047, 10048, 10049, 10050, 27906, 10052, 27943, 10055, 10056, 27907, 10060, 10062, 10063, 27953, 10066, 27908, 26978, 10070, 10071, 10076, 10077, 10082, 10083, 10086, 10093, 10094, 10097, 10104, 10105, 10107, 10108, 10109, 10111, 10113, 10115, 10116, 10118, 10119, 10121, 10122, 10123, 10125, 10127, 27923, 27918, 10131, 10132, 10134, 10135, 10137, 10138, 10140, 10141, 10144, 10150, 10151, 10152, 10156, 10162, 10163, 10164, 10168, 10173, 27905, 10175, 10176, 10177, 10178, 27904, 10180, 10181, 10182, 10183, 10184, 27906, 10186, 28022, 10189, 10190, 27907, 10194, 10195, 10197, 28032, 10200, 27908, 26978, 10204, 10205, 10210, 10211, 10216, 10217, 10220, 10227, 10228, 10231, 10238, 10239, 10240, 10241, 10243, 10245, 10247, 10249, 10250, 10251, 10253, 10255, 10256, 10258, 27923, 10261, 10262, 27918, 10265, 10266, 10268, 10269, 10271, 10272, 10274, 10275, 10278, 10279, 10280, 10281, 10285, 10291, 10292, 10293, 10297, 10303, 10304, 10305, 10309, 10314, 10320, 27905, 10322, 10323, 10324, 10325, 10326, 10327, 10328, 10329, 10330, 10331, 10332, 10333, 10334, 27906, 10336, 28110, 10339, 10340, 27907, 10344, 10345, 10347, 28120, 10350, 27908, 26978, 10354, 10355, 10360, 10361, 10366, 10367, 10370, 10377, 10378, 10381, 27911, 10390, 10391, 10392, 10394, 10395, 10397, 10399, 27923, 27924, 10402, 10403, 10404, 27912, 10407, 10408, 10412, 10419, 27920, 27922, 10428, 10429, 10431, 10432, 10434, 10435, 10437, 10438, 10440, 27923, 27924, 10443, 10444, 10445, 27925, 10448, 27926, 10451, 10456, 10459, 10464, 10465, 10470, 10478, 10479, 10480, 10482, 10484, 10485, 10486, 10488, 27916, 10491, 10492, 10494, 10495, 10496, 10498, 10500, 27917, 10503, 10504, 27918, 10507, 10508, 10510, 10511, 10513, 10514, 10516, 10517, 10520, 10521, 10527, 10528, 10529, 10533, 10538, 10539, 10540, 10541, 10545, 10546, 10547, 10548, 10552, 10554, 10559, 10561, 10563, 10564, 10565, 10566, 10573, 10575, 10577, 10578, 10579, 10580, 27920, 27922, 10591, 10592, 10594, 10595, 10597, 10598, 10600, 10601, 10603, 27923, 27924, 10606, 10607, 10608, 27925, 10611, 27926, 10614, 10619, 10622, 10627, 10628, 10633, 28269, 28271, 10697, 10699, 10702, 10703, 10706, 10707, 10731, 10766, 10767, 10784, 10787, 10788, 10837, 10840, 10841, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10037, 28425, 28427, 28430, 28434, 28436, 10051, 27946, 10058, 28443, 28444, 10068, 10069, 28451, 28453, 28455, 28458, 28418, 28417, 28419, 28467, 28469, 28471, 27983, 28475, 10128, 10129, 28478, 28480, 27993, 27996, 27999, 28488, 28492, 10174, 28498, 28500, 10179, 28504, 28506, 10185, 28025, 10192, 28513, 28029, 10202, 10203, 28521, 28523, 28525, 28528, 28418, 28419, 28417, 28537, 28056, 28541, 28543, 10259, 28545, 10263, 28548, 28550, 28072, 28075, 28078, 28557, 28559, 28562, 28566, 10321, 28573, 28575, 28578, 28582, 28584, 10335, 28113, 10342, 28591, 28117, 10352, 10353, 28599, 28601, 28603, 28606, 28416, 10389, 28136, 28139, 28615, 10400, 10401, 28619, 28146, 10406, 28623, 28420, 10425, 28421, 10427, 28630, 28632, 28634, 28636, 10441, 10442, 28640, 28167, 10447, 28169, 10450, 28649, 28417, 28418, 28419, 28658, 10489, 28660, 28662, 28192, 28666, 10501, 28668, 10505, 28671, 28673, 28205, 28208, 28211, 28680, 28682, 28686, 28688, 28690, 28692, 28698, 28700, 28704, 28706, 28420, 10588, 28421, 10590, 28711, 28713, 28715, 28717, 10604, 10605, 28721, 28257, 10610, 28259, 10613, 28730, 28438, 28446, 28456, 28459, 28460, 28463, 28486, 28490, 28494, 28508, 28516, 28526, 28737, 28529, 28739, 28530, 28532, 28560, 28564, 28568, 28569, 28586, 28594, 28604, 28607, 28609, 28624, 28742, 28625, 28628, 28645, 28647, 28646, 28650, 28745, 28652, 28656, 28684, 28694, 28693, 28696, 28695, 28702, 28701, 28709, 28726, 28728, 28727, 28731, 28748, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 28928, 28939, 10106, 10110, 10112, 28489, 28493, 28962, 28973, 10242, 10244, 10246, 28995, 28563, 28567, 28998, 29009, 10388, 10424, 10426, 10477, 10481, 10483, 28683, 29062, 29064, 29066, 29068, 10587, 10589, 28931, 28930, 28432, 28933, 28936, 28440, 10647, 10649, 28938, 28941, 28942, 10655, 28943, 10659, 28944, 10663, 10667, 28951, 28952, 28948, 28949, 28950, 28479, 28954, 28959, 28958, 28957, 10678, 10680, 10683, 28964, 28502, 28967, 28970, 28510, 10692, 10694, 28972, 28975, 28976, 10700, 28977, 10704, 28978, 10709, 10712, 28985, 28987, 28984, 28982, 28983, 28549, 28988, 28993, 28992, 28991, 10723, 10726, 10729, 10732, 29001, 29000, 28580, 29003, 29006, 28588, 10741, 10743, 29008, 29011, 29012, 10749, 29013, 10753, 29014, 10757, 29019, 29018, 29017, 29021, 29023, 10764, 29025, 10768, 10770, 29033, 29032, 29031, 29030, 29035, 29039, 29037, 10780, 10782, 10783, 10785, 29041, 10791, 10792, 29047, 29052, 29049, 29045, 29048, 29050, 28672, 29053, 29058, 29057, 29056, 29059, 10807, 10810, 10811, 10816, 10817, 10820, 10821, 10823, 29076, 29075, 29074, 29073, 29078, 29082, 29080, 10833, 10835, 10836, 10838, 29084, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29184, 10641, 10642, 10643, 10644, 10645, 10646, 29185, 10650, 10651, 10653, 10656, 10660, 29188, 29186, 29187, 10668, 10669, 10670, 10671, 10672, 10673, 10674, 10675, 10676, 10677, 29189, 29190, 29191, 10687, 10688, 10689, 10690, 10691, 29192, 10695, 10696, 10698, 10701, 10705, 29195, 29194, 29193, 10713, 10714, 10715, 10716, 10717, 10718, 10719, 10720, 10721, 10722, 29196, 29197, 29198, 29199, 10735, 10736, 10737, 10738, 10739, 10740, 29200, 10744, 10745, 10747, 10750, 10754, 29201, 10759, 10760, 10761, 10762, 10763, 10765, 29203, 29202, 10773, 10774, 10775, 10776, 10777, 10778, 10779, 29308, 10786, 29205, 29204, 29206, 29312, 10794, 10795, 10796, 10797, 10798, 10799, 10800, 10801, 10802, 10803, 10804, 10805, 29207, 29209, 29208, 29327, 29210, 29211, 29213, 29212, 10826, 10827, 10828, 10829, 10830, 10831, 10832, 29342, 10839, 29241, 29273, 29297, 29306, 29340, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10640, 29442, 29446, 10648, 29451, 29452, 10664, 10665, 10666, 29457, 29459, 29462, 29464, 10681, 10684, 10686, 29473, 10693, 29478, 29479, 10708, 10710, 10711, 29484, 29486, 29489, 29491, 10724, 10727, 10730, 10734, 29498, 29502, 10742, 29507, 29508, 10758, 29511, 29515, 10771, 10772, 29519, 29521, 29524, 29526, 10789, 10790, 10793, 29532, 29534, 29536, 29538, 29540, 10808, 10812, 10813, 10815, 10819, 10824, 10825, 29552, 29554, 29557, 29559, 29443, 29450, 29449, 10853, 29470, 29477, 29476, 10863, 29499, 29506, 29505, 29513, 10876, 29525, 10882, 29542, 29558, 10894, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29220, 29221, 29702, 29704, 29706, 29465, 29709, 29710, 29249, 29250, 29258, 29718, 29720, 29492, 29723, 29724, 29725, 29280, 29281, 29732, 29512, 29735, 29738, 29742, 29745, 29541, 29749, 29751, 29328, 29330, 29754, 29757, 10843, 29696, 29701, 29700, 10847, 10848, 10855, 29711, 29715, 29714, 10859, 10860, 10868, 29726, 29731, 29730, 10872, 10873, 10874, 29734, 29522, 29740, 10881, 10888, 29555, 29759, 10893, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29448, 29955, 29460, 29475, 29963, 29487, 29504, 29736, 29530, 29746, 29979, 29329, 29331, 29755, 10844, 10845, 10846, 29989, 29707, 29959, 29958, 10856, 10857, 10858, 29995, 29721, 29968, 29967, 29966, 10869, 10870, 10871, 30001, 29971, 10877, 10878, 10880, 29747, 29978, 10890, 10892, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29230, 29259, 29743, 29952, 30224, 10849, 10851, 10852, 29960, 30231, 10861, 10864, 10865, 10866, 29969, 30239, 10875, 30242, 30215, 30006, 10883, 10885, 30219, 30220, 30218, 30221, 30010, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10842, 30225, 30464, 30471, 10854, 30232, 30465, 30475, 30477, 10867, 30240, 30480, 10879, 29774, 30466, 10886, 10887, 10889, 10891, 29777, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 29984, 10850, 29763, 29990, 10862, 30728, 29996, 30732, 10884, 30735, 30007, 30738, 30731, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30222, 30977, 30229, 30980, 30237, 30984, 30986, 10897, 30983, 30987, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 30737, 31236, 31233, 31234, 31232, 10901, 10902, 31235, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31237, 10896, 10898, 10899, 10900, 10903, 31494, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 10895, 31746, 31748, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31745, 31750, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32001, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 32257, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 31749, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255};
bool h_Op[]= {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#define THREADS_PER_BLOCK 256
#define BLOCKS_PER_GRID 1
#define SIZE_OF_IN 11008
#define SIZE_OF_AC 22272
__device__ void
ac(float *A, const int *B, const int *C, const bool *Op, int n_iter) {
int i= blockDim.x * blockIdx.x + threadIdx.x;
__shared__ float R[130*THREADS_PER_BLOCK];
const int t= THREADS_PER_BLOCK;
__shared__ float final;
final=0;
R[i + 0*t] = A[i + 0*t];
R[i + 1*t] = A[i + 1*t];
R[i + 2*t] = A[i + 2*t];
R[i + 3*t] = A[i + 3*t];
R[i + 4*t] = A[i + 4*t];
R[i + 5*t] = A[i + 5*t];
R[i + 6*t] = A[i + 6*t];
R[i + 7*t] = A[i + 7*t];
R[i + 8*t] = A[i + 8*t];
R[i + 9*t] = A[i + 9*t];
R[i + 10*t] = A[i + 10*t];
R[i + 11*t] = A[i + 11*t];
R[i + 12*t] = A[i + 12*t];
R[i + 13*t] = A[i + 13*t];
R[i + 14*t] = A[i + 14*t];
R[i + 15*t] = A[i + 15*t];
R[i + 16*t] = A[i + 16*t];
R[i + 17*t] = A[i + 17*t];
R[i + 18*t] = A[i + 18*t];
R[i + 19*t] = A[i + 19*t];
R[i + 20*t] = A[i + 20*t];
R[i + 21*t] = A[i + 21*t];
R[i + 22*t] = A[i + 22*t];
R[i + 23*t] = A[i + 23*t];
R[i + 24*t] = A[i + 24*t];
R[i + 25*t] = A[i + 25*t];
R[i + 26*t] = A[i + 26*t];
R[i + 27*t] = A[i + 27*t];
R[i + 28*t] = A[i + 28*t];
R[i + 29*t] = A[i + 29*t];
R[i + 30*t] = A[i + 30*t];
R[i + 31*t] = A[i + 31*t];
R[i + 32*t] = A[i + 32*t];
R[i + 33*t] = A[i + 33*t];
R[i + 34*t] = A[i + 34*t];
R[i + 35*t] = A[i + 35*t];
R[i + 36*t] = A[i + 36*t];
R[i + 37*t] = A[i + 37*t];
R[i + 38*t] = A[i + 38*t];
R[i + 39*t] = A[i + 39*t];
R[i + 40*t] = A[i + 40*t];
R[i + 41*t] = A[i + 41*t];
R[i + 42*t] = A[i + 42*t];
__syncthreads();
for (int iter=0; iter< n_iter; iter++) {
R[i + 43*t] = Op[i + 0*t] ? R[B[i + 0*t]] * R[C[i + 0*t]] : R[B[i + 0*t]] + R[C[i + 0*t]];
R[i + 44*t] = Op[i + 1*t] ? R[B[i + 1*t]] * R[C[i + 1*t]] : R[B[i + 1*t]] + R[C[i + 1*t]];
R[i + 45*t] = Op[i + 2*t] ? R[B[i + 2*t]] * R[C[i + 2*t]] : R[B[i + 2*t]] + R[C[i + 2*t]];
R[i + 46*t] = Op[i + 3*t] ? R[B[i + 3*t]] * R[C[i + 3*t]] : R[B[i + 3*t]] + R[C[i + 3*t]];
R[i + 47*t] = Op[i + 4*t] ? R[B[i + 4*t]] * R[C[i + 4*t]] : R[B[i + 4*t]] + R[C[i + 4*t]];
R[i + 48*t] = Op[i + 5*t] ? R[B[i + 5*t]] * R[C[i + 5*t]] : R[B[i + 5*t]] + R[C[i + 5*t]];
R[i + 49*t] = Op[i + 6*t] ? R[B[i + 6*t]] * R[C[i + 6*t]] : R[B[i + 6*t]] + R[C[i + 6*t]];
R[i + 50*t] = Op[i + 7*t] ? R[B[i + 7*t]] * R[C[i + 7*t]] : R[B[i + 7*t]] + R[C[i + 7*t]];
R[i + 51*t] = Op[i + 8*t] ? R[B[i + 8*t]] * R[C[i + 8*t]] : R[B[i + 8*t]] + R[C[i + 8*t]];
R[i + 52*t] = Op[i + 9*t] ? R[B[i + 9*t]] * R[C[i + 9*t]] : R[B[i + 9*t]] + R[C[i + 9*t]];
R[i + 53*t] = Op[i + 10*t] ? R[B[i + 10*t]] * R[C[i + 10*t]] : R[B[i + 10*t]] + R[C[i + 10*t]];
R[i + 54*t] = Op[i + 11*t] ? R[B[i + 11*t]] * R[C[i + 11*t]] : R[B[i + 11*t]] + R[C[i + 11*t]];
R[i + 55*t] = Op[i + 12*t] ? R[B[i + 12*t]] * R[C[i + 12*t]] : R[B[i + 12*t]] + R[C[i + 12*t]];
R[i + 56*t] = Op[i + 13*t] ? R[B[i + 13*t]] * R[C[i + 13*t]] : R[B[i + 13*t]] + R[C[i + 13*t]];
__syncthreads();
R[i + 57*t] = Op[i + 14*t] ? R[B[i + 14*t]] * R[C[i + 14*t]] : R[B[i + 14*t]] + R[C[i + 14*t]];
R[i + 58*t] = Op[i + 15*t] ? R[B[i + 15*t]] * R[C[i + 15*t]] : R[B[i + 15*t]] + R[C[i + 15*t]];
R[i + 59*t] = Op[i + 16*t] ? R[B[i + 16*t]] * R[C[i + 16*t]] : R[B[i + 16*t]] + R[C[i + 16*t]];
R[i + 60*t] = Op[i + 17*t] ? R[B[i + 17*t]] * R[C[i + 17*t]] : R[B[i + 17*t]] + R[C[i + 17*t]];
R[i + 61*t] = Op[i + 18*t] ? R[B[i + 18*t]] * R[C[i + 18*t]] : R[B[i + 18*t]] + R[C[i + 18*t]];
R[i + 62*t] = Op[i + 19*t] ? R[B[i + 19*t]] * R[C[i + 19*t]] : R[B[i + 19*t]] + R[C[i + 19*t]];
R[i + 63*t] = Op[i + 20*t] ? R[B[i + 20*t]] * R[C[i + 20*t]] : R[B[i + 20*t]] + R[C[i + 20*t]];
R[i + 64*t] = Op[i + 21*t] ? R[B[i + 21*t]] * R[C[i + 21*t]] : R[B[i + 21*t]] + R[C[i + 21*t]];
R[i + 65*t] = Op[i + 22*t] ? R[B[i + 22*t]] * R[C[i + 22*t]] : R[B[i + 22*t]] + R[C[i + 22*t]];
__syncthreads();
R[i + 66*t] = Op[i + 23*t] ? R[B[i + 23*t]] * R[C[i + 23*t]] : R[B[i + 23*t]] + R[C[i + 23*t]];
R[i + 67*t] = Op[i + 24*t] ? R[B[i + 24*t]] * R[C[i + 24*t]] : R[B[i + 24*t]] + R[C[i + 24*t]];
R[i + 68*t] = Op[i + 25*t] ? R[B[i + 25*t]] * R[C[i + 25*t]] : R[B[i + 25*t]] + R[C[i + 25*t]];
R[i + 69*t] = Op[i + 26*t] ? R[B[i + 26*t]] * R[C[i + 26*t]] : R[B[i + 26*t]] + R[C[i + 26*t]];
R[i + 70*t] = Op[i + 27*t] ? R[B[i + 27*t]] * R[C[i + 27*t]] : R[B[i + 27*t]] + R[C[i + 27*t]];
R[i + 71*t] = Op[i + 28*t] ? R[B[i + 28*t]] * R[C[i + 28*t]] : R[B[i + 28*t]] + R[C[i + 28*t]];
R[i + 72*t] = Op[i + 29*t] ? R[B[i + 29*t]] * R[C[i + 29*t]] : R[B[i + 29*t]] + R[C[i + 29*t]];
R[i + 73*t] = Op[i + 30*t] ? R[B[i + 30*t]] * R[C[i + 30*t]] : R[B[i + 30*t]] + R[C[i + 30*t]];
__syncthreads();
R[i + 74*t] = Op[i + 31*t] ? R[B[i + 31*t]] * R[C[i + 31*t]] : R[B[i + 31*t]] + R[C[i + 31*t]];
R[i + 75*t] = Op[i + 32*t] ? R[B[i + 32*t]] * R[C[i + 32*t]] : R[B[i + 32*t]] + R[C[i + 32*t]];
R[i + 76*t] = Op[i + 33*t] ? R[B[i + 33*t]] * R[C[i + 33*t]] : R[B[i + 33*t]] + R[C[i + 33*t]];
R[i + 77*t] = Op[i + 34*t] ? R[B[i + 34*t]] * R[C[i + 34*t]] : R[B[i + 34*t]] + R[C[i + 34*t]];
R[i + 78*t] = Op[i + 35*t] ? R[B[i + 35*t]] * R[C[i + 35*t]] : R[B[i + 35*t]] + R[C[i + 35*t]];
R[i + 79*t] = Op[i + 36*t] ? R[B[i + 36*t]] * R[C[i + 36*t]] : R[B[i + 36*t]] + R[C[i + 36*t]];
R[i + 80*t] = Op[i + 37*t] ? R[B[i + 37*t]] * R[C[i + 37*t]] : R[B[i + 37*t]] + R[C[i + 37*t]];
__syncthreads();
R[i + 81*t] = Op[i + 38*t] ? R[B[i + 38*t]] * R[C[i + 38*t]] : R[B[i + 38*t]] + R[C[i + 38*t]];
R[i + 82*t] = Op[i + 39*t] ? R[B[i + 39*t]] * R[C[i + 39*t]] : R[B[i + 39*t]] + R[C[i + 39*t]];
R[i + 83*t] = Op[i + 40*t] ? R[B[i + 40*t]] * R[C[i + 40*t]] : R[B[i + 40*t]] + R[C[i + 40*t]];
R[i + 84*t] = Op[i + 41*t] ? R[B[i + 41*t]] * R[C[i + 41*t]] : R[B[i + 41*t]] + R[C[i + 41*t]];
R[i + 85*t] = Op[i + 42*t] ? R[B[i + 42*t]] * R[C[i + 42*t]] : R[B[i + 42*t]] + R[C[i + 42*t]];
__syncthreads();
R[i + 86*t] = Op[i + 43*t] ? R[B[i + 43*t]] * R[C[i + 43*t]] : R[B[i + 43*t]] + R[C[i + 43*t]];
R[i + 87*t] = Op[i + 44*t] ? R[B[i + 44*t]] * R[C[i + 44*t]] : R[B[i + 44*t]] + R[C[i + 44*t]];
R[i + 88*t] = Op[i + 45*t] ? R[B[i + 45*t]] * R[C[i + 45*t]] : R[B[i + 45*t]] + R[C[i + 45*t]];
R[i + 89*t] = Op[i + 46*t] ? R[B[i + 46*t]] * R[C[i + 46*t]] : R[B[i + 46*t]] + R[C[i + 46*t]];
R[i + 90*t] = Op[i + 47*t] ? R[B[i + 47*t]] * R[C[i + 47*t]] : R[B[i + 47*t]] + R[C[i + 47*t]];
R[i + 91*t] = Op[i + 48*t] ? R[B[i + 48*t]] * R[C[i + 48*t]] : R[B[i + 48*t]] + R[C[i + 48*t]];
R[i + 92*t] = Op[i + 49*t] ? R[B[i + 49*t]] * R[C[i + 49*t]] : R[B[i + 49*t]] + R[C[i + 49*t]];
__syncthreads();
R[i + 93*t] = Op[i + 50*t] ? R[B[i + 50*t]] * R[C[i + 50*t]] : R[B[i + 50*t]] + R[C[i + 50*t]];
R[i + 94*t] = Op[i + 51*t] ? R[B[i + 51*t]] * R[C[i + 51*t]] : R[B[i + 51*t]] + R[C[i + 51*t]];
R[i + 95*t] = Op[i + 52*t] ? R[B[i + 52*t]] * R[C[i + 52*t]] : R[B[i + 52*t]] + R[C[i + 52*t]];
R[i + 96*t] = Op[i + 53*t] ? R[B[i + 53*t]] * R[C[i + 53*t]] : R[B[i + 53*t]] + R[C[i + 53*t]];
R[i + 97*t] = Op[i + 54*t] ? R[B[i + 54*t]] * R[C[i + 54*t]] : R[B[i + 54*t]] + R[C[i + 54*t]];
R[i + 98*t] = Op[i + 55*t] ? R[B[i + 55*t]] * R[C[i + 55*t]] : R[B[i + 55*t]] + R[C[i + 55*t]];
__syncthreads();
R[i + 99*t] = Op[i + 56*t] ? R[B[i + 56*t]] * R[C[i + 56*t]] : R[B[i + 56*t]] + R[C[i + 56*t]];
R[i + 100*t] = Op[i + 57*t] ? R[B[i + 57*t]] * R[C[i + 57*t]] : R[B[i + 57*t]] + R[C[i + 57*t]];
R[i + 101*t] = Op[i + 58*t] ? R[B[i + 58*t]] * R[C[i + 58*t]] : R[B[i + 58*t]] + R[C[i + 58*t]];
__syncthreads();
R[i + 102*t] = Op[i + 59*t] ? R[B[i + 59*t]] * R[C[i + 59*t]] : R[B[i + 59*t]] + R[C[i + 59*t]];
R[i + 103*t] = Op[i + 60*t] ? R[B[i + 60*t]] * R[C[i + 60*t]] : R[B[i + 60*t]] + R[C[i + 60*t]];
R[i + 104*t] = Op[i + 61*t] ? R[B[i + 61*t]] * R[C[i + 61*t]] : R[B[i + 61*t]] + R[C[i + 61*t]];
__syncthreads();
R[i + 105*t] = Op[i + 62*t] ? R[B[i + 62*t]] * R[C[i + 62*t]] : R[B[i + 62*t]] + R[C[i + 62*t]];
R[i + 106*t] = Op[i + 63*t] ? R[B[i + 63*t]] * R[C[i + 63*t]] : R[B[i + 63*t]] + R[C[i + 63*t]];
__syncthreads();
R[i + 107*t] = Op[i + 64*t] ? R[B[i + 64*t]] * R[C[i + 64*t]] : R[B[i + 64*t]] + R[C[i + 64*t]];
R[i + 108*t] = Op[i + 65*t] ? R[B[i + 65*t]] * R[C[i + 65*t]] : R[B[i + 65*t]] + R[C[i + 65*t]];
__syncthreads();
R[i + 109*t] = Op[i + 66*t] ? R[B[i + 66*t]] * R[C[i + 66*t]] : R[B[i + 66*t]] + R[C[i + 66*t]];
R[i + 110*t] = Op[i + 67*t] ? R[B[i + 67*t]] * R[C[i + 67*t]] : R[B[i + 67*t]] + R[C[i + 67*t]];
__syncthreads();
R[i + 111*t] = Op[i + 68*t] ? R[B[i + 68*t]] * R[C[i + 68*t]] : R[B[i + 68*t]] + R[C[i + 68*t]];
R[i + 112*t] = Op[i + 69*t] ? R[B[i + 69*t]] * R[C[i + 69*t]] : R[B[i + 69*t]] + R[C[i + 69*t]];
__syncthreads();
R[i + 113*t] = Op[i + 70*t] ? R[B[i + 70*t]] * R[C[i + 70*t]] : R[B[i + 70*t]] + R[C[i + 70*t]];
__syncthreads();
R[i + 114*t] = Op[i + 71*t] ? R[B[i + 71*t]] * R[C[i + 71*t]] : R[B[i + 71*t]] + R[C[i + 71*t]];
__syncthreads();
R[i + 115*t] = Op[i + 72*t] ? R[B[i + 72*t]] * R[C[i + 72*t]] : R[B[i + 72*t]] + R[C[i + 72*t]];
__syncthreads();
R[i + 116*t] = Op[i + 73*t] ? R[B[i + 73*t]] * R[C[i + 73*t]] : R[B[i + 73*t]] + R[C[i + 73*t]];
__syncthreads();
R[i + 117*t] = Op[i + 74*t] ? R[B[i + 74*t]] * R[C[i + 74*t]] : R[B[i + 74*t]] + R[C[i + 74*t]];
__syncthreads();
R[i + 118*t] = Op[i + 75*t] ? R[B[i + 75*t]] * R[C[i + 75*t]] : R[B[i + 75*t]] + R[C[i + 75*t]];
__syncthreads();
R[i + 119*t] = Op[i + 76*t] ? R[B[i + 76*t]] * R[C[i + 76*t]] : R[B[i + 76*t]] + R[C[i + 76*t]];
__syncthreads();
R[i + 120*t] = Op[i + 77*t] ? R[B[i + 77*t]] * R[C[i + 77*t]] : R[B[i + 77*t]] + R[C[i + 77*t]];
__syncthreads();
R[i + 121*t] = Op[i + 78*t] ? R[B[i + 78*t]] * R[C[i + 78*t]] : R[B[i + 78*t]] + R[C[i + 78*t]];
__syncthreads();
R[i + 122*t] = Op[i + 79*t] ? R[B[i + 79*t]] * R[C[i + 79*t]] : R[B[i + 79*t]] + R[C[i + 79*t]];
__syncthreads();
R[i + 123*t] = Op[i + 80*t] ? R[B[i + 80*t]] * R[C[i + 80*t]] : R[B[i + 80*t]] + R[C[i + 80*t]];
__syncthreads();
R[i + 124*t] = Op[i + 81*t] ? R[B[i + 81*t]] * R[C[i + 81*t]] : R[B[i + 81*t]] + R[C[i + 81*t]];
__syncthreads();
R[i + 125*t] = Op[i + 82*t] ? R[B[i + 82*t]] * R[C[i + 82*t]] : R[B[i + 82*t]] + R[C[i + 82*t]];
__syncthreads();
R[i + 126*t] = Op[i + 83*t] ? R[B[i + 83*t]] * R[C[i + 83*t]] : R[B[i + 83*t]] + R[C[i + 83*t]];
__syncthreads();
R[i + 127*t] = Op[i + 84*t] ? R[B[i + 84*t]] * R[C[i + 84*t]] : R[B[i + 84*t]] + R[C[i + 84*t]];
__syncthreads();
R[i + 128*t] = Op[i + 85*t] ? R[B[i + 85*t]] * R[C[i + 85*t]] : R[B[i + 85*t]] + R[C[i + 85*t]];
__syncthreads();
R[i + 129*t] = Op[i + 86*t] ? R[B[i + 86*t]] * R[C[i + 86*t]] : R[B[i + 86*t]] + R[C[i + 86*t]];
if (i==0) { final += R[129*t]; }
__syncthreads();
}
if (i==0) { A[0]= final;}
}
|
20,087 | #include <stdio.h>
#include <math.h>
#include <stdlib.h>
#include <thrust/host_vector.h>
#include <thrust/device_vector.h>
#include <iostream>
__global__ void jacobi(double *dev_A, double *dev_V, int *dev_pair, int size, int *d_cont, int tolerance);
void check (double *A, int n, double tolerance, thrust::host_vector<int> H);
int main (void)
{
double tolerance = 0.000000000001;
int n = 32, cont = 1;
thrust::host_vector<int> H(n*n);
for (int i = 0; i < n*n; i++)
H[i] = 0;
thrust::device_vector<int> D = H;
int *d_cont;
cudaMalloc((void**) &d_cont, sizeof(int));
cudaMemcpy(d_cont, &cont, sizeof(int), cudaMemcpyHostToDevice);
double* A = (double*)malloc(1024*1024*sizeof(double));
double* V = (double*)malloc(1024*1024*sizeof(double));
int* pair = (int*)malloc(n*sizeof(int));
double *d_A, *d_V;
int *d_pair;
cudaMalloc( (void**) &d_A, 1024*1024*sizeof(double));
cudaMalloc( (void**) &d_V, 1024*1024*sizeof(double));
cudaMalloc( (void**) &d_pair, n*sizeof(int));
/* enter a valid matrix A*/
int row, col, i;
for (row=0; row<n; row++)
{
for (col=0; col<n-1; col++)
scanf("%lf,", (A+row*n+col));
scanf("%lf\n", (A+row*n+n-1));
}
/*copy matrix to device*/
cudaMemcpy(d_A, A, 1024*1024*sizeof(double), cudaMemcpyHostToDevice);
/*initializing vector matrix V */
for (row = 0; row < n; row++)
{
for (col = 0; col < n; col++)
{
if (row == col)
{
*(V + row * n + col) = 1.0;
}
else
{
*(V + row * n + col) = 0.0;
}
}
}
/*copy matrix to device*/
cudaMemcpy(d_V, V, 1024*1024*sizeof(double), cudaMemcpyHostToDevice);
/*initializing pair matrix*/
for (i = 0; i < n; i++)
*(pair + i) = i;
//for (i = 0; i < n; i++)
//printf("%d ", *(pair + i));
/*copy matrix to device*/
cudaMemcpy(d_pair, pair, n*sizeof(int), cudaMemcpyHostToDevice);
/*launch kernel here*/
dim3 grid (1, 1, 1);
dim3 block (n/2, 1, 1);
while (cont != 0)
{
jacobi<<<grid, block>>>(d_A, d_V, d_pair, n, d_cont, tolerance);
cudaMemcpy(A, d_A, 1024*1024*sizeof(double), cudaMemcpyDeviceToHost);
check(A, n, tolerance, H);
for(int i = 0; i < H.size(); i++)
std::cout << "H[" << i << "] = " << H[i] << std::endl;
cont = thrust::reduce(H.begin(), H.end(), (int) 0, thrust::plus<int>());
printf("%d\n", cont);
}
cudaMemcpy(pair, d_pair, n*sizeof(int), cudaMemcpyDeviceToHost);
//for (int i = 0; i < n; i++)
//printf("%d\n", *(pair + n));
/*write matrix back to host*/
cudaMemcpy(A, d_A, 1024*1024*sizeof(double), cudaMemcpyDeviceToHost);
cudaMemcpy(V, d_V, 1024*1024*sizeof(double), cudaMemcpyDeviceToHost);
/*check result*/
double* ans = (double*) malloc(n*sizeof(double));
double norm = 0;
for (row = 0; row<n; row++){
for (col = 0; col<n; col++){
if (row==col)
{
*(ans+row) = *(A+row*n+col);
norm += (*(ans+row))*(*(ans+col));
//printf("%lf ", *(A+row*n+col));
}
//printf("%lf", *(A+row*n+col));
}
//printf("\n");
}
norm = sqrt(norm);
printf("Norm is %lf\n", norm);
free(A);
free(V);
free(pair);
cudaFree(d_A);
cudaFree(d_V);
cudaFree(d_pair);
}
__global__ void jacobi(double *dev_A, double *dev_V, int *dev_pair, int size, int *d_cont, int tolerance)
{
short threadno, p, q, n, i, temp1, temp2;
double c, s;
threadno = threadIdx.x;
n = size;
p = *(dev_pair + threadno);
q = *(dev_pair + threadno + n/2);
/*calculate c, s value*/
if (*(dev_A + p * n + q) != 0)
{
double torque, t;
torque = ( *(dev_A + q * n + q) - *(dev_A + p * n + p))/(2*(*(dev_A + p * n + q)));
if (torque >= 0)
t = 1/(torque + sqrt(1+torque*torque));
else
t = -1/(-torque + sqrt(1+torque*torque));
c = 1/sqrt(1+t*t);
s = t*c;
}
else
{
c = 1;
s = 0;
}
/* A = transpose(J)*A*J */
for (i = 0; i < n; i++)
{
double Api = (*(dev_A + p * n + i))*c + (*(dev_A + q * n + i))*(-s);
double Aqi = (*(dev_A + p * n + i))*s + (*(dev_A + q * n + i))*c;
__syncthreads();
*(dev_A + p * n + i) = Api;
*(dev_A + q * n + i) = Aqi;
}
for (i = 0; i < n; i++)
{
double Aip = (*(dev_A + i * n + p))*c + (*(dev_A + i * n + q))*(-s);
double Aiq = (*(dev_A + i * n + p))*s + (*(dev_A + i * n + q))*c;
__syncthreads();
*(dev_A + i * n + p) = Aip;
*(dev_A + i * n + q) = Aiq;
}
/* V = V*J */
for (i = 0; i < n; i++)
{
double Vpi = (*(dev_V + p * n + i))*c + (*(dev_V + q * n + i))*(-s);
double Vqi = (*(dev_V + p * n + i))*s + (*(dev_V + q * n + i))*c;
__syncthreads();
*(dev_V + p * n + i) = Vpi;
*(dev_V + q * n + i) = Vqi;
}
/* chess tournament rotate*/
if (threadno == 0)
{
temp1 = 0;
temp2 = *(dev_pair + n/2 + 1);
}
else if (threadno == 1)
{
temp1 = *(dev_pair + n/2);
temp2 = *(dev_pair + threadno + n/2 + 1);
}
else if (threadno == n/2 - 1)
{
temp1 = *(dev_pair + threadno - 1);
temp2 = *(dev_pair + n/2 - 1);
}
else
{
temp1 = *(dev_pair + threadno - 1);
temp2 = *(dev_pair + threadno + n/2 + 1);
}
__syncthreads();
*(dev_pair + threadno) = temp1;
*(dev_pair + threadno + n/2) = temp2;
}
void check (double *A, int n, double tolerance, thrust::host_vector<int> H)
{
int i, j;
for ( i = 0; i< n; i++)
{
for ( j = 0; j< n; j++)
{
if (i != j)
if (*(A +i* n + j) > tolerance)
H[i* n + j] = 1;
else
H[i* n + j] = 0;
else
H[i * n + j] = 0;
}
}
}
|
20,088 | #include <iostream>
#include <cstdlib>
using namespace std;
__global__ void add_vector(int* a, int* b, int* c, int size)
{
int id = blockIdx.x * 96 + threadIdx.x;
if (id<size)
{
c[id] = a[id] + b[id];
}
}
int main()
{
int v1, v2;
cout << "First value ? " << endl;
cin >> v1;
cout << "Second value ? " << endl;
cin >> v2;
cout << "v1 = " << v1 << endl;
cout << "v2 = " << v2 << endl;
const int size = 20000;
int *a = new int[size];
int *b = new int[size];
int *c = new int[size];
for (int i=0; i<size; ++i)
{
a[i] = v1;
b[i] = v2;
}
int *a_dev, *b_dev, *c_dev;
if (cudaSuccess != cudaMalloc((void**)&a_dev, sizeof(int) * size))
{
cerr << "error allocation a_dev" << endl;
}
if (cudaSuccess != cudaMalloc((void**)&b_dev, sizeof(int) * size))
{
cerr << "error allocation b_dev" << endl;
}
if (cudaSuccess != cudaMalloc((void**)&c_dev, sizeof(int) * size))
{
cerr << "error allocation c_dev" << endl;
}
if (cudaSuccess != cudaMemcpy(a_dev, a, sizeof(int) * size, cudaMemcpyHostToDevice))
{
cerr << "error cuda mem copy" << endl;
}
if (cudaSuccess != cudaMemcpy(b_dev, b, sizeof(int) * size, cudaMemcpyHostToDevice))
{
cerr << "error cuda mem copy" << endl;
}
// dim3 block_dim(1, 1, 1);
// dim3 threads_per_block(size, 1, 1);
int nb_blocks = size / 96;
add_vector<<<nb_blocks+1, 96>>>(a_dev, b_dev, c_dev, size);
if (cudaSuccess != cudaMemcpy(c, c_dev, sizeof(int) * size, cudaMemcpyDeviceToHost))
{
cerr << "error cuda mem copy back" << endl;
}
for (int i=0; i<size; ++i)
{
cout << c[i] << " ";
}
cout << endl;
delete[] a;
delete[] b;
delete[] c;
cudaFree(a_dev);
cudaFree(b_dev);
cudaFree(c_dev);
return 0;
}
|
20,089 | #include <stdio.h>
#include <cuda.h>
#include <cuda_runtime.h>
#include <string.h>
__device__ int d_islower(char c) {
if (c > 96 && c < 123) return 1;
else return 0;
}
__device__ int d_toupper(char c) {
if (c > 64 && c < 91) return c;
else return c - 32;
}
__global__ void toUpperString(char* c) {
if (d_islower(c[blockIdx.x])) c[blockIdx.x] = d_toupper(c[blockIdx.x]);
}
int main() {
const int N = 16;
char* c;
int BufferSize = N * sizeof(char);
// Host memory allocation
c = (char*)malloc(BufferSize);
// Assign value to string
strcpy(c, "good luck! guys");
char* dev_C;
// Device memory allocation
cudaMalloc((void**)&dev_C, BufferSize);
// Copy string from host to device
cudaMemcpy(dev_C, c, BufferSize, cudaMemcpyHostToDevice);
// Kernel function
toUpperString<<<N, 1>>>(dev_C);
// Copy upper string from device to host
cudaMemcpy(c, dev_C, BufferSize, cudaMemcpyDeviceToHost);
printf("%s\n", c);
// Free memory
cudaFree(dev_C);
free(c);
return 0;
}
|
20,090 | #include <cuda_runtime.h>
#include "device_launch_parameters.h"
#include <iostream>
__global__
void prefix_sum(int iter, int* tooffsets, const int* input, int64_t length, int loader) {
int thid = threadIdx.x;
extern __shared__ int temp[];
int pout = 0, pin = 1;
if (thid < length) {
temp[thid] = input[thid + (iter*1024)];
__syncthreads();
for (int offset = 1; offset < length; offset *=2) {
pout = 1 - pout;
pin = 1 - pout;
if (thid >= offset)
temp[pout*length + thid] = temp[pin*length + thid - offset] + temp[pin*length + thid];
else
temp[pout*length + thid] = temp[pin*length + thid];
__syncthreads();
}
tooffsets[thid] = temp[pout*length + thid] + loader;
}
}
void offload(int* tooffsets, const int* input, int64_t length) {
int* d_tooffsets;
int* d_input;
cudaMalloc((void**)&d_tooffsets, 1024 * sizeof(int));
cudaMalloc((void**)&d_input, length * sizeof(int));
cudaMemcpy(d_input, input, length * sizeof(int), cudaMemcpyHostToDevice);
int block, thread;
if (length > 1024) {
block = (length / 1024) + 1;
thread = 1024;
}
else {
thread = length;
block = 1;
}
int sums = 0;
for (int i = 0; i < block; i++) {
prefix_sum<<<1, thread, thread*2*sizeof(int)>>>(i, d_tooffsets, d_input, thread, sums);
cudaDeviceSynchronize();
int temp1;
if (((i+1)*1024) > length ) {
temp1 = length%1024;
}
else {
temp1 = 1024;
}
cudaMemcpy(tooffsets+(i*1024), d_tooffsets, temp1 * sizeof(int), cudaMemcpyDeviceToHost);
sums = tooffsets[(i*1024) + temp1 - 1];
}
tooffsets[length] = tooffsets[length - 1] + input[length - 1];
cudaFree(d_tooffsets);
cudaFree(d_input);
}
int main() {
const int size = 600000;
int tooffsets[size + 1], input[size];
for (int i = 0; i < size; i++) {
input[i] = 10;
}
offload(tooffsets, input, size);
for (int i = 0; i < size + 1; i++) {
std::cout << tooffsets[i] << "\n";
}
return 0;
}
|
20,091 | /**
* @brief: Dijkstra implementation with CUDA
* @author 557966
* @date 31 V 2020
*
* make sure the graph file starts with p sp!
* nvcc dijkstra2.cu -o dipa2; ./dipa2 ../resources/sampleGraph-1.gr 0 4
* dijkstraCUDA/dicu resources/ny-roads.gr 0 25906
*/
#include "cuda_runtime.h"
#include "device_launch_parameters.h"
#include <iostream>
#include <vector>
#include <set>
#include <algorithm>
#include <iterator>
#include <ctime>
#include <time.h>
#include <fstream>
#include <limits.h>
#include <list>
#include <time.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <map>
#define _GNU_SOURCE 1
/**
* custom data structures and relevant constants
*/
#define ONE 1
#define MAX_THREADS_PER_BLOCK 1024
const int max_weight = INT_MAX; //2,147,483,647
std::vector<int> matrix;
struct neighbor {
int target;
int weight;
neighbor(int arg_target, int arg_weight)
: target(arg_target), weight(arg_weight) {}
};
/**
* GPU (device) kernel function: given directed, weighted graph, compute shortest path
* \param matrix graph representation
* \param size number of graph nodes
* \param size2 number maximal edges per node
* \param min_distance contains distances for each node
* \param previous contains the predecessor for each node
* \param visited contains status whether a node already was visited
* \param debug boolean for extra output prints
*/
__global__
void process_graph(const int* matrix, const int size, const int size2, int* min_distance, int* previous, int* visited, const bool debug) {
__shared__ int u;
int dist, ti, wi, v, w, distance_through_u;
int tid = blockDim.x * blockIdx.x + threadIdx.x;
// printf("start device %d\n", tid);
// worst case node loop
for(int i=0; i<size; i++) {
if(tid == 0) {
u = -1;
dist = max_weight;
if(debug) {
printf("master thread started: su=%d, dist=%d\n", u, dist);
printf("\tsearching for new node to process");
}
for (int j=0; j < size; j++) {
if(visited[j] == 0 && min_distance[j] < dist) {
dist = min_distance[j];
u = j;
}
}
if(u == -1) {
if(debug) { printf("...nothing found, exit\n"); }
break; // exit loop, if no unvisited nodes
}
if(debug) { printf("...found %d\n", u); }
visited[u] = 1;
// printf("end master block, print vectors of size %d\n", size);
}
__syncthreads();
// printf("%d: dist=%d prev=%d vist=%d\n", tid, min_distance[tid], previous[tid], visited[tid]);
if(tid < size2/2) {
ti = size2 * u + tid * 2; // target index
wi = size2 * u + tid * 2 + 1; // weights index
v = matrix[ti]; // neighbor fixation
w = matrix[wi]; // weight
if(debug) { printf("tid %d: t_index = %d/%d, t_value = %d/%d\n", tid, ti, wi, v, w); }
if(v == -1) {
if(debug) { printf("tid=%d: skip empty slot\n", tid); }
continue;
}
if(v == u) {
if(debug) { printf("\tcontinue FROM %d TO %d\n", u, v); }
// printf("continue: u=%d ud=%d tid=%d\n", u, min_distance[u], tid);
continue;
}
distance_through_u = w + min_distance[u];
if(debug) { printf("tid=%d: w = %d, distance_through_u = %d, min_distance[%d] = %d\n", tid, w, distance_through_u, v, min_distance[v]); }
if (distance_through_u < min_distance[v]) {
min_distance[v] = distance_through_u;
previous[v] = u;
// __syncthreads();
if(debug) { printf("\trelaxation FROM %d TO %d WITH %d + %d = %d\n", u, v, min_distance[u], w, min_distance[u] + w); }
}
}
}
}
/**
* Given directed, weighted graph, handle device calculation and data management
* \param source source vertex as path start
* \param matrix graph representation
* \param size number of graph nodes
* \param size2 number maximal edges per node
* \param min_distance contains distances for each node
* \param previous contains the predecessor for each node
* \param visited contains status whether a node already was visited
* \param debug boolean for extra output prints
*/
void dijkstra(const int& source, const std::vector<int>& matrix, const int& size, const int& size2, std::vector<int>& min_distance, std::vector<int>& previous, std::vector<int>& visited, const bool& debug) {
int* matrix_dev;
int* min_distance_dev;
int* previous_dev;
int* visited_dev;
// bool* relaxation_dev;
// printf("dijkstra print vec of size %d\n", size);
// for(int i=0; i<size; i++) printf("%d: dist=%d, prev=%d\n", i, min_distance[i], previous[i]);
printf("allocate and copy to device");
cudaMalloc( &matrix_dev, size * size2 * sizeof(int) );
cudaMalloc( &min_distance_dev, size*sizeof(int) );
cudaMalloc( &previous_dev, size*sizeof(int) );
cudaMalloc( &visited_dev, size*sizeof(int) );
// cudaMalloc( &relaxation_dev, sizeof(bool) );
cudaMemcpy( matrix_dev, matrix.data(), size * size2 * sizeof(int), cudaMemcpyHostToDevice );
cudaMemcpy( min_distance_dev, min_distance.data(), size*sizeof(int), cudaMemcpyHostToDevice );
cudaMemcpy( previous_dev, previous.data(), size*sizeof(int), cudaMemcpyHostToDevice );
cudaMemcpy( visited_dev, visited.data(), size*sizeof(int), cudaMemcpyHostToDevice );
printf("...OK\nstart processing nodes, call kernel with %d threads", size2/2);
process_graph <<< ONE, size2/2 >>> (matrix_dev, size, size2, min_distance_dev, previous_dev, visited_dev, debug);
printf("...");
printf("...OK\ncopy data back\n");
cudaMemcpy( min_distance.data(), min_distance_dev, size * sizeof(int), cudaMemcpyDeviceToHost );
cudaMemcpy( previous.data(), previous_dev, size * sizeof(int), cudaMemcpyDeviceToHost );
printf("copy data back...OK\n");
if(debug) {
printf("after dijkstra:\n");
for(int i=0; i<size; i++) printf("%d: dist=%d, prev=%d\n", i, min_distance[i], previous[i]);
}
cudaFree(matrix_dev);
cudaFree(min_distance_dev);
cudaFree(previous_dev);
cudaFree(visited_dev);
cudaDeviceReset();
}
/**
* Prints graph in form of an flattened adjacency matrix
* \param matrix graph representation
* \param size number of graph nodes
*/
void printAdjacencyMatrix2(const std::vector<int>& matrix, const int size) {
printf("\nprinting flattened vector of size %d and line lenght %d", size*size, size);
for(int row=0; row < size*size; row += size) { //node
std::cout << std::endl;
for(int col=0; col < size; col++) { //neighbors
printf("%2d ", matrix[row+col]);
}
}
std::cout << std::endl;
}
/**
* Prints graph in form of sparse matrix
* \param matrix graph representation
* \param size number of graph nodes
* \param size2 number maximal edges per node
*/
void printSparseMatrix(const std::vector<int>& matrix, const int size, const int size2) {
printf("\nprinting flattened sparse matrix of size %d and max line length %d\n", size, size2);
printf("structure: node -> { (edge_1, weight_1), ... (edge_size2, weight_size2) }\n");
for(int row=0, r=0; row < size*size2; row += size2, r++) { //node
printf("node %d:", r);
for(int col=0; col < size2; col++) { //neighbors
printf(" %2d ", matrix[row+col]);
}
printf("\n");
}
// std::cout << std::endl;
}
/**
* Unfolds shortest path from target to source backwards
* \param vertex target node
* \param previous contains the predecessor for each node
* \return path path from target to source
*/
std::list <int> getShortestPathToX(int vertex, const std::vector <int>& previous) {
std::list <int> path;
do {
path.push_front(vertex);
vertex = previous[vertex];
} while(vertex != -1);
return path;
}
/**
* Launcher
* \param argc program argument counter
* \param argv submitted arguments: 1=graph file path, 2=source node, 3=target node, 4=extra prints
* \return status program exit status
*/
int main(int argc, char** argv) {
const clock_t begin_time = clock();
FILE *fp;
const char *input_file_name;
char* line = NULL;
size_t llen = 0;
ssize_t read = 0;
int size, edges, source, target, weight, status, start, end, max_edges, size2, ti, wi, debug_int;
std::vector<int> min_distance, previous, visited;
// std::vector<int> matrix, min_distance, previous, visited;
bool debug = true;
printf("Dijkstra with CUDA\nargc=%d\n", argc);
if (argc == 5) {
input_file_name = argv[1];
start = atoi(argv[2]);
end = atoi(argv[3]);
debug_int = atoi(argv[4]);
if(debug_int == 0) { debug = false; }
printf("init from args: ");
}
else {
input_file_name = "resources/sampleGraph-1.gr";
start = 0;
end = 4;
printf("no or bad args submitted, use default values: ");
}
printf("\n\tinput file = %s\n\tsource = %d\n\ttarget = %d\n\tverbose = %d", input_file_name, start, end, debug);
fp = fopen(input_file_name,"r");
if(!fp) printf("Error Openning Graph File\n");
printf("\nReading Graph File");
printf("\n");
fscanf(fp, "p sp %d %d", &size, &edges);
printf("(first line) size=%d edges=%d\n", size, edges);
// matrix.resize(size*size, -1); //size2 = 1 159 330 980
std::map<int, std::vector<neighbor> > adjacency_list;
std::map<int, int> max_edges_map;
while ((read = getline(&line, &llen, fp)) != -1) {
if(debug) { printf("%3zu: %s", read, line); }
if(line[0] == 'a' && sscanf(line, "a %d %d %d", &source, &target, &weight) == 3) {
source--;
target--;
if(debug) { printf("\tarc from %d to %d weight %d | index %d\n", source, target, weight, source*size + target); }
neighbor n = neighbor(target, weight);
adjacency_list[source].push_back(n);
max_edges_map[source]++;
}
}
max_edges = -1;
for(int i=0; i<size; i++) {
if(max_edges_map[i] > max_edges) {
max_edges = max_edges_map[i];
}
}
size2 = max_edges * 2;
printf("\n");
printf("filled temp map: max_edges = %d, size2 = %d,\n\tnew size = %d, square size = %d, diff = %d\n", max_edges, size2, size*size2, size*size, size*size - size*size2);
matrix.resize(size * size2, -1);
printf("\n");
printf("start transfer from map to sparse matrix\n");
// node
for(int i=0; i<size; i++) {
const std::vector <neighbor>& neighbors = adjacency_list.find(i)->second;
int temp_size = neighbors.size();
if(debug) { printf("node %d of size %d\n", i, temp_size); }
// node edges
for(int j=0; j < temp_size; j++) {
target = neighbors[j].target;
weight = neighbors[j].weight;
ti = size2 * i + j * 2;
wi = size2 * i + j * 2 + 1;
if(debug) { printf("\ttarget = %d weight = %d | t_index = %d/%d\n", target, weight, ti, wi); }
matrix[ti] = target;
matrix[wi] = weight;
}
}
if(fp) fclose(fp);
printf("Reading Graph File...OK\nempty temp map");
adjacency_list.clear();
printf("...OK\n");
// printAdjacencyMatrix2(matrix, size);
if(debug) { printSparseMatrix(matrix, size, size2); }
printf("resize output vectors and set source to zero\n");
min_distance.resize(size, max_weight);
min_distance[start] = 0;
previous.resize(size, -1);
visited.resize(size, 0);
// printf("main print vec of size %d\n", size);
// for(int i=0; i<size; i++) printf("vec[%d] = %d\n", i, min_distance[i]);
printf("init in %f sec OK\n\n", float( clock () - begin_time ) / CLOCKS_PER_SEC);
printf("start dijkstra\n");
const clock_t begin_time2 = clock();
dijkstra(start, matrix, size, size2, min_distance, previous, visited, debug);
printf("distance from start node [%d] to end node [%d] is %2d\n"
"calculation time: %f sec\n", start, end, min_distance[end], float( clock () - begin_time2 ) / CLOCKS_PER_SEC);
std::list<int> path = getShortestPathToX(end, previous);
size = path.size();
printf("path of size %d:", size);
// std::copy(path.begin(), path.end(), std::ostream_iterator<int>(std::cout, " "));
for(int i=0; i < size; i++) {
ti = path.front();
printf(" %d/%d", ti, min_distance[ti]);
path.pop_front();
}
std::cout << std::endl;
printf("total run time is %f sec\n", float( clock () - begin_time ) / CLOCKS_PER_SEC);
return 0;
} |
20,092 | #include <stdio.h>
#include <stdlib.h>
#include <math.h>
/******************************************************************************/
#define BLOCK_SIZE_X 256
#define BLOCK_SIZE_Y 1
__constant__ int c_maxiter;
__constant__ float c_xmin;
__constant__ float c_ymin;
__constant__ float c_x_step;
__constant__ float c_y_step;
__constant__ int c_N;
__constant__ int c_width;
__constant__ int c_rowsize;
/******************************************************************************/
__device__ void d_smooth_fast_element_colormap(int iter, float re2, float im2,
int *rp, int *gp, int *bp)
{
if(iter == c_maxiter) {
/* black */
*rp = 0; // Red channel
*gp = 0; // Green channel
*bp = 0; // Blue channel
}
else {
int brightness = 256.*log2(1.75-log2(0.5)+iter-log2(log2(re2+im2)))/log2((float)c_maxiter);
*rp = brightness; // Red channel
*gp = brightness; // Green channel
*bp = 255; // Blue channel
}
}
__device__ void in_cardioid_or_period2_bulb(int *iterp, float x, float y)
{
float xdiff = x - 0.25;
float y2 = y * y;
float q = xdiff*xdiff + y2;
// Is the point in the cardioid?
if (q * (q + xdiff) < 0.25*y2) {
*iterp = c_maxiter;
}
else if ((x+1.)*(x+1.) + y2 < 0.0625) { // Is the point in the period-2 bulb?
*iterp = c_maxiter;
}
}
__global__ void compute_escape_time(char *img)
{
int offset = gridDim.x*blockDim.x*threadIdx.y + blockIdx.x*blockDim.x+threadIdx.x;
int i = offset / c_width;
int j = offset - i * c_width;
int iteration = 0;
float c_re = c_xmin + c_x_step/2 + j*c_x_step;
float c_im = c_ymin + c_y_step/2 + i*c_y_step;
float zn_re = 0.;
float zn_im = 0.;
float tmp_re;
float re2 = 0.;
float im2 = 0.;
int bailout_radius2 = 2*2;
int r, g, b;
if (offset < c_N) {
// Check if point is in cardioid or in period-2 bulb
in_cardioid_or_period2_bulb(&iteration, c_re, c_im);
while ((re2 + im2 < bailout_radius2) && (iteration < c_maxiter)) {
tmp_re = re2 - im2 + c_re;
zn_im = zn_re * zn_im;
zn_im += zn_im; // Multiply by two
zn_im += c_im;
zn_re = tmp_re;
re2 = zn_re * zn_re;
im2 = zn_im * zn_im;
iteration++;
}
d_smooth_fast_element_colormap(iteration, re2, im2, &r, &g, &b);
offset = c_rowsize * i + 3 * j; // offset in the image array
img[offset++] = b;
img[offset++] = g;
img[offset] = r;
}
}
/******************************************************************************/
extern "C" void kernel_wrapper(char *h_img, int d_img_size, int MAX_ITER,
float X_MIN, float Y_MIN, float h_x_step,
float h_y_step, int N, int WIDTH, int row_size)
{
dim3 block_size, grid_size;
char *d_img;
// Create the grid of blocks of threads
block_size.x = BLOCK_SIZE_X; block_size.y = BLOCK_SIZE_Y;
grid_size.x = N / (block_size.x*block_size.y) + (N%(block_size.x*block_size.y) == 0? 0 : 1);
cudaMalloc((void **)&d_img, d_img_size);
cudaMemset(d_img, 0, d_img_size);
// Copy memory to constant memory in the device
cudaMemcpyToSymbol(c_maxiter, &MAX_ITER, sizeof(int));
cudaMemcpyToSymbol(c_xmin, &X_MIN, sizeof(float));
cudaMemcpyToSymbol(c_ymin, &Y_MIN, sizeof(float));
cudaMemcpyToSymbol(c_x_step, &h_x_step, sizeof(float));
cudaMemcpyToSymbol(c_y_step, &h_y_step, sizeof(float));
cudaMemcpyToSymbol(c_N, &N, sizeof(int));
cudaMemcpyToSymbol(c_width, &WIDTH, sizeof(int));
cudaMemcpyToSymbol(c_rowsize, &row_size, sizeof(int));
// Call the kernel to execute on the gpu
compute_escape_time<<<grid_size, block_size>>>(d_img);
// Copy the results back
cudaMemcpy(h_img, d_img, d_img_size, cudaMemcpyDeviceToHost);
cudaFree(d_img);
}
|
20,093 | // From CUDA for Engineering
// dist_v2_cuda_unified/kernel.cu
#include <iostream>
#include <stdio.h>
#include <cuda_runtime.h>
#define N 1000000000
#define TPB 32
#define DEBUG 0
float scale(int i, int n) {
return ((float)i) / (n - 1);
}
__device__
float distance(float x1, float x2)
{
return sqrt((x2 - x1) * (x2 - x1));
}
__global__
void distanceKernel(float *d_out, float *d_in, float ref)
{
const int i = blockIdx.x * blockDim.x + threadIdx.x;
const float x = d_in[i];
d_out[i] = distance(x, ref);
}
int main()
{
const float ref = 0.5f;
float *in = 0;
float *out = 0;
// Allocate managed memory for in/out arrays
cudaMallocManaged(&in, N* sizeof(float));
cudaMallocManaged(&out, N* sizeof(float));
for (int i = 0; i < N; i++) { in[i] = scale(i, N); }
// launch kernel
distanceKernel<<<N/TPB, TPB>>>(out, in, ref);
cudaDeviceSynchronize();
#if DEBUG
std::cout << "dist_v2_unified: cuda unified memory\n";
std::cout << "out: ";
for (int i = 0; i < N; i++) {
std::cout << " " << out[i];
}
std::cout << "\n";
#endif
cudaFree(in);
cudaFree(out);
}
|
20,094 | #include "includes.h"
__global__ void computeMinEnergyMatrix(float *energy, float *min_energy, int height, int width) {
const int bid = blockIdx.x;
const int tid = threadIdx.x;
const int pos = bid * blockDim.x + tid;
extern __shared__ float shared_row_energy[];
if(pos < width) {
for(int i=tid; i<width; i += blockDim.x) {
shared_row_energy[i] = energy[i];
min_energy[i] = energy[i];
}
} else {
return;
}
__syncthreads();
float temp[4];
for(int i=1; i<height; i++) {
int k = 0;
for(int j=tid; j<width; j += blockDim.x) {
float l = (j==0) ? 999999999 : shared_row_energy[j-1];
float m = shared_row_energy[j];
float r = (j==width-1) ? 999999999: shared_row_energy[j+1];
float minimum = energy[i*width + j] + min(l, min(m, r));
temp[k++] = minimum;
}
__syncthreads();
k = 0;
for(int j=tid; j<width; j += blockDim.x) {
shared_row_energy[j] = temp[k];
min_energy[i * width + j] = temp[k++];
}
}
} |
20,095 | /*
This works for all.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include <float.h>
#include <math.h>
#include <iostream>
#include <climits>
#include <vector>
#include <stack>
using namespace std;
//#include <thrust/host_vector.h>
//#include <thrust/device_vector.h>
struct edge{ int flow,capacity,to; edge(int to,int capacity){ this->to=to; this->capacity=capacity; this->flow=0; } };
struct node{ int height,excess,color; vector<edge> edges; };
node* graph;
//functions
void push_relabel_GPU(int N, node * graph);
void addEdge(int from,int to,int capacity){
graph[from].edges.push_back(edge(to,capacity));
}
void findMinCut(int N)
{
int src=0,sink=N-1; // src should be background
// compute preflow
for(int i=0;i<N;i++){ graph[i].height=0; graph[i].excess=0; } graph[src].height=N;
for(vector<edge>::iterator e=graph[src].edges.begin();e!=graph[src].edges.end();e++){
graph[e->to].excess=e->capacity; e->flow=e->capacity;
addEdge(e->to,src,e->flow);
}
bool hasExcessNode=true;
while(hasExcessNode)
{
hasExcessNode=false;
for(int i=1;i<N-1;i++) if(graph[i].excess>0)
{
// push from i to neighbours
hasExcessNode=true; bool pushed=false;
for(vector<edge>::iterator e=graph[i].edges.begin();e!=graph[i].edges.end();e++) if(graph[e->to].height<graph[i].height and e->capacity>e->flow)
{
int del=min(graph[i].excess,e->capacity-e->flow);
e->flow+=del; graph[i].excess-=del; graph[e->to].excess+=del;
// update residual graph
bool edgeFound=false;
for(vector<edge>::iterator e2=graph[e->to].edges.begin();e2!=graph[e->to].edges.end();e2++)
if(e2->to==i){ e2->flow-=del; edgeFound=true; break; }
if(!edgeFound) addEdge(e->to,i,del);
pushed=true; break;
}
if(!pushed)
{
// relabel i to enable push afterwards
int minHeight = INT_MAX;
for(int j=0; j<graph[i].edges.size();j++)
{
edge e = graph[i].edges[j];
if(e.to!=sink and e.capacity>e.flow){
minHeight=min(minHeight,graph[e.to].height);
}
}
if(graph[i].height<=minHeight) graph[i].height=minHeight+1;
}
}
}
// do a dfs from src to mark background pixels
stack<int> stack; stack.push(src);
while(!stack.empty())
{
int curr=stack.top(); stack.pop();
graph[curr].color=0; // mark the pixel as background
for(vector<edge>::iterator e=graph[curr].edges.begin();e!=graph[curr].edges.end();e++)
if(e->capacity==e->flow and graph[e->to].color!=0) stack.push(e->to);
}
}
// 0 , 1 , 2 , N = 3
// (0,1,2),(3,4),(5,6,7) assume num_neighbours = 2
//[2][0] gives 3 + 1*2 + 0 = 5
//[1][1] gives 3 + 0*2 + 1 = 4
//[0][2] gives
__host__ __device__ int get_index(int i, int j, int num_neighbours, int N){
// cout<<"got i = "<<i<<" j = "<<j<<" N = "<<N<<'\n';
int ans = 0;
if(i>0)
{
ans+=N;
ans+=(i-1)*num_neighbours;
ans+=j;
}
else
{
ans+=j;
}
// cout<<"returning ans = "<<ans<<'\n';
return ans;
}
int main()
{
int N=6;
graph=new node[N];
for(int i=0;i<N;i++) graph[i].color=1;
/*addEdge(0,1,3);
addEdge(0,2,2);
addEdge(1,2,5);
addEdge(2,3,3);
addEdge(1,3,2);*/
addEdge(0, 2, 13);
addEdge(0, 1, 16);
addEdge(1, 2, 10);
// addEdge(2, 1, 4);
addEdge(1, 3, 12);
addEdge(2, 4, 14);
addEdge(3, 2, 9);
addEdge(2, 3, 9);
addEdge(3, 5, 20);
addEdge(4, 3, 7);
addEdge(4, 5, 4);
// findMinCut(N);
// for(int i=0;i<N;i++) if(graph[i].color==0) cout<<i<<" "; cout<<endl;
// cout<<graph[N-1].excess;
push_relabel_GPU(N,graph); //Make sure findMinCut is NOT called before this as it modifies graph.
}
__global__ void kernel(int * height_d, int * excess_d,int * adjacency_list_d,int * size_matrix_d, int * capacity_d, int * flow_d, int N, int num_neighbours, int sink){
int cycle = 1;
int u = blockIdx.x*blockDim.x+threadIdx.x;
while(cycle>0)
{
printf("Working on node/thread %d\n",u);
if(excess_d[u]>0 && u!=sink)
{
int e_dash = excess_d[u];
int h_dash = 100000;
int v_dash = -1;
int i_dash = -1;
for(int i=0;i<size_matrix_d[u];i++)
{
int ind = get_index(u,i,num_neighbours,N);
int v = adjacency_list_d[ind];
int h_da_da = height_d[v];
if(h_da_da<h_dash && ((capacity_d[ind] - flow_d[ind])>0))
{
v_dash = v;
i_dash = i;
h_dash = h_da_da;
}
}
if(height_d[u]>h_dash)
{
printf("nearest neighbour with lower height %d\n",v_dash);
int d = 0;
int x_tmp=capacity_d[get_index(u,i_dash,num_neighbours,N)]-flow_d[get_index(u,i_dash,num_neighbours,N)];
if(x_tmp<0)
{
//assert(false)
}
if(e_dash<x_tmp)
{
d = e_dash;
}
else{
d = x_tmp;
}
int ind_of_u_in_v_list = 0;
for(int i=0;i<size_matrix_d[v_dash];i++)
{
if(adjacency_list_d[get_index(v_dash,i,num_neighbours,N)]==u)
{
ind_of_u_in_v_list = i;
break;
}
}
atomicAdd(&flow_d[get_index(u,i_dash,num_neighbours,N)],d);
// atomicSub(&flow_d[get_index(v_dash,ind_of_u_in_v_list,num_neighbours,N)],d);
atomicSub(&excess_d[u],d);
atomicAdd(&excess_d[v_dash],d);
}
else{
height_d[u]= h_dash +1;
}
}
cycle-=1;
}
}
void global_relabel(int * height, int * excess,int * adjacency_list,int * size_matrix, int * capacity, int * flow, int N, int num_neighbours, int src, int sink)
{
for(int i=0;i<N;i++)
{
for(int j=0;j<size_matrix[i];j++)
{
int ind = get_index(i,j,num_neighbours,N);
int u = i, v = adjacency_list[ind];
int ind_of_v_in_u_list =-1;
if(height[u]>height[v]+1)
{
int cfuv = (capacity[get_index(u,j,num_neighbours,N)]-flow[get_index(u,j,num_neighbours,N)]);
excess[u] =excess[u] - cfuv;
excess[v] =excess[v] + cfuv;
for(int k=0;k<size_matrix[v];k++)
{
if(adjacency_list[get_index(v,k,num_neighbours,N)]==u)
{
ind_of_v_in_u_list = k;
break;
}
}
int ind1 = get_index(v,ind_of_v_in_u_list ,num_neighbours,N);
// int cfvu = capacity[ind1] - flow[ind1];
flow[ind1] = flow[ind1] - cfuv;
flow[ind] = capacity[ind];
}
}
}
}
void print_flow(int * flow,int N, int * size_matrix, int * adjlist, string s, int * excess)
{
if(s=="excess")
{
for(int i=0;i<N;i++)
{
cout<<"Excess for "<<i<<' '<<excess[i];
}cout<<'\n';
return;
}
for(int i=0;i<N;i++)
{
for(int j=0;j<size_matrix[i];j++)
{
int ind = get_index(i,j,16,N);
cout<<s+" for "<<i<<" and "<<adjlist[ind]<<" = ";
cout<<" "<<flow[ind]<<" ";
}
cout<<'\n';
}
}
void push_relabel_GPU(int N, node * graph)
{
int src=0,sink=N-1; // src should be background
// compute preflow
// int * height_arr = malloc(sizeof(int))
size_t nsize = sizeof(int)*N;
int NUM_NEIGHBOURS = 16;
size_t twonsize = sizeof(int)*((N-2)*NUM_NEIGHBOURS + 2*N);
//CPU variables
int * height = (int*)malloc(nsize);
int * excess = (int*)malloc(nsize);
int * adjacency_list = (int*)malloc(twonsize);
int * size_matrix = (int*)malloc(nsize);
int * capacity = (int*)malloc(twonsize);
int * flow = (int*)malloc(twonsize);
int * cf = (int*)malloc(twonsize);
//GPU variables
int * height_d; cudaMalloc(&height_d, nsize);
int * excess_d; cudaMalloc(&excess_d, nsize);
int * adjacency_list_d; cudaMalloc(&adjacency_list_d,twonsize);
int * size_matrix_d; cudaMalloc(&size_matrix_d, nsize);
int * capacity_d; cudaMalloc(&capacity_d,twonsize);
int * flow_d; cudaMalloc(&flow_d,twonsize);
int * cf_d = (int*)malloc(twonsize);
//Setting values for new AoS implementation
memset(height,0,nsize);
memset(excess,0,nsize);
for(int i=0;i<N;i++)
{
size_matrix[i]=0;
}
for(int i=0;i<N;i++)
{
int s = graph[i].edges.size();
// size_matrix[i] = s;
for(int j=0;j<s;j++)
{
cout<<" -------------- << s = << "<<s<<'\n';
// int u = get_index(i,j,NUM_NEIGHBOURS,N);
// cout<<"i = "<<i<<" j = "<<j<< "setting "<< u<<"adj_list[ ] as "<< (graph[i].edges)[j].to<<'\n';
int v = (graph[i].edges)[j].to;
int cap = (((graph[i].edges)[j]).capacity);
adjacency_list[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)] = v;
capacity[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)] = cap;
flow[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)] = 0;
cf[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)] = capacity[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)]-flow[get_index(i,size_matrix[i],NUM_NEIGHBOURS,N)];
size_matrix[i]++;
adjacency_list[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = i;
capacity[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = cap;
// capacity[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = 0;
flow[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = 0;
cf[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = capacity[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)]-flow[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)];
size_matrix[v]++;
// capacity[get_index(v,size_matrix[v],NUM_NEIGHBOURS,N)] = i;
// capacity[get_index(i,j,NUM_NEIGHBOURS,N)] = (((graph[i].edges)[j]).capacity);
//cout<<"i = "<<i<<" j = "<<j<< "setting capacity "<< u<<"adj_list[ ] as "<< (graph[i].edges)[j].capacity<<'\n';
}
}
//excess is the excess flow of vertex, flow is the flow on edge
//Initializing pre-flow
for(int i=0;i<N;i++){ height[i]=0; excess[i]=0; } height[src]=N;
// for(int i = 0;i<size_matrix[src];i++){
// //v is the to vertex
// int v = adjacency_list[get_index(src,i,NUM_NEIGHBOURS,N)];
// excess[v]=capacity[get_index(src,i,NUM_NEIGHBOURS,N)];
// int cap = capacity[get_index(src,i,NUM_NEIGHBOURS,N)];
// flow[get_index(src,i,NUM_NEIGHBOURS,N)]= cap;
// cout<<"set excess of "<<v<<" to "<<excess[v];
// //add-edge implementation
// //!!!!! CHECK 7
// int last_elem = size_matrix[v];
// cout<<"last elem of "<<v<<" = "<<last_elem<<'\n';
//// adjacency_list[get_index(v,last_elem,NUM_NEIGHBOURS,N)] = src;
//// cout<<" setting edge in list "<<get_index(v,last_elem,NUM_NEIGHBOURS,N)<<'\n';
//// capacity[get_index(v,last_elem,NUM_NEIGHBOURS,N)] = 0;
//// flow[get_index(v,last_elem,NUM_NEIGHBOURS,N)] = -flow[get_index(src,i,NUM_NEIGHBOURS,N)];
//// cout<<"flow = "<<flow[get_index(v,last_elem,NUM_NEIGHBOURS,N)]<<'\n';
//// size_matrix[v]++;
//// addEdge(e->to,src,e->flow);
// //add edge fn ends or include next line too
// excess[src] -= flow[get_index(src,i,NUM_NEIGHBOURS,N)];
// cout<<"Loop end---------------------------------"<<'\n';
// }
for(int i=0;i<size_matrix[src];i++)
{
int index_v = get_index(src, i, NUM_NEIGHBOURS, N);
int cap = capacity[index_v];
int v = adjacency_list[index_v];
flow[index_v] = cap;
excess[src] -= cap;
excess[v] = cap;
//uncomment loop cfor correct termination
// for(int j=0;j<size_matrix[v];j++)
// {
// int ind = get_index(v,j,NUM_NEIGHBOURS,N);
// if(adjacency_list[ind]==src)
// {
// flow[ind] = -cap;
// break;
// }
// }
}
height[src] = N;
//pre-flow ends
//Copying
cudaMemcpy( excess_d, excess, nsize, cudaMemcpyHostToDevice);
cudaMemcpy(capacity_d, capacity,twonsize, cudaMemcpyHostToDevice);
cudaMemcpy(flow_d, flow,twonsize, cudaMemcpyHostToDevice);
cudaMemcpy(size_matrix_d, size_matrix,nsize, cudaMemcpyHostToDevice);
cudaMemcpy(adjacency_list_d, adjacency_list,twonsize, cudaMemcpyHostToDevice);
//Starting main loop
cout<<"graph[src].excess = "<<excess[src]<<" graph[sink].excess = "<<excess[sink]<<'\n';
int cnt =1000;
while(excess[src]+ excess[sink]< 0 && cnt>0)
{
cout<<"graph[src].excess = "<<excess[src]<<" graph[sink].excess = "<<excess[sink]<<'\n';
cudaMemcpy(height_d, height, nsize, cudaMemcpyHostToDevice);
//call kernel here
kernel<<<1,N>>>(height_d,excess_d,adjacency_list_d,size_matrix_d,capacity_d,flow_d,N,NUM_NEIGHBOURS,sink);
cudaMemcpy(height, height_d, nsize, cudaMemcpyDeviceToHost);
cudaMemcpy( excess, excess_d, nsize, cudaMemcpyDeviceToHost);
cudaMemcpy(capacity, capacity_d,twonsize, cudaMemcpyDeviceToHost);
cudaMemcpy(flow, flow_d,twonsize, cudaMemcpyDeviceToHost);
cudaMemcpy(size_matrix, size_matrix_d,nsize, cudaMemcpyDeviceToHost);
cudaDeviceSynchronize();
cout<<"H array: --------------------------------------------\n";
for(int q=0;q<N;q++)
{
cout<<height[q]<<' ';
}
printf("FLOW:\n");
print_flow(flow,N,size_matrix, adjacency_list,"flow",excess);
printf("EXCESS:\n");
print_flow(flow,N,size_matrix, adjacency_list,"excess",excess);
printf("capacity:\n");
print_flow(capacity,N,size_matrix, adjacency_list,"capacity",excess);
cnt--;
//global_relabel(height,excess,adjacency_list,size_matrix,capacity,flow,N,NUM_NEIGHBOURS,src,sink);
//call global relabel here
}
}
|
20,096 | /* Furthest point sampling GPU implementation
* Author Zhaoyu SU
* All Rights Reserved. Sep., 2019.
* Happy Mid-Autumn Festival! :)
*/
#include <stdio.h>
#include <time.h>
#include <sys/time.h>
#include <iostream>
#define USECPSEC 1000000ULL
__global__ void get_bbox_gpu_kernel(int batch_size, int npoint, int nbbox, int bbox_attr, int diff_thres, int cls_thres, float expand_ratio,
const float* roi_attrs,
const float* gt_bbox,
const int* input_num_list,
int* input_accu_list,
float* bbox,
int* bbox_conf,
int* bbox_diff) {
if (batch_size * nbbox * bbox_attr <=0 || npoint <=0) {
// printf("Get Bbox Logits Op exited unexpectedly.\n");
return;
}
input_accu_list[0] = 0;
for (int b=1; b<batch_size; b++) {
input_accu_list[b] = input_accu_list[b-1] + input_num_list[b-1];
}
__syncthreads();
for (int b=blockIdx.x; b<batch_size; b+=gridDim.x) {
for (int i=threadIdx.x; i<input_num_list[b]; i+=blockDim.x) {
bbox[input_accu_list[b]*7 + i*7 + 0] = 0.1;
bbox[input_accu_list[b]*7 + i*7 + 1] = 0.1;
bbox[input_accu_list[b]*7 + i*7 + 2] = 0.1;
float roi_x = roi_attrs[input_accu_list[b]*7 + i*7 + 3];
float roi_y = roi_attrs[input_accu_list[b]*7 + i*7 + 4];
float roi_z = roi_attrs[input_accu_list[b]*7 + i*7 + 5];
bbox_conf[input_accu_list[b] + i] = 0;
bbox_diff[input_accu_list[b] + i] = -1;
for (int j=0; j<nbbox; j++) {
// [w, l, h, x, y, z, r, cls, diff_idx]
// 0 1 2 3 4 5 6 7 8
float bbox_w = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 0];
float bbox_l = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 1];
float bbox_h = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 2];
float bbox_x = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 3];
float bbox_y = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 4];
float bbox_z = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 5];
float bbox_r = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 6];
float bbox_cls = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 7];
float diff = gt_bbox[b*nbbox*bbox_attr + j*bbox_attr + 8];
if (bbox_l*bbox_h*bbox_w > 0) {
float rel_roi_x = roi_x - bbox_x;
float rel_roi_y = roi_y - bbox_y;
float rel_roi_z = roi_z - bbox_z;
float rot_rel_roi_x = rel_roi_x*cosf(bbox_r) + rel_roi_y*sinf(bbox_r);
float rot_rel_roi_y = -rel_roi_x*sinf(bbox_r) + rel_roi_y*cosf(bbox_r);
if (abs(rot_rel_roi_x) <= bbox_w * (1 + expand_ratio) / 2 &&
abs(rot_rel_roi_y) <= bbox_l * (1 + expand_ratio) / 2 &&
abs(rel_roi_z) <= bbox_h * (1 + expand_ratio) / 2) {
bbox[input_accu_list[b]*7 + i*7 + 0] = bbox_w;
bbox[input_accu_list[b]*7 + i*7 + 1] = bbox_l;
bbox[input_accu_list[b]*7 + i*7 + 2] = bbox_h;
bbox[input_accu_list[b]*7 + i*7 + 3] = bbox_x;
bbox[input_accu_list[b]*7 + i*7 + 4] = bbox_y;
bbox[input_accu_list[b]*7 + i*7 + 5] = bbox_z;
bbox[input_accu_list[b]*7 + i*7 + 6] = bbox_r;
// if (diff <= diff_thres && bbox_cls == 0) {
if (diff <= diff_thres && bbox_cls <= cls_thres) {
// Here we only take cars into consideration, while vans are excluded and give the foreground labels as -1 (ignored).
bbox_conf[input_accu_list[b] + i] = 1;
bbox_diff[input_accu_list[b] + i] = diff;
}
else {
bbox_conf[input_accu_list[b] + i] = -1;
bbox_diff[input_accu_list[b] + i] = -1;
}
}
}
}
}
}
}
long long dtime_usec(unsigned long long start){
timeval tv;
gettimeofday(&tv, 0);
return ((tv.tv_sec*USECPSEC)+tv.tv_usec)-start;
}
void get_bbox_gpu_launcher(int batch_size, int npoint, int nbbox, int bbox_attr, int diff_thres, int cls_thres, float expand_ratio,
const float* roi_attrs,
const float* gt_bbox,
const int* input_num_list,
int* input_accu_list,
float* bbox,
int* bbox_conf,
int* bbox_diff) {
// long long dt = dtime_usec(0);
get_bbox_gpu_kernel<<<32,512>>>(batch_size, npoint, nbbox, bbox_attr, diff_thres, cls_thres, expand_ratio,
roi_attrs,
gt_bbox,
input_num_list,
input_accu_list,
bbox,
bbox_conf,
bbox_diff);
// dt = dtime_usec(dt);
// std::cout << "Voxel Sample (forward) CUDA time: " << dt/(float)USECPSEC << "s" << std::endl;
}
|
20,097 | #include "includes.h"
__global__ void cinzaGPU1d( unsigned char *image1, unsigned char *res, int pixels ) {
int i = threadIdx.x + blockIdx.x*blockDim.x;
int cinza;
if( i < pixels ) {
int idx = 3*i;
int r = image1[ idx+2 ];
int g = image1[ idx+1 ];
int b = image1[ idx ];
cinza = (30*r + 59*g + 11*b)/100;
res[ idx+2 ] = (unsigned char)cinza;
res[ idx+1 ] = (unsigned char)cinza;
res[ idx ] = (unsigned char)cinza;
}
} |
20,098 | #include <algorithm>
#include <cmath>
#include <cstdio>
#include <cstdlib>
#include <iomanip>
#include <iostream>
#include <vector>
#define C 4
#define THREADS 1024 // 2^10
#define MAX 85
#define MAX_S MAX* MAX
#define PERM_MAX (MAX * (MAX - 1) * (MAX - 2) * (MAX - 3)) / 24
#define pb push_back
#define mp make_pair
#define gpuErrChk(ans) \
{ \
gpuAssert((ans), __FILE__, __LINE__); \
}
inline void gpuAssert(cudaError_t code, char* file, int line, bool abort = true)
{
if (code != cudaSuccess) {
fprintf(stderr, "GPUassert: %s %s %d\n", cudaGetErrorString(code), file, line);
if (abort)
getchar();
}
}
using namespace std;
typedef long long int64;
typedef pair<int, int> ii;
/*
sz ---> Adjacency matrix dimension (1D)
perm ---> Number of permutations of an instance
graph ---> Adjacency matrix itself
seeds ---> Set of seeds
faces ---> Set of triangular faces for the output
*/
struct Node {
int sz, perm;
int graph[MAX_S], seeds[C * PERM_MAX], F_ANS[6 * MAX];
};
/*
faces ---> Number of triangular faces
count ---> Number of remaining vertices
tmpMax ---> Max value obtained for a seed
F ---> Set of triangular faces
F ---> Set of remaining vertices
*/
struct Params {
int *faces, *count, *tmpMax;
int *F, *V;
};
/*
SIZE ---> Number of vertices
BLOCKS ---> Number of blocks
PERM ---> Number of permutations
R ---> Output graph for a possible solution
F ---> Set of triangular faces of an instance
qtd ---> Number of possible 4-cliques
*/
clock_t start, stop;
int SIZE, BLOCKS, PERM, qtd = 0;
int R[MAX_S], F[8 * MAX], bib[MAX];
Node* N;
__device__ void initializeDevice(Params* devP, int sz, int t)
{
devP->faces[t] = 0;
devP->tmpMax[t] = -1;
devP->count[t] = sz - 4;
}
/*
Generates a list containing the vertices which are not on the planar graph
*/
__device__ void generateList(Node* devN, Params* devP, int t)
{
int sz = devN->sz;
int va = devN->seeds[t], vb = devN->seeds[t + devN->perm], vc = devN->seeds[t + 2 * devN->perm], vd = devN->seeds[t + 3 * devN->perm];
for (int i = 0; i < sz; i++) {
if (i == va || i == vb || i == vc || i == vd)
devP->V[t + i * devN->perm] = -1;
else
devP->V[t + i * devN->perm] = i;
}
}
/*
Returns the weight of the planar graph so far
*/
__device__ void generateTriangularFaceList(Node* devN, Params* devP, int graph[], int t)
{
int sz = devN->sz;
int va = devN->seeds[t];
int vb = devN->seeds[t + devN->perm];
int vc = devN->seeds[t + 2 * devN->perm];
int vd = devN->seeds[t + 3 * devN->perm];
//generate first triangle of the output graph
devP->F[t + (devP->faces[t] * 3) * devN->perm] = va;
devP->F[t + (devP->faces[t] * 3 + 1) * devN->perm] = vb;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * devN->perm] = vc;
int resp = graph[va * sz + vb] + graph[va * sz + vc] + graph[vb * sz + vc];
//generate the next 3 possible faces
devP->F[t + (devP->faces[t] * 3) * devN->perm] = va;
devP->F[t + (devP->faces[t] * 3 + 1) * devN->perm] = vb;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * devN->perm] = vd;
devP->F[t + (devP->faces[t] * 3) * devN->perm] = va;
devP->F[t + (devP->faces[t] * 3 + 1) * devN->perm] = vc;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * devN->perm] = vd;
devP->F[t + (devP->faces[t] * 3) * devN->perm] = vb;
devP->F[t + (devP->faces[t] * 3 + 1) * devN->perm] = vc;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * devN->perm] = vd;
resp += graph[va * sz + vd] + graph[vb * sz + vd] + graph[vc * sz + vd];
devP->tmpMax[t] = resp;
}
/*
Insert a new vertex, 3 new triangular faces and removes face 'f' from the set
*/
__device__ int operationT2(Node* devN, Params* devP, int graph[], int new_vertex, int f, int t)
{
int sz = devN->sz, perm = devN->perm;
//remove the chosen face and insert a new one
int va = devP->F[t + (f * 3) * perm];
int vb = devP->F[t + (f * 3 + 1) * perm];
int vc = devP->F[t + (f * 3 + 2) * perm];
devP->F[t + (f * 3) * perm] = new_vertex;
devP->F[t + (f * 3 + 1) * perm] = va;
devP->F[t + (f * 3 + 2) * perm] = vb;
//and insert the other two possible faces
devP->F[t + (devP->faces[t] * 3) * perm] = new_vertex;
devP->F[t + (devP->faces[t] * 3 + 1) * perm] = va;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * perm] = vc;
devP->F[t + (devP->faces[t] * 3) * perm] = new_vertex;
devP->F[t + (devP->faces[t] * 3 + 1) * perm] = vb;
devP->F[t + ((devP->faces[t]++) * 3 + 2) * perm] = vc;
int resp = graph[va * sz + new_vertex] + graph[vb * sz + new_vertex] + graph[vc * sz + new_vertex];
return resp;
}
/*
Return the vertex with the maximum gain inserting within a face 'f'
*/
__device__ int maxGain(Node* devN, Params* devP, int graph[], int* f, int t)
{
int sz = devN->sz, perm = devN->perm;
int gain = -1, vertex = -1;
//iterate through the remaining vertices
for (int new_vertex = 0; new_vertex < sz; new_vertex++) {
if (devP->V[t + new_vertex * perm] == -1)
continue;
//and test which has the maximum gain with its insetion
//within all possible faces
int faces = devP->faces[t];
for (int i = 0; i < faces; i++) {
int va = devP->F[t + (i * 3) * perm], vb = devP->F[t + (i * 3 + 1) * perm], vc = devP->F[t + (i * 3 + 2) * perm];
int tmpGain = graph[va * sz + new_vertex] + graph[vb * sz + new_vertex] + graph[vc * sz + new_vertex];
if (tmpGain > gain) {
gain = tmpGain;
*f = i;
vertex = new_vertex;
}
}
}
return vertex;
}
__device__ void tmfg(Node* devN, Params* devP, int graph[], int t)
{
while (devP->count[t]) {
int f = -1;
int vertex = maxGain(devN, devP, graph, &f, t);
devP->V[t + vertex * devN->perm] = -1;
devP->tmpMax[t] += operationT2(devN, devP, graph, vertex, f, t);
devP->count[t]--;
}
}
__global__ void tmfgParallel(Node* devN, Params devP, int* respMax, int* idx)
{
int x = blockDim.x * blockIdx.x + threadIdx.x;
int sz = devN->sz, perm = devN->perm;
extern __shared__ int graph[];
for (int i = threadIdx.x; i < sz * sz; i += blockDim.x) {
graph[i] = devN->graph[i];
graph[i] = devN->graph[i];
}
__syncthreads();
if (x < perm) {
initializeDevice(&devP, sz, x);
generateList(devN, &devP, x);
generateTriangularFaceList(devN, &devP, graph, x);
tmfg(devN, &devP, graph, x);
__syncthreads();
atomicMax(respMax, devP.tmpMax[x]);
if (devP.tmpMax[x] == *respMax)
*idx = x;
__syncthreads();
}
}
int tmfgPrepare()
{
int resp = 0, idx = 0, *tmpResp, *tmpIdx;
gpuErrChk(cudaMalloc((void**)&tmpResp, sizeof(int)));
gpuErrChk(cudaMalloc((void**)&tmpIdx, sizeof(int)));
gpuErrChk(cudaMemcpy(tmpResp, &resp, sizeof(int), cudaMemcpyHostToDevice));
gpuErrChk(cudaMemcpy(tmpIdx, &idx, sizeof(int), cudaMemcpyHostToDevice));
Node* devN;
Params devP;
cout << "Amount of memory: " << (3 * PERM + PERM * SIZE + 6 * SIZE * PERM * sizeof(int)) / 1000000 << "MB" << endl;
gpuErrChk(cudaMalloc((void**)&devN, sizeof(Node)));
gpuErrChk(cudaMemcpy(devN, N, sizeof(Node), cudaMemcpyHostToDevice));
cout << "1 done." << endl;
gpuErrChk(cudaMalloc((void**)&devP.faces, PERM * sizeof(int)));
gpuErrChk(cudaMalloc((void**)&devP.count, PERM * sizeof(int)));
gpuErrChk(cudaMalloc((void**)&devP.tmpMax, PERM * sizeof(int)));
gpuErrChk(cudaMalloc((void**)&devP.F, PERM * 6 * SIZE * sizeof(int)));
gpuErrChk(cudaMalloc((void**)&devP.V, PERM * SIZE * sizeof(int)));
cout << "2 done." << endl;
dim3 blocks(BLOCKS, 1);
dim3 threads(THREADS, 1);
cout << "Launching kernel..." << endl;
tmfgParallel<<<blocks, threads, SIZE * SIZE * sizeof(int)>>>(devN, devP, tmpResp, tmpIdx);
gpuErrChk(cudaDeviceSynchronize());
cout << "Kernel finished." << endl;
//copy back the maximum weight and the index of the graph
//which gave this result
gpuErrChk(cudaMemcpy(&resp, tmpResp, sizeof(int), cudaMemcpyDeviceToHost));
cout << "1 done." << endl;
gpuErrChk(cudaMemcpy(&idx, tmpIdx, sizeof(int), cudaMemcpyDeviceToHost));
cout << "2 done." << endl;
//gpuErrChk(cudaMemcpy(&F, devP.F[idx + ], (6*MAX)*sizeof(int), cudaMemcpyDeviceToHost));
cout << "3 done." << endl;
gpuErrChk(cudaFree(devN));
gpuErrChk(cudaFree(devP.faces));
gpuErrChk(cudaFree(devP.count));
gpuErrChk(cudaFree(devP.tmpMax));
gpuErrChk(cudaFree(devP.F));
gpuErrChk(cudaFree(devP.V));
cout << "Completed." << endl;
return resp;
}
void printElapsedTime(clock_t start, clock_t stop)
{
double elapsed = ((double)(stop - start)) / CLOCKS_PER_SEC;
cout << fixed << setprecision(3) << "Elapsed time: " << elapsed << "s\n";
}
/*
C ---> Size of the combination
index ---> Current index in data[]
data[] ---> Temporary array to store a current combination
i ---> Index of current element in vertices[]
*/
void combineUntil(int index, vector<int>& data, int i)
{
// Current cobination is ready, print it
if (index == C) {
for (int j = 0; j < C; j++) {
N->seeds[qtd + j * PERM] = data[j];
}
qtd++;
return;
}
// When there are no more elements to put in data[]
if (i >= SIZE)
return;
//current is inserted; put next at a next location
data[index] = i;
combineUntil(index + 1, data, i + 1);
//current is deleted; replace it with next
combineUntil(index, data, i + 1);
}
/*
Print all combinations of size 'C' using a temporary array 'data'
*/
void combine()
{
vector<int> data(C);
combineUntil(0, data, 0);
}
void initialize()
{
for (int i = 0; i < SIZE; i++) {
for (int j = i + 1; j < SIZE; j++) {
R[i * SIZE + j] = R[j * SIZE + i] = -1;
}
}
}
void readInput()
{
int x;
cin >> SIZE;
PERM = bib[SIZE - 1];
BLOCKS = PERM / THREADS + 1;
N = (Node*)malloc(sizeof(Node));
N->sz = SIZE;
N->perm = PERM;
for (int i = 0; i < SIZE; i++) {
for (int j = i + 1; j < SIZE; j++) {
cin >> x;
N->graph[i * SIZE + j] = x;
N->graph[j * SIZE + i] = x;
}
}
}
/*
Define the number of permutations and blocks
*/
void sizeDefinitions()
{
for (int i = 6; i <= MAX; i++) {
int resp = 1;
for (int j = i - 3; j <= i; j++)
resp *= j;
resp /= 24;
bib[i - 1] = resp;
}
}
int main(int argv, char** argc)
{
ios::sync_with_stdio(false);
sizeDefinitions();
//read the input, which is given by a size of a graph and its weighted edges.
//the graph given is dense.
readInput();
initialize();
//generate multiple 4-clique seeds, given the number of vertices
combine();
cudaSetDevice(3);
start = clock();
int respMax = tmfgPrepare();
stop = clock();
//reconstruct the graph given the regions of the graph
// for ( int i = 0; i < 2*SIZE; i++ ){
// int va = F[i*3], vb = F[i*3 + 1], vc = F[i*3 + 2];
// if ( va == vb && vb == vc ) continue;
// R[va*SIZE + vb] = R[vb*SIZE + va] = N->graph[va*SIZE + vb];
// R[va*SIZE + vc] = R[vc*SIZE + va] = N->graph[va*SIZE + vc];
// R[vb*SIZE + vc] = R[vc*SIZE + vb] = N->graph[vb*SIZE + vc];
// }
// cout << "Printing generated graph: " << endl;
// for ( int i = 0; i < SIZE; i++ ){
// for ( int j = i+1; j < SIZE; j++ ){
// cout << R[i*SIZE + j] << " ";
// }
// cout << endl;
// }
printElapsedTime(start, stop);
cout << "Maximum weight found: " << respMax << endl;
free(N);
gpuErrChk(cudaDeviceReset());
return 0;
} |
20,099 | #include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <getopt.h>
#include <curand_kernel.h>
#include <stdlib.h>
#include <cuda.h>
#include <sys/time.h>
#include "cudaSNormalizeROIs_kernel.cu"
#include<chrono>
#include<iostream>
using namespace std;
using namespace std::chrono;
int blocks_[20][2] = {{8,8},{16,16},{24,24},{32,32},{1,64},{1,128},{1,192},{1,256},{1,320},{1,384},{1,448},{1,512},{1,576},{1,640},{1,704},{1,768},{1,832},{1,896},{1,960},{1,1024}};
int matrices_[7][2] = {{240,240},{496,496},{784,784},{1016,1016},{1232,1232},{1680,1680},{2024,2024}};
int main(int argc, char **argv) {
cudaSetDevice(0);
char* p;int matrix_len=strtol(argv[1], &p, 10);
for(int matrix_looper=0;matrix_looper<matrix_len;matrix_looper++){
for(int block_looper=0;block_looper<20;block_looper++){
int XSIZE=matrices_[matrix_looper][0],YSIZE=matrices_[matrix_looper][1],BLOCKX=blocks_[block_looper][0],BLOCKY=blocks_[block_looper][1];
unsigned int inputSizeX = 1;
unsigned int inputSizeY = 1;
unsigned int nbProposals = 1;
unsigned int batchSize = 1;
unsigned int scoreIdx = 1;
unsigned int nbCls = 1;
unsigned int maxParts = 1;
unsigned int maxTemplates = 1;
bool keepMax = 1;
bool generateParts = 1;
bool generateTemplates = 1;
const float normX = 1;
const float normY = 1;
const float *means = NULL;
cudaMalloc(&means, XSIZE*YSIZE);
const float *std = NULL;
cudaMalloc(&std, XSIZE*YSIZE);
const unsigned int *numPartsPerClass = NULL;
cudaMalloc(&numPartsPerClass, XSIZE*YSIZE);
const unsigned int *numTemplatesPerClass = NULL;
cudaMalloc(&numTemplatesPerClass, XSIZE*YSIZE);
const float *ROIRef = NULL;
cudaMalloc(&ROIRef, XSIZE*YSIZE);
const float *ROIEst = NULL;
cudaMalloc(&ROIEst, XSIZE*YSIZE);
const float *ValuesEst = NULL;
cudaMalloc(&ValuesEst, XSIZE*YSIZE);
const float *partsEst = NULL;
cudaMalloc(&partsEst, XSIZE*YSIZE);
const float *partsVisibilityEst = NULL;
cudaMalloc(&partsVisibilityEst, XSIZE*YSIZE);
const float *templatesEst = NULL;
cudaMalloc(&templatesEst, XSIZE*YSIZE);
float *outputs = NULL;
cudaMalloc(&outputs, XSIZE*YSIZE);
int *argMax = NULL;
cudaMalloc(&argMax, XSIZE*YSIZE);
float *partsPrediction = NULL;
cudaMalloc(&partsPrediction, XSIZE*YSIZE);
float *partsVisibilityPrediction = NULL;
cudaMalloc(&partsVisibilityPrediction, XSIZE*YSIZE);
float *templatesPrediction = NULL;
cudaMalloc(&templatesPrediction, XSIZE*YSIZE);
float scoreThreshold = 1;
int iXSIZE= XSIZE;
int iYSIZE= YSIZE;
while(iXSIZE%BLOCKX!=0)
{
iXSIZE++;
}
while(iYSIZE%BLOCKY!=0)
{
iYSIZE++;
}
dim3 gridBlock(iXSIZE/BLOCKX, iYSIZE/BLOCKY);
dim3 threadBlock(BLOCKX, BLOCKY);
cudaFree(0);
cudaSNormalizeROIs_kernel<<<gridBlock,threadBlock>>>(inputSizeX,inputSizeY,nbProposals,batchSize,scoreIdx,nbCls,maxParts,maxTemplates,keepMax,generateParts,generateTemplates,normX,normY,means,std,numPartsPerClass,numTemplatesPerClass,ROIRef,ROIEst,ValuesEst,partsEst,partsVisibilityEst,templatesEst,outputs,argMax,partsPrediction,partsVisibilityPrediction,templatesPrediction,scoreThreshold);
cudaDeviceSynchronize();
for (int loop_counter = 0; loop_counter < 10; ++loop_counter) {
cudaSNormalizeROIs_kernel<<<gridBlock,threadBlock>>>(inputSizeX,inputSizeY,nbProposals,batchSize,scoreIdx,nbCls,maxParts,maxTemplates,keepMax,generateParts,generateTemplates,normX,normY,means,std,numPartsPerClass,numTemplatesPerClass,ROIRef,ROIEst,ValuesEst,partsEst,partsVisibilityEst,templatesEst,outputs,argMax,partsPrediction,partsVisibilityPrediction,templatesPrediction,scoreThreshold);
}
auto start = steady_clock::now();
for (int loop_counter = 0; loop_counter < 1000; loop_counter++) {
cudaSNormalizeROIs_kernel<<<gridBlock,threadBlock>>>(inputSizeX,inputSizeY,nbProposals,batchSize,scoreIdx,nbCls,maxParts,maxTemplates,keepMax,generateParts,generateTemplates,normX,normY,means,std,numPartsPerClass,numTemplatesPerClass,ROIRef,ROIEst,ValuesEst,partsEst,partsVisibilityEst,templatesEst,outputs,argMax,partsPrediction,partsVisibilityPrediction,templatesPrediction,scoreThreshold);
}
auto end = steady_clock::now();
auto usecs = duration_cast<duration<float, microseconds::period> >(end - start);
cout <<'['<<usecs.count()<<','<<'('<<BLOCKX<<','<<BLOCKY<<')' << ','<<'('<<XSIZE<<','<<YSIZE<<')'<<']' << endl;
}
}} |
20,100 | #include <stdio.h>
#include <stdlib.h>
#include <cuda.h>
#include <string.h>
#include <math.h>
#define TAM 8
#define NITERACIONES 10
#define TAMBLOCK 2
__global__ void stencil(float *a, float *b){ //Kernel, salto a la GPU. Esta funcion es ejecutada por todos los hilos al mismo tiempo.
int idx = blockIdx.x*blockDim.x+threadIdx.x+1;
for(int i=idx; i<TAM-1; i+=TAMBLOCK)
b[i]=(a[i-1]+a[i]+a[i+1])/3;
}
int main() {
int memsize = sizeof(float)*TAM;
float *h_a,*h_b; //Arrays en el host (CPU & RAM)
h_a=(float *)malloc(memsize);
h_b=(float *)malloc(memsize);
for(int i=0;i<TAM;++i){
h_a[i]=h_b[i]=(float)(rand()%20);
}
float *d_a,*d_b; //Arrays en la GPU
cudaMalloc(&d_a, memsize);
cudaMalloc(&d_b, memsize);
cudaMemcpy(d_a, h_a, memsize, cudaMemcpyHostToDevice);
cudaMemcpy(d_b, h_b, memsize, cudaMemcpyHostToDevice);
dim3 block(1);
dim3 thread(TAMBLOCK);
printf("El numero de bloques es %d, y el numero de hilos es %d\n", block.x, thread.x);
for(int j=0;j<NITERACIONES;++j){
stencil <<<block,thread>>> (d_a, d_b);
cudaThreadSynchronize();
float *aux=d_b;
d_b=d_a;
d_a=aux;
}
cudaMemcpy(h_a, d_a, memsize, cudaMemcpyDeviceToHost);
cudaMemcpy(h_b, d_b, memsize, cudaMemcpyDeviceToHost);
printf("Vector A:\n");
for(int i=0;i<TAM;++i)
printf("%f, ", *(h_a+i));
printf("\n\nVector B:\n");
for(int i=0;i<TAM;++i)
printf("%f, ", *(h_b+i));
printf("\n");
free(h_a);
free(h_b);
cudaFree(d_a);
cudaFree(d_b);
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.