// metropolis yöntemi ile random sayı üretir it generates random numbers by using metropolis method // compile: mpicc metrop.c -lm // usage: mpirun -n process# ./a.out trial# // note: process# must be square of an integer #include #include #include #include #include // w is gaussian weight function double w(double x, double y) { double sigma ; // sigma is standard deviation sigma = 0.05 ; return exp(-(x *x + y*y) / (2 * sigma * sigma) ) ; } void generator(double * x, double * y) { double xt, yt, ratio, tmp ; double delta = 0.05 ; tmp = (double)rand() / (double)RAND_MAX ; // update x by adding a multiples of delta , (2 * tmp - 1) creates a number between -1.0 and 1.0 xt = (*x) + delta * (2 * tmp - 1) ; tmp = (double)rand() / (double)RAND_MAX ; // update y by adding a multiples of delta , (2 * tmp - 1) creates a number between -1.0 and 1.0 yt = (*y) + delta * (2 * tmp - 1) ; // compare updated x,y values with old x,y values, accept or reject updated values as new values according to weight function ratio = w(xt, yt ) / w(*x, *y) ; tmp = (double)rand() / (double)RAND_MAX ; if(ratio > tmp ) { *x = xt ; *y = yt ; } } int main(int argc, char* argv[]) { double x, y ; double * x_array ; double * y_array ; double * local_x_array ; double * local_y_array ; int count = atoi(argv[1]); // how many random number will generated int i ; int pnumber, myrank; // pnumber is total process number, myrank is rank of each process MPI_Status status; MPI_Init(&argc,&argv); MPI_Comm_rank(MPI_COMM_WORLD, &myrank); MPI_Comm_size(MPI_COMM_WORLD, &pnumber); local_x_array = malloc( count * sizeof(double)) ; local_y_array = malloc( count * sizeof(double)) ; // define boundries for each process double x_min, x_max, y_min, y_max ; double interval = 2.0 / sqrt(pnumber) ; x_min = -1.0 + interval * (myrank % ((int) sqrt(pnumber))) ; x_max = x_min + interval ; y_min = -1.0 + interval * (myrank / (int) sqrt(pnumber)) ; y_max = y_min + interval ; int counter = 0 ; // counter holds number of random points in local_x_array and local_y_array srand(myrank) ; // seed each process a different number to generate different random number while(counter != 1) { x = x_min + interval * ((double) rand() / RAND_MAX) ; //assign initial random number between 0-1 to x y = y_min + interval * ((double) rand() / RAND_MAX) ; //assign initial random number between 0-1 to y if( (x > x_min) && (x < x_max) && (y > y_min) && (y < y_max) ) { local_x_array[0] = x ; local_y_array[0] = y ; counter++ ; } } for(i=1 ; i < count ; i++ ) { generator(&x, &y); if( (x > x_min) && (x < x_max) && (y > y_min) && (y < y_max) ) { local_x_array[counter] = x ; local_y_array[counter] = y ; counter++ ; } } // compute total generated numbers int total_number[pnumber]; int master = 0 ; //all processes will know how many number generated in each process MPI_Allgather(&counter, 1, MPI_INT, total_number, 1, MPI_INT, MPI_COMM_WORLD); //compute displacements from initial element in receive buffer for each send buffer int displs[pnumber] ; displs[0] = 0 ; int sum = 0; for(i=1 ; i < pnumber ; i++) { sum = sum + total_number[i-1] ; displs[i] = sum ; } sum = sum + total_number[pnumber-1] ; x_array = malloc(sizeof(double) * sum ); y_array = malloc(sizeof(double) * sum ); MPI_Gatherv(local_x_array, counter, MPI_DOUBLE, x_array, total_number, displs, MPI_DOUBLE, master, MPI_COMM_WORLD); MPI_Gatherv(local_y_array, counter, MPI_DOUBLE, y_array, total_number, displs, MPI_DOUBLE, master, MPI_COMM_WORLD); MPI_Finalize(); return 0; }