/* --------------------------------------------------------------- * pi_reduce.c * FILES: pi_reduce.c, dboard.c, make.pi.c * DESCRIPTION: MPI pi calculation example program. C Version. * This program calculates pi using a "dartboard" algorithm. See * Fox et al.(1988) Solving Problems on Concurrent Processors, vol.1 * page 207. All processes contribute to the calculation, with the * master averaging the values for pi. * * SPMD version: Conditional statements check if the process * is the master or a worker. * * This version uses MPI_Reduce to collect results * * AUTHOR: Roslyn Leibensperger * REVISED: 05/24/93 for API by Roslyn Leibensperger * 09/14/93 for latest API changes Blaise Barney * 01/10/94 changed API to MPL Stacy Pendell * 05/18/94 corrections to comments Roslyn Leibensperger * CONVERTED TO MPI: 11/12/94 by Xianneng Shen * --------------------------------------------------------------- */ #include #include #include "mpi.h" double dboard (int darts); #define DARTS 5000 /* number of throws at dartboard */ #define ROUNDS 10 /* number of times "darts" is iterated */ #define MASTER 0 /* task ID of master task */ main(int argc, char **argv) { double homepi, /* value of pi calculated by current task */ pisum, /* sum of tasks' pi values */ pi, /* average of pi after "darts" is thrown */ avepi; /* average pi value for all iterations */ int mytid, /* task ID - also used as seed number */ nproc, /* number of tasks */ rcode, /* return code */ i; /* Obtain number of tasks and task ID */ MPI_Init(&argc, &argv); MPI_Comm_rank(MPI_COMM_WORLD, &mytid); MPI_Comm_size(MPI_COMM_WORLD, &nproc); printf ("MPI task ID = %d\n", mytid); /* Set seed for random number generator equal to task ID */ srandom (mytid); avepi = 0; for (i = 0; i < ROUNDS; i++) { /* All tasks calculate pi using dartboard algorithm */ homepi = dboard(DARTS); /* Use MPI_Reduce to sum values of homepi across all tasks * Master will store the accumulated value in pisum * - homepi is the send buffer * - pisum is the receive buffer (used by the receiving task only) * - the size of the message is sizeof(double) * - MASTER is the task that will receive the result of the reduction * operation */ rcode = MPI_Reduce(&homepi, &pisum, 1, MPI_DOUBLE, MPI_SUM, MASTER, MPI_COMM_WORLD); if (rcode != 0) printf("%d: failure on MPI_Reduce\n", mytid); /* Master computes average for this iteration and all iterations */ if (mytid == MASTER) { pi = pisum/nproc; avepi = ((avepi * i) + pi)/(i + 1); printf(" After %3d throws, average value of pi = %10.8f\n", (DARTS * (i + 1)),avepi); } } MPI_Finalize(); }