program laplace ************************************************************ * Serial program to solve a Laplaces equation * (d2/dx2)u + (d2/dy2)u = 0 using Jacobi method ************************************************************ implicit none include "mpif.h" integer n, m parameter(n=200, m = 200/4) double precision u(0:n+1,0:m+1),unew(1:n,1:m) double precision error, tot_error integer iter, i, j integer ierror, rank, size integer send_status(MPI_STATUS_SIZE), recv_status(MPI_STATUS_SIZE) c Uncomment for SENDRECV solution c integer recv_status1(MPI_STATUS_SIZE), c & recv_status2(MPI_STATUS_SIZE) integer up, down,up_request, dn_request integer :: tag = 10 c setup MPI stuff call MPI_Init(ierror) call MPI_Comm_rank(MPI_COMM_WORLD, rank, ierror) call MPI_Comm_size(MPI_COMM_WORLD, size, ierror) if (size .ne. 4) then print *, "Can only run with 4 processesd!" call MPI_Finalize(ierror) stop endif up = rank+1 if (up == size) up = MPI_PROC_NULL down = rank - 1 if (down == -1) down = MPI_PROC_NULL do j = 0,m+1 do i = 0,n+1 u(i,j) = 0.0d0 enddo enddo do j = 1,m do i = 1,n unew(i,j) = 0.0d0 enddo enddo c Boundary conditions: c c u=10 c -------- c | | c | | c u=0 | | u=0 c | | c | | c -------- c u=0 if (rank==0) then do i = 1,n u(i,0) = 0.0d0 enddo endif if (rank==size-1) then do i = 1,n u(i,m+1) = 10.0d0 enddo endif do j = 1,m u(0,j) = 0.0d0 u(n+1,j) = 0.0d0 enddo c Main iterative loop tot_error = 1000.0 iter = 0 do while ( tot_error .gt. 1.0d-6 .and. iter .lt. 1000) do j = 1,m do i = 1,n unew(i,j) = 0.25d0*( u(i+1,j) + u(i-1,j) . + u(i,j+1) + u(i,j-1) ) enddo enddo error = 0.0d0 do j = 1,m do i = 1,n error = error + abs(unew(i,j)-u(i,j)) u(i,j) = unew(i,j) enddo enddo c print *, "Iteration ",iter,", error = ",error c Now fill in borders of u that are internal to full grid c Comment following for SENDRECV solution call MPI_Issend( u(1,m), n, MPI_DOUBLE_PRECISION, up, tag, . MPI_COMM_WORLD, up_request, ierror) call MPI_Issend( u(1,1), n, MPI_DOUBLE_PRECISION, down, tag, . MPI_COMM_WORLD, dn_request, ierror) call MPI_Recv( u(1,0), n, MPI_DOUBLE_PRECISION, down, tag, . MPI_COMM_WORLD, recv_status, ierror) call MPI_Recv( u(1,m+1), n, MPI_DOUBLE_PRECISION, up, tag, . MPI_COMM_WORLD, recv_status, ierror) call MPI_Wait(up_request, send_status, ierror) call MPI_Wait(dn_request, send_status, ierror) c c Uncomment for SENDRECV solution c call MPI_Sendrecv( u(1,1), n, MPI_DOUBLE_PRECISION, down, tag, c & u(1,m+1),n, MPI_DOUBLE_PRECISION, up, tag, MPI_COMM_WORLD, c & recv_status1, ierror) c c call MPI_SENDRECV( u(1,m), n, MPI_DOUBLE_PRECISION, up, tag, c & u(1,0),n, MPI_DOUBLE_PRECISION,down,tag,MPI_COMM_WORLD, c & recv_status2, ierror) c Get the total error call MPI_AllReduce(error, tot_error, 1, MPI_DOUBLE_PRECISION, . MPI_SUM, MPI_COMM_WORLD, ierror) iter = iter+1 if( rank == 0 .and. mod(iter,100) .eq. 0) . print *, "Iteration ",iter,", error = ",tot_error enddo call MPI_Finalize(ierror) stop end