From 4a834b382cbb22345ad3ed812f043125b7568222 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Wed, 31 May 2023 19:01:05 -0400 Subject: [PATCH 01/17] Implements MPI_SCATTERV in MPP_SCATTER --- mpp/include/mpp_comm.inc | 6 ++ mpp/include/mpp_scatter.fh | 216 +++++++++++++++++++------------------ 2 files changed, 117 insertions(+), 105 deletions(-) diff --git a/mpp/include/mpp_comm.inc b/mpp/include/mpp_comm.inc index 2355102ea9..bc5138bd5b 100644 --- a/mpp/include/mpp_comm.inc +++ b/mpp/include/mpp_comm.inc @@ -427,6 +427,8 @@ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ #define MPP_TYPE_ integer(i4_kind) +#undef MPI_TYPE_ +#define MPI_TYPE_ MPI_INTEGER4 #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d #include @@ -435,6 +437,8 @@ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ #define MPP_TYPE_ real(r4_kind) +#undef MPI_TYPE_ +#define MPI_TYPE_ MPI_REAL4 #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d #include @@ -443,6 +447,8 @@ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ #define MPP_TYPE_ real(r8_kind) +#undef MPI_TYPE_ +#define MPI_TYPE_ MPI_REAL8 #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d #include diff --git a/mpp/include/mpp_scatter.fh b/mpp/include/mpp_scatter.fh index 4223f79c39..7e45469c34 100644 --- a/mpp/include/mpp_scatter.fh +++ b/mpp/include/mpp_scatter.fh @@ -49,111 +49,117 @@ end subroutine MPP_SCATTER_PELIST_2D_ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, is_root_pe, & - ishift, jshift) - integer, intent(in) :: is, ie, js, je, nk - integer, dimension(:), intent(in) :: pelist - MPP_TYPE_, dimension(is:ie,js:je,1:nk), intent(inout) :: array_seg - MPP_TYPE_, dimension(:,:,:), intent(in) :: data - logical, intent(in) :: is_root_pe - integer, optional, intent(in) :: ishift, jshift - - integer :: i, msgsize, root_pe, root_pe_test - integer :: i1, i2, j1, j2, ioff, joff - integer :: my_ind(4), gind(4,size(pelist)) - type array3D - MPP_TYPE_, dimension(:,:,:), allocatable :: data - endtype array3D - type(array3d), dimension(size(pelist)) :: temp - - if (.not.ANY(mpp_pe().eq.pelist(:))) return - - if (is_root_pe) then - root_pe = mpp_pe() - root_pe_test = 999 - if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") - else - root_pe = 0 - root_pe_test = -999 - endif -! need this check in case MPI-rank 0 is a member of the pelist - call mpp_max(root_pe_test, pelist) - if (root_pe_test.lt.0) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): root_pe not specified or not a member of the pelist") -! need to make sure only one root_pe has been specified - call mpp_sum(root_pe, pelist) - if ((is_root_pe) .and. (mpp_pe().ne.root_pe)) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): too many root_pes specified") - - - ioff=0 - joff=0 - if (present(ishift)) ioff=ishift - if (present(jshift)) joff=jshift - - my_ind(1) = is - my_ind(2) = ie - my_ind(3) = js - my_ind(4) = je - -! scatter indices into global index on root_pe - if (is_root_pe) then - do i = 1, size(pelist) -! root_pe data copy - no send to self - if (pelist(i).eq.root_pe) then - gind(:,i) = my_ind(:) - else - call mpp_recv(gind(:,i:i), 4, pelist(i), .FALSE., COMM_TAG_1) - endif - enddo - call mpp_sync_self(check=EVENT_RECV) - gind(1,:)=gind(1,:)+ioff - gind(2,:)=gind(2,:)+ioff - gind(3,:)=gind(3,:)+joff - gind(4,:)=gind(4,:)+joff -! check indices to make sure they are within the range of "data" - if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & - call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside of the & - &range of the receiving array") - else -! non root_pe's send indices to root_pe - call mpp_send(my_ind(:), 4, root_pe, COMM_TAG_1) - call mpp_sync_self(check=EVENT_SEND) - endif - -! scatter segments into data based on indices - if (is_root_pe) then - do i = 1, size(pelist) - if (pelist(i).ne.root_pe) then ! no send to self - i1 = gind(1,i) - i2 = gind(2,i) - j1 = gind(3,i) - j2 = gind(4,i) - msgsize = (i2-i1+1)*(j2-j1+1)*nk -! allocate and copy data into a contiguous memory space - allocate(temp(i)%data(i1:i2,j1:j2,1:nk)) - temp(i)%data(i1:i2,j1:j2,1:nk)=data(i1:i2,j1:j2,1:nk) - call mpp_send(temp(i)%data, msgsize, pelist(i), COMM_TAG_2) - else -! data copy - no send to self - array_seg(is:ie,js:je,1:nk) = data(is+ioff:ie+ioff,js+joff:je+joff,1:nk) - endif - enddo - call mpp_sync_self(check=EVENT_SEND) -! deallocate the temporary array used for the send - do i = 1, size(pelist) - if (allocated(temp(i)%data)) deallocate(temp(i)%data) - enddo - else -! non root_pe's recv data from root_pe - msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk - call mpp_recv(array_seg, msgsize, root_pe, .FALSE., COMM_TAG_2) - call mpp_sync_self(check=EVENT_RECV) - endif - - call mpp_sync_self() - - return + ishift, jshift) + integer, intent(in) :: is, ie, js, je, nk + integer, dimension(:), intent(in) :: pelist + MPP_TYPE_, dimension(is:ie,js:je,1:nk), intent(inout) :: array_seg + MPP_TYPE_, dimension(:,:,:), intent(in) :: data + logical, intent(in) :: is_root_pe + integer, optional, intent(in) :: ishift, jshift + + integer :: i, msgsize, root_pe, root_pe_test + integer :: i1, j1 !< Starting indices of i and j + integer :: i2, j2 !< Ending indices of i and j + integer :: ioff, joff !< Offsets to i and j + integer :: my_ind(4) !< My starting and ending indices of i and j + integer :: gind(4,size(pelist)) !< Starting and ending indices of all processes in the group + integer :: n !< Peset number + integer :: ierr !< MPI error state + MPP_TYPE_, allocatable :: send_buf(:) !< Packed data to be scattered; only relevant to the root pe + MPP_TYPE_, allocatable :: recv_buf(:) !< My chunk of data + integer :: send_count(size(pelist)) !< Stores message sizes for all processes in the group + integer :: displ(size(pelist)) !< Displacements for data segments + integer :: total_msgsize + + if (.not.ANY(mpp_pe().eq.pelist(:))) return + + ! Get peset number + n = get_peset(pelist); if( peset(n)%count.EQ.1 )return + + if (is_root_pe) then + root_pe = mpp_pe() + root_pe_test = 999 + if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") + if (root_pe /= pelist(1)) then + call mpp_error(FATAL, "fms_io(mpp_scatter_pelist): root_pe is not the first pe of pelist") + end if + else + root_pe = 0 + root_pe_test = -999 + endif + ! need this check in case MPI-rank 0 is a member of the pelist + call mpp_max(root_pe_test, pelist) + if (root_pe_test.lt.0) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): root_pe not specified or not a member of the pelist") + ! need to make sure only one root_pe has been specified + call mpp_sum(root_pe, pelist) + if ((is_root_pe) .and. (mpp_pe().ne.root_pe)) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): too many root_pes specified") + + send_count = 0 + total_msgsize = 0 + ioff=0 + joff=0 + if (present(ishift)) ioff=ishift + if (present(jshift)) joff=jshift + + my_ind(1) = is + my_ind(2) = ie + my_ind(3) = js + my_ind(4) = je + + ! Gather indices from all processes in the group at the root pe + call MPI_GATHER(my_ind, 4, MPI_INTEGER, gind, 4, MPI_INTEGER, root_pe, peset(n)%id, ierr) + if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") + + ! Compute my message size + msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk + allocate(recv_buf(msgsize)) + + ! Update group indices + if (is_root_pe) then + gind(1,:)=gind(1,:)+ioff + gind(2,:)=gind(2,:)+ioff + gind(3,:)=gind(3,:)+joff + gind(4,:)=gind(4,:)+joff + ! check indices to make sure they are within the range of "data" + if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & + call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside & + of the range of the receiving array") + end if + + if (is_root_pe) then + do i = 1, size(pelist) + i1 = gind(1,i) + i2 = gind(2,i) + j1 = gind(3,i) + j2 = gind(4,i) + ! Pack message sizes + send_count(i) = (i2-i1+1)*(j2-j1+1)*nk + total_msgsize = total_msgsize + send_count(i) + ! Compute data displacements + displ(i) = total_msgsize - send_count(i) + ! Pack data segments + if (i == 1) then + send_buf = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/)) + else + send_buf = reshape(send_buf, (/size(send_buf)+size(data(i1:i2,j1:j2,1:nk))/), data(i1:i2,j1:j2,1:nk)) + end if + enddo + end if + + ! Scatter data segments to respective processes + call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & + msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) + if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_SCATTERV") + + call mpp_sync_self() + + ! Unpack received data + array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) + + return end subroutine MPP_SCATTER_PELIST_3D_ !> @} From a915f9c5d4cbd7ac72b0982b33f13112fa07037f Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Wed, 31 May 2023 19:57:46 -0400 Subject: [PATCH 02/17] Replaced MPI_INTEGER with MPI_TYPE_ --- mpp/include/mpp_scatter.fh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpp/include/mpp_scatter.fh b/mpp/include/mpp_scatter.fh index 7e45469c34..7240b2f91e 100644 --- a/mpp/include/mpp_scatter.fh +++ b/mpp/include/mpp_scatter.fh @@ -110,7 +110,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i my_ind(4) = je ! Gather indices from all processes in the group at the root pe - call MPI_GATHER(my_ind, 4, MPI_INTEGER, gind, 4, MPI_INTEGER, root_pe, peset(n)%id, ierr) + call MPI_GATHER(my_ind, 4, MPI_TYPE_, gind, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") ! Compute my message size From b02d5352e96502e782d75d2c600da92e7d88ea31 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Thu, 1 Jun 2023 10:48:51 -0400 Subject: [PATCH 03/17] Adds routine MPP_SCATTER_ for non-MPI version --- mpp/include/mpp_transmit_nocomm.fh | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/mpp/include/mpp_transmit_nocomm.fh b/mpp/include/mpp_transmit_nocomm.fh index ca132a4dc8..ade2cfd4ac 100644 --- a/mpp/include/mpp_transmit_nocomm.fh +++ b/mpp/include/mpp_transmit_nocomm.fh @@ -151,5 +151,26 @@ return end subroutine MPP_BROADCAST_ +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! MPP_SCATTER ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +subroutine MPP_SCATTER_(is, ie, js, je, pelist, array_seg, data, is_root_pe, ishift, jshift) + integer, intent(in) :: is, ie, js, je !< indices of segment array + integer, dimension(:), intent(in) :: pelist ! From 0258b802d1560a5b42f1cb792075830228bc27f5 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Thu, 1 Jun 2023 15:04:45 -0400 Subject: [PATCH 04/17] Adds mpp_scatter_mpi.fh and mpp_scatter_nocomm.fh --- mpp/Makefile.am | 6 +- mpp/include/mpp_comm.inc | 58 ++++---- mpp/include/mpp_comm_mpi.inc | 36 +++++ mpp/include/mpp_comm_nocomm.inc | 36 +++++ mpp/include/mpp_scatter.fh | 216 ++++++++++++++--------------- mpp/include/mpp_scatter_mpi.fh | 155 +++++++++++++++++++++ mpp/include/mpp_scatter_nocomm.fh | 35 +++++ mpp/include/mpp_transmit_nocomm.fh | 21 --- 8 files changed, 400 insertions(+), 163 deletions(-) create mode 100644 mpp/include/mpp_scatter_mpi.fh create mode 100644 mpp/include/mpp_scatter_nocomm.fh diff --git a/mpp/Makefile.am b/mpp/Makefile.am index c7e482ea13..a5656ad9c8 100644 --- a/mpp/Makefile.am +++ b/mpp/Makefile.am @@ -101,7 +101,8 @@ libmpp_la_SOURCES = \ include/mpp_read_distributed_ascii.inc \ include/mpp_reduce_mpi.fh \ include/mpp_reduce_nocomm.fh \ - include/mpp_scatter.fh \ + include/mpp_scatter_mpi.fh \ + include/mpp_scatter_nocomm.fh \ include/mpp_sum.inc \ include/mpp_sum_ad.inc \ include/mpp_sum_mpi.fh \ @@ -160,7 +161,8 @@ mpp_mod.$(FC_MODEXT): \ include/mpp_transmit_nocomm.fh \ include/mpp_type_nocomm.fh \ include/mpp_gather.fh \ - include/mpp_scatter.fh \ + include/mpp_scatter_mpi.fh \ + include/mpp_scatter_nocomm.fh \ include/system_clock.fh mpp_data_mod.$(FC_MODEXT): \ mpp_parameter_mod.$(FC_MODEXT) \ diff --git a/mpp/include/mpp_comm.inc b/mpp/include/mpp_comm.inc index bc5138bd5b..688adfda80 100644 --- a/mpp/include/mpp_comm.inc +++ b/mpp/include/mpp_comm.inc @@ -423,33 +423,33 @@ #include !################################################# -#undef MPP_SCATTER_PELIST_2D_ -#undef MPP_SCATTER_PELIST_3D_ -#undef MPP_TYPE_ -#define MPP_TYPE_ integer(i4_kind) -#undef MPI_TYPE_ -#define MPI_TYPE_ MPI_INTEGER4 -#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d -#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d -#include - -#undef MPP_SCATTER_PELIST_2D_ -#undef MPP_SCATTER_PELIST_3D_ -#undef MPP_TYPE_ -#define MPP_TYPE_ real(r4_kind) -#undef MPI_TYPE_ -#define MPI_TYPE_ MPI_REAL4 -#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d -#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d -#include - -#undef MPP_SCATTER_PELIST_2D_ -#undef MPP_SCATTER_PELIST_3D_ -#undef MPP_TYPE_ -#define MPP_TYPE_ real(r8_kind) -#undef MPI_TYPE_ -#define MPI_TYPE_ MPI_REAL8 -#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d -#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d -#include +!#undef MPP_SCATTER_PELIST_2D_ +!#undef MPP_SCATTER_PELIST_3D_ +!#undef MPP_TYPE_ +!#define MPP_TYPE_ integer(i4_kind) +!#undef MPI_TYPE_ +!#define MPI_TYPE_ MPI_INTEGER4 +!#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d +!#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d +!#include + +!#undef MPP_SCATTER_PELIST_2D_ +!#undef MPP_SCATTER_PELIST_3D_ +!#undef MPP_TYPE_ +!#define MPP_TYPE_ real(r4_kind) +!#undef MPI_TYPE_ +!#define MPI_TYPE_ MPI_REAL4 +!#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d +!#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d +!#include + +!#undef MPP_SCATTER_PELIST_2D_ +!#undef MPP_SCATTER_PELIST_3D_ +!#undef MPP_TYPE_ +!#define MPP_TYPE_ real(r8_kind) +!#undef MPI_TYPE_ +!#define MPI_TYPE_ MPI_REAL8 +!#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d +!#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d +!#include !> @} diff --git a/mpp/include/mpp_comm_mpi.inc b/mpp/include/mpp_comm_mpi.inc index 928f9fcb92..16e9711d1c 100644 --- a/mpp/include/mpp_comm_mpi.inc +++ b/mpp/include/mpp_comm_mpi.inc @@ -1304,6 +1304,42 @@ end subroutine mpp_exit #define MPI_TYPE_ MPI_INTEGER8 #include +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! MPP_SCATTER: MPI_SCATTERV, MPI_GATHER ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d +#define MPP_TYPE_ integer(i4_kind) +#define MPI_TYPE_ MPI_INTEGER4 +#include + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d +#define MPP_TYPE_ real(r4_kind) +#define MPI_TYPE_ MPI_REAL4 +#include + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d +#define MPP_TYPE_ real(r8_kind) +#define MPI_TYPE_ MPI_REAL8 +#include + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ! ! ! DATA TRANSFER TYPES: mpp_type_create, mpp_type_free ! diff --git a/mpp/include/mpp_comm_nocomm.inc b/mpp/include/mpp_comm_nocomm.inc index a1d849b831..d9ac19d0f2 100644 --- a/mpp/include/mpp_comm_nocomm.inc +++ b/mpp/include/mpp_comm_nocomm.inc @@ -720,6 +720,42 @@ end subroutine mpp_exit #include #undef MPP_TYPE_INIT_VALUE +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! MPP_SCATTER: MPI_SCATTERV, MPI_GATHER ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d +#define MPP_TYPE_ integer(i4_kind) +#define MPI_TYPE_ MPI_INTEGER4 +#include + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d +#define MPP_TYPE_ real(r4_kind) +#define MPI_TYPE_ MPI_REAL4 +#include + +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d +#define MPP_TYPE_ real(r8_kind) +#define MPI_TYPE_ MPI_REAL8 +#include + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ! ! ! GLOBAL REDUCTION ROUTINES: mpp_max, mpp_sum, mpp_min ! diff --git a/mpp/include/mpp_scatter.fh b/mpp/include/mpp_scatter.fh index 7240b2f91e..4223f79c39 100644 --- a/mpp/include/mpp_scatter.fh +++ b/mpp/include/mpp_scatter.fh @@ -49,117 +49,111 @@ end subroutine MPP_SCATTER_PELIST_2D_ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, is_root_pe, & - ishift, jshift) - integer, intent(in) :: is, ie, js, je, nk - integer, dimension(:), intent(in) :: pelist - MPP_TYPE_, dimension(is:ie,js:je,1:nk), intent(inout) :: array_seg - MPP_TYPE_, dimension(:,:,:), intent(in) :: data - logical, intent(in) :: is_root_pe - integer, optional, intent(in) :: ishift, jshift - - integer :: i, msgsize, root_pe, root_pe_test - integer :: i1, j1 !< Starting indices of i and j - integer :: i2, j2 !< Ending indices of i and j - integer :: ioff, joff !< Offsets to i and j - integer :: my_ind(4) !< My starting and ending indices of i and j - integer :: gind(4,size(pelist)) !< Starting and ending indices of all processes in the group - integer :: n !< Peset number - integer :: ierr !< MPI error state - MPP_TYPE_, allocatable :: send_buf(:) !< Packed data to be scattered; only relevant to the root pe - MPP_TYPE_, allocatable :: recv_buf(:) !< My chunk of data - integer :: send_count(size(pelist)) !< Stores message sizes for all processes in the group - integer :: displ(size(pelist)) !< Displacements for data segments - integer :: total_msgsize - - if (.not.ANY(mpp_pe().eq.pelist(:))) return - - ! Get peset number - n = get_peset(pelist); if( peset(n)%count.EQ.1 )return - - if (is_root_pe) then - root_pe = mpp_pe() - root_pe_test = 999 - if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") - if (root_pe /= pelist(1)) then - call mpp_error(FATAL, "fms_io(mpp_scatter_pelist): root_pe is not the first pe of pelist") - end if - else - root_pe = 0 - root_pe_test = -999 - endif - ! need this check in case MPI-rank 0 is a member of the pelist - call mpp_max(root_pe_test, pelist) - if (root_pe_test.lt.0) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): root_pe not specified or not a member of the pelist") - ! need to make sure only one root_pe has been specified - call mpp_sum(root_pe, pelist) - if ((is_root_pe) .and. (mpp_pe().ne.root_pe)) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): too many root_pes specified") - - send_count = 0 - total_msgsize = 0 - ioff=0 - joff=0 - if (present(ishift)) ioff=ishift - if (present(jshift)) joff=jshift - - my_ind(1) = is - my_ind(2) = ie - my_ind(3) = js - my_ind(4) = je - - ! Gather indices from all processes in the group at the root pe - call MPI_GATHER(my_ind, 4, MPI_TYPE_, gind, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) - if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") - - ! Compute my message size - msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk - allocate(recv_buf(msgsize)) - - ! Update group indices - if (is_root_pe) then - gind(1,:)=gind(1,:)+ioff - gind(2,:)=gind(2,:)+ioff - gind(3,:)=gind(3,:)+joff - gind(4,:)=gind(4,:)+joff - ! check indices to make sure they are within the range of "data" - if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & - call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside & - of the range of the receiving array") - end if - - if (is_root_pe) then - do i = 1, size(pelist) - i1 = gind(1,i) - i2 = gind(2,i) - j1 = gind(3,i) - j2 = gind(4,i) - ! Pack message sizes - send_count(i) = (i2-i1+1)*(j2-j1+1)*nk - total_msgsize = total_msgsize + send_count(i) - ! Compute data displacements - displ(i) = total_msgsize - send_count(i) - ! Pack data segments - if (i == 1) then - send_buf = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/)) - else - send_buf = reshape(send_buf, (/size(send_buf)+size(data(i1:i2,j1:j2,1:nk))/), data(i1:i2,j1:j2,1:nk)) - end if - enddo - end if - - ! Scatter data segments to respective processes - call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & - msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) - if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_SCATTERV") - - call mpp_sync_self() - - ! Unpack received data - array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) - - return + ishift, jshift) + integer, intent(in) :: is, ie, js, je, nk + integer, dimension(:), intent(in) :: pelist + MPP_TYPE_, dimension(is:ie,js:je,1:nk), intent(inout) :: array_seg + MPP_TYPE_, dimension(:,:,:), intent(in) :: data + logical, intent(in) :: is_root_pe + integer, optional, intent(in) :: ishift, jshift + + integer :: i, msgsize, root_pe, root_pe_test + integer :: i1, i2, j1, j2, ioff, joff + integer :: my_ind(4), gind(4,size(pelist)) + type array3D + MPP_TYPE_, dimension(:,:,:), allocatable :: data + endtype array3D + type(array3d), dimension(size(pelist)) :: temp + + if (.not.ANY(mpp_pe().eq.pelist(:))) return + + if (is_root_pe) then + root_pe = mpp_pe() + root_pe_test = 999 + if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") + else + root_pe = 0 + root_pe_test = -999 + endif +! need this check in case MPI-rank 0 is a member of the pelist + call mpp_max(root_pe_test, pelist) + if (root_pe_test.lt.0) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): root_pe not specified or not a member of the pelist") +! need to make sure only one root_pe has been specified + call mpp_sum(root_pe, pelist) + if ((is_root_pe) .and. (mpp_pe().ne.root_pe)) call mpp_error(FATAL, & + "fms_io(mpp_scatter_pelist): too many root_pes specified") + + + ioff=0 + joff=0 + if (present(ishift)) ioff=ishift + if (present(jshift)) joff=jshift + + my_ind(1) = is + my_ind(2) = ie + my_ind(3) = js + my_ind(4) = je + +! scatter indices into global index on root_pe + if (is_root_pe) then + do i = 1, size(pelist) +! root_pe data copy - no send to self + if (pelist(i).eq.root_pe) then + gind(:,i) = my_ind(:) + else + call mpp_recv(gind(:,i:i), 4, pelist(i), .FALSE., COMM_TAG_1) + endif + enddo + call mpp_sync_self(check=EVENT_RECV) + gind(1,:)=gind(1,:)+ioff + gind(2,:)=gind(2,:)+ioff + gind(3,:)=gind(3,:)+joff + gind(4,:)=gind(4,:)+joff +! check indices to make sure they are within the range of "data" + if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & + call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside of the & + &range of the receiving array") + else +! non root_pe's send indices to root_pe + call mpp_send(my_ind(:), 4, root_pe, COMM_TAG_1) + call mpp_sync_self(check=EVENT_SEND) + endif + +! scatter segments into data based on indices + if (is_root_pe) then + do i = 1, size(pelist) + if (pelist(i).ne.root_pe) then ! no send to self + i1 = gind(1,i) + i2 = gind(2,i) + j1 = gind(3,i) + j2 = gind(4,i) + msgsize = (i2-i1+1)*(j2-j1+1)*nk +! allocate and copy data into a contiguous memory space + allocate(temp(i)%data(i1:i2,j1:j2,1:nk)) + temp(i)%data(i1:i2,j1:j2,1:nk)=data(i1:i2,j1:j2,1:nk) + call mpp_send(temp(i)%data, msgsize, pelist(i), COMM_TAG_2) + else +! data copy - no send to self + array_seg(is:ie,js:je,1:nk) = data(is+ioff:ie+ioff,js+joff:je+joff,1:nk) + endif + enddo + call mpp_sync_self(check=EVENT_SEND) +! deallocate the temporary array used for the send + do i = 1, size(pelist) + if (allocated(temp(i)%data)) deallocate(temp(i)%data) + enddo + else +! non root_pe's recv data from root_pe + msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk + call mpp_recv(array_seg, msgsize, root_pe, .FALSE., COMM_TAG_2) + call mpp_sync_self(check=EVENT_RECV) + endif + + call mpp_sync_self() + + return end subroutine MPP_SCATTER_PELIST_3D_ !> @} diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh new file mode 100644 index 0000000000..e234dde7b3 --- /dev/null +++ b/mpp/include/mpp_scatter_mpi.fh @@ -0,0 +1,155 @@ +!*********************************************************************** +!* GNU Lesser General Public License +!* +!* This file is part of the GFDL Flexible Modeling System (FMS). +!* +!* FMS is free software: you can redistribute it and/or modify it under +!* the terms of the GNU Lesser General Public License as published by +!* the Free Software Foundation, either version 3 of the License, or (at +!* your option) any later version. +!* +!* FMS is distributed in the hope that it will be useful, but WITHOUT +!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +!* for more details. +!* +!* You should have received a copy of the GNU Lesser General Public +!* License along with FMS. If not, see . +!*********************************************************************** + +!> addtogroup mpp_mod +!> @{ + +!> @brief Scatter data from one pe to the specified pes. +!! +!> Scatter (ie - is) * (je - js) contiguous elements of array data from the designated root pe +!! into contigous members of array segment in each pe that is included in the pelist argument. +subroutine MPP_SCATTER_PELIST_2D_(is, ie, js, je, pelist, array_seg, data, is_root_pe, & + ishift, jshift) + integer, intent(in) :: is, ie, js, je !< indices of segment array + integer, dimension(:), intent(in) :: pelist ! @} diff --git a/mpp/include/mpp_scatter_nocomm.fh b/mpp/include/mpp_scatter_nocomm.fh new file mode 100644 index 0000000000..3772b575c0 --- /dev/null +++ b/mpp/include/mpp_scatter_nocomm.fh @@ -0,0 +1,35 @@ +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! MPP_SCATTER ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +subroutine MPP_SCATTER_PELIST_2D_(is, ie, js, je, pelist, array_seg, data, is_root_pe, ishift, jshift) + integer, intent(in) :: is, ie, js, je !< indices of segment array + integer, dimension(:), intent(in) :: pelist ! From 7ce5ea3ed4fa4e1e62ee8e2ad65b0e1484854956 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Thu, 1 Jun 2023 22:53:07 -0400 Subject: [PATCH 05/17] Deleted mpp_scatter.fh --- mpp/include/mpp_scatter.fh | 159 ------------------------------------- 1 file changed, 159 deletions(-) delete mode 100644 mpp/include/mpp_scatter.fh diff --git a/mpp/include/mpp_scatter.fh b/mpp/include/mpp_scatter.fh deleted file mode 100644 index 4223f79c39..0000000000 --- a/mpp/include/mpp_scatter.fh +++ /dev/null @@ -1,159 +0,0 @@ -!*********************************************************************** -!* GNU Lesser General Public License -!* -!* This file is part of the GFDL Flexible Modeling System (FMS). -!* -!* FMS is free software: you can redistribute it and/or modify it under -!* the terms of the GNU Lesser General Public License as published by -!* the Free Software Foundation, either version 3 of the License, or (at -!* your option) any later version. -!* -!* FMS is distributed in the hope that it will be useful, but WITHOUT -!* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -!* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -!* for more details. -!* -!* You should have received a copy of the GNU Lesser General Public -!* License along with FMS. If not, see . -!*********************************************************************** - -!> addtogroup mpp_mod -!> @{ - -!> @brief Scatter data from one pe to the specified pes. -!! -!> Scatter (ie - is) * (je - js) contiguous elements of array data from the designated root pe -!! into contigous members of array segment in each pe that is included in the pelist argument. -subroutine MPP_SCATTER_PELIST_2D_(is, ie, js, je, pelist, array_seg, data, is_root_pe, & - ishift, jshift) - integer, intent(in) :: is, ie, js, je !< indices of segment array - integer, dimension(:), intent(in) :: pelist ! @} From b503bcad7b63f46e56459a1164748162ffb6c767 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Fri, 2 Jun 2023 11:43:15 -0400 Subject: [PATCH 06/17] Updates --- mpp/include/mpp_scatter_mpi.fh | 8 ++------ mpp/include/mpp_scatter_nocomm.fh | 16 ++++++++-------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index e234dde7b3..9a241aa249 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -21,11 +21,8 @@ !> @{ !> @brief Scatter data from one pe to the specified pes. -!! -!> Scatter (ie - is) * (je - js) contiguous elements of array data from the designated root pe -!! into contigous members of array segment in each pe that is included in the pelist argument. subroutine MPP_SCATTER_PELIST_2D_(is, ie, js, je, pelist, array_seg, data, is_root_pe, & - ishift, jshift) + ishift, jshift) integer, intent(in) :: is, ie, js, je !< indices of segment array integer, dimension(:), intent(in) :: pelist ! Date: Wed, 7 Jun 2023 11:53:07 -0400 Subject: [PATCH 07/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 2 -- 1 file changed, 2 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 9a241aa249..6baedc4daf 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -140,8 +140,6 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_SCATTERV") - call mpp_sync_self() - ! Unpack received data array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) From a5db07d1769cee364a10c9ec02bb18491d7183c1 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Fri, 9 Jun 2023 13:00:30 -0400 Subject: [PATCH 08/17] Update packing method --- mpp/include/mpp_comm_mpi.inc | 6 +++++ mpp/include/mpp_scatter_mpi.fh | 42 ++++++++++++++++++++++++---------- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/mpp/include/mpp_comm_mpi.inc b/mpp/include/mpp_comm_mpi.inc index 16e9711d1c..cffd8ff624 100644 --- a/mpp/include/mpp_comm_mpi.inc +++ b/mpp/include/mpp_comm_mpi.inc @@ -1313,30 +1313,36 @@ end subroutine mpp_exit #undef MPP_SCATTER_PELIST_2D_ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ +#undef MPP_TYPE_BYTELEN_ #undef MPI_TYPE_ #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int4_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int4_3d #define MPP_TYPE_ integer(i4_kind) +#define MPP_TYPE_BYTELEN_ 4 #define MPI_TYPE_ MPI_INTEGER4 #include #undef MPP_SCATTER_PELIST_2D_ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ +#undef MPP_TYPE_BYTELEN_ #undef MPI_TYPE_ #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real4_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real4_3d #define MPP_TYPE_ real(r4_kind) +#define MPP_TYPE_BYTELEN_ 4 #define MPI_TYPE_ MPI_REAL4 #include #undef MPP_SCATTER_PELIST_2D_ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ +#undef MPP_TYPE_BYTELEN_ #undef MPI_TYPE_ #define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_real8_2d #define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_real8_3d #define MPP_TYPE_ real(r8_kind) +#define MPP_TYPE_BYTELEN_ 8 #define MPI_TYPE_ MPI_REAL8 #include diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 6baedc4daf..056b6c4ba7 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -66,11 +66,13 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: send_count(size(pelist)) !< Stores message sizes for all processes in the group integer :: displ(size(pelist)) !< Displacements for data segments integer :: total_msgsize + integer :: stdout_unit if (.not.ANY(mpp_pe().eq.pelist(:))) return ! Get peset number - n = get_peset(pelist); if( peset(n)%count.EQ.1 )return + n = get_peset(pelist) + if( peset(n)%count.EQ.1 )return if (is_root_pe) then root_pe = mpp_pe() @@ -78,11 +80,17 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") if (root_pe /= pelist(1)) then call mpp_error(FATAL, "fms_io(mpp_scatter_pelist): root_pe is not the first pe of pelist") - else - print *, 'XXXX', root_pe, pelist(1) end if endif + if( debug ) then + call SYSTEM_CLOCK(tick) + write( stdout_unit,'(a,i18,a,i6,a,2i6)' )& + 'T=',tick, ' PE=',pe, ' MPP_SCATTER begin: from_pe, length=', root_pe + end if + + if( debug .and. (current_clock.NE.0) ) call SYSTEM_CLOCK(start_tick) + send_count = 0 total_msgsize = 0 ioff=0 @@ -103,8 +111,8 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk allocate(recv_buf(msgsize)) - ! Update group indices if (is_root_pe) then + ! Update group indices gind(1,:)=gind(1,:)+ioff gind(2,:)=gind(2,:)+ioff gind(3,:)=gind(3,:)+joff @@ -113,9 +121,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside & of the range of the receiving array") - end if - if (is_root_pe) then do i = 1, size(pelist) i1 = gind(1,i) i2 = gind(2,i) @@ -123,15 +129,25 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i j2 = gind(4,i) ! Pack message sizes send_count(i) = (i2-i1+1)*(j2-j1+1)*nk + ! Compute total message size to scatter total_msgsize = total_msgsize + send_count(i) ! Compute data displacements displ(i) = total_msgsize - send_count(i) + enddo + + ! Allocate send buffer + allocate(send_buf(total_msgsize)) + + total_msgsize = 0 + + do i = 1, size(pelist) + i1 = gind(1,i) + i2 = gind(2,i) + j1 = gind(3,i) + j2 = gind(4,i) + total_msgsize = total_msgsize + send_count(i) ! Pack data segments - if (i == 1) then - send_buf = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/)) - else - send_buf = reshape(send_buf, (/size(send_buf)+size(data(i1:i2,j1:j2,1:nk))/), data(i1:i2,j1:j2,1:nk)) - end if + send_buf(displ(i)+1:total_msgsize) = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/)) enddo end if @@ -143,7 +159,9 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i ! Unpack received data array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) - return + if( debug .and. (current_clock.NE.0) ) & + call increment_current_clock( EVENT_BROADCAST, total_msgsize * MPP_TYPE_BYTELEN_ ) + return end subroutine MPP_SCATTER_PELIST_3D_ !> @} From 548758f969745032b4969289b7e035bc3f98052e Mon Sep 17 00:00:00 2001 From: ganganoaa <121043264+ganganoaa@users.noreply.github.com> Date: Fri, 9 Jun 2023 13:29:04 -0400 Subject: [PATCH 09/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 056b6c4ba7..3ff104b049 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -83,6 +83,9 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i end if endif + if( .NOT.ANY(mpp_pe().EQ.peset(n)%list) ) & + call mpp_error( FATAL, 'MPP_SCATTER: scattering from invalid PE.' ) + if( debug ) then call SYSTEM_CLOCK(tick) write( stdout_unit,'(a,i18,a,i6,a,2i6)' )& From a7a5a234c6bd0b0675485226da5827b5cf5404ad Mon Sep 17 00:00:00 2001 From: ganganoaa <121043264+ganganoaa@users.noreply.github.com> Date: Fri, 9 Jun 2023 15:28:53 -0400 Subject: [PATCH 10/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 3ff104b049..e3640d8229 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -58,7 +58,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: i2, j2 !< Ending indices of i and j integer :: ioff, joff !< Offsets to i and j integer :: my_ind(4) !< My starting and ending indices of i and j - integer :: gind(4,size(pelist)) !< Starting and ending indices of all processes in the group + integer, allocatable :: gind(:,:) !< Starting and ending indices of all processes in the group integer :: n !< Peset number integer :: ierr !< MPI error state MPP_TYPE_, allocatable :: send_buf(:) !< Packed data to be scattered; only relevant to the root pe @@ -67,6 +67,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: displ(size(pelist)) !< Displacements for data segments integer :: total_msgsize integer :: stdout_unit + integer :: gindx1D(4 * size(pelist)) !< Packed version of gind if (.not.ANY(mpp_pe().eq.pelist(:))) return @@ -107,9 +108,12 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i my_ind(4) = je ! Gather indices from all processes in the group at the root pe - call MPI_GATHER(my_ind, 4, MPI_TYPE_, gind, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) + call MPI_GATHER(my_ind, 4, MPI_TYPE_, gindx1D, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") + ! Unpack gindx1D(:) to gind(:,:) + gind = reshape(gindx1D, (/4, size(pelist)/)) + ! Compute my message size msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk allocate(recv_buf(msgsize)) From b680f288890e6b5003f1a297a6e4d1c86172aa6e Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Fri, 9 Jun 2023 16:40:00 -0400 Subject: [PATCH 11/17] Different method of unpacking gindx1D --- mpp/include/mpp_scatter_mpi.fh | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index e3640d8229..5b5dcc6509 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -112,7 +112,16 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") ! Unpack gindx1D(:) to gind(:,:) - gind = reshape(gindx1D, (/4, size(pelist)/)) + !gind = reshape(gindx1D, (/4, size(pelist)/)) + if (is_root_pe) then + allocate(gind(4, size(pelist))) + do i = 1, size(pelist) + gind(1, i) = gindx1D((i-1)*4 + 1) + gind(2, i) = gindx1D((i-1)*4 + 2) + gind(3, i) = gindx1D((i-1)*4 + 3) + gind(4, i) = gindx1D((i-1)*4 + 4) + enddo + end if ! Compute my message size msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk From 91c2ca09233a050658e2cd79da1ad3973d970f2e Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Tue, 13 Jun 2023 09:01:06 -0400 Subject: [PATCH 12/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 35 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 5b5dcc6509..5fea89897d 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -77,26 +77,15 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i if (is_root_pe) then root_pe = mpp_pe() - if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, & - "fms_io(mpp_scatter_pelist): root_pe not a member of pelist") - if (root_pe /= pelist(1)) then - call mpp_error(FATAL, "fms_io(mpp_scatter_pelist): root_pe is not the first pe of pelist") - end if + if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe not a member of pelist") + if (root_pe .ne. pelist(1)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe is not the first pe of pelist") endif - if( .NOT.ANY(mpp_pe().EQ.peset(n)%list) ) & - call mpp_error( FATAL, 'MPP_SCATTER: scattering from invalid PE.' ) - - if( debug ) then - call SYSTEM_CLOCK(tick) - write( stdout_unit,'(a,i18,a,i6,a,2i6)' )& - 'T=',tick, ' PE=',pe, ' MPP_SCATTER begin: from_pe, length=', root_pe - end if - - if( debug .and. (current_clock.NE.0) ) call SYSTEM_CLOCK(start_tick) + root_pe = pelist(1) send_count = 0 total_msgsize = 0 + displ = 0 ioff=0 joff=0 if (present(ishift)) ioff=ishift @@ -109,7 +98,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i ! Gather indices from all processes in the group at the root pe call MPI_GATHER(my_ind, 4, MPI_TYPE_, gindx1D, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) - if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_GATHER") + if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_GATHER something is wrong") ! Unpack gindx1D(:) to gind(:,:) !gind = reshape(gindx1D, (/4, size(pelist)/)) @@ -127,6 +116,8 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk allocate(recv_buf(msgsize)) + print "('mpp_scatter_mpi: message size of PE(', i4,'): ', i6)", mpp_pe(), msgsize + if (is_root_pe) then ! Update group indices gind(1,:)=gind(1,:)+ioff @@ -134,10 +125,11 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i gind(3,:)=gind(3,:)+joff gind(4,:)=gind(4,:)+joff ! check indices to make sure they are within the range of "data" - if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) & - call mpp_error(FATAL,"fms_io(mpp_scatter_pelist): specified indices (with shift) are outside & + if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) then + print "('mpp_scatter_mpi:min-max ', 3i6)", minval(gind), maxval(gind(1:2,:)), maxval(gind(3:4,:)) + call mpp_error(FATAL,"mpp_scatter_mpi:: specified indices (with shift) are outside & of the range of the receiving array") - + end if do i = 1, size(pelist) i1 = gind(1,i) i2 = gind(2,i) @@ -170,14 +162,11 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i ! Scatter data segments to respective processes call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) - if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_pelist:MPI_SCATTERV") + if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_SCATTERV something is wrong") ! Unpack received data array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) - if( debug .and. (current_clock.NE.0) ) & - call increment_current_clock( EVENT_BROADCAST, total_msgsize * MPP_TYPE_BYTELEN_ ) - return end subroutine MPP_SCATTER_PELIST_3D_ !> @} From e3ae5ec97b415f11ba19328bd902115b92d21582 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Wed, 21 Jun 2023 09:32:57 -0400 Subject: [PATCH 13/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 5fea89897d..8bd0d3a1b6 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -77,11 +77,16 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i if (is_root_pe) then root_pe = mpp_pe() + print "('mpp_scatter_mpi: number of PEs=', i6, ' size of pelist=', i6)", mpp_npes(), size(pelist) + print "('mpp_scatter_mpi: size of list=', i6)", size(peset(n)%list) + !print "('mpp_scatter_mpi: first two PEs of pelist:', 2i6)", pelist(1), pelist(2) if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe not a member of pelist") if (root_pe .ne. pelist(1)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe is not the first pe of pelist") endif root_pe = pelist(1) + print "('mpp_scatter_mpi: I am PE', i6, ' first two PEs of pelist:', 2i6, ' my root PE is', i6)", & + mpp_pe(), pelist(1), pelist(2), root_pe send_count = 0 total_msgsize = 0 @@ -100,6 +105,11 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i call MPI_GATHER(my_ind, 4, MPI_TYPE_, gindx1D, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_GATHER something is wrong") + if (is_root_pe) then + print "('mpp_scatter_mpi: first and last two gindx1D are', 4i6)", gindx1D(1), gindx1D(2), & + gindx1D(4*size(pelist)-1), gindx1D(4*size(pelist)) + end if + ! Unpack gindx1D(:) to gind(:,:) !gind = reshape(gindx1D, (/4, size(pelist)/)) if (is_root_pe) then @@ -116,8 +126,6 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i msgsize = (my_ind(2)-my_ind(1)+1) * (my_ind(4)-my_ind(3)+1) * nk allocate(recv_buf(msgsize)) - print "('mpp_scatter_mpi: message size of PE(', i4,'): ', i6)", mpp_pe(), msgsize - if (is_root_pe) then ! Update group indices gind(1,:)=gind(1,:)+ioff From 5d3e8149087a5f40952efebacbc5b42fb3a81568 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Tue, 27 Jun 2023 11:32:14 -0400 Subject: [PATCH 14/17] Update to mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 44 ++++++++++++++++------------------ mpp/mpp.F90 | 2 +- mpp/mpp_parameter.F90 | 4 ++-- 3 files changed, 23 insertions(+), 27 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 8bd0d3a1b6..3b352a6697 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -58,8 +58,8 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: i2, j2 !< Ending indices of i and j integer :: ioff, joff !< Offsets to i and j integer :: my_ind(4) !< My starting and ending indices of i and j - integer, allocatable :: gind(:,:) !< Starting and ending indices of all processes in the group - integer :: n !< Peset number + integer :: gind(4,size(pelist)) !< Starting and ending indices of all processes in the group + integer :: n !< Peset id integer :: ierr !< MPI error state MPP_TYPE_, allocatable :: send_buf(:) !< Packed data to be scattered; only relevant to the root pe MPP_TYPE_, allocatable :: recv_buf(:) !< My chunk of data @@ -67,27 +67,27 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: displ(size(pelist)) !< Displacements for data segments integer :: total_msgsize integer :: stdout_unit - integer :: gindx1D(4 * size(pelist)) !< Packed version of gind + integer :: gindx1D(4*size(pelist)) !< Packed version of gind if (.not.ANY(mpp_pe().eq.pelist(:))) return ! Get peset number n = get_peset(pelist) - if( peset(n)%count.EQ.1 )return + if( peset(n)%count.EQ.1 ) return + + if( debug )then + call SYSTEM_CLOCK(tick) + write( stdout_unit,'(a,i18,a,i6,a,i6)' )& + 'T=',tick, ' PE=', pe, ' MPP_SCATTER begin: from_pe, length=', mpp_pe() + end if + + root_pe = pelist(1) if (is_root_pe) then - root_pe = mpp_pe() - print "('mpp_scatter_mpi: number of PEs=', i6, ' size of pelist=', i6)", mpp_npes(), size(pelist) - print "('mpp_scatter_mpi: size of list=', i6)", size(peset(n)%list) - !print "('mpp_scatter_mpi: first two PEs of pelist:', 2i6)", pelist(1), pelist(2) if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe not a member of pelist") if (root_pe .ne. pelist(1)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe is not the first pe of pelist") endif - root_pe = pelist(1) - print "('mpp_scatter_mpi: I am PE', i6, ' first two PEs of pelist:', 2i6, ' my root PE is', i6)", & - mpp_pe(), pelist(1), pelist(2), root_pe - send_count = 0 total_msgsize = 0 displ = 0 @@ -102,18 +102,11 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i my_ind(4) = je ! Gather indices from all processes in the group at the root pe - call MPI_GATHER(my_ind, 4, MPI_TYPE_, gindx1D, 4, MPI_TYPE_, root_pe, peset(n)%id, ierr) + if (mpp_npes() .gt. 1) call MPI_GATHER(my_ind, 4, MPI_INTEGER4, gindx1D, 4, MPI_INTEGER4, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_GATHER something is wrong") - if (is_root_pe) then - print "('mpp_scatter_mpi: first and last two gindx1D are', 4i6)", gindx1D(1), gindx1D(2), & - gindx1D(4*size(pelist)-1), gindx1D(4*size(pelist)) - end if - ! Unpack gindx1D(:) to gind(:,:) - !gind = reshape(gindx1D, (/4, size(pelist)/)) if (is_root_pe) then - allocate(gind(4, size(pelist))) do i = 1, size(pelist) gind(1, i) = gindx1D((i-1)*4 + 1) gind(2, i) = gindx1D((i-1)*4 + 2) @@ -134,7 +127,6 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i gind(4,:)=gind(4,:)+joff ! check indices to make sure they are within the range of "data" if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) then - print "('mpp_scatter_mpi:min-max ', 3i6)", minval(gind), maxval(gind(1:2,:)), maxval(gind(3:4,:)) call mpp_error(FATAL,"mpp_scatter_mpi:: specified indices (with shift) are outside & of the range of the receiving array") end if @@ -155,7 +147,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i allocate(send_buf(total_msgsize)) total_msgsize = 0 - + ! Fill up send_buf with corresponding data chunk for each PE do i = 1, size(pelist) i1 = gind(1,i) i2 = gind(2,i) @@ -167,14 +159,18 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i enddo end if - ! Scatter data segments to respective processes - call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & + ! Scatter data chunks to respective PEs + if (mpp_npes() .gt. 1) call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_SCATTERV something is wrong") ! Unpack received data array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) + if( debug .and. (current_clock.NE.0) ) then + call increment_current_clock( EVENT_SCATTER, msgsize*MPP_TYPE_BYTELEN_ ) + end if + return end subroutine MPP_SCATTER_PELIST_3D_ !> @} diff --git a/mpp/mpp.F90 b/mpp/mpp.F90 index d429abb3f9..45f58abefd 100644 --- a/mpp/mpp.F90 +++ b/mpp/mpp.F90 @@ -167,7 +167,7 @@ module mpp_mod use mpp_parameter_mod, only : CLOCK_MODULE, CLOCK_ROUTINE, CLOCK_LOOP, CLOCK_INFRA use mpp_parameter_mod, only : MAX_EVENTS, MAX_BINS, MAX_EVENT_TYPES, MAX_CLOCKS use mpp_parameter_mod, only : MAXPES, EVENT_WAIT, EVENT_ALLREDUCE, EVENT_BROADCAST - use mpp_parameter_mod, only : EVENT_ALLTOALL + use mpp_parameter_mod, only : EVENT_ALLTOALL, EVENT_SCATTER use mpp_parameter_mod, only : EVENT_TYPE_CREATE, EVENT_TYPE_FREE use mpp_parameter_mod, only : EVENT_RECV, EVENT_SEND, MPP_READY, MPP_WAIT use mpp_parameter_mod, only : mpp_parameter_version=>version diff --git a/mpp/mpp_parameter.F90 b/mpp/mpp_parameter.F90 index 182e3f2a7d..5e57e19133 100644 --- a/mpp/mpp_parameter.F90 +++ b/mpp/mpp_parameter.F90 @@ -40,7 +40,7 @@ module mpp_parameter_mod public :: MPP_CLOCK_DETAILED, CLOCK_COMPONENT, CLOCK_SUBCOMPONENT, CLOCK_MODULE_DRIVER public :: CLOCK_MODULE, CLOCK_ROUTINE, CLOCK_LOOP, CLOCK_INFRA, MAX_BINS public :: EVENT_ALLREDUCE, EVENT_BROADCAST, EVENT_RECV, EVENT_SEND, EVENT_WAIT - public :: EVENT_ALLTOALL, EVENT_TYPE_CREATE, EVENT_TYPE_FREE + public :: EVENT_ALLTOALL, EVENT_TYPE_CREATE, EVENT_TYPE_FREE, EVENT_SCATTER public :: DEFAULT_TAG public :: COMM_TAG_1, COMM_TAG_2, COMM_TAG_3, COMM_TAG_4 public :: COMM_TAG_5, COMM_TAG_6, COMM_TAG_7, COMM_TAG_8 @@ -75,7 +75,7 @@ module mpp_parameter_mod integer, parameter :: NOTE=0, WARNING=1, FATAL=2 integer, parameter :: MAX_CLOCKS=400, MAX_EVENT_TYPES=5, MAX_EVENTS=40000 integer, parameter :: EVENT_ALLREDUCE=1, EVENT_BROADCAST=2, EVENT_RECV=3, EVENT_SEND=4, EVENT_WAIT=5 - integer, parameter :: EVENT_ALLTOALL=6 + integer, parameter :: EVENT_ALLTOALL=6, EVENT_SCATTER=7 integer, parameter :: EVENT_TYPE_CREATE=7, EVENT_TYPE_FREE=8 integer, parameter :: MPP_CLOCK_SYNC=1, MPP_CLOCK_DETAILED=2 integer :: DEFAULT_TAG = 1 From a07f9c62c2e617a001c49659a6d76c8de79906b4 Mon Sep 17 00:00:00 2001 From: ganganoaa <121043264+ganganoaa@users.noreply.github.com> Date: Tue, 27 Jun 2023 12:03:45 -0400 Subject: [PATCH 15/17] Update mpp.F90 --- mpp/mpp.F90 | 2 -- 1 file changed, 2 deletions(-) diff --git a/mpp/mpp.F90 b/mpp/mpp.F90 index 8c4e060abe..6c7f7bfbbd 100644 --- a/mpp/mpp.F90 +++ b/mpp/mpp.F90 @@ -738,8 +738,6 @@ module mpp_mod interface mpp_scatter module procedure mpp_scatter_pelist_int4_2d module procedure mpp_scatter_pelist_int4_3d - module procedure mpp_scatter_pelist_int8_2d - module procedure mpp_scatter_pelist_int8_3d module procedure mpp_scatter_pelist_real4_2d module procedure mpp_scatter_pelist_real4_3d module procedure mpp_scatter_pelist_real8_2d From f9e0a371f45fdc00ff17c7fa59b4a166cd22851a Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Wed, 28 Jun 2023 15:24:18 -0400 Subject: [PATCH 16/17] Adds interfaces int8_2d int8_3d for mpp_scatter --- mpp/include/mpp_comm_mpi.inc | 12 ++++++++++++ mpp/include/mpp_comm_nocomm.inc | 10 ++++++++++ mpp/include/mpp_scatter_mpi.fh | 2 ++ mpp/mpp.F90 | 2 ++ 4 files changed, 26 insertions(+) diff --git a/mpp/include/mpp_comm_mpi.inc b/mpp/include/mpp_comm_mpi.inc index cffd8ff624..5cef537e4f 100644 --- a/mpp/include/mpp_comm_mpi.inc +++ b/mpp/include/mpp_comm_mpi.inc @@ -1322,6 +1322,18 @@ end subroutine mpp_exit #define MPI_TYPE_ MPI_INTEGER4 #include +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPP_TYPE_BYTELEN_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int8_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int8_3d +#define MPP_TYPE_ integer(i8_kind) +#define MPP_TYPE_BYTELEN_ 8 +#define MPI_TYPE_ MPI_INTEGER8 +#include + #undef MPP_SCATTER_PELIST_2D_ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ diff --git a/mpp/include/mpp_comm_nocomm.inc b/mpp/include/mpp_comm_nocomm.inc index d9ac19d0f2..e464356dfc 100644 --- a/mpp/include/mpp_comm_nocomm.inc +++ b/mpp/include/mpp_comm_nocomm.inc @@ -736,6 +736,16 @@ end subroutine mpp_exit #define MPI_TYPE_ MPI_INTEGER4 #include +#undef MPP_SCATTER_PELIST_2D_ +#undef MPP_SCATTER_PELIST_3D_ +#undef MPP_TYPE_ +#undef MPI_TYPE_ +#define MPP_SCATTER_PELIST_2D_ mpp_scatter_pelist_int8_2d +#define MPP_SCATTER_PELIST_3D_ mpp_scatter_pelist_int8_3d +#define MPP_TYPE_ integer(i8_kind) +#define MPI_TYPE_ MPI_INTEGER8 +#include + #undef MPP_SCATTER_PELIST_2D_ #undef MPP_SCATTER_PELIST_3D_ #undef MPP_TYPE_ diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index 3b352a6697..a1c7557663 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -75,6 +75,8 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i n = get_peset(pelist) if( peset(n)%count.EQ.1 ) return + if (any(peset(n)%list .ne. pelist)) call mpp_error(FATAL, "mpp_scatter_mpi: two pelists don't match") + if( debug )then call SYSTEM_CLOCK(tick) write( stdout_unit,'(a,i18,a,i6,a,i6)' )& diff --git a/mpp/mpp.F90 b/mpp/mpp.F90 index 6c7f7bfbbd..8c4e060abe 100644 --- a/mpp/mpp.F90 +++ b/mpp/mpp.F90 @@ -738,6 +738,8 @@ module mpp_mod interface mpp_scatter module procedure mpp_scatter_pelist_int4_2d module procedure mpp_scatter_pelist_int4_3d + module procedure mpp_scatter_pelist_int8_2d + module procedure mpp_scatter_pelist_int8_3d module procedure mpp_scatter_pelist_real4_2d module procedure mpp_scatter_pelist_real4_3d module procedure mpp_scatter_pelist_real8_2d From cdf6e609a52456fac337b1bcc8d186e9abcdc209 Mon Sep 17 00:00:00 2001 From: Ganga P Purja Pun Date: Thu, 13 Jul 2023 14:27:21 -0400 Subject: [PATCH 17/17] Update mpp_scatter_mpi.fh --- mpp/include/mpp_scatter_mpi.fh | 83 ++++++++++++++++++++++++++++------ 1 file changed, 69 insertions(+), 14 deletions(-) diff --git a/mpp/include/mpp_scatter_mpi.fh b/mpp/include/mpp_scatter_mpi.fh index a1c7557663..32d351839e 100644 --- a/mpp/include/mpp_scatter_mpi.fh +++ b/mpp/include/mpp_scatter_mpi.fh @@ -68,6 +68,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i integer :: total_msgsize integer :: stdout_unit integer :: gindx1D(4*size(pelist)) !< Packed version of gind + integer :: ii, jj, k, m if (.not.ANY(mpp_pe().eq.pelist(:))) return @@ -75,7 +76,14 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i n = get_peset(pelist) if( peset(n)%count.EQ.1 ) return - if (any(peset(n)%list .ne. pelist)) call mpp_error(FATAL, "mpp_scatter_mpi: two pelists don't match") + do i = 1, mpp_npes() + if(peset(n)%list(i) == pelist(1)) then + root_pe = i - 1 + exit + endif + enddo + + if (any(peset(n)%list(:) .ne. pelist(:))) call mpp_error(FATAL, "mpp_scatter_mpi: two pelists don't match") if( debug )then call SYSTEM_CLOCK(tick) @@ -83,8 +91,6 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i 'T=',tick, ' PE=', pe, ' MPP_SCATTER begin: from_pe, length=', mpp_pe() end if - root_pe = pelist(1) - if (is_root_pe) then if (.not.ANY(pelist(:).eq.root_pe)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe not a member of pelist") if (root_pe .ne. pelist(1)) call mpp_error(FATAL, "mpp_scatter_mpi: root_pe is not the first pe of pelist") @@ -107,14 +113,14 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i if (mpp_npes() .gt. 1) call MPI_GATHER(my_ind, 4, MPI_INTEGER4, gindx1D, 4, MPI_INTEGER4, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_GATHER something is wrong") - ! Unpack gindx1D(:) to gind(:,:) - if (is_root_pe) then - do i = 1, size(pelist) - gind(1, i) = gindx1D((i-1)*4 + 1) - gind(2, i) = gindx1D((i-1)*4 + 2) - gind(3, i) = gindx1D((i-1)*4 + 3) - gind(4, i) = gindx1D((i-1)*4 + 4) - enddo + if (any(mpp_pe() .eq. pelist(:))) then + print "('mpp_scatter_mpi:my_ind ', 'PE ', i4, 4i4)", mpp_pe(), my_ind(1:4) + end if + if (mpp_pe() .eq. pelist(1)) then + !print *, 'mpp_scatter_mpi:pelist', size(pelist), pelist(:) + !print *, 'mpp_scatter_mpi:list', size(peset(n)%list), peset(n)%list(:) + !print "('mpp_scatter_mpi:gindx1D ', 'root PE=', i4, 4i4)", root_pe, gindx1D((i-1)*4 + 1:(i-1)*4 + 4) + print *, 'mpp_scatter_mpi:gindx1D root PE', root_pe, gindx1D(:) end if ! Compute my message size @@ -122,6 +128,13 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i allocate(recv_buf(msgsize)) if (is_root_pe) then + ! Unpack gindx1D(:) to gind(:,:) + do i = 1, size(pelist) + gind(1, i) = gindx1D((i-1)*4 + 1) + gind(2, i) = gindx1D((i-1)*4 + 2) + gind(3, i) = gindx1D((i-1)*4 + 3) + gind(4, i) = gindx1D((i-1)*4 + 4) + end do ! Update group indices gind(1,:)=gind(1,:)+ioff gind(2,:)=gind(2,:)+ioff @@ -129,6 +142,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i gind(4,:)=gind(4,:)+joff ! check indices to make sure they are within the range of "data" if ((minval(gind).lt.1) .OR. (maxval(gind(1:2,:)).gt.size(data,1)) .OR. (maxval(gind(3:4,:)).gt.size(data,2))) then + print "('mpp_scatter_mpi:min-max ', 3i6)", minval(gind), maxval(gind(1:2,:)), maxval(gind(3:4,:)) call mpp_error(FATAL,"mpp_scatter_mpi:: specified indices (with shift) are outside & of the range of the receiving array") end if @@ -143,6 +157,7 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i total_msgsize = total_msgsize + send_count(i) ! Compute data displacements displ(i) = total_msgsize - send_count(i) + !print "('mpp_scatter_mpi:', 2i6)", displ(i), send_count(i) enddo ! Allocate send buffer @@ -157,17 +172,57 @@ subroutine MPP_SCATTER_PELIST_3D_(is, ie, js, je, nk, pelist, array_seg, data, i j2 = gind(4,i) total_msgsize = total_msgsize + send_count(i) ! Pack data segments - send_buf(displ(i)+1:total_msgsize) = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/)) + m = displ(i) + 1 + do k = 1, nk + do jj = j1, j2 + do ii = i1, i2 + send_buf(m) = data(ii, jj, k) + m = m + 1 + end do + end do + end do + !send_buf(displ(i)+1:total_msgsize) = reshape(data(i1:i2,j1:j2,1:nk), (/size(data(i1:i2,j1:j2,1:nk))/), & + ! data(i1:i2,j1:j2,1:nk)) + !print *, 'mpp_scatter_mpi:send_buf', i, send_buf(displ(i)+1:displ(i)+4) + !print *, 'mpp_scatter_mpi:data', i, data(i1:i2,j1:j2,1:nk) enddo end if ! Scatter data chunks to respective PEs if (mpp_npes() .gt. 1) call MPI_SCATTERV(send_buf, send_count, displ, MPI_TYPE_, recv_buf, & - msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) + msgsize, MPI_TYPE_, root_pe, peset(n)%id, ierr) if (ierr /= MPI_SUCCESS) call mpp_error(FATAL, "mpp_scatter_mpi::MPI_SCATTERV something is wrong") ! Unpack received data - array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/)) + !if (is_root_pe) then + !array_seg(is:ie,js:je,1:nk) = reshape(send_buf(1:send_count(1)), (/shape(array_seg(is:ie,js:je,1:nk))/), & + !send_buf(1:send_count(1))) + !else + m = 1 + do k = 1, nk + do jj = js, je + do ii = is, ie + array_seg(ii,jj,k) = recv_buf(m) + m = m + 1 + end do + end do + end do + !array_seg(is:ie,js:je,1:nk) = reshape(recv_buf, (/shape(array_seg(is:ie,js:je,1:nk))/), recv_buf) + !end if + + i = 1 + if (i .le. size(pelist)) then + if (mpp_pe() .eq. pelist(i)) then + !print *, 'mpp_scatter_mpi:array_seg', array_seg(ie-3:ie,js:js,1:1) + end if + if (is_root_pe) then + !if (any(array_seg(is:ie,js:je,1:nk) .ne. data(gind(1,1):gind(2,1),gind(3,1):gind(4,1),1:nk))) then + !print *, 'mpp_scatter_mpi: data did not match!' + !end if + !print *, 'mpp_scatter_mpi:data', data(gind(2,i)-3:gind(2,i),gind(3,i):gind(3,i),1:1) + !print *, 'mpp_scatter_mpi:array_seg', array_seg(ie:ie,je:je,1:1) + end if + end if if( debug .and. (current_clock.NE.0) ) then call increment_current_clock( EVENT_SCATTER, msgsize*MPP_TYPE_BYTELEN_ )